diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..60d25648b --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,25 @@ +# CODEOWNERS +# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners + +# This file specifies only who must review each file, IE, who is the primary maintainer / responsibility holder + +# araistrick will review everything except where specified below +* @araistrick + +infinigen/datagen/customgt/* @lahavlipson +infinigen/tools/ground_truth/* @lahavlipson + +infinigen/terrain/* @mazeyu + +infinigen/core/constraints/evaluator/node_impl/* @karhankayan +infinigen/core/constraints/example_solver/room/* @JerryLingjieMei + +infinigen/tools/export.py @David-Yan-1 + +infinigen/assets/utils/* @JerryLingjieMei +infinigen/assets/fluid/* @karhankayan + +# Jerry will review all object assets except where specified below +infinigen/assets/objects/* @JerryLingjieMei +infinigen/assets/objects/creatures/* @araistrick +infinigen/assets/objects/trees/* @araistrick \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/ask-for-help.md b/.github/ISSUE_TEMPLATE/ask-for-help.md new file mode 100644 index 000000000..4b9ccdbb3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/ask-for-help.md @@ -0,0 +1,33 @@ +--- +name: Question +about: Ask for help using the system +title: "" +labels: 'question' +assignees: '' + +--- + +## Steps to Reproduce +If this relates to running the codebase, please provide steps to reproduce: + +### What version of the code were you using? +Tell us the commit & commit hash from `git log` + +### What command did you run? + + +### What are your FULL output logs? +Provide the FULL output logs from your command as a txt file. + +### If this is your first time running Infinigen, what are the full install logs?** +Run `pip install -v -e . > logs.txt 2>&1` and send logs.txt as an attachment. + + +### Platform +- OS & OS Version: +- GPU (?) : +- GPU Driver Version (?) : +- RAM (GB): + +# Additional context +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index 44988edcc..491980196 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -22,7 +22,7 @@ Tell us the commit & commit hash from `git log` Provide the FULL output logs from your command as a txt file. ### If this is your first time running Infinigen, what are the full install logs?** -Run `pip install -vv -e . > logs.txt 2>&1` and send logs.txt as an attachment. +Run `pip install -v -e . > logs.txt 2>&1` and send logs.txt as an attachment. ### Platform diff --git a/.github/ISSUE_TEMPLATE/request.md b/.github/ISSUE_TEMPLATE/suggestion.md similarity index 71% rename from .github/ISSUE_TEMPLATE/request.md rename to .github/ISSUE_TEMPLATE/suggestion.md index be2c75809..18a49c346 100644 --- a/.github/ISSUE_TEMPLATE/request.md +++ b/.github/ISSUE_TEMPLATE/suggestion.md @@ -1,6 +1,6 @@ --- -name: Request -about: Request a feature! +name: Suggestion +about: Suggest a new feature title: "" labels: 'enhancement' assignees: '' diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index ed99f6f1b..ebd0045b2 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -34,7 +34,7 @@ jobs: # stop the build if there are Python syntax errors or undefined names ruff check --output-format=github --select=E9,F63,F7,F82 . # default set of ruff rules with GitHub Annotations - #ruff --format=github . # to be enabled in a future PR + ruff check --output-format=github . - name: Set up Python 3.10 uses: actions/setup-python@v4 diff --git a/.gitignore b/.gitignore index d17c38493..254321a68 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ blender blender.tar.xz Blender.app +*.c + .coverage coverage.xml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1c4ac1ccf..40f25e394 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,12 @@ - repos: - -# config from https://black.readthedocs.io/en/stable/integrations/source_version_control.html -#- repo: https://github.com/psf/black -# rev: 23.7.0 -# hooks: -# - id: black -# language_version: python3.10 - -# config from https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.285 + # Ruff version. + rev: v0.4.9 hooks: + # Run the linter. - id: ruff - + # Sort imports + - id: ruff + args: [--select, I, --fix] + # Run the formatter. + - id: ruff-format diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 7376a9c3f..376f4ce11 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -60,4 +60,11 @@ v1.4.0 - Infinigen Indoors v1.4.1 - @David-Yan1 fix placeholder & ocmesher submodule version - @lahavlipson fix bug in surface normals of wall meshes -- @araistrick bugfix example commands & other typos \ No newline at end of file +- @araistrick bugfix example commands & other typos + +v1.5.0 +- ruff & auto-lint-fix the entire codebase +- move mesh assets into infinigen/assets/objects +- minimize pip dependences: remove unused packages & move terrain/gt-vis packages into optional \[terrain,vis\] extras. +- add parameters for object clutter, reduce excessively cluttered / slow indoors scenes +- minorly improve infinigen-indoors performance via logging & asset hiding diff --git a/docs/ConfiguringInfinigen.md b/docs/ConfiguringInfinigen.md index 3561f1462..b2a35e5e4 100644 --- a/docs/ConfiguringInfinigen.md +++ b/docs/ConfiguringInfinigen.md @@ -45,7 +45,7 @@ Our `infinigen_examples/generate_nature.py` driver always loads [`infinigen_exam Now that you understand the two major python programs and how to configure them, you may notice and wonder about the many configs/overrides provided in our original one-command "Hello World" example: -``` +```bash # Original hello world command python -m infinigen.datagen.manage_jobs --output_folder outputs/hello_world --num_scenes 1 --specific_seed 0 \ --configs desert.gin simple.gin --pipeline_configs local_16GB.gin monocular.gin blender_gt.gin \ @@ -169,23 +169,23 @@ All commands below are shown with using `local_256GB` config, but you can attemp We recommend this command as a starting point for generating high quality videos. Generating multi-view consistent terrain is not computationally tractible without CUDA accelleration, so make sure to follow the CUDA Terrain instructions in Installation.md, and we recommend not to remove the `cuda_terrain` flag below. -```` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 60000 --config trailer_video high_quality_terrain -```` +``` #### Creating large-scale stereo datasets -```` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/stereo_data --num_scenes 10000 \ --pipeline_config slurm stereo cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 60000 --config high_quality_terrain -```` +``` #### Creating a few low-resolution images to your test changes -``` +```bash screen python -m infinigen.datagen.manage_jobs --output_folder outputs/dev --num_scenes 50 \ --pipeline_config slurm monocular cuda_terrain \ --cleanup big_files --warmup_sec 1200 --configs dev @@ -196,7 +196,7 @@ screen python -m infinigen.datagen.manage_jobs --output_folder outputs/dev --num These commands are intended as inspiration - please read docs above for more advice on customizing all aspects of Infinigen. Create images that always have rain: -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 30000 \ @@ -206,7 +206,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_ :bulb: You can substitute the `rain_particles` in `rain_particles_chance` for any `run_stage` name argument string in `infinigen_examples/generate_nature.py`, such as `trees` or `ground_creatures`. Create images that only have terrain: -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 30000 --config no_assets @@ -215,7 +215,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_ Create videos at birds-eye-view camera altitudes: -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 30000 --config trailer_video high_quality_terrain \ @@ -225,7 +225,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_ :bulb: The command shown is overriding `infinigen_examples/configs_nature/base.gin`'s default setting of `camera.camera_pose_proposal.altitude`. You can use a similar syntax to override any number of .gin config entries. Separate multiple entries with spaces. Create 1 second video clips: -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ --cleanup big_files --warmup_sec 30000 --config trailer_video high_quality_terrain \ diff --git a/docs/GeneratingFluidSimulations.md b/docs/GeneratingFluidSimulations.md index 387eab50e..52756b96a 100644 --- a/docs/GeneratingFluidSimulations.md +++ b/docs/GeneratingFluidSimulations.md @@ -9,13 +9,13 @@ Before you can generate fluids, you must run an additional installation step: `b ## Example Commands #### Generate a video of a single scene with simulated fire generated on the fly -``` +```bash python -m infinigen.datagen.manage_jobs --specific_seed 3930249d --output_folder outputs/fire --num_scenes 1 --pipeline_config local_256GB.gin monocular_video.gin --cleanup none --config plain.gin fast_terrain_assets.gin use_on_the_fly_fire.gin ``` Because fluid simulation takes a long time, the fire resolution can be reduced in use_on_the_fly_fire.gin, by setting `set_obj_on_fire.resolution = {resolution}`. This will reduce the fire quality but speed up the simulation. #### Generate a video of a single valley scene with simulated river -``` +```bash python -m infinigen.datagen.manage_jobs --specific_seed 61fc881a --output_folder outputs/river --num_scenes 1 --pipeline_config local_256GB.gin monocular_video.gin opengl_gt.gin cuda_terrain.gin --pipeline_overrides iterate_scene_tasks.frame_range=[100,244] --config river.gin simulated_river.gin no_assets.gin no_creatures.gin fast_terrain_assets.gin --cleanup none ``` Similar to fire, the simulation can be sped up by reducing the resolution. In simulated_river.gin, the resolution can be modified by setting `make_river.resolution = {resolution}`. The simulation can also be sped up by reducing the simulation duration in simulated_river.gin by setting `make_river.simulation_duration = {duration}`. For instance, before running the command above, the duration can be reduced to a number greater than 200 since that is the last frame of the video. @@ -23,7 +23,7 @@ Similar to fire, the simulation can be sped up by reducing the resolution. In si Also, note that this command will produce a scene without assets to speed up the process. However, the liquids generally interact with the objects by splashing on them, and a scene like this can be produced by removing the `no_assets.gin` option in the above command. #### Generate videos of random scene types, with simulated fire generated on the fly when needed -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/onthefly --num_scenes 10 \ --pipeline_config slurm_high_memory.gin monocular_video.gin \ --config fast_terrain_assets.gin use_on_the_fly_fire.gin \ @@ -31,7 +31,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/onthefly --num_ ``` #### Generate videos of valley scenes with simulated rivers -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder /n/fs/pvl-renders/kkayan/river --num_scenes 10 \ --pipeline_config slurm_high_memory.gin monocular_video.gin opengl_gt.gin cuda_terrain.gin \ --pipeline_overrides iterate_scene_tasks.frame_range=[100,244] \ @@ -44,20 +44,21 @@ python -m infinigen.datagen.manage_jobs --output_folder /n/fs/pvl-renders/kkayan ## Using Pre-Generated Fire High-resolution fluid simulations take a long time, so assets on fire can pre-generated and imported in the scene instead of being baked on-the-fly. This allows for fire to be simulated once, instead of every time a scene is generated. To pre-generate fire assets of all types (bush, tree, creature, cactus, boulder), run: -``` +```bash python -m infinigen.tools.submit_asset_cache -f {fire_asset_folder} -n 1 -s -40 -d 184 ``` where `fire_asset_folder` is where you want to save the fire. The number of assets per type, start frame and duration can be adjusted. If you only want to pre-generate one type of asset once, run: -``` +```bash python -m infinigen.assets.fluid.run_asset_cache -f {fire_asset_folder} -a {asset} -s {start_frame} -d {simulation_duration} ``` where `fire_asset_folder` is where you want to save the fire. `asset` can be one of `CachedBushFactory`, `CachedTreeFactory`, `CachedCactusFactory`, `CachedCreatureFactory`, `CachedBoulderFactory`. ### Import pre-generated fire when generating a scene After fire is pre-generated with one of the previous commands, edit config/use_cached_fire.gin and set the `FireCachingSystem.asset_folder` variable to `fire_asset_folder` you used when pre-generating fire. After this `use_cached_fire.gin` can be used instead of `use_on_the_fly_fire.gin` when generating a scene. This will import the fire from the folder it is saved instead of simulating it on-the-fly. + #### Example Command -``` +```bash python -m infinigen.datagen.manage_jobs --specific_seed 3930249d --output_folder outputs/fire --num_scenes 1 --pipeline_config local_256GB.gin monocular_video.gin --cleanup none --config plain.gin fast_terrain_assets.gin use_cached_fire.gin ``` diff --git a/docs/GroundTruthAnnotations.md b/docs/GroundTruthAnnotations.md index 723ed8299..21f077e0b 100644 --- a/docs/GroundTruthAnnotations.md +++ b/docs/GroundTruthAnnotations.md @@ -8,6 +8,14 @@ **Want annotations that we don't currently support? [Fill out a request!](https://github.com/princeton-vl/infinigen/issues/new?assignees=&labels=&projects=&template=request.md&title=%5BREQUEST%5D)** +### Visualization dependencies + +To run the visualization scripts below you will need to install extra dependencies + +```bash +pip install .[vis] +``` + ## Default Annotations from Blender Infinigen can produce some dense annotations using Blender's built-in render passes. Users may prefer to use these annotations over our extended annotation system's since it requires only the bare-minimum installation. It is also able to run without a GPU. diff --git a/docs/HelloRoom.md b/docs/HelloRoom.md index 244ed1cf6..aaeea6cfa 100644 --- a/docs/HelloRoom.md +++ b/docs/HelloRoom.md @@ -76,7 +76,7 @@ We also provide an OpenGL-based ground truth extractor which offers additional g To generate a single scene in one command, you can run the following: ```bash -screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1000 --pipeline_configs local_256GB.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True +screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1 --pipeline_configs local_256GB.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True ``` To create a large dataset of many random rooms, we recommend: diff --git a/docs/HelloWorld.md b/docs/HelloWorld.md index 76f9b3113..74d988455 100644 --- a/docs/HelloWorld.md +++ b/docs/HelloWorld.md @@ -16,7 +16,7 @@ Infinigen generates scenes by running multiple tasks (usually executed automatic :exclamation: If you encounter any missing .so files, missing dependencies (such as `gin`), or similar crashes, please check again that all steps of installation ran successfully. If you cannot resolve any issues with installation, please see our README and 'Bug Report' Git Issue template for advice on posting Git Issues to get help quickly - you must include the full installation logs in your issue so that we can help debug. -``` +```bash mkdir outputs # Generate a scene layout @@ -40,7 +40,7 @@ Output logs should indicate what the code is working on. Use `--debug` for even We provide `infinigen/datagen/manage_jobs.py`, a utility which runs similar steps automatically. -``` +```bash python -m infinigen.datagen.manage_jobs --output_folder outputs/hello_world --num_scenes 1 --specific_seed 0 \ --configs desert.gin simple.gin --pipeline_configs local_16GB.gin monocular.gin blender_gt.gin --pipeline_overrides LocalScheduleHandler.use_gpu=False ``` diff --git a/docs/ImplementingAssets.md b/docs/ImplementingAssets.md index bdca69c0a..87f76a37f 100644 --- a/docs/ImplementingAssets.md +++ b/docs/ImplementingAssets.md @@ -144,7 +144,7 @@ class MyAssetFactory(AssetFactory): You can implement the `create_asset` function however you wish so long as it produces a Blender Object as a result. Many existing assets use various different strategies, which you can use as examples: - `assets/flower.py` uses mostly auto-generated code from transpiling a hand-designed geometry node-graph. -- `assets/grassland/grass_tuft.py` uses pure NumPy code to create and define a mesh. +- `assets.objects.grassland/grass_tuft.py` uses pure NumPy code to create and define a mesh. - `assets/trees/infinigen_examples/generate_nature.py` combines transpiled materials & leaves with a python-only space colonization algorithm. The simplest implementation for a new asset is to create a geometry nodes equivelant, transpile it similarly to as shown above, copy the code into the same file as the template shown above, and implement the `create_asset` function as shown: diff --git a/docs/Installation.md b/docs/Installation.md index dc18dbc01..867bd4a73 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -72,10 +72,11 @@ Then, install the infinigen package using one of the options below: INFINIGEN_MINIMAL_INSTALL=True pip install -e . # Full install (Terrain & OpenGL-GT enabled, needed for Infinigen-Nature HelloWorld) -pip install -e . +pip install -e .[terrain,vis] # Developer install (includes pytest, ruff, other recommended dev tools) -pip install -e ".[dev]" +pip install -e ".[dev,terrain,vis]" +pre-commit install ``` :exclamation: If you encounter any issues with the above, please add `-vv > logs.txt 2>&1` to the end of your command and run again, then provide the resulting logs.txt file as an attachment when making a Github Issue. @@ -94,7 +95,7 @@ Then, install using one of the options below: # Minimal installation (recommended setting for use in the Blender UI) INFINIGEN_MINIMAL_INSTALL=True bash scripts/install/interactive_blender.sh -# Normal install (includes CPU Terrain, and CUDA Terrain if available) +# Normal install bash scripts/install/interactive_blender.sh # Enable OpenGL GT diff --git a/infinigen/OcMesher b/infinigen/OcMesher index d3d1441ab..2cdcbacbe 160000 --- a/infinigen/OcMesher +++ b/infinigen/OcMesher @@ -1 +1 @@ -Subproject commit d3d1441ab57c48db3ec40c621fc3d0c323579e8a +Subproject commit 2cdcbacbe62ef79dc6031e0131f916266b7372e3 diff --git a/infinigen/__init__.py b/infinigen/__init__.py index 9af1e6f0d..816527ea4 100644 --- a/infinigen/__init__.py +++ b/infinigen/__init__.py @@ -1,3 +1,8 @@ import logging +from pathlib import Path -__version__ = "1.4.1" +__version__ = "1.5.0" + + +def repo_root(): + return Path(__file__).parent.parent diff --git a/infinigen/assets/appliances/beverage_fridge.py b/infinigen/assets/appliances/beverage_fridge.py deleted file mode 100644 index 9ab8b5278..000000000 --- a/infinigen/assets/appliances/beverage_fridge.py +++ /dev/null @@ -1,735 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core.util.blender import delete -from infinigen.core.util.bevelling import get_bevel_edges, add_bevel, complete_bevel, complete_no_bevel -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.material_assignments import AssetList - -class BeverageFridgeFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.]): - super(BeverageFridgeFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['BeverageFridgeFactory']() - params = { - "Surface": material_assignments['surface'].assign_material(), - "Front": material_assignments['front'].assign_material(), - "Handle": material_assignments['handle'].assign_material(), - "Back": material_assignments['back'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - depth = 1 + N(0, 0.1) - width = 1 + N(0, 0.1) - height = 1 + N(0, 0.1) - # depth, width, height = dimensions - door_thickness = U(0.05, 0.1) * depth - door_rotation = 0 # Set to 0 for now - - rack_radius = U(0.01, 0.02) * depth - rack_h_amount = RI(2, 4) - rack_d_amount = RI(4, 6) - brand_name = "BrandName" - - params = { - "Depth": depth, - "Width": width, - "Height": height, - "DoorThickness": door_thickness, - "DoorRotation": door_rotation, - "RackRadius": rack_radius, - "RackHAmount": rack_h_amount, - "RackDAmount": rack_d_amount, - "BrandName": brand_name, - } - return params - - def create_asset(self, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_beverage_fridge_geometry(preprocess=True), ng_inputs=self.params, apply=True) - bevel_edges = get_bevel_edges(obj) - delete(obj) - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_beverage_fridge_geometry(), ng_inputs=self.params, apply=True) - obj = add_bevel(obj, bevel_edges, offset=0.01) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - -@node_utils.to_nodegroup('nodegroup_oven_rack', singleton=False, type='GeometryNodeTree') -def nodegroup_oven_rack(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Height', 2.0000), - ('NodeSocketFloatDistance', 'Radius', 0.0200), - ('NodeSocketInt', 'Amount', 5)]) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': group_input.outputs["Width"], 'Height': group_input.outputs["Height"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_3, 'End': combine_xyz_4}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': curve_line}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': group_input.outputs["Amount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz}) - - duplicate_elements_1 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': group_input.outputs["Amount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_4, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: divide_1}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements_1.outputs["Geometry"], 'Offset': combine_xyz_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadrilateral, set_position, set_position_1]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': curve_to_mesh}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_text', singleton=False, type='GeometryNodeTree') -def nodegroup_text(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Translation', (1.5000, 0.0000, 0.0000)), - ('NodeSocketString', 'String', 'BrandName'), - ('NodeSocketFloatDistance', 'Size', 0.0500), - ('NodeSocketFloat', 'Offset Scale', 0.0020)]) - - string_to_curves = nw.new_node('GeometryNodeStringToCurves', - input_kwargs={'String': group_input.outputs["String"], 'Size': group_input.outputs["Size"]}, - attrs={'align_y': 'BOTTOM_BASELINE', 'align_x': 'CENTER'}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': string_to_curves.outputs["Curve Instances"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': fill_curve, 'Offset Scale': group_input.outputs["Offset Scale"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': extrude_mesh.outputs["Mesh"], 'Translation': group_input.outputs["Translation"], 'Rotation': (1.5708, 0.0000, 1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_handle(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'length', 0.0000), - ('NodeSocketFloat', 'thickness', 0.0200)]) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', 3: cube_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["length"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [store_named_attribute, transform]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz_3}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"], 1: group_input.outputs["width"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': add, 'Z': group_input.outputs["thickness"]}) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"]}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': add_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_center', singleton=False, type='GeometryNodeTree') -def nodegroup_center(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'MarginX', 0.5000), - ('NodeSocketFloat', 'MarginY', 0.0000), - ('NodeSocketFloat', 'MarginZ', 0.0000)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract.outputs["Vector"]}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract_1.outputs["Vector"]}) - - greater_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1}) - - greater_than_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN'}) - - greater_than_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - - greater_than_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - greater_than_5 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_3 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5}) - - op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'In': op_and_4, 'Out': op_not}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Size"], 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Name': 'uv_map'}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Size"], 1: (0.5000, 0.5000, 0.5000), 2: group_input.outputs["Pos"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': multiply_add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_hollow_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_hollow_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2), - ('NodeSocketFloat', 'Thickness', 0.0000), - ('NodeSocketBool', 'Switch1', False), - ('NodeSocketBool', 'Switch2', False), - ('NodeSocketBool', 'Switch3', False), - ('NodeSocketBool', 'Switch4', False), - ('NodeSocketBool', 'Switch5', False), - ('NodeSocketBool', 'Switch6', False)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Size"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Thickness"], 'Y': subtract, 'Z': subtract_1}) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"]}, attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Pos"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Size"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale.outputs["Vector"]}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': subtract_2}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz_5}) - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2}) - - subtract_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': subtract_3, 'Z': group_input.outputs["Thickness"]}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_2, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute_4 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', 3: cube_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': add_3, 'Z': subtract_4}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_4, 'Translation': combine_xyz_3}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1}) - - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': subtract_5, 'Z': group_input.outputs["Thickness"]}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': add_5, 'Z': add_6}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_1}) - - switch = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform}) - - subtract_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - subtract_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Thickness"], 'Y': subtract_6, 'Z': subtract_7}) - - cube_3 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_6, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute_5 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_3.outputs["Mesh"], 'Name': 'uv_map', 3: cube_3.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subtract_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_9 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_8, 'Y': add_7, 'Z': subtract_9}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_5, 'Translation': combine_xyz_7}) - - switch_3 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': group_input.outputs["Thickness"], 'Z': separate_xyz.outputs["Z"]}) - - cube_4 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_9, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_4.outputs["Mesh"], 'Name': 'uv_map', 3: cube_4.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1}) - - add_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_8, 'Y': add_9, 'Z': add_10}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_8}) - - switch_4 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': group_input.outputs["Thickness"], 'Z': separate_xyz.outputs["Z"]}) - - cube_5 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_10, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_5.outputs["Mesh"], 'Name': 'uv_map', 3: cube_5.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_11 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - subtract_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - add_12 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_11, 'Y': subtract_10, 'Z': add_12}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_3, 'Translation': combine_xyz_11}) - - switch_5 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [switch_2.outputs[6], switch_1.outputs[6], switch.outputs[6], switch_3.outputs[6], switch_4.outputs[6], switch_5.outputs[6]]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_beverage_fridge_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_beverage_fridge_geometry(nw: NodeWrangler, preprocess: bool = False): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Depth', 1.0000), - ('NodeSocketFloat', 'Width', 1.0000), - ('NodeSocketFloat', 'Height', 1.0000), - ('NodeSocketFloat', 'DoorThickness', 0.0700), - ('NodeSocketFloat', 'DoorRotation', 0.0000), - ('NodeSocketFloatDistance', 'RackRadius', 0.0100), - ('NodeSocketInt', 'RackDAmount', 5), - ('NodeSocketInt', 'RackHAmount', 2), - ('NodeSocketString', 'BrandName', 'BrandName'), - ('NodeSocketMaterial', 'Surface', None), - ('NodeSocketMaterial', 'Front', None), - ('NodeSocketMaterial', 'Handle', None), - ('NodeSocketMaterial', 'Back', None)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Depth"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - hollowcube = nw.new_node(nodegroup_hollow_cube().name, - input_kwargs={'Size': combine_xyz, 'Thickness': group_input.outputs["DoorThickness"], 'Switch2': True, 'Switch4': True}) - - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': hollowcube, 'Material': group_input.outputs["Surface"]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': set_material_1, 'Level': 0}) - - # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) - - body = nw.new_node(Nodes.Reroute, input_kwargs={'Input': subdivide_mesh}, label='Body') - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["DoorThickness"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - cube = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_1, 'Pos': combine_xyz_2}) - - position = nw.new_node(Nodes.InputPosition) - - center = nw.new_node(nodegroup_center().name, - input_kwargs={'Geometry': cube, 'Vector': position, 'MarginX': -1.0000, 'MarginY': 0.1000, 'MarginZ': 0.1500}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube, 'Selection': center.outputs["In"], 'Material': group_input.outputs["Front"]}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_2, 'Selection': center.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.8000}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - handle = nw.new_node(nodegroup_handle().name, - input_kwargs={'width': multiply, 'length': multiply_1, 'thickness': multiply_2}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.1000}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.9000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_13 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': multiply_3, 'Z': multiply_4}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': handle, 'Translation': combine_xyz_13, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - set_material_8 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_1, 'Material': group_input.outputs["Handle"]}) - - geometry_to_instance_4 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': set_material_8}) - - rotate_instances_2 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': geometry_to_instance_4, 'Rotation': (-1.5708, 0.0000, 0.0000), 'Pivot Point': combine_xyz_13}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': multiply_5, 'Z': 0.0300}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - text = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_12, 'String': group_input.outputs["BrandName"], 'Size': multiply_6, 'Offset Scale': 0.0020}) - - text = complete_no_bevel(nw, text, preprocess) - - set_material_9 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': text, 'Material': group_input.outputs["Handle"]}) - - rotate_instances_2 = complete_bevel(nw, rotate_instances_2, preprocess) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_3, rotate_instances_2, set_material_9]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': join_geometry_3}) - - z = nw.scalar_multiply(group_input.outputs["DoorRotation"], 1 if not preprocess else 0) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': z}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"], 'Y': group_input.outputs["Width"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': geometry_to_instance, 'Rotation': combine_xyz_3, 'Pivot Point': combine_xyz_4}) - - door = nw.new_node(Nodes.Reroute, input_kwargs={'Input': nw.new_node(Nodes.RealizeInstances, [rotate_instances])}, label='door') - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: multiply_7}, - attrs={'operation': 'SUBTRACT'}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: multiply_8}, - attrs={'operation': 'SUBTRACT'}) - - ovenrack = nw.new_node(nodegroup_oven_rack().name, - input_kwargs={'Width': subtract, 'Height': subtract_1, 'Radius': group_input.outputs["RackRadius"], 'Amount': group_input.outputs["RackDAmount"]}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': ovenrack}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance_1, 'Amount': group_input.outputs["RackHAmount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}) - - multiply_11 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Height"], 1: multiply_11}, - attrs={'operation': 'SUBTRACT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["RackHAmount"], 1: 1.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: add_3}, attrs={'operation': 'DIVIDE'}) - - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: divide}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_9, 'Y': multiply_10, 'Z': multiply_12}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz_5}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': group_input.outputs["Handle"]}) - - racks = nw.new_node(Nodes.Reroute, input_kwargs={'Input': nw.new_node(Nodes.RealizeInstances, [set_material])}, label='racks') - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': add_4}) - - reroute_11 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Width"]}) - - reroute_8 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["DoorThickness"]}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_10, 'Y': reroute_11, 'Z': reroute_8}) - - reroute_9 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Height"]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': reroute_9}) - - cube_1 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_6, 'Pos': combine_xyz_7}) - - set_material_5 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube_1, 'Material': group_input.outputs["Back"]}) - - # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': set_material_5}) - - heater = nw.new_node(Nodes.Reroute, input_kwargs={'Input': join_geometry_2}, label='heater') - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [body, door, racks, heater]}) - - geometry = nw.new_node(Nodes.RealizeInstances,[join_geometry]) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/appliances/dishwasher.py b/infinigen/assets/appliances/dishwasher.py deleted file mode 100644 index 0b1694a3b..000000000 --- a/infinigen/assets/appliances/dishwasher.py +++ /dev/null @@ -1,929 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI - -from infinigen.assets.materials import metal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core.util.blender import delete -from infinigen.core.util.bevelling import get_bevel_edges, add_bevel, complete_bevel, complete_no_bevel -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.material_assignments import AssetList - -class DishwasherFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.]): - super(DishwasherFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['DishwasherFactory']() - params = { - "Surface": material_assignments['surface'].assign_material(), - "Front": material_assignments['front'].assign_material(), - "WhiteMetal": material_assignments['white_metal'].assign_material(), - "Top": material_assignments['top'].assign_material(), - 'NameMaterial': material_assignments['name_material'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # depth, width, height = dimensions - depth = 1 + N(0, 0.1) - width = 1 + N(0, 0.1) - height = 1 + N(0, 0.1) - door_thickness = U(0.05, 0.1) * depth - door_rotation = 0 # Set to 0 for now - - rack_radius = U(0.01, 0.02) * depth - rack_h_amount = RI(2, 3) - brand_name = "BrandName" - - params = { - "Depth": depth, - "Width": width, - "Height": height, - "DoorThickness": door_thickness, - "DoorRotation": door_rotation, - "RackRadius": rack_radius, - "RackAmount": rack_h_amount, - "BrandName": brand_name, - } - return params - - def create_asset(self, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_dishwasher_geometry(preprocess=True), ng_inputs=self.params, apply=True) - bevel_edges = get_bevel_edges(obj) - delete(obj) - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_dishwasher_geometry(), ng_inputs=self.params, apply=True) - obj = add_bevel(obj, bevel_edges, offset=0.01) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - -@node_utils.to_nodegroup('nodegroup_dish_rack', singleton=False, type='GeometryNodeTree') -def nodegroup_dish_rack(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral') - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0000, -1.0000, 0.0000), 'End': (0.0000, 1.0000, 0.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Depth', 2.0000), - ('NodeSocketFloatDistance', 'Width', 2.0000), ('NodeSocketFloatDistance', 'Radius', 0.0200), - ('NodeSocketInt', 'Amount', 5), ('NodeSocketFloat', 'Height', 0.5000)]) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': -1.0000, 'Z': group_input.outputs["Height"]}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0000, -1.0000, 0.0000), 'End': combine_xyz_4}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': curve_line_1}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Amount"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - duplicate_elements_2 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance_1, 'Amount': multiply}, - attrs={'domain': 'INSTANCE'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_2.outputs["Duplicate Index"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1}) - - set_position_2 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements_2.outputs["Geometry"], - 'Offset': combine_xyz_3 - }) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_line, set_position_2]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': join_geometry_1}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': multiply}, - attrs={'domain': 'INSTANCE'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={ - 0: duplicate_elements.outputs["Duplicate Index"], - 1: group_input.outputs["Amount"] - }, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: divide}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements.outputs["Geometry"], - 'Offset': combine_xyz - }) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Rotation': (0.0000, 0.0000, 1.5708)}) - - duplicate_elements_1 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': multiply}, - attrs={'domain': 'INSTANCE'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: duplicate_elements_1.outputs["Duplicate Index"], - 1: group_input.outputs["Amount"] - }, attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements_1.outputs["Geometry"], - 'Offset': combine_xyz_1 - }) - - quadrilateral_1 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral') - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.8000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_4}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': quadrilateral_1, 'Translation': combine_xyz_5}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [quadrilateral, transform_1, set_position_1, transform_2] - }) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': join_geometry, - 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True - }) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5, 'Y': multiply_6, 'Z': 0.5000}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': curve_to_mesh, - 'Rotation': (0.0000, 0.0000, 1.5708), - 'Scale': combine_xyz_2 - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': transform}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_text', singleton=False, type='GeometryNodeTree') -def nodegroup_text(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[ - ('NodeSocketVectorTranslation', 'Translation', (1.5000, 0.0000, 0.0000)), - ('NodeSocketString', 'String', 'BrandName'), ('NodeSocketFloatDistance', 'Size', 0.0500), - ('NodeSocketFloat', 'Offset Scale', 0.0020)]) - - string_to_curves = nw.new_node('GeometryNodeStringToCurves', input_kwargs={ - 'String': group_input.outputs["String"], - 'Size': group_input.outputs["Size"] - }, attrs={'align_y': 'BOTTOM_BASELINE', 'align_x': 'CENTER'}) - - fill_curve = nw.new_node(Nodes.FillCurve, - input_kwargs={'Curve': string_to_curves.outputs["Curve Instances"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': fill_curve, - 'Offset Scale': group_input.outputs["Offset Scale"] - }) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': extrude_mesh.outputs["Mesh"], - 'Translation': group_input.outputs["Translation"], - 'Rotation': (1.5708, 0.0000, 1.5708) - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_handle(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'length', 0.0000), ('NodeSocketFloat', 'thickness', 0.0200)]) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_1.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_1.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["length"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [store_named_attribute, transform]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz_3}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["length"], 1: group_input.outputs["width"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["width"], - 'Y': add, - 'Z': group_input.outputs["thickness"] - }) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_2.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_2.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': add_1}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - - -@node_utils.to_nodegroup('nodegroup_center', singleton=False, type='GeometryNodeTree') -def nodegroup_center(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000)), ('NodeSocketFloat', 'MarginX', 0.5000), - ('NodeSocketFloat', 'MarginY', 0.0000), ('NodeSocketFloat', 'MarginZ', 0.0000)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract.outputs["Vector"]}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract_1.outputs["Vector"]}) - - greater_than_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: separate_xyz_1.outputs["X"], - 1: group_input.outputs["MarginX"] - }, attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1}) - - greater_than_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN'}) - - greater_than_3 = nw.new_node(Nodes.Math, input_kwargs={ - 0: separate_xyz_1.outputs["Y"], - 1: group_input.outputs["MarginY"] - }, attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - - greater_than_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - greater_than_5 = nw.new_node(Nodes.Math, input_kwargs={ - 0: separate_xyz_1.outputs["Z"], - 1: group_input.outputs["MarginZ"] - }, attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_3 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5}) - - op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'In': op_and_4, 'Out': op_not}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2)]) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={ - 'Size': group_input.outputs["Size"], - 'Vertices X': group_input.outputs["Resolution"], - 'Vertices Y': group_input.outputs["Resolution"], - 'Vertices Z': group_input.outputs["Resolution"] - }) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Name': 'uv_map'}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, input_kwargs={ - 0: group_input.outputs["Size"], - 1: (0.5000, 0.5000, 0.5000), - 2: group_input.outputs["Pos"] - }, attrs={'operation': 'MULTIPLY_ADD'}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': store_named_attribute, - 'Translation': multiply_add.outputs["Vector"] - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - - -@node_utils.to_nodegroup('nodegroup_hollow_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_hollow_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2), - ('NodeSocketFloat', 'Thickness', 0.0000), - ('NodeSocketBool', 'Switch1', False), - ('NodeSocketBool', 'Switch2', False), - ('NodeSocketBool', 'Switch3', False), - ('NodeSocketBool', 'Switch4', False), - ('NodeSocketBool', 'Switch5', False), - ('NodeSocketBool', 'Switch6', False)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Size"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Thickness"], - 'Y': subtract, - 'Z': subtract_1 - }) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_2.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_2.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Pos"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: group_input.outputs["Size"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale.outputs["Vector"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': subtract_2}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz_5}) - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2}) - - subtract_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': separate_xyz.outputs["X"], - 'Y': subtract_3, - 'Z': group_input.outputs["Thickness"] - }) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_2, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_4 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_1.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_1.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': add_3, 'Z': subtract_4}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_4, 'Translation': combine_xyz_3}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1}) - - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': separate_xyz.outputs["X"], - 'Y': subtract_5, - 'Z': group_input.outputs["Thickness"] - }) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_5 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': add_5, 'Z': add_6}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_1}) - - switch = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform}) - - subtract_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - subtract_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Thickness"], - 'Y': subtract_6, - 'Z': subtract_7 - }) - - cube_3 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_6, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_5 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_3.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_3.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subtract_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - add_7 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_9 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_8, 'Y': add_7, 'Z': subtract_9}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_5, 'Translation': combine_xyz_7}) - - switch_3 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': separate_xyz.outputs["X"], - 'Y': group_input.outputs["Thickness"], - 'Z': separate_xyz.outputs["Z"] - }) - - cube_4 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_9, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_4.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_4.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_8 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1}) - - add_10 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_8, 'Y': add_9, 'Z': add_10}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_8}) - - switch_4 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': separate_xyz.outputs["X"], - 'Y': group_input.outputs["Thickness"], - 'Z': separate_xyz.outputs["Z"] - }) - - cube_5 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_10, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube_5.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube_5.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_11 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - subtract_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - add_12 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_11, 'Y': subtract_10, 'Z': add_12}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_3, 'Translation': combine_xyz_11}) - - switch_5 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [switch_2.outputs[6], switch_1.outputs[6], switch.outputs[6], switch_3.outputs[6], - switch_4.outputs[6], switch_5.outputs[6]] - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - - -@node_utils.to_nodegroup('nodegroup_dishwasher_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_dishwasher_geometry(nw: NodeWrangler, preprocess: bool = False): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Depth', 1.0000), - ('NodeSocketFloat', 'Width', 1.0000), - ('NodeSocketFloat', 'Height', 1.0000), - ('NodeSocketFloat', 'DoorThickness', 0.0700), - ('NodeSocketFloat', 'DoorRotation', 0.0000), - ('NodeSocketFloatDistance', 'RackRadius', 0.0100), - ('NodeSocketInt', 'RackAmount', 2), - ('NodeSocketString', 'BrandName', 'BrandName'), - ('NodeSocketMaterial', 'Surface', None), - ('NodeSocketMaterial', 'Front', None), - ('NodeSocketMaterial', 'Top', None), - ('NodeSocketMaterial', 'WhiteMetal', None), - ('NodeSocketMaterial', 'NameMaterial', None)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Depth"], - 'Y': group_input.outputs["Width"], - 'Z': group_input.outputs["Height"] - }) - - hollowcube = nw.new_node(nodegroup_hollow_cube().name, input_kwargs={ - 'Size': combine_xyz, - 'Thickness': group_input.outputs["DoorThickness"], - 'Switch2': True, - 'Switch4': True - }) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': hollowcube, 'Material': group_input.outputs["Surface"]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': set_material_1, 'Level': 0}) - - # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) - - body = nw.new_node(Nodes.Reroute, input_kwargs={'Input': subdivide_mesh}, label='Body') - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["DoorThickness"], - 'Y': group_input.outputs["Width"], - 'Z': group_input.outputs["Height"] - }) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - cube = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_1, 'Pos': combine_xyz_2}) - - position = nw.new_node(Nodes.InputPosition) - - center = nw.new_node(nodegroup_center().name, input_kwargs={ - 'Geometry': cube, - 'Vector': position, - 'MarginX': -1.0000, - 'MarginY': 0.1000, - 'MarginZ': 0.1500 - }) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube, 'Selection': center.outputs["In"], 'Material': group_input.outputs["Front"]}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_2, 'Selection': center.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - - # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.8000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - handle = nw.new_node(nodegroup_handle().name, - input_kwargs={'width': multiply, 'length': multiply_1, 'thickness': multiply_2}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.1000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.9500}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_13 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': multiply_3, 'Z': multiply_4}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': handle, - 'Translation': combine_xyz_13, - 'Rotation': (0.0000, 1.5708, 0.0000) - }) - - set_material_8 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_1, 'Material': group_input.outputs["WhiteMetal"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': multiply_5, 'Z': 0.0300}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, - attrs={'operation': 'MULTIPLY'}) - - text = nw.new_node(nodegroup_text().name, input_kwargs={ - 'Translation': combine_xyz_12, - 'String': group_input.outputs["BrandName"], - 'Size': multiply_6 - }) - - text = complete_no_bevel(nw, text, preprocess) - - set_material_9 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': text, 'Material': group_input.outputs["NameMaterial"]}) - - set_material_8 = complete_bevel(nw, set_material_8, preprocess) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_3, set_material_8, set_material_9]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': join_geometry_3}) - - y = nw.scalar_multiply(group_input.outputs["DoorRotation"], 1 if not preprocess else 0) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': y}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, input_kwargs={ - 'Instances': geometry_to_instance, - 'Rotation': combine_xyz_3, - 'Pivot Point': combine_xyz_4 - }) - - rotate_instances = nw.new_node(Nodes.RealizeInstances, [rotate_instances]) - - door = nw.new_node(Nodes.Reroute, input_kwargs={'Input': rotate_instances}, label='door') - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: multiply_7}, - attrs={'operation': 'SUBTRACT'}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: multiply_8}, - attrs={'operation': 'SUBTRACT'}) - - dishrack = nw.new_node(nodegroup_dish_rack().name, input_kwargs={ - 'Depth': subtract_1, - 'Width': subtract, - 'Radius': group_input.outputs["RackRadius"], - 'Amount': 4, - 'Height': 0.1000 - }) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': dishrack}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, input_kwargs={ - 'Geometry': geometry_to_instance_1, - 'Amount': group_input.outputs["RackAmount"] - }, attrs={'domain': 'INSTANCE'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}) - - multiply_11 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: multiply_11}, - attrs={'operation': 'SUBTRACT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["RackAmount"], 1: 1.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: add_3}, attrs={'operation': 'DIVIDE'}) - - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: divide}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_9, 'Y': multiply_10, 'Z': multiply_12}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements.outputs["Geometry"], - 'Offset': combine_xyz_5 - }) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': group_input.outputs["Surface"]}) - - set_material = nw.new_node(Nodes.RealizeInstances, [set_material]) - - racks = nw.new_node(Nodes.Reroute, input_kwargs={'Input': set_material}, label='racks') - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': add_4}) - - reroute_11 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Width"]}) - - reroute_8 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["DoorThickness"]}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': reroute_10, 'Y': reroute_11, 'Z': reroute_8}) - - reroute_9 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Height"]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': reroute_9}) - - cube_1 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_6, 'Pos': combine_xyz_7}) - - set_material_5 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube_1, 'Material': group_input.outputs["Top"]}) - - # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': set_material_5}) - - heater = nw.new_node(Nodes.Reroute, input_kwargs={'Input': join_geometry_2}, label='heater') - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [body, door, racks, heater]}) - - geometry = nw.new_node(Nodes.RealizeInstances, [join_geometry]) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/appliances/microwave.py b/infinigen/assets/appliances/microwave.py deleted file mode 100644 index 3e21353b3..000000000 --- a/infinigen/assets/appliances/microwave.py +++ /dev/null @@ -1,447 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI - -from infinigen.assets.utils.misc import generate_text -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import delete -from infinigen.core.util.bevelling import get_bevel_edges, add_bevel, complete_bevel, complete_no_bevel -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.material_assignments import AssetList - - -class MicrowaveFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.]): - super(MicrowaveFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['MicrowaveFactory']() - params = { - "Surface": material_assignments['surface'].assign_material(), - "Back": material_assignments['back'].assign_material(), - "BlackGlass": material_assignments['black_glass'].assign_material(), - "Glass": material_assignments['glass'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - depth = U(0.5, 0.7) - width = U(0.6, 1.0) - height = U(0.35, 0.45) - panel_width = U(0.2, 0.4) - margin_z = U(0.05, 0.1) - door_thickness = U(0.02, 0.04) - door_margin = U(0.03, 0.1) - door_rotation = 0 # Set to 0 for now - brand_name = generate_text() - params = { - "Depth": depth, - "Width": width, - "Height": height, - "PanelWidth": panel_width, - "MarginZ": margin_z, - "DoorThickness": door_thickness, - "DoorMargin": door_margin, - "DoorRotation": door_rotation, - "BrandName": brand_name, - } - return params - - def create_asset(self, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_microwave_geometry(preprocess=True), ng_inputs=self.params, apply=True) - bevel_edges = get_bevel_edges(obj) - delete(obj) - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_microwave_geometry(), ng_inputs=self.params, apply=True) - obj = add_bevel(obj, bevel_edges) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - -@node_utils.to_nodegroup('nodegroup_plate', singleton=False, type='GeometryNodeTree') -def nodegroup_plate(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 128}) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Start Handle': (0.0000, 0.0000, 0.0000), 'End': (1.0000, 0.0000, 0.4000)}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': bezier_segment, 'Rotation': (1.5708, 0.0000, 0.0000)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': curve_circle.outputs["Curve"], 'Profile Curve': transform}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketVectorXYZ', 'Scale', (1.0000, 1.0000, 1.0000))]) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, 'Scale': group_input.outputs["Scale"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_text', singleton=False, type='GeometryNodeTree') -def nodegroup_text(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Translation', (1.5000, 0.0000, 0.0000)), - ('NodeSocketString', 'String', 'BrandName'), - ('NodeSocketFloatDistance', 'Size', 0.0500), - ('NodeSocketFloat', 'Offset Scale', 0.0020)]) - - string_to_curves = nw.new_node('GeometryNodeStringToCurves', - input_kwargs={'String': group_input.outputs["String"], 'Size': group_input.outputs["Size"]}, - attrs={'align_y': 'BOTTOM_BASELINE', 'align_x': 'CENTER'}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': string_to_curves.outputs["Curve Instances"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': fill_curve, 'Offset Scale': group_input.outputs["Offset Scale"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': extrude_mesh.outputs["Mesh"], 'Translation': group_input.outputs["Translation"], 'Rotation': (1.5708, 0.0000, 1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_center', singleton=False, type='GeometryNodeTree') -def nodegroup_center(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'MarginX', 0.5000), - ('NodeSocketFloat', 'MarginY', 0.0000), - ('NodeSocketFloat', 'MarginZ', 0.0000)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract.outputs["Vector"]}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract_1.outputs["Vector"]}) - - greater_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1}) - - greater_than_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN'}) - - greater_than_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - - greater_than_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - greater_than_5 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_3 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5}) - - op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'In': op_and_4, 'Out': op_not}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 10)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Size"], 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Name': 'uv_map'}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Size"], 1: (0.5000, 0.5000, 0.5000), 2: group_input.outputs["Pos"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': multiply_add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_microwave_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_microwave_geometry(nw: NodeWrangler, preprocess: bool=False): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Depth', 0.0000), - ('NodeSocketFloat', 'Width', 0.0000), - ('NodeSocketFloat', 'Height', 0.0000), - ('NodeSocketFloat', 'PanelWidth', 0.5000), - ('NodeSocketFloat', 'MarginZ', 0.0000), - ('NodeSocketFloat', 'DoorThickness', 0.0000), - ('NodeSocketFloat', 'DoorMargin', 0.0500), - ('NodeSocketFloat', 'DoorRotation', 0.0000), - ('NodeSocketString', 'BrandName', 'BrandName'), - ('NodeSocketMaterial', 'Surface', None), - ('NodeSocketMaterial', 'Back', None), - ('NodeSocketMaterial', 'BlackGlass', None), - ('NodeSocketMaterial', 'Glass', None), - ]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Depth"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - cube = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["PanelWidth"]}, - attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Height"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"], 'Y': subtract, 'Z': subtract_1}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["MarginZ"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - cube_1 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_1, 'Pos': scale.outputs["Vector"]}) - - difference = nw.new_node(Nodes.MeshBoolean, input_kwargs={'Mesh 1': cube, 'Mesh 2': cube_1}) - - cube_2 = nw.new_node(nodegroup_cube().name, - input_kwargs={'Size': (0.0300, 0.0300, 0.0100), 'Pos': (0.1000, 0.0000, 0.0500), 'Resolution': 2}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': cube_2}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance_1, 'Amount': 10}, - attrs={'domain': 'INSTANCE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 0.0400}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz_7}) - - duplicate_elements_1 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': set_position_1, 'Amount': 7}, - attrs={'domain': 'INSTANCE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: 0.0200}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements_1.outputs["Geometry"], 'Offset': combine_xyz_8}) - - difference_1 = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': difference.outputs["Mesh"], 'Mesh 2': [duplicate_elements_1.outputs["Geometry"], set_position_2]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': difference_1.outputs["Mesh"], 'Material': group_input.outputs["Back"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["DoorThickness"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - cube_3 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_2, 'Pos': combine_xyz_3, 'Resolution': 10}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["PanelWidth"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["MarginZ"]}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: multiply_2}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: add}, attrs={'operation': 'LESS_THAN'}) - - separate_geometry = nw.new_node(Nodes.SeparateGeometry, - input_kwargs={'Geometry': cube_3, 'Selection': less_than}, - attrs={'domain': 'FACE'}) - - convex_hull = nw.new_node(Nodes.ConvexHull, input_kwargs={'Geometry': separate_geometry.outputs["Selection"]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': convex_hull, 'Level': 0}) - - position_1 = nw.new_node(Nodes.InputPosition) - - center = nw.new_node(nodegroup_center().name, - input_kwargs={'Geometry': subdivide_mesh, 'Vector': position_1, 'MarginX': -1.0000, 'MarginZ': group_input.outputs["DoorMargin"]}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': subdivide_mesh, 'Selection': center.outputs["In"], 'Material': group_input.outputs["BlackGlass"]}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_3, 'Selection': center.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - bounding_box_1 = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': subdivide_mesh}) - - add_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: bounding_box_1.outputs["Min"], 1: bounding_box_1.outputs["Max"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_2.outputs["Vector"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale_1.outputs["Vector"]}) - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box_1.outputs["Min"]}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_4.outputs["Z"], 1: group_input.outputs["DoorMargin"]}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': separate_xyz_3.outputs["Y"], 'Z': add_3}) - - text = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_5, 'String': group_input.outputs["BrandName"], 'Size': 0.0300, 'Offset Scale': 0.0020}) - - text = complete_no_bevel(nw, text, preprocess) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_2, text]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': join_geometry_1}) - - z = nw.scalar_multiply(group_input.outputs["DoorRotation"], 1 if not preprocess else 0) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': z}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': geometry_to_instance, 'Rotation': combine_xyz_6, 'Pivot Point': combine_xyz_3}) - - plate = nw.new_node(nodegroup_plate().name, input_kwargs={'Scale': (0.1000, 0.1000, 0.1000)}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_1, 1: (0.5000, 0.5000, 0.0000), 2: scale.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': plate, 'Offset': multiply_add.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': group_input.outputs["Glass"]}) - - convex_hull_1 = nw.new_node(Nodes.ConvexHull, input_kwargs={'Geometry': separate_geometry.outputs["Inverted"]}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': convex_hull_1, 'Level': 0}) - - position_2 = nw.new_node(Nodes.InputPosition) - - center_1 = nw.new_node(nodegroup_center().name, - input_kwargs={'Geometry': subdivide_mesh_1, 'Vector': position_2, 'MarginX': -1.0000, 'MarginY': 0.0010, 'MarginZ': group_input.outputs["DoorMargin"]}) - - set_material_4 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': subdivide_mesh_1, 'Selection': center_1.outputs["In"], 'Material': group_input.outputs["BlackGlass"]}) - - set_material_5 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_4, 'Selection': center_1.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': subdivide_mesh_1}) - - add_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: bounding_box.outputs["Min"], 1: bounding_box.outputs["Max"]}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_5.outputs["Vector"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale_2.outputs["Vector"]}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Max"]}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: group_input.outputs["DoorMargin"]}, - attrs={'operation': 'SUBTRACT'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3, 1: -0.1000}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': separate_xyz_1.outputs["Y"], 'Z': add_6}) - - text_1 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_4, 'String': '12:01', 'Offset Scale': 0.0050}) - - text_1 = complete_no_bevel(nw, text_1, preprocess) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, rotate_instances, set_material, set_material_5, text_1]}) - geometry =nw.new_node(Nodes.RealizeInstances,[join_geometry]) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}, attrs={'is_active_output': True}) diff --git a/infinigen/assets/appliances/oven.py b/infinigen/assets/appliances/oven.py deleted file mode 100644 index 16eeff2da..000000000 --- a/infinigen/assets/appliances/oven.py +++ /dev/null @@ -1,1215 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI - -from infinigen.assets.utils.misc import generate_text -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core.util.blender import delete -from infinigen.core.util.bevelling import get_bevel_edges, add_bevel, complete_bevel, complete_no_bevel -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.material_assignments import AssetList -from infinigen.assets.utils.object import new_bbox -from infinigen.core.surface import write_attr_data -from infinigen.assets.utils.decorate import read_normal -from infinigen.core.tagging import PREFIX -from infinigen.core import tagging, tags as t - -class OvenFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.]): - super(OvenFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - with FixedSeed(factory_seed): - self.params, self.geometry_node_params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - self.geometry_node_params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['OvenFactory']() - params = { - "Surface": material_assignments['surface'].assign_material(), - "Back": material_assignments['back'].assign_material(), - "WhiteMetal": material_assignments['white_metal'].assign_material(), - "SuperBlackGlass": material_assignments['black_glass'].assign_material(), - "Glass": material_assignments['glass'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # depth, width, height = dimensions - depth = 1 + N(0, 0.1) - width = 1 + N(0, 0.1) - height = 1 + N(0, 0.1) - door_thickness = U(0.05, 0.1) * depth - door_rotation = 0 # Set to 0 for now - - rack_radius = U(0.01, 0.02) * depth - rack_h_amount = RI(2, 4) - rack_d_amount = RI(4, 6) - - panel_height = U(0.2, 0.4) * height - panel_thickness = U(0.15, 0.25) * depth - botton_amount = RI(1, 3) * 2 - botton_radius = U(0.05, 0.1) * width - botton_thickness = U(0.02, 0.04) * depth - heat_radius_ratio = U(0.1, 0.2) - brand_name = generate_text() - - use_gas = RI(2) - n_grids = RI(2, 5) - grids = [RI(1, 4) for i in range(n_grids)] - branches = 2 * RI(2, 9) - grate_thickness = U(0.01, 0.03) - center_ratio = U(0.05, 0.15) - middle_ratio = U(0.5, 0.7) - - params = { - "UseGas": use_gas, - "Grids": grids, - "Branches": branches, - "GrateThickness": grate_thickness, - "CenterRatio": center_ratio, - "MiddleRatio": middle_ratio, - "Depth": depth, - "Width": width, - "Height": height, - "DoorThickness": door_thickness, - "DoorRotation": door_rotation, - "RackRadius": rack_radius, - "RackHAmount": rack_h_amount, - "RackDAmount": rack_d_amount, - "PanelHeight": panel_height, - "PanelThickness": panel_thickness, - "BottonAmount": botton_amount, - "BottonRadius": botton_radius, - "BottonThickness": botton_thickness, - "HeaterRadiusRatio": heat_radius_ratio, - "BrandName": brand_name, - } - geometry_node_params = {k: params[k] for k in params.keys() if k not in [ - "UseGas", - "Grids", - "Branches", - "GrateThickness", - "CenterRatio", - "MiddleRatio", - ]} - return params, geometry_node_params - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - # x, y, z = self.params["Depth"], self.params["Width"], self.params["Height"] - # box = new_bbox(-x/2 - 0.05, x/2 + self.params["DoorThickness"] + 0.1, -y/2, y/2, 0, z + 0.1) - # tagging.tag_object(box, f'{PREFIX}{t.Subpart.SupportSurface.value}', read_normal(box)[:, -1] > .5) - # box_top = new_bbox(-x/2 - 0.05, -x/2 - 0.05 + self.params["PanelThickness"], -y/2, y/2, z + 0.1, z+ 0.1 + 0.5) - # box_top.rotation_euler[1] = -0.1 - #box = butil.join_objects([box, box_top]) - obj = butil.spawn_cube() - return butil.modify_mesh(obj, 'NODES', node_group=nodegroup_oven_geometry(use_gas=self.params["UseGas"], is_placeholder=True), ng_inputs=self.geometry_node_params, apply=True) - - def create_asset(self, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_oven_geometry(preprocess=True, use_gas=self.params["UseGas"]), ng_inputs=self.geometry_node_params, apply=True) - bevel_edges = get_bevel_edges(obj) - delete(obj) - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_oven_geometry(use_gas=self.params["UseGas"]), ng_inputs=self.geometry_node_params, apply=True) - obj = add_bevel(obj, bevel_edges, offset=0.01) - if not self.params["UseGas"]: return obj - width, depth = self.params["Width"], self.params["Depth"] + 2 * self.params["DoorThickness"] - grate_width, grate_depth = width * 0.8, depth * 0.6 - grate_thickness = self.params["GrateThickness"] - grates = gas_grates(width, depth, grate_width, grate_depth, self.params["Height"] + self.params["DoorThickness"] - grate_thickness, grate_thickness, self.params["Grids"], self.params["Branches"], self.params["CenterRatio"], self.params["MiddleRatio"]) - grates.data.materials.append(self.geometry_node_params["WhiteMetal"]) - obj.data.materials.append(self.geometry_node_params["Back"]) - with butil.SelectObjects(obj): - obj.active_material_index = len(obj.material_slots) - 1 - for i in range(len(obj.material_slots)): bpy.ops.object.material_slot_move(direction='UP') - hollow= butil.spawn_cube( - size=1, - location=(depth / 2, width / 2, self.params["Height"] + self.params["DoorThickness"]), - scale=(grate_depth + grate_thickness, grate_width + grate_thickness, grate_thickness * 2), - ) - with butil.SelectObjects(hollow): - bpy.ops.object.modifier_add(type='BEVEL') - bpy.context.object.modifiers["Bevel"].segments = 8 - bpy.context.object.modifiers["Bevel"].width = grate_thickness - bpy.ops.object.modifier_apply(modifier="Bevel") - with butil.SelectObjects(obj): - bpy.ops.object.modifier_add(type='BOOLEAN') - bpy.context.object.modifiers["Boolean"].object = hollow - bpy.context.object.modifiers["Boolean"].use_hole_tolerant = True - bpy.ops.object.modifier_apply(modifier="Boolean") - butil.delete(hollow) - butil.join_objects([obj, grates], check_attributes=True) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - -def gas_grates(width, depth, grate_width, grate_depth, height, thickness, grids, branches, center_ratio, middle_ratio): - high_height = height + thickness * 0.9 - grates = [] - for i, n in enumerate(grids): - cubes = [ - butil.spawn_cube(size=1, location=(depth / 2, grate_width / len(grids) * i + (width - grate_width) / 2 + thickness / 2, height), scale=(grate_depth + thickness, thickness, thickness), name=None), - butil.spawn_cube(size=1, location=(depth / 2, grate_width / len(grids) * (i+1) + (width - grate_width) / 2 - thickness / 2, height), scale=(grate_depth + thickness, thickness, thickness), name=None), - ] - for j in range(n+1): - cubes.append(butil.spawn_cube( - size=1, - location=(grate_depth / n * j + (depth - grate_depth) / 2, grate_width / len(grids) * (i+0.5) + (width - grate_width) / 2, high_height), - scale=(thickness, grate_width / len(grids), thickness), - )) - for j in range(n): - min_dist = min(grate_width / len(grids) / 2, grate_depth / n / 2) - line_len = max(grate_width / len(grids) / 2, grate_depth / n / 2) - min_dist - center_dist = min_dist * center_ratio - middle_dist = min_dist * middle_ratio - if grate_width / len(grids) / 2 > grate_depth / n / 2: - x_center, y_center = center_dist, line_len + center_dist - x_middle, y_middle = middle_dist, line_len + middle_dist - x_full, y_full = min_dist, line_len + min_dist - else: - x_center, y_center = center_dist + line_len, center_dist - x_middle, y_middle = middle_dist + line_len, middle_dist - x_full, y_full = min_dist + line_len, min_dist - center = (grate_depth / n * (j+0.5) + (depth - grate_depth) / 2), grate_width / len(grids) * (i+0.5) + (width - grate_width) / 2 - for k in range(branches): - angle = 2 * np.pi / branches * k - x0, y0 = x_center * np.cos(angle), y_center * np.sin(angle) - x1, y1 = x_middle * np.cos(angle), y_middle * np.sin(angle) - location = center[0] + (x0 + x1) / 2, center[1] + (y0 + y1) / 2, high_height - scale = ((x0 - x1) ** 2 + (y0 - y1) ** 2) ** 0.5, thickness, thickness - actual_angle = np.arctan2(y1-y0, x1-x0) - obj = butil.spawn_cube(size=1, location=location, scale=scale) - bpy.context.object.rotation_euler[2] = actual_angle - cubes.append(obj) - x0, y0 = x1, y1 - if x_full - abs(x0) < y_full - abs(y0): - x1, y1 = x_full * np.sign(x0), y0 - else: - x1, y1 = x0, y_full * np.sign(y0) - location = center[0] + (x0 + x1) / 2, center[1] + (y0 + y1) / 2, high_height - scale = ((x0 - x1) ** 2 + (y0 - y1) ** 2) ** 0.5, thickness, thickness - actual_angle = np.arctan2(y1-y0, x1-x0) - obj = butil.spawn_cube(size=1, location=location, scale=scale) - bpy.context.object.rotation_euler[2] = actual_angle - cubes.append(obj) - grates.append(butil.spawn_cylinder(center_dist + thickness, thickness / 2, location=(center[0], center[1], height))) - obj = butil.boolean(cubes) - for i in range(1, len(cubes)): - butil.delete(cubes[i]) - with butil.SelectObjects(obj): - bpy.ops.object.modifier_add(type='REMESH') - remesh_type = "VOXEL" - bpy.context.object.modifiers["Remesh"].mode = remesh_type - bpy.context.object.modifiers["Remesh"].voxel_size = 0.004 - bpy.ops.object.modifier_apply(modifier="Remesh") - bpy.ops.object.modifier_add(type='SMOOTH') - bpy.context.object.modifiers["Smooth"].iterations = 8 - bpy.context.object.modifiers["Smooth"].factor = 1 - bpy.ops.object.modifier_apply(modifier="Smooth") - grates.append(obj) - obj = butil.boolean(grates) - for i in range(1, len(grates)): - butil.delete(grates[i]) - return obj - -@node_utils.to_nodegroup('nodegroup_hollow_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_hollow_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2), - ('NodeSocketFloat', 'Thickness', 0.0000), - ('NodeSocketBool', 'Switch1', False), - ('NodeSocketBool', 'Switch2', False), - ('NodeSocketBool', 'Switch3', False), - ('NodeSocketBool', 'Switch4', False), - ('NodeSocketBool', 'Switch5', False), - ('NodeSocketBool', 'Switch6', False)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Size"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Thickness"], 'Y': subtract, 'Z': subtract_1}) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"]}, attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Pos"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Size"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale.outputs["Vector"]}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': subtract_2}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz_5}) - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2}) - - subtract_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': subtract_3, 'Z': group_input.outputs["Thickness"]}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_2, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_4 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', 3: cube_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': add_3, 'Z': subtract_4}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_4, 'Translation': combine_xyz_3}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1}) - - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': subtract_5, 'Z': group_input.outputs["Thickness"]}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': add_5, 'Z': add_6}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_1}) - - switch = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform}) - - subtract_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - subtract_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Thickness"], 'Y': subtract_6, 'Z': subtract_7}) - - cube_3 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_6, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_5 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_3.outputs["Mesh"], 'Name': 'uv_map', 3: cube_3.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subtract_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - subtract_9 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_8, 'Y': add_7, 'Z': subtract_9}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_5, 'Translation': combine_xyz_7}) - - switch_3 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': group_input.outputs["Thickness"], 'Z': separate_xyz.outputs["Z"]}) - - cube_4 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_9, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_4.outputs["Mesh"], 'Name': 'uv_map', 3: cube_4.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1}) - - add_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_8, 'Y': add_9, 'Z': add_10}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_8}) - - switch_4 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': group_input.outputs["Thickness"], 'Z': separate_xyz.outputs["Z"]}) - - cube_5 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_10, 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_5.outputs["Mesh"], 'Name': 'uv_map', 3: cube_5.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - add_11 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}) - - subtract_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - add_12 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_11, 'Y': subtract_10, 'Z': add_12}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_3, 'Translation': combine_xyz_11}) - - switch_5 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [switch_2.outputs[6], switch_1.outputs[6], switch.outputs[6], switch_3.outputs[6], switch_4.outputs[6], switch_5.outputs[6]]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_o', singleton=False, type='GeometryNodeTree') -def nodegroup_o(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0.0000, 0.0000, 0.0020)}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Size', 1.0000)]) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Size"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line, 'Profile Curve': curve_circle_1.outputs["Curve"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': curve_to_mesh, 'Offset Scale': 0.0030}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': extrude_mesh.outputs["Mesh"]}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_heater', singleton=False, type='GeometryNodeTree') -def nodegroup_heater(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0.0000, 0.0000, 0.0010)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'depth', 0.0000), - ('NodeSocketFloat', 'radius_ratio', 0.2000), - ('NodeSocketFloat', 'arrangement_ratio', 0.5000), - ('NodeSocketShader', 'SuperBlackGlass', None)]) - - minimum = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["width"], 1: group_input.outputs["depth"]}, - attrs={'operation': 'MINIMUM'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: minimum, 1: group_input.outputs["radius_ratio"]}, - label='Multiply', - attrs={'operation': 'MULTIPLY'}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh_1, 'Material': group_input.outputs["SuperBlackGlass"]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': set_material}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: minimum, 1: group_input.outputs["arrangement_ratio"]}, - label='Multiply', - attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_1}, attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'FLOOR'}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: multiply_1}, attrs={'operation': 'DIVIDE'}) - - floor_1 = nw.new_node(Nodes.Math, input_kwargs={0: divide_1}, attrs={'operation': 'FLOOR'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: floor, 1: floor_1}, attrs={'operation': 'MULTIPLY'}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': multiply_2}, - attrs={'domain': 'INSTANCE'}) - - divide_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: floor_1}, attrs={'operation': 'DIVIDE'}) - - divide_3 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: floor}, - attrs={'operation': 'DIVIDE'}) - - floor_2 = nw.new_node(Nodes.Math, input_kwargs={0: divide_3}, attrs={'operation': 'FLOOR'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: floor_2, 1: divide_2}, attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: divide_2, 2: multiply_3}, attrs={'operation': 'MULTIPLY_ADD'}) - - divide_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: floor}, attrs={'operation': 'DIVIDE'}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: floor}, - attrs={'operation': 'MODULO'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: modulo, 1: divide_4}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_1 = nw.new_node(Nodes.Math, input_kwargs={0: divide_4, 2: multiply_4}, attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_add, 'Y': multiply_add_1}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': set_position}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_oven_rack', singleton=False, type='GeometryNodeTree') -def nodegroup_oven_rack(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Height', 2.0000), - ('NodeSocketFloatDistance', 'Radius', 0.0200), - ('NodeSocketInt', 'Amount', 5)]) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': group_input.outputs["Width"], 'Height': group_input.outputs["Height"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_3, 'End': combine_xyz_4}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': curve_line}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': group_input.outputs["Amount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz}) - - duplicate_elements_1 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': group_input.outputs["Amount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_4, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: divide_1}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements_1.outputs["Geometry"], 'Offset': combine_xyz_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadrilateral, set_position, set_position_1]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': curve_to_mesh}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_text', singleton=False, type='GeometryNodeTree') -def nodegroup_text(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Translation', (1.5000, 0.0000, 0.0000)), - ('NodeSocketString', 'String', 'BrandName'), - ('NodeSocketFloatDistance', 'Size', 0.0500), - ('NodeSocketFloat', 'Offset Scale', 0.0020)]) - - string_to_curves = nw.new_node('GeometryNodeStringToCurves', - input_kwargs={'String': group_input.outputs["String"], 'Size': group_input.outputs["Size"]}, - attrs={'align_y': 'BOTTOM_BASELINE', 'align_x': 'CENTER'}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': string_to_curves.outputs["Curve Instances"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': fill_curve, 'Offset Scale': group_input.outputs["Offset Scale"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': extrude_mesh.outputs["Mesh"], 'Translation': group_input.outputs["Translation"], 'Rotation': (1.5708, 0.0000, 1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_handle(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'length', 0.0000), - ('NodeSocketFloat', 'thickness', 0.0200)]) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["width"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', 3: cube_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["length"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [store_named_attribute, transform]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz_3}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"], 1: group_input.outputs["width"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': add, 'Z': group_input.outputs["thickness"]}) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"]}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': add_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_center', singleton=False, type='GeometryNodeTree') -def nodegroup_center(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'MarginX', 0.5000), - ('NodeSocketFloat', 'MarginY', 0.0000), - ('NodeSocketFloat', 'MarginZ', 0.0000)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract.outputs["Vector"]}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': subtract_1.outputs["Vector"]}) - - greater_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["MarginX"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1}) - - greater_than_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN'}) - - greater_than_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: group_input.outputs["MarginY"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - - greater_than_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - greater_than_5 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: group_input.outputs["MarginZ"]}, - attrs={'operation': 'GREATER_THAN', 'use_clamp': True}) - - op_and_3 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5}) - - op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'In': op_and_4, 'Out': op_not}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_cube(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Size', (0.1000, 10.0000, 4.0000)), - ('NodeSocketVector', 'Pos', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Resolution', 2)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Size"], 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"], 'Vertices Z': group_input.outputs["Resolution"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Name': 'uv_map'}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Size"], 1: (0.5000, 0.5000, 0.5000), 2: group_input.outputs["Pos"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': multiply_add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_oven_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_oven_geometry(nw: NodeWrangler, preprocess: bool=False, use_gas: bool=False, is_placeholder: bool=False): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Depth', 1.0000), - ('NodeSocketFloat', 'Width', 1.0000), - ('NodeSocketFloat', 'Height', 1.0000), - ('NodeSocketFloat', 'DoorThickness', 0.0700), - ('NodeSocketFloat', 'DoorRotation', 0.0000), - ('NodeSocketFloatDistance', 'RackRadius', 0.0100), - ('NodeSocketInt', 'RackHAmount', 2), - ('NodeSocketInt', 'RackDAmount', 5), - ('NodeSocketFloat', 'PanelHeight', 0.3000), - ('NodeSocketFloat', 'PanelThickness', 0.2000), - ('NodeSocketInt', 'BottonAmount', 4), - ('NodeSocketFloatDistance', 'BottonRadius', 0.0500), - ('NodeSocketFloat', 'BottonThickness', 0.0300), - ('NodeSocketFloat', 'HeaterRadiusRatio', 0.1500), - ('NodeSocketString', 'BrandName', 'BrandName'), - ('NodeSocketMaterial', 'Glass', None), - ('NodeSocketMaterial', 'Surface', None), - ('NodeSocketMaterial', 'WhiteMetal', None), - ('NodeSocketMaterial', 'SuperBlackGlass', None), - ('NodeSocketMaterial', 'Back', None), - ('NodeSocketBool', 'is_placeholder', is_placeholder)]) - - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["DoorThickness"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - cube = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_1, 'Pos': combine_xyz_2}) - - position = nw.new_node(Nodes.InputPosition) - - center = nw.new_node(nodegroup_center().name, - input_kwargs={'Geometry': cube, 'Vector': position, 'MarginX': -1.0000, 'MarginY': 0.1000, 'MarginZ': 0.1500}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube, 'Selection': center.outputs["In"], 'Material': group_input.outputs["Glass"]}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_2, 'Selection': center.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 0.8000}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - handle = nw.new_node(nodegroup_handle().name, - input_kwargs={'width': multiply, 'length': multiply_1, 'thickness': multiply_2}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.9200}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_13 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': multiply_5}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': handle, 'Translation': combine_xyz_13, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - set_material_8 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_1, 'Material': group_input.outputs["WhiteMetal"]}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': multiply_6, 'Z': 0.0300}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - text = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_12, 'String': group_input.outputs["BrandName"], 'Size': multiply_7}) - - text = complete_no_bevel(nw, text, preprocess) - - set_material_9 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': text, 'Material': group_input.outputs["WhiteMetal"]}) - - set_material_8 = complete_bevel(nw, set_material_8, preprocess) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_3, set_material_8, set_material_9]}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': join_geometry_3}) - - y = nw.scalar_multiply(group_input.outputs["DoorRotation"], 1 if not preprocess else 0) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': y}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Depth"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': geometry_to_instance, 'Rotation': combine_xyz_3, 'Pivot Point': combine_xyz_4}) - - rotate_instances = nw.new_node(Nodes.RealizeInstances, [rotate_instances]) - - door = nw.new_node(Nodes.Reroute, input_kwargs={'Input': rotate_instances}, label='door') - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: multiply_8}, - attrs={'operation': 'SUBTRACT'}) - - multiply_9 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: multiply_9}, - attrs={'operation': 'SUBTRACT'}) - - ovenrack = nw.new_node(nodegroup_oven_rack().name, - input_kwargs={'Width': subtract, 'Height': subtract_1, 'Radius': group_input.outputs["RackRadius"], 'Amount': group_input.outputs["RackDAmount"]}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': ovenrack}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance_1, 'Amount': group_input.outputs["RackHAmount"]}, - attrs={'domain': 'INSTANCE'}) - - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_11 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}) - - multiply_12 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Height"], 1: multiply_12}, - attrs={'operation': 'SUBTRACT'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["RackHAmount"], 1: 1.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: add_4}, attrs={'operation': 'DIVIDE'}) - - multiply_13 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: divide}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_10, 'Y': multiply_11, 'Z': multiply_13}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 'Offset': combine_xyz_5}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': group_input.outputs["Surface"]}) - - set_material = nw.new_node(Nodes.RealizeInstances, [set_material]) - - racks = nw.new_node(Nodes.Reroute, input_kwargs={'Input': set_material}, label='racks') - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["DoorThickness"]}) - - reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': add_5}) - - reroute_11 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Width"]}) - - reroute_8 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["DoorThickness"]}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_10, 'Y': reroute_11, 'Z': reroute_8}) - - reroute_9 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Height"]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': reroute_9}) - - cube_1 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_6, 'Pos': combine_xyz_7}) - - set_material_5 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube_1, 'Material': group_input.outputs["Back"]}) - - # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_10, 1: group_input.outputs["PanelThickness"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["HeaterRadiusRatio"], 1: 2.0000, 2: 0.1000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - heater = nw.new_node(nodegroup_heater().name, - input_kwargs={'width': reroute_11, 'depth': subtract_3, 'radius_ratio': group_input.outputs["HeaterRadiusRatio"], 'arrangement_ratio': multiply_add}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_8, 1: reroute_9}) - - combine_xyz_15 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["PanelThickness"], 'Z': add_6}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': heater, 'Translation': combine_xyz_15}) - - transform_2 = complete_no_bevel(nw, transform_2, preprocess) - - if use_gas: - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_5]}) - else: - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_5, transform_2]}) - - heater_1 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': join_geometry_2}, label='heater') - - reroute_14 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Width"]}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["PanelThickness"], 'Y': reroute_14, 'Z': group_input.outputs["PanelHeight"]}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: group_input.outputs["DoorThickness"]}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add_7}) - - cube_2 = nw.new_node(nodegroup_cube().name, input_kwargs={'Size': combine_xyz_9, 'Pos': combine_xyz_8}) - - position_1 = nw.new_node(Nodes.InputPosition) - - center_1 = nw.new_node(nodegroup_center().name, - input_kwargs={'Geometry': cube_2, 'Vector': position_1, 'MarginX': -1.0000, 'MarginY': 0.0500, 'MarginZ': 0.0500}) - - set_material_4 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cube_2, 'Selection': center_1.outputs["In"], 'Material': group_input.outputs["Back"]}) - - set_material_7 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_material_4, 'Selection': center_1.outputs["Out"], 'Material': group_input.outputs["Surface"]}) - - # set_shade_smooth_3 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_7}) - - reroute_13 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["PanelThickness"]}) - - multiply_14 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_14}, attrs={'operation': 'MULTIPLY'}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': cube_2}) - - add_8 = nw.new_node(Nodes.VectorMath, input_kwargs={0: bounding_box.outputs["Min"], 1: bounding_box.outputs["Max"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_8.outputs["Vector"], 'Scale': 0.5000}, - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': scale.outputs["Vector"]}) - - combine_xyz_16 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_13, 'Y': multiply_14, 'Z': separate_xyz.outputs["Z"]}) - - multiply_15 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["PanelHeight"], 1: 0.2000}, - attrs={'operation': 'MULTIPLY'}) - - text_1 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_16, 'String': '12:01', 'Size': multiply_15}) - - set_material_7 = complete_bevel(nw, set_material_7, preprocess) - text_1 = complete_no_bevel(nw, text_1, preprocess) - - join_geometry_5 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_7, text_1]}) - - combine_xyz_21 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["BottonThickness"]}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_21}) - - reroute_12 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["BottonRadius"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': reroute_12}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_12, 1: 0.0050}) - - o = nw.new_node(nodegroup_o().name, input_kwargs={'Size': add_9}) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh, o]}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_13, 'Z': separate_xyz.outputs["Z"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_4, 'Translation': combine_xyz_10, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - reroute_16 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': separate_xyz.outputs["Z"]}) - - reroute_15 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["BottonRadius"]}) - - multiply_16 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["PanelHeight"], 1: 0.0500}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_1 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_15, 1: 1.0000, 2: multiply_16}, attrs={'operation': 'MULTIPLY_ADD'}) - - add_10 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_16, 1: multiply_add_1}) - - combine_xyz_17 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_13, 'Z': add_10}) - - multiply_17 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["BottonRadius"], 1: 0.2500}, - attrs={'operation': 'MULTIPLY'}) - - text_2 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_17, 'String': 'Off', 'Size': multiply_17}) - - multiply_add_2 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_15, 1: 0.7000, 2: multiply_16}, attrs={'operation': 'MULTIPLY_ADD'}) - - add_11 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_16, 1: multiply_add_2}) - - combine_xyz_18 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_13, 'Y': multiply_add_2, 'Z': add_11}) - - text_3 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_18, 'String': 'High', 'Size': multiply_17}) - - multiply_18 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_16, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_15, 1: -0.7000, 2: multiply_18}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_19 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_13, 'Y': multiply_add_3, 'Z': add_11}) - - text_4 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_19, 'String': 'Low', 'Size': multiply_17}) - - add_12 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_13, 1: group_input.outputs["BottonThickness"]}) - - combine_xyz_20 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_12, 'Z': separate_xyz.outputs["Z"]}) - - multiply_19 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["BottonThickness"], 1: 0.1000}, - attrs={'operation': 'MULTIPLY'}) - - text_5 = nw.new_node(nodegroup_text().name, - input_kwargs={'Translation': combine_xyz_20, 'String': '1', 'Size': group_input.outputs["BottonRadius"], 'Offset Scale': multiply_19}) - - join_geometry_6 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, text_2, text_3, text_4, text_5]}) - - geometry_to_instance_2 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': join_geometry_6}) - - add_13 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["BottonAmount"], 1: 2.0000}) - - reroute_6 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': add_13}) - - duplicate_elements_1 = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance_2, 'Amount': reroute_6}, - attrs={'domain': 'INSTANCE'}) - - add_14 = nw.new_node(Nodes.Math, input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: 1.0000}) - - add_15 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_6, 1: 1.0000}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: add_15}, attrs={'operation': 'DIVIDE'}) - - multiply_20 = nw.new_node(Nodes.Math, input_kwargs={0: add_14, 1: divide_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_20}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicate_elements_1.outputs["Geometry"], 'Offset': combine_xyz_11}) - - multiply_21 = nw.new_node(Nodes.Math, input_kwargs={0: add_13}, attrs={'operation': 'MULTIPLY'}) - - add_16 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_21, 1: -1.0100}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: add_16}, - attrs={'operation': 'GREATER_THAN'}) - - add_17 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_21, 1: 0.9900}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: add_17}, - attrs={'operation': 'LESS_THAN'}) - - minimum = nw.new_node(Nodes.Math, input_kwargs={0: greater_than, 1: less_than}, attrs={'operation': 'MINIMUM'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': set_position_1, 'Selection': minimum}, - attrs={'domain': 'INSTANCE'}) - - set_material_6 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': delete_geometry, 'Material': group_input.outputs["WhiteMetal"]}) - - botton = nw.new_node(Nodes.Reroute, input_kwargs={'Input': set_material_6}, label='botton') - - botton = complete_no_bevel(nw, botton, preprocess) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_5, botton]}) - - geometry_to_instance_3 = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': join_geometry_1}) - - combine_xyz_14 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Height"]}) - - panel_bbox = nw.new_node(Nodes.BoundingBox,input_kwargs={'Geometry': geometry_to_instance_3}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={'Switch': group_input.outputs["is_placeholder"], 'False': geometry_to_instance_3, 'True': panel_bbox}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': switch_1, 'Rotation': (0.0000, -0.1745, 0.0000), 'Pivot Point': combine_xyz_14}) - - rotate_instances_1 = nw.new_node(Nodes.RealizeInstances, [rotate_instances_1]) - - panel = nw.new_node(Nodes.Reroute, input_kwargs={'Input': rotate_instances_1}, label='panel') - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Depth"], 'Y': group_input.outputs["Width"], 'Z': group_input.outputs["Height"]}) - - hollowcube = nw.new_node(nodegroup_hollow_cube().name, - input_kwargs={'Size': combine_xyz, 'Thickness': group_input.outputs["DoorThickness"], 'Switch2': True, 'Switch4': True}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': hollowcube, 'Material': group_input.outputs["Surface"]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': set_material_1, 'Level': 0}) - - # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) - - body = nw.new_node(Nodes.Reroute, input_kwargs={'Input': subdivide_mesh}, label='Body') - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [door, racks, heater_1, panel, body]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [door, racks, heater_1, body]}) - body_bbox = nw.new_node(Nodes.BoundingBox,input_kwargs={'Geometry': join_geometry_2}) - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [body_bbox, panel]}) - - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={'Switch': group_input.outputs["is_placeholder"], 'False': join_geometry, 'True': join_geometry_3}) - geometry = nw.new_node(Nodes.RealizeInstances,[switch_2]) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/appliances/tv.py b/infinigen/assets/appliances/tv.py deleted file mode 100644 index 2a1dcebc6..000000000 --- a/infinigen/assets/appliances/tv.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: -# - Lingjie Mei: primary author -# - Karhan Kayan: fix rotation - -import bpy -import bmesh -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import read_co, write_attribute, write_co, read_area, mirror, read_normal -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_bbox, new_cube, new_plane -from infinigen.assets.utils.uv import compute_uv_direction, face_corner2faces, unwrap_faces -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.surface import read_attr_data, write_attr_data -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList -from infinigen.assets.materials.text import Text - - -class TVFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(TVFactory, self).__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.aspect_ratio = np.random.choice([9 / 16, 3 / 4]) - self.width = uniform(0.6, 2.1) - self.screen_bevel_width = uniform(0, .01) - self.side_margin = log_uniform(.005, .01) - self.bottom_margin = uniform(.005, .03) - self.depth = uniform(.02, .04) - self.has_depth_extrude = uniform() < .4 - if self.has_depth_extrude: - self.depth_extrude = self.depth * uniform(2, 5) - else: - self.depth_extrude = self.depth * 1.5 - self.leg_type = np.random.choice(['two-legged', 'single-legged']) # 'none', - self.leg_length = uniform(.1, .2) - self.leg_length_y = uniform(.1, .15) - self.leg_radius = uniform(.008, .015) - self.leg_width = uniform(.5, .8) - self.leg_bevel_width = uniform(.01, .02) - - materials = self.get_material_params() - self.surface = materials['surface'] - self.scratch = materials['scratch'] - self.edge_wear = materials['edge_wear'] - self.screen_surface = materials['screen_surface'] - self.support_surface = materials['support'] - - def get_material_params(self): - material_assignments = AssetList['TVFactory']() - surface = material_assignments['surface'].assign_material() - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - args = (self.factory_seed, False) - kwargs = {'emission': 0.01 if uniform() < 0.1 else uniform(2, 3)} - screen_surface = material_assignments['screen_surface'].assign_material() - if screen_surface == Text: - screen_surface = screen_surface(*args, **kwargs) - support = material_assignments['support'].assign_material() - return { - 'surface': surface, 'scratch': scratch, 'edge_wear': edge_wear, 'screen_surface': screen_surface, - 'support': support - } - - @property - def height(self): - return self.aspect_ratio * self.width - - @property - def total_width(self): - return self.width + 2 * self.side_margin - - @property - def total_height(self): - return self.height + self.side_margin + self.bottom_margin - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - match self.leg_type: - case 'two-legged': - max_x = self.leg_length_y / 2 - (1 - self.leg_width) * self.depth_extrude - case _: - max_x = self.leg_length_y / 2 - self.depth_extrude / 2 - return new_bbox( - - self.depth_extrude - self.depth, max_x, -self.total_width / 2, - self.total_width / 2, -self.leg_length - self.leg_radius / 2, self.total_height - ) - - def create_asset(self, **params) -> bpy.types.Object: - obj = self.make_base() - self.make_screen(obj) - parts = [obj] - match self.leg_type: - case 'two-legged': - legs = self.add_two_legs() - case _: - legs = self.add_single_leg() - for l in legs: - write_attribute(l, 1, 'leg', 'FACE', 'INT') - parts.extend(legs) - obj = join_objects(parts) - obj.rotation_euler[2] = np.pi / 2 - butil.apply_transform(obj) - return obj - - def make_screen(self, obj): - cutter = new_cube() - cutter.location = 0, -1, 1 - butil.apply_transform(cutter, True) - cutter.scale = self.width / 2, 1, self.height / 2 - cutter.location = 0, 1e-3, self.bottom_margin - butil.apply_transform(cutter, True) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') - butil.delete(cutter) - areas = read_area(obj) - screen = np.zeros(len(areas), int) - y = read_normal(obj)[:, 1] < 0 - screen[np.argmax(areas + 1e5 * y)] = 1 - fc2f = face_corner2faces(obj) - unwrap_faces(obj, screen) - bbox = compute_uv_direction(obj, 'x', 'z', screen[fc2f]) - write_attr_data(obj, 'screen', screen, domain='FACE', type='INT') - self.screen_surface.apply(obj, 'screen', bbox) - - def make_base(self): - obj = new_cube() - obj.location = 0, 1, 1 - butil.apply_transform(obj, True) - obj.scale = self.total_width / 2, self.depth / 2, self.total_height / 2 - butil.apply_transform(obj) - butil.modify_mesh(obj, 'BEVEL', width=self.screen_bevel_width, segments=8) - if not self.has_depth_extrude: - return obj - with butil.ViewportMode(obj, 'EDIT'): - bm = bmesh.from_edit_mesh(obj.data) - geom = [f for f in bm.faces if f.normal[1] > .5] - bmesh.ops.delete(bm, geom=geom, context='FACES_KEEP_BOUNDARY') - bmesh.update_edit_mesh(obj.data) - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - height_min, height_max = self.total_height * uniform(.1, .3), self.total_height * uniform(.5, .7) - width = self.total_width * uniform(.3, .6) - extra = new_plane() - extra.scale = width / 2, (height_max - height_min) / 2, 1 - extra.rotation_euler[0] = -np.pi / 2 - extra.location = 0, self.depth_extrude + self.depth, self.total_height / 2 - obj = join_objects([obj, extra]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.bridge_edge_loops(number_cuts=32, profile_shape_factor=-uniform(.0, .4)) - x, y, z = read_co(obj).T - z += (height_max + height_min - self.total_height) / 2 * np.clip( - y - self.depth, 0, - None - ) / self.depth_extrude - write_co(obj, np.stack([x, y, z], -1)) - return obj - - def add_two_legs(self): - vertices = (-self.total_width / 2 * self.leg_width * uniform(0, .6), 0, self.total_height * uniform(.3, .5)), ( - 0, 0, -self.leg_length), ( - 0, self.leg_length_y / 2, -self.leg_length), (0, -self.leg_length_y / 2, -self.leg_length) - edges = (0, 1), (1, 2), (1, 3) - leg = mesh2obj(data2mesh(vertices, edges)) - surface.add_geomod(leg, geo_radius, apply=True, input_args=[self.leg_radius, 16]) - x, y, z = read_co(leg).T - write_co(leg, np.stack([x, y, np.maximum(z, -self.leg_length - self.leg_radius * uniform(.0, .6))], -1)) - leg_ = deep_clone_obj(leg) - butil.select_none() - leg.location = self.total_width / 2 * self.leg_width, (1 - self.leg_width) * self.depth_extrude, 0 - butil.apply_transform(leg, True) - mirror(leg_) - leg_.location = -self.total_width / 2 * self.leg_width, (1 - self.leg_width) * self.depth_extrude, 0 - butil.apply_transform(leg_, True) - return [leg, leg_] - - def add_single_leg(self): - leg = new_cube() - leg.location = 0, 1, 1 - butil.apply_transform(leg, True) - leg.location = 0, self.depth_extrude / 2, -self.leg_length - leg.scale = [self.total_width * uniform(.05, .1), self.leg_radius, - (self.leg_length + self.total_height * uniform(.3, .5)) / 2] - butil.apply_transform(leg, True) - butil.modify_mesh(leg, 'BEVEL', width=self.leg_bevel_width, segments=8) - base = new_cube() - base.location = 0, self.depth_extrude / 2, -self.leg_length - base.scale = [self.total_width * uniform(.15, .3), self.leg_length_y / 2, self.leg_radius] - butil.apply_transform(base, True) - butil.modify_mesh(base, 'BEVEL', width=self.leg_bevel_width, segments=8) - return [leg, base] - - def finalize_assets(self, assets): - self.surface.apply(assets, selection='!screen', rough=True, metal_color='bw') - self.support_surface.apply(assets, selection='leg', rough=True, metal_color='bw') - - -class MonitorFactory(TVFactory): - def __init__(self, factory_seed, coarse=False): - super(MonitorFactory, self).__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.width = log_uniform(.4, .8) - self.leg_type = 'single-legged' diff --git a/infinigen/assets/bathroom/toilet.py b/infinigen/assets/bathroom/toilet.py deleted file mode 100644 index 2e82f638e..000000000 --- a/infinigen/assets/bathroom/toilet.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import ( - read_center, read_co, read_edge_center, read_edges, read_normal, - select_edges, select_faces, select_vertices, subsurf, write_attribute, write_co, -) -from infinigen.assets.utils.draw import align_bezier -from infinigen.assets.utils.object import join_objects, new_bbox, new_cube, new_cylinder -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import normalize, FixedSeed -from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList - - -class ToiletFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.size = uniform(.4, .5) - self.width = self.size * uniform(.7, .8) - self.height = self.size * uniform(.8, .9) - self.size_mid = uniform(.6, .65) - self.curve_scale = log_uniform(.8, 1.2, 4) - self.depth = self.size * uniform(.5, .6) - self.tube_scale = uniform(.25, .3) - self.thickness = uniform(.05, .06) - self.extrude_height = uniform(.015, .02) - self.stand_depth = self.depth * uniform(.85, .95) - self.stand_scale = uniform(.7, .85) - self.bottom_offset = uniform(.5, 1.5) - self.back_thickness = self.thickness * uniform(0, .8) - self.back_size = self.size * uniform(.55, .65) - self.back_scale = uniform(.8, 1.) - self.seat_thickness = uniform(.1, .3) * self.thickness - self.seat_size = self.thickness * uniform(1.2, 1.6) - self.has_seat_cut = uniform() < .1 - self.tank_width = self.width * uniform(1., 1.2) - self.tank_height = self.height * uniform(.6, 1.) - self.tank_size = self.back_size - self.seat_size - uniform(.02, .03) - self.tank_cap_height = uniform(.03, .04) - self.tank_cap_extrude = 0 if uniform() < .5 else uniform(.005, .01) - self.cover_rotation = - uniform(0, np.pi / 2) - self.hardware_type = np.random.choice(['button', 'handle']) - self.hardware_cap = uniform(.01, .015) - self.hardware_radius = uniform(.015, .02) - self.hardware_length = uniform(.04, .05) - self.hardware_on_side = uniform() < .5 - material_assignments = AssetList['ToiletFactory']() - self.surface = material_assignments['surface'].assign_material() - self.hardware_surface = material_assignments['hardware_surface'].assign_material() - - is_scratch = uniform() < material_assignments['wear_tear_prob'][0] - is_edge_wear = uniform() < material_assignments['wear_tear_prob'][1] - self.scratch = material_assignments['wear_tear'][0] if is_scratch else None - self.edge_wear = material_assignments['wear_tear'][1] if is_edge_wear else None - - @property - def mid_offset(self): - return (1 - self.size_mid) * self.size - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - return new_bbox( - -self.mid_offset - self.back_size - self.tank_cap_extrude, - self.size_mid * self.size + self.thickness + self.thickness, - -self.width / 2 - self.thickness * 1.1, self.width / 2 + self.thickness * 1.1, -self.height, - max( - self.tank_height, - -np.sin(self.cover_rotation) * (self.seat_size + self.size + self.thickness + self.thickness) - ) - ) - - def create_asset(self, **params) -> bpy.types.Object: - upper = self.build_curve() - lower = deep_clone_obj(upper) - lower.scale = [self.tube_scale] * 3 - lower.location = 0, self.tube_scale * self.mid_offset / 2, -self.depth - butil.apply_transform(lower, True) - bottom = deep_clone_obj(upper) - bottom.scale = [self.stand_scale] * 3 - bottom.location = 0, self.tube_scale * ( - 1 - self.size_mid) * self.size / 2 * self.bottom_offset, -self.height - butil.apply_transform(bottom, True) - - obj = self.make_tube(lower, upper) - seat, cover = self.make_seat(obj) - stand = self.make_stand(obj, bottom) - back = self.make_back(obj) - tank = self.make_tank() - butil.modify_mesh(obj, 'BEVEL', segments=2) - match self.hardware_type: - case 'button': - hardware = self.add_button() - case _: - hardware = self.add_handle() - write_attribute(hardware, 1, 'hardware', 'FACE') - obj = join_objects([obj, seat, cover, stand, back, tank, hardware]) - obj.rotation_euler[-1] = np.pi / 2 - butil.apply_transform(obj) - return obj - - def build_curve(self): - x_anchors = [0, self.width / 2, 0] - y_anchors = [-self.size_mid * self.size, 0, self.mid_offset] - axes = [np.array([1, 0, 0]), np.array([0, 1, 0]), np.array([1, 0, 0])] - obj = align_bezier([x_anchors, y_anchors, 0], axes, self.curve_scale) - butil.modify_mesh(obj, 'MIRROR', use_axis=(True, False, False)) - return obj - - def make_tube(self, lower, upper): - obj = join_objects([upper, lower]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops( - number_cuts=np.random.randint(12, 16), - profile_shape_factor=uniform(.1, .2), interpolation='SURFACE' - ) - butil.modify_mesh( - obj, 'SOLIDIFY', thickness=self.thickness, offset=1, solidify_mode='NON_MANIFOLD', - nonmanifold_boundary_mode='FLAT' - ) - normal = read_normal(obj) - select_faces(obj, normal[:, -1] > .9) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.extrude_region_move( - TRANSFORM_OT_translate={'value': (0, 0, self.thickness + self.extrude_height)} - ) - x, y, z = read_co(obj).T - write_co(obj, np.stack([x, y, np.clip(z, None, self.extrude_height)], -1)) - return obj - - def make_seat(self, obj): - seat = self.make_plane(obj) - cover = deep_clone_obj(seat) - butil.modify_mesh(seat, 'SOLIDIFY', thickness=self.extrude_height, offset=1) - if self.has_seat_cut: - cutter = new_cube() - cutter.scale = [self.thickness] * 3 - cutter.location = 0, -self.thickness / 2 - self.size_mid * self.size, 0 - butil.apply_transform(cutter, True) - butil.select_none() - butil.modify_mesh(seat, 'BOOLEAN', object=cutter, operation='DIFFERENCE') - butil.delete(cutter) - butil.modify_mesh(seat, 'BEVEL', segments=2) - - x, y, _ = read_edge_center(cover).T - i = np.argmin(np.abs(x) + np.abs(y)) - selection = np.full(len(x), False) - selection[i] = True - select_edges(cover, selection) - with butil.ViewportMode(cover, 'EDIT'): - bpy.ops.mesh.loop_multi_select() - bpy.ops.mesh.fill_grid() - butil.modify_mesh(cover, 'SOLIDIFY', thickness=self.extrude_height, offset=1) - cover.location = [0, -self.mid_offset - self.seat_size + self.extrude_height / 2, - -self.extrude_height / 2] - butil.apply_transform(cover, True) - cover.rotation_euler[0] = self.cover_rotation - cover.location = [0, self.mid_offset + self.seat_size - self.extrude_height / 2, - self.extrude_height * 1.5] - butil.apply_transform(cover, True) - butil.modify_mesh(cover, 'BEVEL', segments=2) - return seat, cover - - def make_plane(self, obj): - select_faces(obj, lambda x, y, z: z > self.extrude_height * 2 / 3) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') - seat = next(o for o in bpy.context.selected_objects if o != obj) - butil.select_none() - select_vertices(seat, lambda x, y, z: y > self.mid_offset + self.seat_thickness) - with butil.ViewportMode(seat, 'EDIT'): - bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, self.seat_size + self.thickness * 2, 0)} - ) - x, y, z = read_co(seat).T - write_co(seat, np.stack([x, np.clip(y, None, self.mid_offset + self.seat_size), z], -1)) - return seat - - def make_stand(self, obj, bottom): - co = read_co(obj)[read_edges(obj).reshape(-1)].reshape(-1, 2, 3) - horizontal = np.abs(normalize(co[:, 0] - co[:, 1])[:, -1]) < .1 - x, y, z = read_edge_center(obj).T - under_depth = z < -self.stand_depth - i = np.argmin(y - horizontal - under_depth) - selection = np.full(len(co), False) - selection[i] = True - select_edges(obj, selection) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.loop_multi_select() - bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') - stand = next(o for o in bpy.context.selected_objects if o != obj) - stand = join_objects([stand, bottom]) - with butil.ViewportMode(stand, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops( - number_cuts=np.random.randint(12, 16), - profile_shape_factor=uniform(.0, .15) - ) - return stand - - def make_back(self, obj): - back = read_center(obj)[:, 1] > self.mid_offset - self.back_thickness - back_facing = read_normal(obj)[:, 1] > .1 - butil.select_none() - select_faces(obj, back & back_facing) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') - back = next(o for o in bpy.context.selected_objects if o != obj) - butil.modify_mesh(back, 'CORRECTIVE_SMOOTH') - butil.select_none() - with butil.ViewportMode(back, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, self.back_size + self.thickness * 2, 0)} - ) - bpy.ops.transform.resize(value=(self.back_scale, 1, 1)) - bpy.ops.mesh.edge_face_add() - back.location[1] -= .01 - butil.apply_transform(back, True) - x, y, z = read_co(back).T - write_co(back, np.stack([x, np.clip(y, None, self.mid_offset + self.back_size), z], -1)) - return back - - def make_tank(self): - tank = new_cube() - tank.scale = self.tank_width / 2, self.tank_size / 2, self.tank_height / 2 - tank.location = 0, self.mid_offset + self.back_size - self.tank_size / 2, self.tank_height / 2 - butil.apply_transform(tank, True) - subsurf(tank, 2, True) - butil.modify_mesh(tank, 'BEVEL', segments=2) - cap = new_cube() - cap.scale = self.tank_width / 2 + self.tank_cap_extrude, self.tank_size / 2 + self.tank_cap_extrude, \ - self.tank_cap_height / 2 - cap.location = 0, self.mid_offset + self.back_size - self.tank_size / 2, self.tank_height - butil.apply_transform(cap, True) - butil.modify_mesh(cap, 'BEVEL', width=uniform(0, self.extrude_height), segments=4) - tank = join_objects([tank, cap]) - return tank - - def add_button(self): - obj = new_cylinder() - obj.scale = self.hardware_radius, self.hardware_radius, self.tank_cap_height / 2 + 1e-3 - obj.location = 0, self.mid_offset + self.back_size - self.tank_size / 2, self.tank_height - butil.apply_transform(obj, True) - return obj - - def add_handle(self): - obj = new_cylinder() - obj.scale = self.hardware_radius, self.hardware_radius, self.hardware_cap - obj.rotation_euler[0] = np.pi / 2 - butil.apply_transform(obj, True) - lever = new_cylinder() - lever.scale = self.hardware_radius / 2, self.hardware_radius / 2, self.hardware_length - lever.rotation_euler[1] = np.pi / 2 - lever.location = [-self.hardware_radius * uniform(0, .5), -self.hardware_cap, - -self.hardware_radius * uniform(0, .5)] - butil.apply_transform(lever, True) - obj = join_objects([obj, lever]) - if self.hardware_on_side: - obj.location = [-self.tank_width / 2 + self.hardware_radius + uniform(.01, .02), - self.mid_offset + self.back_size - self.tank_size, - self.tank_height - self.hardware_radius - uniform(.02, .03)] - else: - obj.location = [-self.tank_width / 2, - self.mid_offset + self.back_size - self.tank_size + self.hardware_radius + uniform(.01, .02), - self.tank_height - self.hardware_radius - uniform(.02, .03)] - obj.rotation_euler[-1] = -np.pi / 2 - butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'BEVEL', width=uniform(.005, .01), segments=2) - return obj - - def finalize_assets(self, assets): - self.surface.apply(assets, clear=True, metal_color='plain') - self.hardware_surface.apply(assets, 'hardware', metal_color='natural') - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) diff --git a/infinigen/assets/cactus/columnar.py b/infinigen/assets/cactus/columnar.py deleted file mode 100644 index 84480da89..000000000 --- a/infinigen/assets/cactus/columnar.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import geo_extension -from infinigen.assets.utils.nodegroup import align_tilt -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.assets.cactus.base import BaseCactusFactory -from infinigen.assets.trees.tree import build_radius_tree -from infinigen.core import tagging - - -class ColumnarBaseCactusFactory(BaseCactusFactory): - spike_distance = .08 - - @staticmethod - def radius_fn(base_radius, size, resolution): - radius_decay = uniform(.5, .8) - radius_decay_root = uniform(.7, .9) - leaf_alpha = uniform(2, 3) - radius = base_radius * radius_decay * np.ones(size * resolution) - radius[:resolution] *= radius_decay_root ** (1 - np.arange(resolution) / resolution) - radius[-resolution:] *= (1 - (np.arange(resolution) / resolution) ** leaf_alpha) ** (1 / leaf_alpha) - return radius - - @property - def branch_config(self): - n_major = 16 - n_minor = np.random.randint(10, 14) - b_minor = np.random.randint(2, 4) - while True: - angles = uniform(0, np.pi * 2, b_minor) - s = np.sort(angles) - if (np.concatenate([s[1:], [s[0] + np.pi * 2]]) - s > np.pi / 3).all(): - break - minor_config = { - 'n': b_minor, - 'path_kargs': lambda idx: { - 'n_pts': n_minor, - 'std': .4, - 'momentum': .1, - 'sz': .2, - 'pull_dir': [0, 0, 1], - 'pull_init': 0., - 'pull_factor': 4. - }, - 'spawn_kargs': lambda idx: { - 'ang_min': np.pi / 2.5, - 'ang_max': np.pi / 2, - 'rng': [.2, .6], - 'axis2': [np.cos(angles[idx]), np.sin(angles[idx]), 0] - }, - 'children': [] - } - major_config = { - 'n': 1, - 'path_kargs': lambda idx: {'n_pts': n_major, 'std': .4, 'momentum': .99, 'sz': .3}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [minor_config] - } - return major_config - - def create_asset(self, face_size=.01, **params) -> bpy.types.Object: - resolution = 16 - base_radius = .25 - obj = build_radius_tree(self.radius_fn, self.branch_config, base_radius, resolution, True) - surface.add_geomod(obj, self.geo_star, apply=True, input_attributes=[None, 'radius'], - attributes=['selection']) - surface.add_geomod(obj, geo_extension, apply=True, input_kwargs={'musgrave_dimensions': '2D'}) - return obj - - @staticmethod - def geo_star(nw: NodeWrangler): - perturb = .1 - curve, radius = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Radius', None)]).outputs[:2] - star_resolution = np.random.randint(5, 8) - circle = nw.new_node(Nodes.MeshCircle, [star_resolution * 3]) - circle = nw.new_node(Nodes.SetPosition, [circle, None, None, nw.uniform([-perturb] * 3, [perturb] * 3)]) - circle = nw.new_node(Nodes.Transform, [circle], input_kwargs={'Scale': [*uniform(.8, 1., 2), 1]}) - selection = nw.compare('EQUAL', nw.math('MODULO', nw.new_node(Nodes.Index), 2), 0) - circle, _, selection = nw.new_node(Nodes.CaptureAttribute, [circle, None, selection]).outputs[:3] - circle = nw.new_node(Nodes.SetPosition, [circle, selection, - nw.scale(nw.new_node(Nodes.InputPosition), uniform(1.15, 1.25))]) - profile_curve = nw.new_node(Nodes.MeshToCurve, [circle]) - - curve = nw.new_node(Nodes.MeshToCurve, [curve]) - curve = align_tilt(nw, curve, noise_strength=uniform(np.pi / 4, np.pi /2)) - curve = nw.new_node(Nodes.SetCurveRadius, [curve, None, radius]) - geometry = nw.curve2mesh(curve, profile_curve) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry, 'Selection': selection}) diff --git a/infinigen/assets/cactus/globular.py b/infinigen/assets/cactus/globular.py deleted file mode 100644 index b78785f93..000000000 --- a/infinigen/assets/cactus/globular.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -from numpy.random import uniform - -from infinigen.assets.cactus.base import BaseCactusFactory -import numpy as np - -from infinigen.assets.utils.object import new_cube -from infinigen.assets.utils.decorate import geo_extension -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core import tagging - - -class GlobularBaseCactusFactory(BaseCactusFactory): - spike_distance = .08 - - @staticmethod - def geo_globular(nw: NodeWrangler): - star_resolution = np.random.randint(6, 12) - resolution = 64 - frequency = uniform(-.2, .2) - circle = nw.new_node(Nodes.MeshCircle, [star_resolution * 3]) - selection = nw.compare('EQUAL', nw.math('MODULO', nw.new_node(Nodes.Index), 2), 0) - circle, _, selection = nw.new_node(Nodes.CaptureAttribute, [circle, None, selection]).outputs[:3] - circle = nw.new_node(Nodes.SetPosition, - [circle, selection, nw.scale(nw.new_node(Nodes.InputPosition), uniform(1.1, 1.2))]) - profile_curve = nw.new_node(Nodes.MeshToCurve, [circle]) - curve = nw.new_node(Nodes.ResampleCurve, [nw.new_node(Nodes.CurveLine), None, resolution]) - anchors = [(0, uniform(.2, .4)), (uniform(.4, .6), log_uniform(.5, .8)), - (uniform(.8, .85), uniform(.4, .6)), (1., .05)] - radius = nw.scalar_multiply(nw.build_float_curve(nw.new_node(Nodes.SplineParameter), anchors, 'AUTO'), - log_uniform(.5, 1.)) - curve = nw.new_node(Nodes.SetCurveRadius, [curve, None, radius]) - curve = nw.new_node(Nodes.SetCurveTilt, [curve, None, - nw.scalar_multiply(nw.new_node(Nodes.SplineParameter), 2 * np.pi * frequency)]) - geometry = nw.curve2mesh(curve, profile_curve) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry, 'Selection': selection}) - - def create_asset(self, face_size=.01, **params) -> bpy.types.Object: - - obj = new_cube() - surface.add_geomod(obj, self.geo_globular, apply=True, attributes=['selection']) - surface.add_geomod(obj, geo_extension, apply=True, input_kwargs={'musgrave_dimensions': '2D'}) - - obj.scale = uniform(.8, 1.5, 3) - obj.rotation_euler[-1] = uniform(0, np.pi * 2) - butil.apply_transform(obj) - - return obj diff --git a/infinigen/assets/cactus/kalidium.py b/infinigen/assets/cactus/kalidium.py deleted file mode 100644 index e0dc9c0e9..000000000 --- a/infinigen/assets/cactus/kalidium.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.trees.tree import TreeVertices, build_radius_tree, recursive_path -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.assets.utils.object import data2mesh, mesh2obj, new_cube, origin2lowest, separate_loose -from infinigen.assets.utils.decorate import displace_vertices, geo_extension, read_co, remove_vertices, \ - subsurface2face_size -from infinigen.assets.utils.shortest_path import geo_shortest_path -from infinigen.core.nodes.node_info import Nodes - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.assets.cactus.base import BaseCactusFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class KalidiumBaseCactusFactory(BaseCactusFactory): - cap_percentage = .0 - noise_strength = .0 - density = .0 - - @staticmethod - def build_twig(i): - branch_config = { - 'n': 1, - 'path_kargs': lambda idx: {'n_pts': 5, 'std': .5, 'momentum': .85, 'sz': .01}, - 'spawn_kargs': lambda idx: {'init_vec': (0, 0, 1)} - } - obj = build_radius_tree(None, branch_config, .005) - surface.add_geomod(obj, geo_radius, apply=True, input_args=['radius']) - return obj - - def create_asset(self, face_size=.01, **params) -> bpy.types.Object: - resolution = 20 - obj = new_cube(location=(1, 1, 1)) - butil.modify_mesh(obj, 'ARRAY', count=resolution, relative_offset_displace=(1, 0, 0), - use_merge_vertices=True) - butil.modify_mesh(obj, 'ARRAY', count=resolution, relative_offset_displace=(0, 1, 0), - use_merge_vertices=True) - butil.modify_mesh(obj, 'ARRAY', count=resolution, relative_offset_displace=(0, 0, 1), - use_merge_vertices=True) - obj.scale = [1 / resolution] * 3 - obj.location = -1, -1, -.1 - butil.apply_transform(obj, loc=True) - remove_vertices(obj, - lambda x, y, z: (x ** 2 + y ** 2 + (z - 1) ** 2 > 1.1) | (uniform(0, 1, len(x)) < .05)) - end_indices = np.nonzero(read_co(obj)[:, -1] < 5 / resolution)[0] - end_index = lambda nw: nw.build_index_case(np.random.choice(end_indices, 5)) - displace_vertices(obj, lambda x, y, z: uniform(-.8 / resolution, .8 / resolution, (3, len(x)))) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') - surface.add_geomod(obj, geo_extension, apply=True) - - weight = lambda nw: nw.scalar_multiply( - nw.vector_math('DISTANCE', *nw.new_node(Nodes.InputEdgeVertices).outputs[2:]), nw.uniform(.8, 1)) - surface.add_geomod(obj, geo_shortest_path, apply=True, input_args=[end_index, weight, .05]) - surface.add_geomod(obj, geo_radius, apply=True, input_args=[.006]) - - twigs = make_asset_collection(self.build_twig, 5, verbose=False) - surface.add_geomod(obj, self.geo_twigs, apply=True, input_args=[twigs]) - butil.delete_collection(twigs) - obj = separate_loose(obj) - - obj.scale = uniform(.8, 1.2, 3) - butil.apply_transform(obj) - subsurface2face_size(obj, face_size) - origin2lowest(obj) - tag_object(obj, 'kalidium_cactus') - return obj - - @staticmethod - def geo_twigs(nw: NodeWrangler, instances): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - points, _, rotation = nw.new_node(Nodes.DistributePointsOnFaces, [geometry], - input_kwargs={'Density': 2e3}).outputs[:3] - points = nw.new_node(Nodes.MergeByDistance, [points, None, .005]) - perturb = .4 - rotation = nw.new_node(Nodes.AlignEulerToVector, - [nw.add(rotation, nw.uniform([-perturb] * 3, [perturb] * 3)), - nw.uniform(.2, .5)], attrs={'axis': 'Z'}) - instances = nw.new_node(Nodes.CollectionInfo, [instances, True, True]) - - twigs = nw.new_node(Nodes.RealizeInstances, [nw.new_node(Nodes.InstanceOnPoints, - [points, None, instances, True, None, rotation, - nw.combine(1, 1, nw.uniform(1., 1.5))])]) - geometry = nw.new_node(Nodes.JoinGeometry, [[geometry, twigs]]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/cactus/spike.py b/infinigen/assets/cactus/spike.py deleted file mode 100644 index 527d9a584..000000000 --- a/infinigen/assets/cactus/spike.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys -import numpy as np -from numpy.random import uniform - -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util import blender as butil -from infinigen.assets.utils.misc import assign_material, sample_direction, toggle_show, toggle_hide -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes -from infinigen.core import surface -from infinigen.assets.trees.tree import build_radius_tree -import infinigen.core.util.blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.tagging import tag_object, tag_nodegroup, COMBINED_ATTR_NAME - -def build_spikes(base_radius=.002, **kwargs): - n_branch = 4 - n_major = 9 - branch_config = { - 'n': n_branch, - 'path_kargs': lambda idx: {'n_pts': n_major, 'std': .5, 'momentum': .85, 'sz': uniform(.005, .01)}, - 'spawn_kargs': lambda idx: {'init_vec': sample_direction(.8)} - } - - radius_fn = lambda base_radius, size, resolution: base_radius * .5 ** ( - np.arange(size * resolution) / (size * resolution)) - obj = build_radius_tree(radius_fn, branch_config, base_radius) - surface.add_geomod(obj, geo_radius, apply=True, input_args=['radius', None, .001]) - return obj - - -def make_default_selections(spike_distance, cap_percentage, density): - def selection(nw: NodeWrangler, selected, geometry): - z = nw.separate(nw.new_node(Nodes.InputPosition))[-1] - z_stat = nw.new_node(Nodes.AttributeStatistic, [geometry, None, z]).outputs - percentage = nw.scalar_divide(nw.scalar_sub(z_stat['Max'], z), z_stat['Range']) - is_cap = nw.bernoulli(nw.build_float_curve(percentage, [(0, 1), (cap_percentage, .5), (1, 0)])) - cap = nw.new_node(Nodes.SeparateGeometry, [geometry, is_cap]) - cap = nw.new_node(Nodes.MergeByDistance, [cap, None, spike_distance / 2]) - - points = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': geometry, 'Selection': selected, 'Density': density - }).outputs['Points'] - points = nw.new_node(Nodes.MergeByDistance, [points, None, spike_distance]) - - all_points = nw.new_node(Nodes.JoinGeometry, [[cap, points]]) - return all_points - - return selection - - -def geo_spikes(nw: NodeWrangler, spikes, points_fn=None, realize=True): - - geometry, selection = nw.new_node( - Nodes.GroupInput, - expose_input=[ - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Selection', None) - ] - ).outputs[:2] - - capture = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': geometry, 'Value': nw.new_node(Nodes.InputNormal)}) - - selected = nw.compare('GREATER_THAN', selection, .8) - spikes = nw.new_node(Nodes.CollectionInfo, [spikes, True, True]) - - rotation = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': (capture, 'Attribute')}, - attrs={'axis': 'Z'}) - rotation = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': rotation, 'Angle': nw.uniform(0, 2 * np.pi)}, - attrs={'type': 'AXIS_ANGLE', 'space': 'LOCAL'}) - rotation = nw.new_node(Nodes.AlignEulerToVector, [rotation, nw.uniform(.2, .5)], attrs={'axis': 'Z'}) - rotation = nw.add(rotation, nw.uniform([-.05] * 3, [.05] * 3)) - - points = surface.eval_argument(nw, points_fn, selected=selected, geometry=capture.outputs['Geometry']) - spikes = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={ - 'Points': points, - 'Instance': spikes, - 'Pick Instance': True, - 'Rotation': rotation, - 'Scale': nw.uniform([.5] * 3, [1.] * 3) - }) - if realize: - realize_instances = nw.new_node(Nodes.RealizeInstances, [spikes]) - else: - realize_instances = spikes - - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances}) - - -def shader_spikes(nw: NodeWrangler): - roughness = .8 - specular = .25 - mix_ratio = .9 - color = hsv2rgba(uniform(.2, .4), uniform(.1, .3), .8) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Subsurface': .1 - }) - transparent_bsdf = nw.new_node(Nodes.TranslucentBSDF, [color]) - mix_rgb = nw.new_node(Nodes.MixShader, [mix_ratio, principled_bsdf, transparent_bsdf]) - return mix_rgb - - -def apply(obj, points_fn, base_radius=.002, realize=True): - spikes = deep_clone_obj(obj) - - if COMBINED_ATTR_NAME in spikes.data.attributes: - spikes.data.attributes.remove(spikes.data.attributes[COMBINED_ATTR_NAME]) - - instances = make_asset_collection(build_spikes, 5, 'spikes', verbose=False, base_radius=base_radius) - mat = surface.shaderfunc_to_material(shader_spikes) - toggle_show(instances) - for o in instances.objects: - assign_material(o, mat) - toggle_hide(instances) - surface.add_geomod(spikes, geo_spikes, apply=realize, input_args=[instances, points_fn, realize], - input_attributes=[None, 'selection']) - butil.delete_collection(instances) - return spikes diff --git a/infinigen/assets/clothes/shirt.py b/infinigen/assets/clothes/shirt.py deleted file mode 100644 index 9c5b1c7ec..000000000 --- a/infinigen/assets/clothes/shirt.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import read_center, read_normal, remove_faces, subsurf, write_co -from infinigen.assets.utils.draw import remesh_fill -from infinigen.assets.utils.object import new_circle -from infinigen.assets.utils.uv import wrap_front_back -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList -from infinigen.assets.materials.art import ArtFabric - -class ShirtFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(ShirtFactory, self).__init__(factory_seed, coarse) - self.width = log_uniform(.45, .55) - self.size = self.width + uniform(.25, .3) - self.size_neck = uniform(.1, .15) * self.size - self.type = np.random.choice(['short', 'long']) - match self.type: - case 'short': - self.sleeve_length = self.size / 2 + uniform(-.35, -.3) - case _: - self.sleeve_length = self.size / 2 + uniform(-.05, .0) - self.sleeve_width = uniform(.14, .18) - self.sleeve_angle = uniform(np.pi / 6, np.pi / 4) - self.thickness = log_uniform(.02, .03) - materials = AssetList['ShirtFactory']() - self.surface = materials['surface'].assign_material() - if self.surface == ArtFabric: - self.surface = self.surface(self.factory_seed) - - def create_asset(self, **params) -> bpy.types.Object: - x_anchors = 0, self.width / 2, self.width / 2, self.width / 2 + self.sleeve_length * np.sin( - self.sleeve_angle), self.width / 2 + self.sleeve_length * np.sin( - self.sleeve_angle) + self.sleeve_width * np.cos( - self.sleeve_angle), self.width / 2, self.width / 4, 0 - - y_anchors = 0, 0, self.size - self.sleeve_width / np.sin( - self.sleeve_angle), self.size - self.sleeve_width / np.sin( - self.sleeve_angle) - self.sleeve_length * np.cos( - self.sleeve_angle), self.size - self.sleeve_width / np.sin( - self.sleeve_angle) - self.sleeve_length * np.cos(self.sleeve_angle) + self.sleeve_width * np.sin( - self.sleeve_angle), self.size, self.size + self.size_neck, self.size + self.size_neck * uniform(.3, - .7) - - obj = new_circle(vertices=len(x_anchors)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.edge_face_add() - bpy.ops.mesh.flip_normals() - write_co(obj, np.stack([x_anchors, y_anchors, np.zeros_like(x_anchors)], -1)) - butil.modify_mesh(obj, 'MIRROR', use_axis=(True, False, False)) - remesh_fill(obj, .02) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) - x, y, z = read_center(obj).T - x_, y_, z_ = read_normal(obj).T - remove_faces(obj, (y_ < -.5) | ((y_ > .5) & (x_ * x < 0))) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.remove_doubles(threshold=1e-3) - butil.modify_mesh(obj, 'BEVEL', width=self.sleeve_width * uniform(.1, .15)) - subsurf(obj, 1) - wrap_front_back(obj, self.surface) - return obj diff --git a/infinigen/assets/color_fits.py b/infinigen/assets/color_fits.py index a04a3b40f..a4d6ba8ad 100644 --- a/infinigen/assets/color_fits.py +++ b/infinigen/assets/color_fits.py @@ -3,74 +3,69 @@ # Authors: Stamatis Alexandropoulos, Meenal Parakh -import os + import numpy as np -from matplotlib import pyplot as plt -import time from infinigen.core.util.color import hsv2rgba manual_fits = { - 'sofa_fabric': { - 'means': np.array([[0.1, 0.25], [0.5, 0.7], [0.65, 0.15]]), - 'covariances': [ - 0.7*np.array([[0.01, 0], [0, 0.04]]), - 0.7*np.array([[0.02, 0], [0, 0.02]]), - 0.7*np.array([[0.03, 0], [-0.01, 0.012]]) + "sofa_fabric": { + "means": np.array([[0.1, 0.25], [0.5, 0.7], [0.65, 0.15]]), + "covariances": [ + 0.7 * np.array([[0.01, 0], [0, 0.04]]), + 0.7 * np.array([[0.02, 0], [0, 0.02]]), + 0.7 * np.array([[0.03, 0], [-0.01, 0.012]]), ], - 'probabilities': [0.5, 0.3, 0.2], - 'n_components': 3 + "probabilities": [0.5, 0.3, 0.2], + "n_components": 3, }, - 'sofa_leather': { - 'means': np.array([[0.07, 0.45], [0.6, 0.3]]), - 'covariances': [ - 0.7*np.array([[0.005, 0], [0, 0.09]]), - 0.7*np.array([[0.015, 0], [0, 0.04]]) + "sofa_leather": { + "means": np.array([[0.07, 0.45], [0.6, 0.3]]), + "covariances": [ + 0.7 * np.array([[0.005, 0], [0, 0.09]]), + 0.7 * np.array([[0.015, 0], [0, 0.04]]), ], - 'min_val': [0.04, 0.04], - 'max_val': [0.75, 0.85], - 'probabilities': [0.7, 0.3], - 'n_components': 2 + "min_val": [0.04, 0.04], + "max_val": [0.75, 0.85], + "probabilities": [0.7, 0.3], + "n_components": 2, }, - 'sofa_linen': { - 'means': np.array([[0.12, 0.5], [0.6, 0.4], [0.9, 0.2]]), - 'covariances': [ - 0.7*np.array([[0.01, 0], [0, 0.12]]), - 0.7*np.array([[0.01, 0], [0, 0.09]]), - 0.7*np.array([[0.01, 0], [0, 0.02]]) + "sofa_linen": { + "means": np.array([[0.12, 0.5], [0.6, 0.4], [0.9, 0.2]]), + "covariances": [ + 0.7 * np.array([[0.01, 0], [0, 0.12]]), + 0.7 * np.array([[0.01, 0], [0, 0.09]]), + 0.7 * np.array([[0.01, 0], [0, 0.02]]), ], - 'probabilities': [0.8, 0.15, 0.05], - 'n_components': 3 + "probabilities": [0.8, 0.15, 0.05], + "n_components": 3, }, - 'sofa_velvet': { - 'means': np.array([[0.52, 0.45]]), - 'covariances': [ - np.array([[0.2, 0], [0, 0.2]]) - ], - 'probabilities': [1.0], - 'n_components': 1 + "sofa_velvet": { + "means": np.array([[0.52, 0.45]]), + "covariances": [np.array([[0.2, 0], [0, 0.2]])], + "probabilities": [1.0], + "n_components": 1, }, - 'bedding_sheet': { - 'means': np.array([[0.1, 0.4], [0.6, 0.2]]), - 'covariances': [ - 0.7*np.array([[0.01, 0], [0, 0.1]]), - 0.7*np.array([[0.03, 0], [-0.01, 0.02]]) + "bedding_sheet": { + "means": np.array([[0.1, 0.4], [0.6, 0.2]]), + "covariances": [ + 0.7 * np.array([[0.01, 0], [0, 0.1]]), + 0.7 * np.array([[0.03, 0], [-0.01, 0.02]]), ], - 'probabilities': [0.9, 0.1], - 'n_components': 2 - } + "probabilities": [0.9, 0.1], + "n_components": 2, + }, } val_params = { - 'bedding_sheet': {'min_val': 0.15, 'max_val': 0.94, 'mu': 0.66, 'std': 0.17}, - 'sofa_fabric': {'min_val': 0.10, 'max_val': 0.88, 'mu': 0.47, 'std': 0.23}, - 'sofa_leather': {'min_val': 0.06, 'max_val': 0.93, 'mu': 0.40, 'std': 0.2}, - 'sofa_linen': {'min_val': 0.15, 'max_val': 0.86, 'mu': 0.55, 'std': 0.2}, - 'sofa_velvet': {'min_val': 0.11, 'max_val': 0.70, 'mu': 0.35, 'std': 0.18}, + "bedding_sheet": {"min_val": 0.15, "max_val": 0.94, "mu": 0.66, "std": 0.17}, + "sofa_fabric": {"min_val": 0.10, "max_val": 0.88, "mu": 0.47, "std": 0.23}, + "sofa_leather": {"min_val": 0.06, "max_val": 0.93, "mu": 0.40, "std": 0.2}, + "sofa_linen": {"min_val": 0.15, "max_val": 0.86, "mu": 0.55, "std": 0.2}, + "sofa_velvet": {"min_val": 0.11, "max_val": 0.70, "mu": 0.35, "std": 0.18}, } - def get_val(mu=0.5, std=0.2, min_val=0.1, max_val=0.9): val = np.random.normal(mu, std) val = np.clip(val, min_val, max_val) @@ -79,20 +74,20 @@ def get_val(mu=0.5, std=0.2, min_val=0.1, max_val=0.9): def real_color_distribution(name): params = manual_fits[name] - - num_gaussians = params['n_components'] - idx = np.random.choice(num_gaussians, p=params['probabilities']) - - mu = params['means'][idx] - cov = params['covariances'][idx] - + + num_gaussians = params["n_components"] + idx = np.random.choice(num_gaussians, p=params["probabilities"]) + + mu = params["means"][idx] + cov = params["covariances"][idx] + h, s = np.random.multivariate_normal(mu, cov) - min_val = params.get('min_val', 0.0) - max_val = params.get('max_val', 1.0) - + min_val = params.get("min_val", 0.0) + max_val = params.get("max_val", 1.0) + h, s = np.clip([h, s], min_val, max_val) - + v = get_val(**(val_params[name])) * 0.1 rgba = hsv2rgba([h, s, v]) - + return rgba diff --git a/infinigen/assets/corals/__init__.py b/infinigen/assets/corals/__init__.py deleted file mode 100644 index c3d2503e3..000000000 --- a/infinigen/assets/corals/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -from .diff_growth import DiffGrowthBaseCoralFactory, TableBaseCoralFactory, LeatherBaseCoralFactory -from .generate import CoralFactory, LeatherCoralFactory, TableCoralFactory, CauliflowerCoralFactory, \ - BrainCoralFactory, HoneycombCoralFactory, BushCoralFactory, TwigCoralFactory, TubeCoralFactory, \ - FanCoralFactory, ElkhornCoralFactory, StarCoralFactory -from .laplacian import CauliflowerBaseCoralFactory -from .elkhorn import ElkhornBaseCoralFactory -from .reaction_diffusion import BrainBaseCoralFactory, HoneycombBaseCoralFactory, \ - ReactionDiffusionBaseCoralFactory -from .tree import BushBaseCoralFactory, TreeBaseCoralFactory, TwigBaseCoralFactory -from .tube import TubeBaseCoralFactory -from .fan import FanBaseCoralFactory -from .star import StarBaseCoralFactory diff --git a/infinigen/assets/corals/generate.py b/infinigen/assets/corals/generate.py deleted file mode 100644 index aa476581a..000000000 --- a/infinigen/assets/corals/generate.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -import bpy -import numpy as np -from numpy.random import uniform - -import infinigen.core.util.blender as butil -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from .fan import FanBaseCoralFactory -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.object import join_objects -from infinigen.core.util.math import FixedSeed -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from .base import BaseCoralFactory -from .diff_growth import DiffGrowthBaseCoralFactory, LeatherBaseCoralFactory, TableBaseCoralFactory -from .laplacian import CauliflowerBaseCoralFactory -from .reaction_diffusion import BrainBaseCoralFactory, HoneycombBaseCoralFactory, \ - ReactionDiffusionBaseCoralFactory -from .elkhorn import ElkhornBaseCoralFactory -from .tree import BushBaseCoralFactory, TreeBaseCoralFactory, TwigBaseCoralFactory -from .tube import TubeBaseCoralFactory -from .star import StarBaseCoralFactory -from . import tentacles -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.nodes.node_utils import build_color_ramp - - -class CoralFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False, factory_method=None): - super(CoralFactory, self).__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.factory_methods = [DiffGrowthBaseCoralFactory, ReactionDiffusionBaseCoralFactory, - TubeBaseCoralFactory, TreeBaseCoralFactory, CauliflowerBaseCoralFactory, - ElkhornBaseCoralFactory, StarBaseCoralFactory] - weights = np.array([.15, .2, .15, .2, .2, .15, .2]) - self.weights = weights / weights.sum() - if factory_method is None: - factory_method = np.random.choice(self.factory_methods, p=self.weights) - self.factory: BaseCoralFactory = factory_method(factory_seed, coarse) - self.base_hue = self.build_base_hue() - self.material = surface.shaderfunc_to_material(self.shader_coral, self.base_hue) - - def create_asset(self, face_size=0.01, realize=True, **params): - obj = self.factory.create_asset(**params) - obj.scale = 2 * np.array(self.factory.default_scale) / max(obj.dimensions[:2]) * uniform(.8, 1.2, 3) - butil.apply_transform(obj) - remesh_with_attrs(obj, face_size) - assign_material(obj, self.material) - - has_bump = uniform(0, 1) < self.factory.bump_prob - if self.factory.noise_strength > 0: - if has_bump: - self.apply_noise_texture(obj) - else: - self.apply_bump(obj) - - tag_object(obj, 'coral') - - if uniform(0, 1) < self.factory.tentacle_prob and not has_bump: - t = tentacles.apply(obj, self.factory.points_fn, self.factory.density, realize, self.base_hue) - obj = join_objects([obj, t]) - - return obj - - def apply_noise_texture(self, obj): - t = np.random.choice(['STUCCI', 'MARBLE']) - texture = bpy.data.textures.new(name='coral', type=t) - texture.noise_scale = log_uniform(.01, .02) - butil.modify_mesh(obj, 'DISPLACE', True, strength=self.factory.noise_strength * uniform(.9, 1.2), - mid_level=0, texture=texture) - - def apply_bump(self, obj): - texture = bpy.data.textures.new(name='coral', type='VORONOI') - texture.noise_scale = log_uniform(.02, .03) - texture.noise_intensity = log_uniform(1.5, 2) - texture.distance_metric = 'MINKOVSKY' - texture.minkovsky_exponent = uniform(1, 1.5) - butil.modify_mesh(obj, 'DISPLACE', True, strength=-self.factory.noise_strength * uniform(1, 2), - mid_level=1, texture=texture) - - @staticmethod - def build_base_hue(): - if uniform(0, 1) < .25: - base_hue = uniform(0, 1) - else: - base_hue = uniform(-.2, .3) % 1 - return base_hue - - @staticmethod - def shader_coral(nw: NodeWrangler, base_hue): - shift = uniform(.05, .1) * (-1) ** np.random.randint(2) - subsurface_color = hsv2rgba(uniform(0, 1), uniform(0, 1), 1.) - bright_color = hsv2rgba((base_hue + shift) % 1, uniform(.7, .9), .2) - dark_color = hsv2rgba(base_hue, uniform(.5, .7), .1) - light_color = hsv2rgba((base_hue + uniform(-.2, .2)) % 1, uniform(.2, .4), .4) - specular = uniform(.25, .5) - - color = build_color_ramp(nw, nw.musgrave(uniform(10, 20)), [.0, .3, .7, 1.], - [dark_color, dark_color, bright_color, bright_color]) - color = nw.new_node(Nodes.MixRGB, [ - nw.build_float_curve(nw.musgrave(uniform(10, 20)), [(0, 1), (uniform(.3, .4), 0), (1, 0)]), color, - light_color]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, 1.)]) - subsurface_ratio = uniform(0, .05) if uniform(0, 1) > .5 else 0 - subsurface_radius = [uniform(.05, .2)] * 3 - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Subsurface': subsurface_ratio, - 'Subsurface Radius': subsurface_radius, - 'Subsurface Color': subsurface_color, - }) - return bsdf - - -class LeatherCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(LeatherCoralFactory, self).__init__(factory_seed, coarse, LeatherBaseCoralFactory) - - -class TableCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(TableCoralFactory, self).__init__(factory_seed, coarse, TableBaseCoralFactory) - - -class CauliflowerCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(CauliflowerCoralFactory, self).__init__(factory_seed, coarse, CauliflowerBaseCoralFactory) - - -class BrainCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(BrainCoralFactory, self).__init__(factory_seed, coarse, BrainBaseCoralFactory) - - -class HoneycombCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(HoneycombCoralFactory, self).__init__(factory_seed, coarse, HoneycombBaseCoralFactory) - - -class BushCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(BushCoralFactory, self).__init__(factory_seed, coarse, BushBaseCoralFactory) - - -class TwigCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(TwigCoralFactory, self).__init__(factory_seed, coarse, TwigBaseCoralFactory) - - -class TubeCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(TubeCoralFactory, self).__init__(factory_seed, coarse, TubeBaseCoralFactory) - - -class FanCoralFactory(CoralFactory): - def __init__(self, factory_seed, coarse=False): - super(FanCoralFactory, self).__init__(factory_seed, coarse, FanBaseCoralFactory) - - -class ElkhornCoralFactory(CoralFactory): - - def __init__(self, factory_seed, coarse=False): - super(ElkhornCoralFactory, self).__init__(factory_seed, coarse, ElkhornBaseCoralFactory) - - -class StarCoralFactory(CoralFactory): - - def __init__(self, factory_seed, coarse=False): - super(StarCoralFactory, self).__init__(factory_seed, coarse, StarBaseCoralFactory) diff --git a/infinigen/assets/corals/star.py b/infinigen/assets/corals/star.py deleted file mode 100644 index 9f5c791f1..000000000 --- a/infinigen/assets/corals/star.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import bmesh -import numpy as np -from mathutils import Vector -from numpy.random import uniform - -import infinigen.core.util.blender as butil -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.utils.decorate import displace_vertices, geo_extension -from infinigen.assets.utils.object import join_objects, new_empty, new_icosphere -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.tagging import tag_object, tag_nodegroup - -class StarBaseCoralFactory(BaseCoralFactory): - tentacle_prob = 1. - noise_strength = .002 - density = 3000 - - @staticmethod - def points_fn(nw: NodeWrangler, points): - points = nw.new_node(Nodes.SeparateGeometry, [points, nw.new_node(Nodes.NamedAttribute, ['outermost'])]) - return points - - def __init__(self, factory_seed, coarse=False): - super(StarBaseCoralFactory, self).__init__(factory_seed, coarse) - self.points_fn = StarBaseCoralFactory.points_fn - - @staticmethod - def geo_dual_mesh(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - perturb = .05 - geometry = nw.new_node(Nodes.SetPosition, - [geometry, None, None, nw.uniform([-perturb] * 3, [perturb] * 3)]) - - geometry = nw.new_node(Nodes.DualMesh, input_kwargs={'Mesh': geometry}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def geo_separate_faces(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.compare('GREATER_THAN', nw.separate(nw.new_node(Nodes.InputPosition))[-1], 0) - geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) - geometry = nw.new_node(Nodes.SplitEdges, [geometry]) - scale = nw.uniform(.9, 1.2) - geometry = nw.new_node(Nodes.ScaleElements, [geometry, None, scale]) - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': 'custom_normal', 'Value': nw.new_node(Nodes.InputNormal)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def geo_flower(nw: NodeWrangler, size, resolution, anchor): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - t = nw.scalar_divide(nw.math('FLOOR', nw.scalar_divide(nw.new_node(Nodes.Index), size)), resolution) - offset = nw.build_float_curve(t, [(0, 0), anchor, (1, 0)], 'AUTO') - normal = nw.new_node(Nodes.NamedAttribute, ['custom_normal'], attrs={'data_type': 'FLOAT_VECTOR'}) - geometry = nw.new_node(Nodes.SetPosition, [geometry, None, None, nw.scale(offset, normal)]) - outer = nw.boolean_math('AND', nw.compare('GREATER_THAN', t, .4), nw.compare('LESS_THAN', t, .6)) - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': 'outermost', 'Value': outer}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def create_asset(self, face_size=0.01, **params): - obj = new_icosphere(subdivisions=3) - obj.location[-1] = uniform(.25, .5) - butil.apply_transform(obj, loc=True) - surface.add_geomod(obj, self.geo_dual_mesh, apply=True) - displace_vertices(obj, lambda x, y, z: (0, 0, -.9 * np.clip(z, None, 0))) - - rings = deep_clone_obj(obj) - levels = 3 - butil.modify_mesh(obj, 'SUBSURF', levels=levels, render_levels=levels) - butil.modify_mesh(rings, 'SHRINKWRAP', target=obj) - - surface.add_geomod(rings, self.geo_separate_faces, apply=True) - levels = 3 - butil.modify_mesh(rings, 'SUBSURF', levels=levels, render_levels=levels) - - butil.select_none() - with butil.ViewportMode(rings, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.select_all(action='INVERT') - bpy.ops.mesh.delete(type='VERT') - - flowers = [] - resolution = 16 - - for ring in butil.split_object(rings): - size = len(ring.data.vertices) - center = np.mean([v.co for v in ring.data.vertices], 0) - empty = new_empty(scale=[uniform(.3, .5) ** (1 / resolution)] * 3) - butil.modify_mesh(ring, 'ARRAY', apply=True, use_relative_offset=False, use_object_offset=True, - count=resolution + 1, offset_object=empty) - butil.delete(empty) - - with butil.ViewportMode(ring, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops() - - bm = bmesh.from_edit_mesh(ring.data) - bm.verts.ensure_lookup_table() - for i in range(1, resolution + 1): - c = np.mean([v.co for v in bm.verts[i * size:(i + 1) * size]], 0) - for j in range(i * size, (i + 1) * size): - bm.verts[j].co += Vector(center - c) - bmesh.update_edit_mesh(ring.data) - - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.bridge_edge_loops() - - anchor = uniform(.4, .6), uniform(.08, .15) - surface.add_geomod(ring, self.geo_flower, apply=True, input_args=[size, resolution, anchor]) - flowers.append(ring) - - obj = join_objects([obj, *flowers]) - surface.add_geomod(obj, geo_extension, apply=True) - tag_object(obj, 'star_coral') - return obj diff --git a/infinigen/assets/corals/tentacles.py b/infinigen/assets/corals/tentacles.py deleted file mode 100644 index f18bb54a3..000000000 --- a/infinigen/assets/corals/tentacles.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.misc import assign_material, sample_direction -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.core.util.color import hsv2rgba -from infinigen.core.placement.factory import make_asset_collection -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes -from infinigen.core import surface -from infinigen.assets.trees.tree import build_radius_tree -import infinigen.core.util.blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.tagging import tag_object, tag_nodegroup, COMBINED_ATTR_NAME - -def build_tentacles(**kwargs): - n_branch = 5 - n_major = 8 - branch_config = { - 'n': n_branch, - 'path_kargs': lambda idx: {'n_pts': n_major, 'std': .5, 'momentum': .5, 'sz': .008}, - 'spawn_kargs': lambda idx: {'init_vec': sample_direction(.6)}} - - obj = build_radius_tree(None, branch_config, uniform(.002, .004)) - surface.add_geomod(obj, geo_radius, apply=True, input_args=['radius']) - return obj - - -def make_min_distance_points_fn(min_distance): - def points_fn(nw: NodeWrangler, points): - return nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': points, 'Distance': min_distance}) - - return points_fn - - -def make_radius_points_fn(min_distance, radius_threshold): - def points_fn(nw: NodeWrangler, points): - radius = nw.vector_math('DISTANCE', nw.new_node(Nodes.InputPosition), [0] * 3) - points = nw.new_node(Nodes.MergeByDistance, input_kwargs={ - 'Geometry': points, - 'Selection': nw.compare('LESS_THAN', radius, radius_threshold * 1.5), - 'Distance': min_distance * 2}) - points = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': points, 'Distance': min_distance}) - points = nw.new_node(Nodes.SeparateGeometry, - [points, nw.compare('GREATER_THAN', radius, radius_threshold)]) - return points - - return points_fn - - -def make_upward_points_fn(min_distance, max_angle): - def points_fn(nw: NodeWrangler, points, normal): - points = nw.new_node(Nodes.SeparateGeometry, - [points, nw.compare_direction('LESS_THAN', normal, [0, 0, 1], max_angle)]) - return nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': points, 'Distance': min_distance}) - - return points_fn - - -def geo_tentacles(nw: NodeWrangler, tentacles, points_fn=None, density=500, realize=True): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - tentacles = nw.new_node(Nodes.CollectionInfo, [tentacles, True, True]) - - points, normal, rotation = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': geometry, 'Density': density}).outputs - rotation = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': rotation, 'Angle': nw.uniform(0, 2 * np.pi)}, - attrs={'type': 'AXIS_ANGLE', 'space': 'LOCAL'}) - - points = surface.eval_argument(nw, points_fn, points=points, normal=normal) - tentacles = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={ - 'Points': points, - 'Instance': tentacles, - 'Pick Instance': True, - 'Rotation': rotation, - 'Scale': nw.uniform([.6] * 3, [1.] * 3, data_type='FLOAT_VECTOR')}) - if realize: - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': tentacles}) - else: - realize_instances = tentacles - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances}) - - -def shader_tentacles(nw: NodeWrangler, base_hue=.3): - roughness = .8 - specular = .25 - color = hsv2rgba((base_hue + uniform(-0.1, 0.1)) % 1, uniform(.4, .6), .5) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Subsurface': .01}) - fresnel_color = hsv2rgba(uniform(0, 1), .6, .6) - fresnel_bdsf = nw.new_node(Nodes.PrincipledBSDF, [fresnel_color]) - mixed_shader = nw.new_node(Nodes.MixShader, [nw.new_node(Nodes.Fresnel), principled_bsdf, fresnel_bdsf]) - return mixed_shader - - -def apply(obj, points_fn, density, realize=True, base_hue=.3): - tentacles = deep_clone_obj(obj) - if COMBINED_ATTR_NAME in tentacles.data.attributes: - tentacles.data.attributes.remove(tentacles.data.attributes[COMBINED_ATTR_NAME]) - - instances = make_asset_collection(build_tentacles, 5, 'spikes', verbose=False) - surface.add_geomod(tentacles, geo_tentacles, apply=realize, - input_args=[instances, points_fn, density, realize]) - - butil.delete_collection(instances) - assign_material(tentacles, surface.shaderfunc_to_material(shader_tentacles, base_hue)) - return tentacles diff --git a/infinigen/assets/corals/tree.py b/infinigen/assets/corals/tree.py deleted file mode 100644 index 09a9bbbff..000000000 --- a/infinigen/assets/corals/tree.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import math -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.corals.tentacles import make_radius_points_fn -from infinigen.assets.utils.object import mesh2obj, data2mesh, separate_loose -from infinigen.assets.utils.nodegroup import geo_radius -import infinigen.core.util.blender as butil -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.util.math import FixedSeed -from infinigen.core import surface -from infinigen.assets.trees.tree import build_radius_tree, recursive_path, FineTreeVertices -from infinigen.core.tagging import tag_object, tag_nodegroup - -class TreeBaseCoralFactory(BaseCoralFactory): - default_scale = [1] * 3 - tentacle_prob = .8 - noise_strength = .01 - - def __init__(self, factory_seed, coarse=False, method=None): - super(TreeBaseCoralFactory, self).__init__(factory_seed, coarse) - self.tip = .4 - self.configs = { - 'twig': {'radius': .08, 'branch_config': self.twig_config}, - 'bush': {'radius': .08, 'branch_config': self.bush_config}} - self.weights = [.5, .5] - with FixedSeed(self.factory_seed): - if method is None: - method = np.random.choice(list(self.configs.keys()), p=self.weights) - self.radius, self.branch_config = map(self.configs[method].get, ['radius', 'branch_config']) - self.points_fn = make_radius_points_fn(.05, .4) - - @property - def bush_config(self): - n_branch = np.random.randint(6, 8) - n_major = np.random.randint(4, 5) - n_minor = np.random.randint(4, 5) - n_detail = np.random.randint(3, 4) - span = uniform(.4, .5) - detail_config = { - 'n': n_minor, - 'path_kargs': lambda idx: { - 'n_pts': n_detail + 1, - 'std': .4, - 'momentum': .6, - 'sz': .01 * (1.5 * n_detail - idx)}, - 'spawn_kargs': lambda idx: { - 'rnd_idx': idx + 1, - 'ang_min': np.pi / 12, - 'ang_max': np.pi / 8, - 'axis2': [0, 0, 1]}, - 'children': []} - minor_config = { - 'n': n_major, - 'path_kargs': lambda idx: { - 'n_pts': n_minor + 1, - 'std': .4, - 'momentum': .4, - 'sz': .03 * (1.2 * n_minor - idx)}, - 'spawn_kargs': lambda idx: { - 'rnd_idx': idx + 1, - 'ang_min': np.pi / 12, - 'ang_max': np.pi / 8, - 'axis2': [0, 0, 1]}, - 'children': [detail_config]} - major_config = { - 'n': n_branch, - 'path_kargs': lambda idx: {'n_pts': n_major + 1, 'std': .4, 'momentum': .4, 'sz': uniform(.08, .1)}, - 'spawn_kargs': lambda idx: { - 'init_vec': [span * np.cos(2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9)), - span * np.sin(2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9)), - math.sqrt(1 - span * span)]}, - 'children': [minor_config]} - return major_config - - @property - def twig_config(self): - n_branch = np.random.randint(6, 8) - n_major = np.random.randint(4, 5) - n_minor = np.random.randint(4, 5) - n_detail = np.random.randint(3, 4) - span = uniform(.7, .8) - detail_config = { - 'n': n_minor, - 'path_kargs': lambda idx: { - 'n_pts': n_detail * 2 + 1, - 'std': .4, - 'momentum': .6, - 'sz': .01 * (2.5 * n_detail - idx)}, - 'spawn_kargs': lambda idx: { - 'rnd_idx': 2 * idx + 1, - 'ang_min': np.pi / 8, - 'ang_max': np.pi / 6, - 'axis2': [0, 0, 1]}, - 'children': []} - minor_config = { - 'n': n_major, - 'path_kargs': lambda idx: { - 'n_pts': n_minor * 2 + 1, - 'std': .4, - 'momentum': .4, - 'sz': .03 * (2.2 * n_minor - idx)}, - 'spawn_kargs': lambda idx: { - 'rnd_idx': 2 * idx + 1, - 'ang_min': np.pi / 8, - 'ang_max': np.pi / 6, - 'axis2': [0, 0, 1]}, - 'children': [detail_config]} - major_config = { - 'n': n_branch, - 'path_kargs': lambda idx: { - 'n_pts': n_major * 2 + 1, - 'std': .4, - 'momentum': .4, - 'sz': uniform(.08, .1)}, - 'spawn_kargs': lambda idx: { - 'init_vec': [span * np.cos(2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9)), - span * np.sin(2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9)), - math.sqrt(1 - span * span)]}, - 'children': [minor_config]} - return major_config - - @staticmethod - def radius_fn(base_radius, size, resolution): - radius_decay_root = .85 - radius_decay_leaf = uniform(.4, .6) - radius = base_radius * radius_decay_root ** (np.arange(size * resolution) / resolution) - radius[-resolution:] *= radius_decay_leaf ** (np.arange(resolution) / resolution) - return radius - - def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: - resolution = 16 - obj = build_radius_tree(self.radius_fn, self.branch_config, self.radius, resolution) - obj.scale = 2 * np.array(self.default_scale) / max(obj.dimensions[:2]) - butil.apply_transform(obj) - surface.add_geomod(obj, geo_radius, apply=True, input_args=['radius', 32]) - tag_object(obj, 'tree_coral') - return obj - - -class TwigBaseCoralFactory(TreeBaseCoralFactory): - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse, method='twig') - - -class BushBaseCoralFactory(TreeBaseCoralFactory): - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse, method='bush') diff --git a/infinigen/assets/corals/tube.py b/infinigen/assets/corals/tube.py deleted file mode 100644 index 07115eee4..000000000 --- a/infinigen/assets/corals/tube.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np - -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.corals.tentacles import make_radius_points_fn -import infinigen.core.util.blender as butil -from infinigen.assets.utils.object import new_icosphere -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup - -class TubeBaseCoralFactory(BaseCoralFactory): - default_scale = [.7] * 3 - - def __init__(self, factory_seed, coarse=False): - super(TubeBaseCoralFactory, self).__init__(factory_seed, coarse) - self.points_fn = make_radius_points_fn(.05, .4) - - def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: - obj = new_icosphere(subdivisions=2) - obj.name = 'tube_coral' - surface.add_geomod(obj, self.geo_coral_tube, apply=True) - butil.modify_mesh(obj, 'BEVEL', True, offset_type='PERCENT', width_pct=10, segments=1) - butil.modify_mesh(obj, 'SOLIDIFY', True, thickness=.05) - butil.modify_mesh(obj, 'SUBSURF', True, levels=2, render_levels=2) - butil.modify_mesh(obj, 'DISPLACE', True, strength=0.1, - texture=bpy.data.textures.new(name='tube_coral', type='STUCCI'), mid_level=0) - tag_object(obj, 'tube_coral') - return obj - - @staticmethod - def geo_coral_tube(nw: NodeWrangler): - ico_sphere_perturb = .2 - growth_z = 1 - short_length_range = .2, .4 - long_length_range = .4, 1.2 - angles = np.linspace(np.pi * 2 / 5, np.pi / 10, 6) - scales = np.linspace(1, .9, 6) - face_perturb = .4 - growth_prob = .75 - seed = np.random.randint(1e3) - ico_sphere = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - perturbed_ico_sphere = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': ico_sphere, - 'Offset': nw.uniform([-ico_sphere_perturb] * 3, [ico_sphere_perturb] * 3, seed) - }) - mesh = nw.new_node(Nodes.DualMesh, input_kwargs={'Mesh': perturbed_ico_sphere}) - normal = nw.new_node(Nodes.InputNormal) - top = nw.boolean_math('AND', nw.compare_direction('LESS_THAN', normal, (0, 0, 1), angles[0]), - nw.bernoulli(growth_prob, seed)) - - for i, (angle, scale) in enumerate(zip(angles, scales)): - direction = nw.vector_math('NORMALIZE', nw.add( - nw.add(normal, nw.combine(0, 0, nw.uniform(0, growth_z, seed + i))), - nw.uniform([face_perturb] * 3, [-face_perturb] * 3, seed + i))) - length = nw.switch(nw.compare_direction('LESS_THAN', normal, (0, 0, 1), angle), - nw.uniform(*long_length_range, seed + i), - nw.uniform(*short_length_range, seed + i)) - mesh, top = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': mesh, - 'Selection': top, - 'Offset': direction, - 'Offset Scale': length - }).outputs[:2] - mesh = nw.new_node(Nodes.ScaleElements, - input_kwargs={'Geometry': mesh, 'Selection': top, 'Scale': scale}) - - geometry_without_top = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': mesh, 'Selection': top}, - attrs={'domain': 'FACE'}) - - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry_without_top}) diff --git a/infinigen/assets/creatures/beetle.py b/infinigen/assets/creatures/beetle.py deleted file mode 100644 index 0eb5a1225..000000000 --- a/infinigen/assets/creatures/beetle.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy - -import logging - -import numpy as np -from numpy.random import uniform as U, normal as N, randint -import gin - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts -from infinigen.assets.creatures.util.creature_util import offset_center -from infinigen.assets.creatures.util import creature, hair as creature_hair, joining -from infinigen.assets.creatures.util.animation import run_cycle as creature_animation -from infinigen.assets.creatures.util.boid_swarm import BoidSwarmFactory - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.util.math import lerp, clip_gaussian, FixedSeed -from infinigen.core import surface -import infinigen.assets.materials.chitin -from infinigen.core.tagging import tag_object, tag_nodegroup - - -logger = logging.getLogger(__name__) - -def insect_hair_params(): - - mat_roughness = U(0.7, 1) - - length = U(0.01, 0.04) - puff = U(0.7, 1) - - return { - 'density': 4000, - 'clump_n': 1, - 'avoid_features_dist': 0.01, - 'grooming': { - 'Length MinMaxScale': np.array((length, length * N(2, 0.5), U(15, 60)), dtype=np.float32), - 'Puff MinMaxScale': np.array((puff, puff * N(3, 0.5), U(15, 60)), dtype=np.float32), - 'Combing': U(0, 0.2), - 'Strand Random Mag': 0.0, - 'Strand Perlin Mag': 0.0, - 'Strand Perlin Scale': U(15, 45), - 'Tuft Spread': 0.0, - 'Tuft Clumping': 0.0, - 'Root Radius': 0.001, - 'Post Clump Noise Mag': 0, - 'Hair Length Pct Min': U(0.7, 1) - }, - 'material': { - 'Roughness': mat_roughness, - 'Radial Roughness': mat_roughness + N(0, 0.07), - 'Random Roughness': 0, - 'IOR': 1.55 - } - } - -def beetle_postprocessing(body_parts, extras, params): - - main_template = surface.registry.sample_registry(params['surface_registry']) - main_template.apply(body_parts) - -def beetle_genome(): - - fac = parts.generic_nurbs.NurbsBody(prefix='body_insect', tags=['body', 'rigid'], var=2) - if U() < 0.5: - n = len(fac.params['proportions']) - noise = U(1, 3, n) - noise[-n//3:] = 1 - fac.params['proportions'] *= noise - - body = genome.part(fac) - - l = fac.params['proportions'].sum () * fac.params['length'] - - leg_fac = parts.leg.InsectLeg() - n_leg_pairs = int(np.clip(l * clip_gaussian(3, 2, 2, 6), 2, 15)) - leg_fac.params['length_rad1_rad2'][0] /= n_leg_pairs / 1.8 - splay = U(30, 60) - for t in np.linspace(0.15, 0.6, n_leg_pairs): - for side in [-1, 1]: - leg = genome.part(leg_fac) - xrot = lerp(70, -100, t) - genome.attach(leg, body, coord=(t, splay/180, 1), - joint=Joint((xrot, 5, 90)), side=side) - - head = genome.part(parts.generic_nurbs.NurbsHead(prefix='head_insect', tags=['head', 'rigid'])) - genome.attach(head, body, coord=(1, 0, 0), joint=Joint((0, -15, 0))) - - if U() < 0.7: - mandible_fac = parts.head_detail.InsectMandible() - rot = np.array((120, 20, 80)) * N(1, 0.15) - for side in [-1, 1]: - genome.attach(genome.part(mandible_fac), head, coord=(0.75, 0.5, 0.1), - joint=Joint(rot), side=side) - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - surface_registry=[ - (infinigen.assets.materials.chitin, 1) - ], - hair=insect_hair_params() - ) - ) - -@gin.configurable -class BeetleFactory(AssetFactory): - - def __init__(self, factory_seed=None, bvh=None, coarse=False, animation_mode=None, **kwargs): - super().__init__(factory_seed, coarse) - self.bvh = bvh - self.animation_mode = animation_mode - - def create_asset(self, i, hair=False, **kwargs): - genome = beetle_genome() - root, parts = creature.genome_to_creature(genome, name=f'beetle({self.factory_seed}, {i})') - tag_object(root, 'beetle') - offset_center(root) - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - rigging=(self.animation_mode is not None), - postprocess_func=beetle_postprocessing, **kwargs) - if self.animation_mode == 'walk_cycle': - creature_animation.animate_run(root, arma, ik_targets, steps_per_sec=N(2, 0.2)) - if hair and U() < 0.5: - creature_hair.configure_hair(joined, root, genome.postprocess_params['hair']) - return root - -class AntSwarmFactory(BoidSwarmFactory): - - def ant_swarm_settings(self, mode=None): - - boids_settings = dict( - use_flight = False, - use_land = True, - use_climb = True, - - land_speed_max = U(0.5, 2), - land_acc_max = U(0.7, 1), - land_personal_space = 0.05, - land_jump_speed = U(0, 0.05), - - bank = 0, - pitch = 0, - - rule_fuzzy = U(0.6, 0.9) - ) - - if mode is None: - mode = np.random.choice(['queues', 'goal_swarm', 'random_swarm']) - logger.debug(f'Randomly chose ant_swarm_settings {mode=}') - - if mode == 'queues': - boids_settings['rules'] = [ - dict(type='FOLLOW_LEADER', use_line=True, queue_count=100, distance=0.0), - ] - elif mode == 'goal_swarm': - boids_settings['rules'] = [ - dict(type='SEPARATE'), - dict(type='GOAL', use_predict=True), - dict(type='FLOCK') - ] - elif mode == 'random_swarm': - boids_settings['rules'] = [ - dict(type='SEPARATE'), - dict(type='AVERAGE_SPEED'), - dict(type='FLOCK') - ] - else: - raise ValueError(f'Unrecognized {mode=}') - - return dict( - particle_size=U(0.02, 0.1), - size_random=U(0.3, 0.7), - - use_rotation_instance=True, - lifetime=bpy.context.scene.frame_end - bpy.context.scene.frame_start, - warmup_frames=1, emit_duration=0, # all particles appear immediately - emit_from='VOLUME', - mass = 2, - use_multiply_size_mass=True, - boids_settings=boids_settings - ) - - def __init__(self, factory_seed, bvh, coarse=False): - with FixedSeed(factory_seed): - settings = self.ant_swarm_settings() - col = make_asset_collection(BeetleFactory(factory_seed=randint(1e7), animation_mode='walk_cycle'), n=1) - super(AntSwarmFactory, self).__init__( - factory_seed, child_col=col, - collider_col=bpy.data.collections.get('colliders'), - settings=settings, bvh=bvh, - volume=N(0.1, 0.015), - coarse=coarse - ) \ No newline at end of file diff --git a/infinigen/assets/creatures/bird.py b/infinigen/assets/creatures/bird.py deleted file mode 100644 index 885d2b11f..000000000 --- a/infinigen/assets/creatures/bird.py +++ /dev/null @@ -1,338 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Alexander Raistrick: regular bird, hair params -# - Beining Han: adapt to create flying bird - - -import pdb -import bpy -import mathutils - -import numpy as np -from numpy.random import normal as N, uniform as U -import gin - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts - -from infinigen.assets.creatures.util.creature_util import euler - -import infinigen.assets.materials.basic_bsdf -import infinigen.assets.materials.spot_sparse_attr -import infinigen.assets.materials.reptile_brown_circle_attr -import infinigen.assets.materials.reptile_two_color_attr -import infinigen.assets.materials.bird - -from infinigen.assets.materials import bone, tongue, eyeball, beak -from infinigen.core.util.math import clip_gaussian, FixedSeed -from infinigen.core.util.random import random_general as rg -from infinigen.core.util import blender as butil -from infinigen.core import surface - -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.creatures.util.creature_util import offset_center -from infinigen.assets.creatures.util import creature, hair as creature_hair, joining -from infinigen.assets.creatures.util.animation.driver_wiggle import animate_wiggle_bones -from infinigen.assets.creatures.util.animation import idle, run_cycle -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.core.placement import animation_policy - -def bird_hair_params(flying=True): - - length = U(0.01, 0.025) if flying else U(0.03, 0.06) - puff = U(0.03, 0.2) - - return { - 'density': 70000, - 'clump_n': 10, - 'avoid_features_dist': 0.02, - 'grooming': { - 'Length MinMaxScale': np.array((length, length * N(2, 0.5), U(15, 60)), dtype=np.float32), - 'Puff MinMaxScale': np.array((puff, puff * N(1.5, 0.5), U(15, 60)), dtype=np.float32), - 'Combing': U(0.6, 1), - 'Strand Random Mag': 0.0, - 'Strand Perlin Mag': U(0, 0.003), - 'Strand Perlin Scale': 30.0, - 'Tuft Spread': 0.01, - 'Tuft Clumping': U(0.5, 1), - 'Root Radius': 0.006, - 'Post Clump Noise Mag': 0.001, - 'Hair Length Pct Min': U(0.5, 0.9) - }, - 'material': { - 'Roughness': U(0, 0.4), - 'Radial Roughness': U(0.1, 0.3), - 'Random Roughness': U(0, 0.2), - 'IOR': 1.55 - } - } - - -def bird_postprocessing(body_parts, extras, params): - - get_extras = lambda k: [o for o in extras if k in o.name] - - main_template = surface.registry.sample_registry(params['surface_registry']) - main_template.apply(body_parts + get_extras('BodyExtra') + get_extras('Feather')) - - tongue.apply(get_extras('Tongue')) - bone.apply(get_extras('Teeth') + get_extras('Claws')) - eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) - beak.apply(get_extras('Beak')) - -def duck_genome(mode): - body_lrr = np.array((0.85, 0.25, 0.38)) * N(1, 0.2) * N(1, 0.2, 3) - body_fac = parts.generic_nurbs.NurbsBody(prefix='body_bird', tags=['body', 'rigid'], var=U(0.3, 1)) - body = genome.part(body_fac) - l = body_fac.params['length'][0] - - tail = genome.part(parts.wings.BirdTail()) - genome.attach(tail, body, coord=(0.2, 1, 0.5), joint=Joint(rest=(0, 170 * N(1, 0.1), 0))) - - shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) - foot_fac = parts.foot.Foot({ - 'length_rad1_rad2': np.array((l * 0.1, 0.025, 0.04)) * N(1, 0.1) * N(1, 0.1, 3), - 'Toe Length Rad1 Rad2': np.array((l * N(0.4, 0.07), 0.03, 0.02)) * N(1, 0.1) * N(1, 0.1, 3), - 'Toe Splay': 35 * N(1, 0.2), - 'Toebean Radius': 0.03 * N(1, 0.1), - 'Toe Rotate': (0., -1.57, 0.), - 'Claw Curl Deg': 12 * N(1, 0.2), - 'Claw Pct Length Rad1 Rad2': np.array((0.13, 0.64, 0.05)) * N(1, 0.1) * N(1, 0.1, 3), - 'Thumb Pct': np.array((0.61, 1.17, 1.5)) * N(1, 0.1) * N(1, 0.1, 3), - 'Toe Curl Scalar': 0.34 * N(1, 0.2) - }, bald=True) - - leg_fac = parts.leg.BirdLeg({'length_rad1_rad2': (l * 0.5 * N(1, 0.05), 0.09 * N(1, 0.1), 0.06 * N(1, 0.1))}) - leg_coord = (N(0.5, 0.05), N(0.7, 0.05), N(0.95, 0.05)) - for side in [-1, 1]: - leg = genome.attach(genome.part(foot_fac), genome.part(leg_fac), coord=(0.9, 0, 0), joint=Joint(rest=(0,0,0))) - genome.attach(leg, body, coord=leg_coord, joint=Joint(rest=(0, 90, 0), bounds=shoulder_bounds), side=side) - - extension = U(0.7, 1) if mode == 'flying' else U(0.01, 0.1) - wing_len = l * 0.5 * clip_gaussian(1.2, 0.7, 0.5, 2.5) - wing_fac = parts.wings.BirdWing({ - 'length_rad1_rad2': np.array((wing_len, 0.1 * N(1, 0.1), 0.02 * N(1, 0.2))), - 'Extension': extension - }) - - - wing_coord = (N(0.7, 0.02), 110/180 * N(1, 0.1), 0.95) - if wing_fac.params['Extension'] > 0.5: - wing_rot = (90, 0, 90) - else: - wing_rot = (90, 40, 90) - for side in [-1, 1]: - wing = genome.part(wing_fac) - genome.attach(wing, body, coord=wing_coord, joint=Joint(rest=wing_rot), side=side) - - head_fac = parts.head.BirdHead() - head = genome.part(head_fac) - - beak = genome.part(parts.beak.BirdBeak()) - genome.attach(beak, head, coord=(0.75, 0, 0.5), joint=Joint(rest=(0, 0, 0))) - - eye_fac = parts.eye.MammalEye({'Radius': N(0.03, 0.005)}) - t, splay = U(0.6, 0.85), U(80, 110)/180 - r = 0.85 - rot = np.array([0, 0, 90]) * N(1, 0.1, 3) - for side in [-1, 1]: - eye = genome.part(eye_fac) - genome.attach(eye, head, coord=(t, splay, r), joint=Joint(rest=(0,0,0)), rotation_basis='normal', side=side) - - genome.attach(head, body, coord=(1, 0, 0), joint=Joint(rest=(0, 0, 0))) - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - animation=dict(), - hair=bird_hair_params(flying=False), - surface_registry=[ - (infinigen.assets.materials.spot_sparse_attr, 4), - (infinigen.assets.materials.reptile_brown_circle_attr, 0.5), - (infinigen.assets.materials.reptile_two_color_attr, 0.5), - (infinigen.assets.materials.bird, 5) - ] - ) - ) - -def flying_bird_genome(mode): - - body_lrr = np.array((0.95, 0.13, 0.18)) * N(1.0, 0.05, size=(3,)) - body = genome.part(parts.body.BirdBody({'length_rad1_rad2': body_lrr})) - l = body_lrr[0] - - tail = genome.part(parts.wings.FlyingBirdTail()) - genome.attach(tail, body, coord=(U(0.08, 0.15), 1, 0.5), joint=Joint(rest=(0, 180 * N(1, 0.1), 0))) - - shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) - foot_fac = parts.foot.Foot({ - 'length_rad1_rad2': np.array((l * 0.2, 0.01, 0.02)) * N(1, 0.1, 3), - 'Toe Length Rad1 Rad2': np.array((l * N(0.4, 0.02), 0.02, 0.01)) * N(1, 0.1) * N(1, 0.1, 3), - 'Toe Splay': 8 * N(1, 0.2), - 'Toe Rotate': (0., -N(0.55, 0.1), 0.), - 'Toebean Radius': 0.01 * N(1, 0.1), - 'Claw Curl Deg': 12 * N(1, 0.2), - 'Claw Pct Length Rad1 Rad2': np.array((0.13, 0.64, 0.05)) * N(0.5, 0.05) * N(1, 0.1, 3), - 'Thumb Pct': np.array((0.4, 0.5, 0.75)) * N(1, 0.1) * N(1, 0.1, 3), - 'Toe Curl Scalar': 0.34 * N(1, 0.2) - }, bald=True) - - leg_fac = parts.leg.BirdLeg({'length_rad1_rad2': (l * 0.5 * N(1, 0.05), 0.04 * N(1, 0.1), 0.02 * N(1, 0.1)), - 'Thigh Rad1 Rad2 Fullness': np.array((0.12, 0.04, 1.26)) * N(1, 0.1, 3), - 'Shin Rad1 Rad2 Fullness': np.array((0.1, 0.04, 5.0)) * N(1, 0.1, 3)}) - leg_coord = (N(0.5, 0.05), N(0.2, 0.04), N(0.8, 0.05)) - for side in [-1, 1]: - leg = genome.attach(genome.part(foot_fac), genome.part(leg_fac), coord=(0.9, 0, 0), joint=Joint(rest=(0, 0, 0))) - genome.attach(leg, body, coord=leg_coord, joint=Joint(rest=(0, U(135, 175), 0), bounds=shoulder_bounds), side=side) - - extension = U(0.8, 1) - wing_len = l * clip_gaussian(1.0, 0.2, 0.6, 1.5) * 0.8 - wing_fac = parts.wings.FlyingBirdWing({ - 'length_rad1_rad2': np.array((wing_len, U(0.08, 0.15), 0.02 * N(1, 0.2))), - 'Extension': extension, - 'feather_density': U(25, 40) - }) - - wing_coord = (N(0.68, 0.02), 150 / 180 * N(1, 0.1), 0.8) - if wing_fac.params['Extension'] > 0.5: - wing_rot = (90, 0, 90) - else: - wing_rot = (90, 40, 90) - for side in [-1, 1]: - wing = genome.part(wing_fac) - genome.attach(wing, body, coord=wing_coord, joint=Joint(rest=wing_rot), side=side) - - head_fac = parts.head.FlyingBirdHead() - head = genome.part(head_fac) - - beak = genome.part(parts.beak.FlyingBirdBeak()) - genome.attach(beak, head, coord=(0.85, 0, 0.5), joint=Joint(rest=(0, 0, 0))) - - eye_fac = parts.eye.MammalEye({'Radius': N(0.02, 0.005)}) - t, splay = U(0.7, 0.85), U(80, 110) / 180 - r = 0.85 - rot = np.array([0, 0, 90]) * N(1, 0.1, 3) - for side in [-1, 1]: - eye = genome.part(eye_fac) - genome.attach(eye, head, coord=(t, splay, r), joint=Joint(rest=(0, 0, 0)), rotation_basis='normal', side=side) - - genome.attach(head, body, coord=(U(0.84, 0.85), 0, U(1.05, 1.15)), joint=Joint(rest=(0, N(18, 5), 0))) - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - animation=dict(), - hair=bird_hair_params(flying=True), - surface_registry=[ - #(infinigen.assets.materials.spot_sparse_attr, 4), - #(infinigen.assets.materials.reptile_brown_circle_attr, 0.5), - #(infinigen.assets.materials.reptile_two_color_attr, 0.5), - (infinigen.assets.materials.bird, 5) - ] - ) - ) - -@gin.configurable -class BirdFactory(AssetFactory): - - def __init__(self, factory_seed=None, coarse=False, bvh=None, animation_mode=None, **kwargs): - super().__init__(factory_seed, coarse) - self.bvh = bvh - self.animation_mode = animation_mode - - def create_asset(self, i, placeholder, hair=True, **kwargs): - - dynamic = self.animation_mode is not None - - genome = duck_genome(mode=self.animation_mode) - root, parts = creature.genome_to_creature(genome, name=f'bird({self.factory_seed}, {i})') - tag_object(root, 'bird') - offset_center(root) - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - rigging=dynamic, - postprocess_func=bird_postprocessing, **kwargs) - - joined_extras = butil.join_objects(extras) - joined_extras.parent = joined - - butil.parent_to(root, placeholder, no_inverse=True) - - if hair: - creature_hair.configure_hair(joined, root, genome.postprocess_params['hair']) - if dynamic: - if self.animation_mode == 'run': - run_cycle.animate_run(root, arma, ik_targets) - elif self.animation_mode == 'idle': - idle.snap_iks_to_floor(ik_targets, self.bvh) - idle.idle_body_noise_drivers(ik_targets, wing_mag=U(0, 0.3)) - elif self.animation_mode == 'swim': - spine = [b for b in arma.pose.bones if 'Body' in b.name] - tail = [b for b in arma.pose.bones if 'Tail' in b.name] - animate_wiggle_bones(arma=arma, bones=tail, mag_deg=U(0, 30), freq=U(0.5, 2)) - else: - raise ValueError(f'Unrecognized mode {self.animation_mode=}') - return root - -@gin.configurable -class FlyingBirdFactory(AssetFactory): - - max_expected_radius = 1 - max_distance = 40 - - def __init__(self, factory_seed=None, coarse=False, bvh=None, animation_mode=None, altitude=("uniform", 15, 30)): - super().__init__(factory_seed, coarse) - self.animation_mode = animation_mode - self.altitude = altitude - self.bvh = bvh - with FixedSeed(factory_seed): - self.policy = animation_policy.AnimPolicyRandomForwardWalk( - forward_vec=(1, 0, 0), speed=U(7, 15), - step_range=(5, 40), yaw_dist=("normal", 0, 15)) - - def create_placeholder(self, i, loc, rot): - - p = butil.spawn_cube(size=3) - p.name = f"{self}.create_placeholder({i})" - p.location = loc - p.rotation_euler = rot - - if self.bvh is None: - return p - - altitude = rg(self.altitude) - p.location.z += altitude - curve = animation_policy.policy_create_bezier_path(p, self.bvh, self.policy, retry_rotation=True, max_full_retries=30, fatal=True) - curve.name = f'animhelper:{self}.create_placeholder({i}).path' - - # animate the placeholder to the APPROX location of the snake, so the camera can follow itcurve.location = (0, 0, 0) - run_cycle.follow_path(p, curve, use_curve_follow=True, offset=0, - duration=bpy.context.scene.frame_end-bpy.context.scene.frame_start) - p.rotation_euler.z += np.pi / 2 - curve.data.twist_mode = 'Z_UP' - curve.data.driver_add('eval_time').driver.expression = 'frame' - - return p - - def create_asset(self, i, placeholder, hair=True, animate=False,**kwargs): - - genome = flying_bird_genome(self.animation_mode) - root, parts = creature.genome_to_creature(genome, name=f'flying_bird({self.factory_seed}, {i})') - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - rigging=self.animation_mode is not None, postprocess_func=bird_postprocessing, **kwargs) - - joined_extras = butil.join_objects(extras) - joined_extras.parent = joined - - if hair: - creature_hair.configure_hair(joined, root, genome.postprocess_params['hair']) - if self.animation_mode is not None: - if self.animation_mode == 'idle': - idle.idle_body_noise_drivers(ik_targets, body_mag=0.0, foot_motion_chance=1.0, head_benddown=0) - else: - raise ValueError(f'Unrecognized {self.animation_mode=}') - - return root \ No newline at end of file diff --git a/infinigen/assets/creatures/carnivore.py b/infinigen/assets/creatures/carnivore.py deleted file mode 100644 index 7b84135c1..000000000 --- a/infinigen/assets/creatures/carnivore.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - -import numpy as np -from numpy.random import uniform as U, normal as N -import gin -import bpy, mathutils - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts -from infinigen.core.util.math import clip_gaussian - -import infinigen.assets.materials.tiger_attr -import infinigen.assets.materials.giraffe_attr -import infinigen.assets.materials.spot_sparse_attr -from infinigen.core import surface - -from infinigen.assets.materials import bone, tongue, eyeball, nose - -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.creatures.util.creature_util import offset_center -from infinigen.assets.creatures.util import creature, joining -from infinigen.assets.creatures.util import hair as creature_hair, cloth_sim -from infinigen.assets.creatures.util.animation import idle, run_cycle - -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.core.util import blender as butil - - -def tiger_hair_params(): - - mat_roughness = U(0.4, 0.7) - - length = clip_gaussian(0.022, 0.03, 0.01, 0.1) - puff = U(0.14, 0.4) - - return { - 'density': 500000, - 'clump_n': np.random.randint(5, 70), - 'avoid_features_dist': 0.01, - 'grooming': { - 'Length MinMaxScale': np.array((length, length * N(2, 0.5), U(15, 60)), dtype=np.float32), - 'Puff MinMaxScale': np.array((puff, puff * N(3, 0.5), U(15, 60)), dtype=np.float32), - 'Combing': U(0.7, 1), - 'Strand Random Mag': 0.0, - 'Strand Perlin Mag': U(0, 0.006), - 'Strand Perlin Scale': U(15, 45), - 'Tuft Spread': N(0.01, 0.002), - 'Tuft Clumping': U(0.2, 0.8), - 'Root Radius': 0.001, - 'Post Clump Noise Mag': 0.0005 * N(1, 0.15), - 'Hair Length Pct Min': U(0.5, 0.9) - }, - 'material': { - 'Roughness': mat_roughness, - 'Radial Roughness': mat_roughness + N(0, 0.07), - 'Random Roughness': 0, - 'IOR': 1.55 - } - } - -def tiger_skin_sim_params(): - return { - 'bending_stiffness_max': 450.0, - 'compression_stiffness_max': 80.0, - 'goal_spring': 0.8, - 'pin_stiffness': 1, - 'shear_stiffness': 15.0, - 'shear_stiffness_max': 80.0, - 'tension_stiffness_max': 80.0, - 'uniform_pressure_force': 5.0, - 'use_pressure': True, - } - -def tiger_postprocessing(body_parts, extras, params): - - get_extras = lambda k: [o for o in extras if k in o.name] - - main_template = surface.registry.sample_registry(params['surface_registry']) - main_template.apply(body_parts + get_extras('BodyExtra')) - - tongue.apply(get_extras('Tongue')) - bone.apply(get_extras('Teeth') + get_extras('Claws')) - eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) - nose.apply(get_extras('Nose')) - -def tiger_genome(): - - body_fac = parts.generic_nurbs.NurbsBody(prefix='body_feline', tags=['body'], var=0.7, temperature=0.2) - body_fac.params['thetas'][-3] *= N(1, 0.1) - body = genome.part(body_fac) - - tail = genome.part(parts.tail.Tail()) - genome.attach(tail, body, coord=(0.07, 1, 1), joint=Joint(rest=(N(0, 10), 180, 0))) - - if U() < 0.5: - - head_length_rad1_rad2 = np.array((0.36, 0.20, 0.18)) * N(1, 0.1, 3) - head_fac = parts.head.CarnivoreHead({'length_rad1_rad2': head_length_rad1_rad2}) - head = genome.part(head_fac) - - jaw_pct = np.array((1.05, 0.55, 0.5)) - jaw = genome.part(parts.head.CarnivoreJaw({'length_rad1_rad2': head_length_rad1_rad2 * jaw_pct})) - genome.attach(jaw, head, coord=(0.2 * N(1, 0.1), 0, 0.35 * N(1, 0.1)), joint=Joint(rest=(0, U(10, 35), 0), pose=(0,0,0))) - - else: - head_fac = parts.generic_nurbs.NurbsHead(prefix='head_carnivore', tags=['head'], var=0.5) - head = genome.part(head_fac) - - headl = head_fac.params['length'][0] - head_length_rad1_rad2 = np.array((headl, 0.20, 0.18)) * N(1, 0.1, 3) - - jaw_pct = np.array((0.7, 0.55, 0.5)) - jaw = genome.part(parts.head.CarnivoreJaw({'length_rad1_rad2': head_length_rad1_rad2 * jaw_pct})) - genome.attach(jaw, head, coord=(0.12, 0, 0.3 * N(1, 0.1)), joint=Joint(rest=(0, U(10, 35), 0), pose=(0,0,0))) - - eye_fac = parts.eye.MammalEye({'Radius': N(0.027, 0.009)}) - eye_t, splay = U(0.61, 0.64), U(90, 140)/180 - r = U(0.8, 0.9) - rot = np.array([0, 0, 0]) - for side in [-1, 1]: - eye = genome.part(eye_fac) - genome.attach(eye, head, coord=(eye_t, splay, r), joint=Joint(rest=rot), rotation_basis='normal', side=side) - - nose = genome.part(parts.head_detail.CatNose()) - genome.attach(nose, head, coord=(U(0.9, 0.96), 1, U(0.5, 0.7)), joint=Joint(rest=(0, 20, 0))) - - ear_fac = parts.head_detail.CatEar() - t, splay = N(0.33, 0.07), U(100, 150)/180 - rot = np.array([-20, -10, -23]) + N(0, 4, 3) - for side in [-1, 1]: - ear = genome.part(ear_fac) - genome.attach(ear, head, coord=(t, splay, 1), joint=Joint(rest=rot), rotation_basis='normal', side=side) - - neck_t = 0.7 - shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) - splay = clip_gaussian(130, 7, 90, 130)/180 - shoulder_t = clip_gaussian(0.12, 0.05, 0.08, 0.12) - params = {'length_rad1_rad2': np.array((1.6, 0.1, 0.05)) * N(1, (0.15, 0.05, 0.05), 3)} - - foot_fac = parts.foot.Foot() - backleg_fac = parts.leg.QuadrupedBackLeg(params=params) - for side in [-1, 1]: - back_leg = genome.attach(genome.part(foot_fac), genome.part(backleg_fac), coord=(0.9, 0, 0), joint=Joint(rest=(0, 0, 0))) - genome.attach(back_leg, body, coord=(shoulder_t, splay, 1.2), - joint=Joint(rest=(0, 90, 0), bounds=shoulder_bounds), - rotation_basis='global', side=side)#, smooth_rad=0.06)#, bridge_rad=0.1) - - frontleg_fac = parts.leg.QuadrupedFrontLeg(params=params) - for side in [-1, 1]: - front_leg = genome.attach(genome.part(foot_fac), genome.part(frontleg_fac), coord=(0.9, 0, 0), joint=Joint(rest=(0, 0, 0))) - genome.attach(front_leg, body, coord=(neck_t - shoulder_t, splay, 0.8), - joint=Joint(rest=(0, 90, 0)), rotation_basis='global', side=side)#, smooth_rad=0.06)#, bridge_rad=0.1) - - #neck_lrr = np.array((body_lrr[0], body_lrr[-1], body_lrr[-1])) * np.array((0.45, 0.5, 0.25)) * N(1, 0.05, 3) - #neck = genome.part(parts.head.Neck({'length_rad1_rad2': neck_lrr})) - genome.attach(head, body, coord=(N(0.97, 0.01), 0, 0), - joint=Joint(rest=(0, N(20, 5), 0)), - rotation_basis='global')#, bridge_rad=0.1) - #genome.attach(neck, body, coord=(0.8, 0, 0.1), joint=Joint(rest=(0, -N(15, 2), 0))) - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - hair=tiger_hair_params(), - skin=tiger_skin_sim_params(), - surface_registry=[ - (infinigen.assets.materials.tiger_attr, 3), - (infinigen.assets.materials.giraffe_attr, 0.2), - (infinigen.assets.materials.spot_sparse_attr, 2) - ] - ) - ) - -@gin.configurable -class CarnivoreFactory(AssetFactory): - - def __init__( - self, - factory_seed=None, - bvh: mathutils.bvhtree.BVHTree = None, - coarse: bool = False, - animation_mode: str = None, - hair: bool = True, - clothsim_skin: bool = False, - **kwargs - ): - super().__init__(factory_seed, coarse) - self.bvh = bvh - self.animation_mode = animation_mode - self.hair = hair - self.clothsim_skin = clothsim_skin - - if self.hair and (self.animation_mode is not None or self.clothsim_skin): - raise NotImplementedError( - 'Dynamic hair is not yet fully working. ' - 'Please disable either hair or both of animation/clothsim' - ) - - def create_placeholder(self, **kwargs): - return butil.spawn_cube(size=4) - - def create_asset(self, i, placeholder, **kwargs): - - genome = tiger_genome() - root, parts = creature.genome_to_creature(genome, name=f'carnivore({self.factory_seed}, {i})') - # tag_object(root, 'carnivore') - offset_center(root) - - dynamic = self.animation_mode is not None - - joined, extras, arma, ik_targets = joining.join_and_rig_parts( - root, parts, genome, rigging=dynamic, - postprocess_func=tiger_postprocessing, **kwargs) - - butil.parent_to(root, placeholder, no_inverse=True) - - if self.hair: - creature_hair.configure_hair( - joined, - root, - genome.postprocess_params['hair'], - is_dynamic=dynamic - ) - - if dynamic: - if self.animation_mode == 'run': - run_cycle.animate_run(root, arma, ik_targets) - elif self.animation_mode == 'idle': - idle.snap_iks_to_floor(ik_targets, self.bvh) - idle.idle_body_noise_drivers(ik_targets) - elif self.animation_mode == 'tpose': - pass - else: - raise ValueError(f'Unrecognized mode {self.animation_mode=}') - if self.clothsim_skin: - rigidity = surface.write_vertex_group( - joined, cloth_sim.local_pos_rigity_mask, apply=True) - cloth_sim.bake_cloth(joined, genome.postprocess_params['skin'], - attributes=dict(vertex_group_mass=rigidity)) - - return root \ No newline at end of file diff --git a/infinigen/assets/creatures/crustacean.py b/infinigen/assets/creatures/crustacean.py deleted file mode 100644 index 09f9e7f1b..000000000 --- a/infinigen/assets/creatures/crustacean.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys -from collections import defaultdict - -import bpy -import gin -import numpy as np -from numpy.random import uniform - -from infinigen.assets.creatures.util.creature import genome_to_creature -from infinigen.assets.creatures.util.joining import join_and_rig_parts -from infinigen.assets.creatures.util.genome import CreatureGenome, Joint, attach, part -from infinigen.assets.creatures.parts.crustacean.antenna import LobsterAntennaFactory, SpinyLobsterAntennaFactory -from infinigen.assets.creatures.parts.crustacean.claw import CrabClawFactory, LobsterClawFactory -from infinigen.assets.creatures.parts.crustacean.eye import CrustaceanEyeFactory -from infinigen.assets.creatures.parts.crustacean.fin import CrustaceanFinFactory -from infinigen.assets.creatures.parts.crustacean.leg import CrabLegFactory, LobsterLegFactory -from infinigen.assets.creatures.parts.crustacean.body import CrabBodyFactory, LobsterBodyFactory -from infinigen.assets.creatures.parts.crustacean.tail import CrustaceanTailFactory -from infinigen.assets.utils.decorate import read_material_index, write_material_index -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.surface import read_attr_data, shaderfunc_to_material -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.nodes.node_utils import build_color_ramp - -n_legs = 4 -n_limbs = 5 -n_side_fin = 2 - - -def crustacean_genome(sp): - body_fac = sp['body_fn']() - obj = part(body_fac) - # Add legs - leg_x_length = sp['leg_x_length'](body_fac.params) - leg_x_lengths = np.sort(uniform(.6, 1, 4))[:: -1] * leg_x_length - leg_angle = sp['leg_angle'] - x_legs = sp['x_legs'] - leg_joints_x, leg_joints_y, leg_joints_z = sp['leg_joint'] - - shared_leg_params = ['bottom_flat', 'bottom_cutoff'] - leg_fn = sp['leg_fn'] - leg_params = {k: v for k, v in leg_fn().params.items() if k in shared_leg_params} - leg_fac = [leg_fn({**leg_params, 'x_length': leg_x_lengths[i]}) for i in range(n_legs)] - for i in range(n_legs): - for side in [1, -1]: - attach(part(leg_fac[i]), obj, (x_legs[i + 1], leg_angle, .99), - Joint((leg_joints_x[i], leg_joints_y[i], leg_joints_z[i])), side=side) - # Add claws - claw_angle = sp['claw_angle'] - claw_fn = sp['claw_fn'] - claw_fac = claw_fn({'x_length': sp['claw_x_length'](body_fac.params)}) - - for side in [1, -1]: - attach(part(claw_fac), obj, (x_legs[0] + sp['x_claw_offset'], claw_angle, .99), Joint(sp['claw_joint']), - side=side) - # Add tails - tail_fac = sp['tail_fn'] - if tail_fac is not None: - shared_params = ['bottom_shift', 'bottom_cutoff', 'top_shift', 'top_cutoff', 'y_length', 'z_length'] - tail_fac = tail_fac({**{k: v for k, v in body_fac.params.items() if k in shared_params}, - 'x_length': sp['tail_x_length'](body_fac.params), - }) - tail = part(tail_fac) - attach(tail, obj, (0, 0, 0), Joint((0, 0, 180))) - fin_fn = sp['fin_fn'] - if fin_fn is not None: - fin_fn = sp['fin_fn'] - x_fins = sp['x_fins'] - fin_joints_x, fin_joints_y, fin_joints_z = sp['fin_joints'] - fin_x_length = sp['fin_x_length'](body_fac.params) - fin_x_lengths = np.sort(uniform(.6, 1, 4))[:: -1] * fin_x_length - fin_fac = [fin_fn({'x_length': fin_x_lengths[i]}) for i in range(n_side_fin + 1)] - - for i in range(n_side_fin): - for side in [1, -1]: - attach(part(fin_fac[i]), tail, (x_fins[i], .5, .99), - Joint((fin_joints_x[i], fin_joints_y[i], fin_joints_z[i])), side=side) - attach(part(fin_fac[-1]), tail, (.99, .5, .9), Joint((0, 0, 0))) - - # Add eyes - x_eye = sp['x_eye'] - eye_angle = sp['eye_angle'] - eye_joint_x, eye_joint_y, eye_joint_z = sp['eye_joint'] - eye_fac = CrustaceanEyeFactory() - for side in [1, -1]: - attach(part(eye_fac), obj, (x_eye, eye_angle, .99), Joint((eye_joint_x, eye_joint_y, eye_joint_z)), - side=side) - # Add antenna - antenna_fn = sp['antenna_fn'] - if antenna_fn is not None: - x_antenna = sp['x_antenna'] - antenna_angle = sp['antenna_angle'] - antenna_fac = antenna_fn({'x_length': sp['antenna_x_length'](body_fac.params)}) - for side in [1, -1]: - attach(part(antenna_fac), obj, (x_antenna, antenna_angle, .99), Joint(sp['antenna_joint']), - side=side) - - anim_params = {k: v for k, v in sp.items() if 'curl' in k or 'rot' in k} - anim_params['freq'] = sp['freq'] - postprocess_params = dict(material={'base_hue': sp['base_hue']}, anim=anim_params) - return CreatureGenome(obj, postprocess_params) - - -def build_base_hue(): - if uniform(0, 1) < .6: - return uniform(0, .05) - else: - return uniform(.4, .45) - - -def shader_crustacean(nw: NodeWrangler, params): - value_shift = log_uniform(2, 10) - base_hue = params['base_hue'] - bright_color = hsv2rgba(base_hue, uniform(.8, 1.), log_uniform(.02, .05) * value_shift) - dark_color = hsv2rgba((base_hue + uniform(-.05, .05)) % 1, uniform(.8, 1.), - log_uniform(.01, .02) * value_shift) - light_color = hsv2rgba(base_hue, uniform(.0, .4), log_uniform(.2, 1.)) - specular = uniform(.6, .8) - specular_tint = uniform(0, 1) - clearcoat = uniform(.2, .8) - roughness = uniform(.1, .3) - metallic = uniform(.6, .8) - x, y, z = nw.separate(nw.new_node(Nodes.NewGeometry).outputs['Position']) - color = build_color_ramp(nw, nw.new_node(Nodes.MapRange, [ - nw.new_node(Nodes.MusgraveTexture, [nw.combine(x, nw.math('ABSOLUTE', y), z)], - input_kwargs={'Scale': log_uniform(5, 8)}), -1, 1, 0, 1]), [.0, .3, .7, 1.], - [bright_color, bright_color, dark_color, dark_color], ) - ratio = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'ratio'}).outputs['Fac'] - color = nw.new_node(Nodes.MixRGB, [ratio, light_color, color]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Metallic': metallic, - 'Roughness': roughness, - 'Specular': specular, - 'Specular Tint': specular_tint, - 'Clearcoat': clearcoat - }) - return bsdf - - -def shader_eye(nw: NodeWrangler): - return nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': (0.1, 0.1, 0.1, 1), 'Specular': 0}) - - -def crustacean_postprocessing(body_parts, extras, params): - tag_list = ['body', 'claw', 'leg'] - materials = [shaderfunc_to_material(shader_crustacean, params['material']) for _, t in enumerate(tag_list)] - tag_list.append('eye') - materials.append(shaderfunc_to_material(shader_eye)) - assign_material(body_parts + extras, materials) - - for part in body_parts: - material_indices = read_material_index(part) - for i, tag_name in enumerate(tag_list): - if f'tag_{tag_name}' in part.data.attributes.keys(): - part.data.attributes.active = part.data.attributes[f'tag_{tag_name}'] - with butil.SelectObjects(part): - bpy.ops.geometry.attribute_convert(domain='FACE') - has_tag = read_attr_data(part, f'tag_{tag_name}', 'FACE') - material_indices[np.nonzero(has_tag)[0]] = i - write_material_index(part, material_indices) - for extra in extras: - material_indices = read_material_index(extra) - material_indices.fill(tag_list.index('claw')) - write_material_index(extra, material_indices) - - -def animate_crustacean_move(arma, params): - groups = defaultdict(list) - for bone in arma.pose.bones.values(): - groups[(bone.bone['factory_class'], bone.bone['index'])].append(bone) - for (factory_name, part_id), bones in groups.items(): - eval(factory_name).animate_bones(arma, bones, params) - - -@gin.configurable -class CrustaceanFactory(AssetFactory): - max_expected_radius = 1 - max_distance = 40 - - def __init__(self, factory_seed, coarse=False, **_): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.species_params = { - 'lobster': self.lobster_params, - 'crab': self.crab_params, - 'spiny_lobster': self.spiny_lobster_params - } - self.species = np.random.choice(list(self.species_params.keys())) - - def create_asset(self, i, animate=True, rigging=True, cloth=False, **kwargs): - genome = crustacean_genome(self.species_params[self.species]()) - root, parts = genome_to_creature(genome, name=f'crustacean({self.factory_seed}, {i})') - for p in parts: - if p.obj.name.split("=")[-1] == "CrustaceanEyeFactor": - assign_material(p.obj, shaderfunc_to_material(shader_eye)) - joined, extras, arma, ik_targets = join_and_rig_parts(root, parts, genome, - postprocess_func=crustacean_postprocessing, - rigging=rigging, min_remesh_size=.005, - face_size=kwargs['face_size'], - roll='GLOBAL_POS_Z') - if animate and arma is not None: - animate_crustacean_move(arma, genome.postprocess_params['anim']) - else: - butil.join_objects([joined] + extras) - return root - - def crab_params(self): - base_leg_curl = uniform(-np.pi * .15, np.pi * .15) - return { - 'body_fn': CrabBodyFactory, - 'leg_fn': CrabLegFactory, - 'claw_fn': CrabClawFactory, - 'tail_fn': None, - 'antenna_fn': None, - 'fin_fn': None, - 'leg_x_length': lambda p: p['y_length'] * log_uniform(2., 3.), - 'claw_x_length': lambda p: p['y_length'] * log_uniform(1.5, 1.8), - 'tail_x_length': lambda p: 0, - 'antenna_x_length': lambda p: 0, - 'fin_x_length': lambda p: 0, - 'x_legs': (np.linspace(uniform(.08, .1), uniform(.55, .6), n_limbs) + np.arange(n_limbs) * .02)[ - ::-1], - 'leg_angle': uniform(.42, .44), - 'leg_joint': ( - np.sort(uniform(-5, 5, n_legs))[::1 if uniform(0, 1) > .5 else -1], np.sort(uniform(0, 10, n_legs)), - np.sort(uniform(65, 105, n_legs) + uniform(-8, 8)) + np.arange(n_legs) * 2), - 'x_claw_offset': uniform(.08, .1), - 'claw_angle': uniform(.44, .46), - 'claw_joint': (uniform(-50, -40), uniform(-20, 20), uniform(10, 20)), - 'x_eye': uniform(.92, .96), - 'eye_angle': uniform(.8, .85), - 'eye_joint': (0, uniform(-60, -0), uniform(10, 70)), - 'x_antenna': 0, - 'antenna_angle': 0, - 'antenna_joint': (0, 0, 0), - 'x_fins': 0, - 'fin_joints': ([0] * n_side_fin, [0] * n_side_fin, [0] * n_side_fin), - 'leg_rot': (uniform(np.pi * .8, np.pi * 1.1), 0, 0), - 'leg_curl': ( - (-np.pi * 1.1, -np.pi * .7), 0, (base_leg_curl - np.pi * .02, base_leg_curl + np.pi * .02)), - 'claw_curl': ((-np.pi * .2, np.pi * .1), 0, (-np.pi * .1, np.pi * .1)), - 'claw_lower_curl': ((-np.pi * .1, np.pi * .1), 0, 0), - 'tail_curl': (0, 0, 0), - 'antenna_curl': (0, 0, 0), - 'base_hue': build_base_hue(), - 'freq': 1 / log_uniform(100, 200), - } - - def lobster_params(self): - base_leg_curl = uniform(-np.pi * .4, np.pi * .4) - return { - 'body_fn': LobsterBodyFactory, - 'leg_fn': LobsterLegFactory, - 'claw_fn': LobsterClawFactory, - 'tail_fn': CrustaceanTailFactory, - 'antenna_fn': LobsterAntennaFactory, - 'fin_fn': CrustaceanFinFactory, - 'leg_x_length': lambda p: p['x_length'] * log_uniform(.6, .8), - 'claw_x_length': lambda p: p['x_length'] * log_uniform(1.2, 1.5), - 'tail_x_length': lambda p: p['x_length'] * log_uniform(1.2, 1.8), - 'antenna_x_length': lambda p: p['x_length'] * log_uniform(1.6, 3.), - 'fin_x_length': lambda p: p['y_length'] * log_uniform(1.2, 2.5), - 'x_legs': (np.linspace(.05, uniform(.2, .25), n_limbs) + np.arange(n_limbs) * .02)[::-1], - 'leg_angle': uniform(.3, .35), - 'leg_joint': ( - uniform(-5, 5, n_legs), uniform(0, 10, n_legs), np.sort(uniform(95, 110, n_legs) + uniform(-8, 8))), - 'x_claw_offset': uniform(.08, .1), - 'claw_angle': uniform(.4, .5), - 'claw_joint': (uniform(-80, -70), uniform(-10, 10), uniform(10, 20)), - 'x_eye': uniform(.8, .88), - 'eye_angle': uniform(.8, .85), - 'eye_joint': (0, uniform(-60, -0), uniform(10, 70)), - 'x_antenna': uniform(.76, .8), - 'antenna_angle': uniform(.6, .7), - 'antenna_joint': (uniform(70, 110), uniform(-40, -30), uniform(20, 40)), - 'x_fins': np.sort(uniform(.85, .95, n_side_fin)), - 'fin_joints': ( - np.sort(uniform(0, 30, n_side_fin))[::1 if uniform(0, 1) < .5 else -1], [0] * n_side_fin, - np.sort(uniform(10, 30, n_side_fin))), - 'leg_rot': (uniform(np.pi * .8, np.pi * 1.1), 0, 0), - 'leg_curl': ( - (-np.pi * 1.1, -np.pi * .7), 0, (base_leg_curl - np.pi * .02, base_leg_curl + np.pi * .02)), - 'claw_curl': ((-np.pi * .1, np.pi * .2), 0, 0), - 'claw_lower_curl': ((-np.pi * .1, np.pi * .1), 0, 0), - 'tail_curl': ((-np.pi * .6, 0), 0, 0), - 'antenna_curl': ((np.pi * .1, np.pi * .3), 0, (0, np.pi * .8)), - 'base_hue': build_base_hue(), - 'freq': 1 / log_uniform(400, 500), - } - - def spiny_lobster_params(self): - lobster_params = self.lobster_params() - leg_joint_x, leg_joint_y, leg_joint_z = lobster_params['leg_joint'] - leg_joint_z_min = np.min(leg_joint_z) + uniform(-10, -5) - return {**lobster_params, - 'antenna_fn': SpinyLobsterAntennaFactory, - 'claw_fn': LobsterLegFactory, - 'claw_x_length': lobster_params['leg_x_length'], - 'claw_angle': lobster_params['leg_angle'], - 'claw_joint': (uniform(10, 40), uniform(0, 10), leg_joint_z_min), - 'x_antenna': uniform(.7, .75), - 'antenna_angle': uniform(.4, .5), - } - - -@gin.configurable -class CrabFactory(CrustaceanFactory): - def __init__(self, factory_seed, coarse=False, **_): - super().__init__(factory_seed, coarse) - self.species = 'crab' - - -@gin.configurable -class LobsterFactory(CrustaceanFactory): - def __init__(self, factory_seed, coarse=False, **_): - super().__init__(factory_seed, coarse) - self.species = 'lobster' - - -@gin.configurable -class SpinyLobsterFactory(CrustaceanFactory): - def __init__(self, factory_seed, coarse=False, **_): - super().__init__(factory_seed, coarse) - self.species = 'spiny_lobster' diff --git a/infinigen/assets/creatures/fish.py b/infinigen/assets/creatures/fish.py deleted file mode 100644 index de37a396f..000000000 --- a/infinigen/assets/creatures/fish.py +++ /dev/null @@ -1,338 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Alexander Raistrick: FishSchoolFactory, basic version of FishFactory, anim & simulation -# - Mingzhe Wang: Fin placement - - -from collections import defaultdict - -import bpy -import gin -import numpy as np -from numpy.random import uniform as U, normal as N, randint - -import infinigen.assets.materials.scale -import infinigen.assets.materials.fishbody -from infinigen.assets.materials import fishfin, eyeball -from infinigen.core import surface -from infinigen.assets.materials.utils.surface_utils import sample_range - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts -from infinigen.assets.creatures.util import creature, joining -from infinigen.assets.creatures.util import cloth_sim -from infinigen.assets.creatures.util.boid_swarm import BoidSwarmFactory - -from infinigen.core.util import blender as butil -from infinigen.core.util.math import clip_gaussian, FixedSeed -from infinigen.assets.creatures.util.animation.driver_wiggle import animate_wiggle_bones -from infinigen.assets.creatures.util.creature_util import offset_center - -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.assets.materials import fish_eye_shader - -def fin_params(scale=(1, 1, 1), dorsal=False): - # scale = np.array((0.2, 1, 0.4)) * np.array((l / l_mean, 1, rad/r_mean)) * np.array(scale) - noise = np.array( - (clip_gaussian(1, 0.1, 0.8, 1.2), 1, 0.8 * clip_gaussian(1, 0.1, 0.8, 1.2))) - scale *= noise - scale = scale.astype(np.float32) - if dorsal: - #if U() < 0.8: - # for dorsal fins, change the shape via RoundWeight - RoundWeight = sample_range(0.8, 1) - RoundingWeight = 1 - #else: - # RoundWeight = sample_range(0.4, 0.5) - # RoundingWeight = sample_range(0.04, 0.06) - AffineZ = sample_range(0, 0.1) - OffsetWeightZ = sample_range(0.6, 1) - OffsetWeightY = 1 - Freq = U(100, 150) - else: - RoundWeight = 1 - RoundingWeight = sample_range(0.02, 0.07) - AffineZ = sample_range(0.8, 1.2) - OffsetWeightZ = sample_range(0.05, 0.2) - OffsetWeightY = sample_range(0.2, 1) - Freq = U(60, 80) - - return { - 'FinScale': scale, - 'RoundWeight': RoundWeight, - 'RoundingWeight': RoundingWeight, - 'AffineZ': AffineZ, - 'OffsetWeightZ': OffsetWeightZ, - 'OffsetWeightY': OffsetWeightY, - 'Freq': Freq - } - -def fish_postprocessing(body_parts, extras, params): - - get_extras = lambda k: [o for o in extras if k in o.name] - main_template = surface.registry.sample_registry(params['surface_registry']) - main_template.apply(body_parts + get_extras('BodyExtra')) - - mat = body_parts[0].active_material - gold = (mat is not None and 'gold' in mat.name) - body_parts[0].active_material.name.lower() or U() < 0.1 - fishfin.apply(get_extras('Fin'), shader_kwargs={'goldfish': gold }) - - fish_eye_shader.apply(get_extras('Eyeball')) - #eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) - -def fish_fin_cloth_sim_params(): - - res = dict( - compression_stiffness= 1200, - tension_stiffness = 1200, - shear_stiffness = 1200, - bending_stiffness = 3000, - - tension_damping=100, - compression_damping=100, - shear_damping=100, - bending_damping=100, - - air_damping = 5, - mass = 0.3, - ) - - for k, v in res.items(): - res[k] = clip_gaussian(1, 0.2, 0.2, 3) * v - - return res - -def fish_genome(): - - temp_dict = defaultdict(lambda: 0.1, {'body_fish_eel': 0.01, 'body_fish_puffer': 0.001}) - body = genome.part(parts.generic_nurbs.NurbsBody( - prefix='body_fish', tags=['body'], var=U(0.3, 1), - temperature=temp_dict, - shoulder_ik_ts=[0.0, 0.3, 0.6, 1.0], - n_bones=15, - rig_reverse_skeleton=True - )) - - if U() < 0.9: - n_dorsal = 1 #if U() < 0.6 else randint(1, 4) - coord = (U(0.3, 0.45), 1, 0.7) - for i in range(n_dorsal): - dorsal_fin = parts.ridged_fin.FishFin(fin_params((U(0.4, 0.6), 0.5, 0.2), dorsal=True), rig=False) - genome.attach(genome.part(dorsal_fin), body, coord=coord, joint=Joint(rest=(0, -100, 0))) - - rot = lambda r: np.array((20, r, -205)) + N(0, 7, 3) - - if U() < 0.8: - pectoral_fin = parts.ridged_fin.FishFin(fin_params((0.1, 0.5, 0.3))) - coord = (U(0.65, 0.8), U(55, 65) / 180, .9) - for side in [-1, 1]: - genome.attach(genome.part(pectoral_fin), body, coord=coord, - joint=Joint(rest=rot(-13)), side=side) - - if U() < 0.8: - pelvic_fin = parts.ridged_fin.FishFin(fin_params((0.08, 0.5, 0.25))) - coord = (U(0.5, 0.65), U(8, 15)/180, .8) - for side in [-1, 1]: - genome.attach(genome.part(pelvic_fin), body, coord=coord, joint=Joint(rest=rot(28)), side=side) - - if U() < 0.8: - hind_fin = parts.ridged_fin.FishFin(fin_params((0.1, 0.5, 0.3))) - coord = (U(0.2, 0.3), N(36, 5)/180, .9) - for side in [-1, 1]: - genome.attach(genome.part(hind_fin), body, coord=coord, joint=Joint(rest=rot(28)), side=side) - - angle = U(140, 170) - tail_fin = parts.ridged_fin.FishFin(fin_params((0.12, 0.5, 0.35)), rig=False) - for vdir in [-1, 1]: - genome.attach(genome.part(tail_fin), body, coord=(0.05, 0, 0), joint=Joint((0, -angle * vdir, 0))) - - eye_fac = parts.eye.MammalEye({'Eyelids': False, 'Radius': N(0.036, 0.01)}) - coord = (0.9, 0.6, 0.9) - for side in [-1, 1]: - genome.attach(genome.part(eye_fac), body, coord=coord, - joint=Joint(rest=(0,0,0)), side=side, rotation_basis='normal') - - if U() < 0: - jaw = genome.part(parts.head.CarnivoreJaw({'length_rad1_rad2': (0.2, 0.1, 0.06)})) - genome.attach(jaw, body, coord=(0.8, 0, 0.7), joint=Joint(rest=(0, U(-30, -80), 0)), rotation_basis="normal") - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - cloth=fish_fin_cloth_sim_params(), - anim=fish_swim_params(), - surface_registry=[ - (infinigen.assets.materials.fishbody, 3), - #(infinigen.assets.materials.scale, 1), - ] - ) - ) - -def fish_swim_params(): - swim_freq = 3 * clip_gaussian(1, 0.3, 0.1, 2) - swim_mag = N(20, 3) - return dict( - swim_mag=swim_mag, - swim_freq=swim_freq, - flipper_freq = 3 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, - flipper_mag = 0.35 * N(1, 0.1) * swim_mag, - flipper_var = U(0, 0.2), - ) - -def animate_fish_swim(arma, params): - - spine = [b for b in arma.pose.bones if 'Body' in b.name] - fin_bones = [b for b in arma.pose.bones if 'extra_bone(Fin' in b.name] - - global_offset = U(0, 1000) # so swimming animations dont sync across fish - animate_wiggle_bones( - arma=arma, bones=spine, - off=global_offset, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(0.5, 2)) - v = params['flipper_var'] - for b in fin_bones: - animate_wiggle_bones( - arma=arma, bones=[b], off=global_offset+U(0, 1), - mag_deg=params['flipper_mag']*N(1, v), - freq=params['flipper_mag']*N(1, v)) - -def simulate_fish_cloth(joined, extras, cloth_params, rigidity='cloth_pin_rigidity'): - - for e in [joined] + extras: - assert e.type == 'MESH' - if 'Fin' in e.name: - assert rigidity in e.data.attributes - else: - surface.write_attribute(joined, lambda nw: 1, data_type='FLOAT', - name=rigidity, apply=True) - joined = butil.join_objects([joined] + extras) - - cloth_sim.bake_cloth(joined, settings=cloth_params, - attributes=dict(vertex_group_mass=rigidity)) - - return joined - -@gin.configurable -class FishFactory(AssetFactory): - - max_distance = 40 - - def __init__( - self, - factory_seed=None, - bvh=None, - coarse=False, - animation_mode=None, - species_variety=None, - clothsim_skin: bool = False, - **_ - ): - super().__init__(factory_seed, coarse) - self.bvh = bvh - self.animation_mode = animation_mode - self.clothsim_skin = clothsim_skin - - with FixedSeed(factory_seed): - self.species_genome = fish_genome() - self.species_variety = species_variety if species_variety is not None else clip_gaussian(0.2, 0.1, 0.05, 0.45) - - def create_asset(self, i, **kwargs): - - instance_genome = genome.interp_genome(self.species_genome, fish_genome(), self.species_variety) - - root, parts = creature.genome_to_creature(instance_genome, name=f'fish({self.factory_seed}, {i})') - offset_center(root, x=True, z=False) - - # Force material consistency across a whole species of fish - # TODO: Replace once Generator class is stnadardized - def seeded_fish_postprocess(*args, **kwargs): - with FixedSeed(self.factory_seed): - fish_postprocessing(*args, **kwargs) - - joined, extras, arma, ik_targets = joining.join_and_rig_parts( - root, parts, instance_genome, rigging=(self.animation_mode is not None), rig_before_subdiv=True, - postprocess_func=seeded_fish_postprocess, adapt_mode='subdivide', **kwargs) - if self.animation_mode is not None and arma is not None: - if self.animation_mode == 'idle' or self.animation_mode == 'roam': - animate_fish_swim(arma, instance_genome.postprocess_params['anim']) - else: - raise ValueError(f'Unrecognized {self.animation_mode=}') - - if self.clothsim_skin: - joined = simulate_fish_cloth(joined, extras, instance_genome.postprocess_params['cloth']) - else: - joined = butil.join_objects([joined] + extras) - joined.parent = root - - tag_object(root, 'fish') - - return root - - -class FishSchoolFactory(BoidSwarmFactory): - - @gin.configurable - def fish_school_params(self): - - boids_settings = dict( - use_flight = True, - use_land = False, - use_climb = False, - - rules = [ - dict(type='SEPARATE'), - dict(type='GOAL'), - dict(type='FLOCK'), - ], - - air_speed_max = U(5, 10), - air_acc_max = U(0.7, 1), - air_personal_space = U(0.15, 2), - bank = 0, # fish dont tip over / roll - pitch = 0.4, # - rule_fuzzy = U(0.6, 0.9) - ) - - return dict( - particle_size=U(0.3, 1), - size_random=U(0.1, 0.7), - - use_rotation_instance=True, - - lifetime=bpy.context.scene.frame_end - bpy.context.scene.frame_start, - warmup_frames=1, emit_duration=0, # all particles appear immediately - emit_from='VOLUME', - mass = 2, - use_multiply_size_mass=True, - effect_gravity=0, - - boids_settings=boids_settings - ) - - def __init__(self, factory_seed, bvh=None, coarse=False): - with FixedSeed(factory_seed): - settings = self.fish_school_params() - col = make_asset_collection(FishFactory(factory_seed=randint(1e7), animation_mode='idle'), n=3) - super().__init__( - factory_seed, child_col=col, - collider_col=bpy.data.collections.get('colliders'), - settings=settings, bvh=bvh, - volume=("uniform", 3, 10), - coarse=coarse - ) - -if __name__ == "__main__": - import os - for i in range(3): - factory = FishFactory(i) - root = factory.create_asset(i) - root.location[0] = i * 3 - - bpy.ops.wm.save_as_mainfile(filepath=os.path.join(os.path.abspath(os.curdir), "dev_fish5.blend")) \ No newline at end of file diff --git a/infinigen/assets/creatures/herbivore.py b/infinigen/assets/creatures/herbivore.py deleted file mode 100644 index 074220f3c..000000000 --- a/infinigen/assets/creatures/herbivore.py +++ /dev/null @@ -1,250 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -from collections import defaultdict -import bpy, mathutils - -import gin -import numpy as np -from numpy.random import uniform as U, normal as N - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts -from infinigen.core.util.math import clip_gaussian - -from infinigen.core import surface - -import infinigen.assets.materials.tiger_attr -import infinigen.assets.materials.giraffe_attr -import infinigen.assets.materials.spot_sparse_attr -import infinigen.assets.materials.reptile_brown_circle_attr -import infinigen.assets.materials.reptile_gray_attr - -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.creatures.util.creature_util import offset_center -from infinigen.assets.creatures.util import creature, joining -from infinigen.assets.creatures.util import hair as creature_hair, cloth_sim -from infinigen.assets.creatures.util.animation import idle, run_cycle - -from infinigen.assets.materials import bone, tongue, eyeball, nose, horn -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.core.util import blender as butil - -def herbivore_hair(): - - mat_roughness = U(0.5, 0.9) - - puff = U(0.14, 0.4) - length = clip_gaussian(0.035, 0.03, 0.01, 0.1) - - return { - 'density': 500000, - 'clump_n': np.random.randint(10, 300), - 'avoid_features_dist': 0.06, - 'grooming': { - 'Length MinMaxScale': np.array((length, length * U(1.5, 4), U(15, 60)), dtype=np.float32), - 'Puff MinMaxScale': np.array((puff, U(0.5, 1.3), U(15, 60)), dtype=np.float32), - 'Combing': U(0.5, 1), - 'Strand Random Mag': U(0, 0.003) if U() < 0.5 else 0, - 'Strand Perlin Mag': U(0, 0.006), - 'Strand Perlin Scale': U(15, 45), - 'Tuft Spread': N(0.06, 0.025), - 'Tuft Clumping': U(0.7, 0.95), - 'Root Radius': 0.0025, - 'Post Clump Noise Mag': 0.001 * N(1, 0.15), - 'Hair Length Pct Min': U(0.5, 0.9) - }, - 'material': { - 'Roughness': mat_roughness, - 'Radial Roughness': mat_roughness + N(0, 0.07), - 'Random Roughness': 0, - 'IOR': 1.55 - } - } - -def herbivore_postprocessing(body_parts, extras, params): - - get_extras = lambda k: [o for o in extras if k in o.name] - - main_template = surface.registry.sample_registry(params['surface_registry']) - main_template.apply(body_parts + get_extras('BodyExtra')) - - tongue.apply(get_extras('Tongue')) - bone.apply(get_extras('Teeth') + get_extras('Claws')) - horn.apply(get_extras('Horn')) - eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) - nose.apply(get_extras('Nose')) - -def herbivore_genome(): - - temp_dict = defaultdict(lambda: 0.2, {'body_herbivore_giraffe': 0.02, 'body_herbivore_llama': 0.1}) - body = genome.part(parts.generic_nurbs.NurbsBody(prefix='body_herbivore', tags=['body'], var=1, temperature=temp_dict)) - - neck_t = 0.67 - shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) - splay = clip_gaussian(130, 7, 90, 130)/180 - shoulder_t = clip_gaussian(0.1, 0.05, 0.05, 0.2) - params = {'length_rad1_rad2': np.array((1.8, 0.1, 0.05)) * N(1, (0.1, 0.05, 0.05), 3)} - - leg_rest = (0, 90, 0) #(0, 90, 0) - foot_rest = (0, -90, 0) - foot_fac = parts.hoof.HoofAnkle() - claw_fac = parts.hoof.HoofClaw() - backleg_fac = parts.leg.QuadrupedBackLeg(params=params) - frontleg_fac = parts.leg.QuadrupedFrontLeg(params=params) - - if U() < 0.15: - lenscale = U(1, 1.3) - backleg_fac.params['length_rad1_rad2'][0] *= lenscale - frontleg_fac.params['length_rad1_rad2'][0] *= lenscale - - for side in [-1, 1]: - # foot = genome.part(claw_fac) - foot = genome.attach(genome.part(claw_fac), genome.part(foot_fac), coord=(0.7, -1, 0), joint=Joint(rest=(0, 90, 0)), rotation_basis='global') - back_leg = genome.attach(foot, genome.part(backleg_fac), coord=(0.95, 1, 0.2), joint=Joint(rest=foot_rest), rotation_basis='global') - genome.attach(back_leg, body, coord=(shoulder_t, splay, 1), - joint=Joint(rest=leg_rest, bounds=shoulder_bounds), rotation_basis='global', side=side) - - for side in [-1, 1]: - # foot = genome.part(claw_fac) - foot = genome.attach(genome.part(claw_fac), genome.part(foot_fac), coord=(0.7, 1, 0), joint=Joint(rest=(0, 90, 0)), rotation_basis='normal') - front_leg = genome.attach(foot, genome.part(frontleg_fac), coord=(0.95, 0, 0.5), joint=Joint(rest=(0, -70, 0))) - genome.attach(front_leg, body, coord=(neck_t - shoulder_t, splay + 0/180, 0.9), - joint=Joint(rest=leg_rest), rotation_basis='global', side=side) - - temp_dict = defaultdict(lambda: 0.2, {'body_herbivore_giraffe': 0.02}) - head_fac = parts.generic_nurbs.NurbsHead(prefix='head_herbivore', tags=['head'], var=0.5, temperature=temp_dict) - head = genome.part(head_fac) - - eye_fac = parts.eye.MammalEye({'Radius': N(0.035, 0.01)}) - eye_t, splay = U(0.34, 0.45), U(80, 140)/180 - r = U(0.7, 0.9) - rot = np.array([0, 0, 0]) - for side in [-1, 1]: - eye = genome.part(eye_fac) - genome.attach(eye, head, coord=(eye_t, splay, r), joint=Joint(rest=rot), rotation_basis='normal', side=side) - - jaw = genome.part(parts.head.CarnivoreJaw({'length_rad1_rad2': (0.6 * head_fac.params['length'], 0.12, 0.08), 'Canine Length': 0})) - genome.attach(jaw, head, coord=(0.25 * N(1, 0.1), 0, 0.35 * N(1, 0.1)), joint=Joint(rest=(0, 10 * N(1, 0.1), 0))) - - if U() < 0.7: - nose = genome.part(parts.head_detail.CatNose()) - genome.attach(nose, head, coord=(0.95, 1, 0.45), joint=Joint(rest=(0, 20, 0))) - - t, splay = U(0.15, eye_t - 0.07), N(125, 15)/180 - ear_fac = parts.head_detail.CatEar({}) - ear_fac.params['length_rad1_rad2'] *= N(1.2, 0.1, 3) - rot = np.array([0, -10, -23]) * N(1, 0.1, 3) - for side in [-1, 1]: - ear = genome.part(ear_fac) - genome.attach(ear, head, coord=(t, splay, 1), joint=Joint(rest=rot), rotation_basis='normal', side=side) - - if U() < 0.7: - horn_fac = parts.horn.Horn() - horn_fac.params['length'] *= U(0.1, 2) - horn_fac.params['rad1'] *= U(0.07, 1.5) - horn_fac.params['rad2'] *= U(0.07, 1.5) - t, splay = U(0.25, t), U(splay + 20/180, 130/180) - rot = np.array([U(-40, 0), 0, N(120, 10)]) - for side in [-1, 1]: - horn = genome.part(horn_fac) - genome.attach(horn, head, coord=(t, splay, 0.5), joint=Joint(rest=rot), rotation_basis='global', side=side) - elif U() < 0: - horn_fac = parts.horn.Horn() - horn_fac.params['length'] *= U(0.3, 1) - horn_fac.params['rotation_x'] = 0 - horn = genome.part(horn_fac) - genome.attach(horn, head, coord=(U(0.3, 0.9), 1, 0.6), joint=Joint(rest=(0,-90,-90)), rotation_basis='global') - - genome.attach(head, body, coord=(0.97, 0, 0.2), joint=Joint(rest=(0, 20, 0))) - - if U() < 1: - hair = herbivore_hair() - registry = [ - (infinigen.assets.materials.giraffe_attr, 1), - (infinigen.assets.materials.spot_sparse_attr, 3) - ] - else: - hair = None - registry = [ - (infinigen.assets.materials.reptile_brown_circle_attr, 1), - (infinigen.assets.materials.reptile_gray_attr, 1) - ] - - return genome.CreatureGenome( - parts=body, - postprocess_params=dict( - animation=dict(), - hair=hair, - surface_registry=registry - ) - ) - -@gin.configurable -class HerbivoreFactory(AssetFactory): - - max_distance = 40 - - def __init__( - self, - factory_seed=None, - bvh: mathutils.bvhtree.BVHTree = None, - coarse: bool = False, - animation_mode: str = None, - hair: bool = True, - clothsim_skin: bool = False, - **kwargs - ): - super().__init__(factory_seed, coarse) - self.bvh = bvh - self.animation_mode = animation_mode - self.hair = hair - self.clothsim_skin = clothsim_skin - - if self.hair and (self.animation_mode is not None or self.clothsim_skin): - raise NotImplementedError( - 'Dynamic hair is not yet fully working. ' - 'Please disable either hair or both of animation/clothsim' - ) - - - def create_placeholder(self, **kwargs): - return butil.spawn_cube(size=4) - - def create_asset(self, i, placeholder, **kwargs): - genome = herbivore_genome() - root, parts = creature.genome_to_creature(genome, name=f'herbivore({self.factory_seed}, {i})') - # tag_object(root, 'herbivore') - offset_center(root) - - dynamic = self.animation_mode is not None - - joined, extras, arma, ik_targets = joining.join_and_rig_parts( - root, parts, genome, rigging=dynamic, - postprocess_func=herbivore_postprocessing, **kwargs) - - butil.parent_to(root, placeholder, no_inverse=True) - - if self.hair: - creature_hair.configure_hair( - joined, root, genome.postprocess_params['hair']) - if dynamic: - if self.animation_mode == 'run': - run_cycle.animate_run(root, arma, ik_targets) - elif self.animation_mode == 'idle': - idle.snap_iks_to_floor(ik_targets, self.bvh) - idle.idle_body_noise_drivers(ik_targets) - else: - raise ValueError(f'Unrecognized mode {self.animation_mode=}') - if self.clothsim_skin: - rigidity = surface.write_vertex_group( - joined, cloth_sim.local_pos_rigity_mask, apply=True) - cloth_sim.bake_cloth(joined, genome.postprocess_params['skin'], - attributes=dict(vertex_group_mass=rigidity)) - - return root \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/__init__.py b/infinigen/assets/creatures/insects/__init__.py deleted file mode 100644 index e05ce4f40..000000000 --- a/infinigen/assets/creatures/insects/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .dragonfly import DragonflyFactory \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/dragonfly.py b/infinigen/assets/creatures/insects/dragonfly.py deleted file mode 100644 index 8afdf0e56..000000000 --- a/infinigen/assets/creatures/insects/dragonfly.py +++ /dev/null @@ -1,319 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import gin -import numpy as np -from numpy.random import uniform as U, normal as N, randint - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from .utils.geom_utils import nodegroup_symmetric_clone -from .parts.head.dragonfly_head import nodegroup_dragon_fly_head -from .parts.body.dragonfly_body import nodegroup_dragonfly_body -from .parts.tail.dragonfly_tail import nodegroup_dragonfly_tail -from .parts.leg.dragonfly_leg import nodegroup_leg_control, nodegroup_dragonfly_leg -from .parts.wing.dragonfly_wing import nodegroup_dragonfly_wing - -from infinigen.core.placement import animation_policy - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil - -def geometry_dragonfly(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - value_head_scale = nw.new_node(Nodes.Value) - value_head_scale.outputs[0].default_value = kwargs["Head Scale"] - - dragonflyhead = nw.new_node(nodegroup_dragon_fly_head(base_color=kwargs["Base Color"], eye_color=kwargs["Eye Color"], v=kwargs["V"]).name) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': kwargs["Head Roll"], 'Y': kwargs["Head Pitch"], 'Z': 1.5708}) - - transform_8 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflyhead, 'Translation': (0.0, -0.3, 0.0), 'Rotation': combine_xyz_8, 'Scale': value_head_scale}) - - transform_13 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_8, 'Scale': (1.1, 1.0, 1.0)}) - - dragonflybody = nw.new_node(nodegroup_dragonfly_body(base_color=kwargs["Body Color"], v=kwargs["V"]).name, - input_kwargs={'Body Length': kwargs["Body Length"], 'Random Seed': kwargs["Body Seed"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': dragonflybody.outputs["Geometry"], 'Name': 'spline parameter', 'Value': dragonflybody.outputs["spline parameter"]}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute, 'Name': 'body seed', 'Value': kwargs["Body Seed"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_1, 'Rotation': (1.5708, 0.0, 0.0)}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["Tail Length"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["Tail Tip Z"], 1: -0.5}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply, 'Z': multiply_1}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': kwargs["Tail Length"], 'Z': kwargs["Tail Tip Z"]}) - - dragonflytail = nw.new_node(nodegroup_dragonfly_tail(base_color=kwargs["Base Color"], v=kwargs["V"], ring_length=kwargs['Ring Length']).name, - input_kwargs={'Middle': combine_xyz_1, 'End': combine_xyz, 'Segment Length': 0.38, 'Random Seed': kwargs["Tail Seed"], 'Radius': kwargs["Tail Radius"]}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 10.0 - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflytail, 'Translation': (0.0, -10.2, 0.0), 'Rotation': (0.0, 0.0, -1.5708), 'Scale': value}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform, transform_1]}) - - nodegroup = nw.new_node(nodegroup_leg_control().name, - input_kwargs={'Openness': kwargs["Leg Openness 3"]}) - - dragonflyleg = nw.new_node(nodegroup_dragonfly_leg().name, - input_kwargs={'Rot claw': 0.18, 'Rot Tarsus': nodegroup.outputs["Tarsus"], 'Rot Femur': nodegroup.outputs["Femur"]}) - - value_leg_scale = nw.new_node(Nodes.Value) - value_leg_scale.outputs[0].default_value = kwargs["Leg Scale"] - - transform_15 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflyleg, 'Rotation': (0.0, 0.0, -0.5236), 'Scale': value_leg_scale}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': nodegroup.outputs["Shoulder"], 'Z': -0.5861}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_15, 'Translation': (0.38, 0.0, 0.0), 'Rotation': combine_xyz_6}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_2, 'Scale': (-1.0, 1.0, 1.0)}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 1.2 - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': symmetric_clone.outputs["Both"], 'Translation': (0.0, -4.6, -2.26), 'Scale': value_1}) - - nodegroup_1 = nw.new_node(nodegroup_leg_control().name, - input_kwargs={'Openness': kwargs["Leg Openness 2"]}) - - dragonflyleg_1 = nw.new_node(nodegroup_dragonfly_leg().name, - input_kwargs={'Rot claw': 0.18, 'Rot Tarsus': nodegroup_1.outputs["Tarsus"], 'Rot Femur': nodegroup_1.outputs["Femur"]}) - - transform_16 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflyleg_1, 'Rotation': (0.0, 0.0, -0.1745), 'Scale': value_leg_scale}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': nodegroup_1.outputs["Shoulder"], 'Z': 0.174}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_16, 'Translation': (0.38, 0.0, 0.0), 'Rotation': combine_xyz_5}) - - symmetric_clone_1 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_5, 'Scale': (-1.0, 1.0, 1.0)}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 1.18 - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': symmetric_clone_1.outputs["Both"], 'Translation': (0.0, -3.62, -2.26), 'Scale': value_2}) - - nodegroup_2 = nw.new_node(nodegroup_leg_control().name, - input_kwargs={'Openness': kwargs["Leg Openness 1"]}) - - dragonflyleg_2 = nw.new_node(nodegroup_dragonfly_leg().name, - input_kwargs={'Rot claw': 1.0, 'Rot Tarsus': nodegroup_2.outputs["Tarsus"], 'Rot Femur': nodegroup_2.outputs["Femur"]}) - - transform_14 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflyleg_2, 'Rotation': (0.0, 0.0, 0.3491), 'Scale': value_leg_scale}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': nodegroup_2.outputs["Shoulder"], 'Z': 0.663}) - - transform_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_14, 'Translation': (0.38, 0.0, 0.0), 'Rotation': combine_xyz_4}) - - symmetric_clone_2 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_6, 'Scale': (-1.0, 1.0, 1.0)}) - - value_3 = nw.new_node(Nodes.Value) - value_3.outputs[0].default_value = 1.04 - - transform_7 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': symmetric_clone_2.outputs["Both"], 'Translation': (0.0, -2.66, -2.26), 'Scale': value_3}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [join_geometry, transform_3, transform_4, transform_7]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_13, join_geometry_1]}) - - dragonflywing = nw.new_node(nodegroup_dragonfly_wing().name) - - scene_time = nw.new_node('GeometryNodeInputSceneTime') - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: scene_time.outputs["Seconds"], 1: 2 * np.pi * kwargs["Flap Freq"]}, - attrs={'operation': 'MULTIPLY'}) - sine = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2}, attrs={'operation': 'SINE'}) - wing_roll = nw.new_node(Nodes.Math, input_kwargs={0: sine, 1: kwargs["Flap Mag"]}, attrs={'operation': 'MULTIPLY'}) - - value_wing_yaw = nw.new_node(Nodes.Value) - value_wing_yaw.outputs[0].default_value = kwargs["Wing Yaw"] - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': wing_roll, 'Z': value_wing_yaw}) - - value_wing_scale = nw.new_node(Nodes.Value) - value_wing_scale.outputs[0].default_value = kwargs["Wing Scale"] - - transform_9 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflywing, 'Translation': (0.22, 0.0, 0.0), 'Rotation': combine_xyz_2, 'Scale': value_wing_scale}) - - symmetric_clone_3 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_9, 'Scale': (-1.0, 1.0, 1.0)}) - - value_5 = nw.new_node(Nodes.Value) - value_5.outputs[0].default_value = 5.4 - - transform_10 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': symmetric_clone_3.outputs["Both"], 'Translation': (0.0, -2.4, 1.8), 'Scale': value_5}) - - dragonflywing_1 = nw.new_node(nodegroup_dragonfly_wing().name) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: wing_roll, 1: 0.0524}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={1: value_wing_yaw}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': add, 'Z': subtract}) - - transform_12 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflywing_1, 'Translation': (0.22, 0.0, 0.0), 'Rotation': combine_xyz_3, 'Scale': value_wing_scale}) - - symmetric_clone_4 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_12, 'Scale': (-1.0, 1.0, 1.0)}) - - value_6 = nw.new_node(Nodes.Value) - value_6.outputs[0].default_value = 6.0 - - transform_11 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': symmetric_clone_4.outputs["Both"], 'Translation': (0.0, -4.18, 1.8), 'Scale': value_6}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [join_geometry_2, transform_10, transform_11]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_3}) - - # TODO replace this hacky postprocess transform - result = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': realize_instances, - 'Translation': (0.6, 0, 0), # position origin at ~center of dragonfly - 'Rotation': (0, 0, -np.pi / 2), - 'Scale': (kwargs['PostprocessScale'],) * 3 - }) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': result}) - - -@gin.configurable -class DragonflyFactory(AssetFactory): - - - def __init__(self, factory_seed, coarse=False, bvh=None, **_): - super(DragonflyFactory, self).__init__(factory_seed, coarse=coarse) - self.bvh = bvh - with FixedSeed(factory_seed): - self.genome = self.sample_geo_genome() - y = U(20, 60) - self.policy = animation_policy.AnimPolicyRandomForwardWalk( - forward_vec=(1, 0, 0), speed=U(7, 10), - step_range=(0.2, 7), yaw_dist=("uniform", -y, y), rot_vars=[0,0,0]) - - @staticmethod - def sample_geo_genome(): - base_color = np.array((U(0.1, 0.6), 0.9, 0.8)) - base_color[1] += N(0.0, 0.05) - base_color[2] += N(0.0, 0.05) - base_color_rgba = hsv2rgba(base_color) - - eye_color = np.copy(base_color) - eye_color[0] += N(0.0, 0.1) - eye_color[1] += N(0.0, 0.05) - eye_color[2] += N(0.0, 0.05) - eye_color_rgba = hsv2rgba(eye_color) - - body_color = np.copy(base_color) - body_color[0] += N(0.0, 0.1) - body_color[1] += N(0.0, 0.05) - body_color[2] += N(0.0, 0.05) - body_color_rgba = hsv2rgba(body_color) - - return { - 'Tail Length': U(2.5, 3.5), - 'Tail Tip Z': U(-0.4, 0.3), - 'Tail Seed': U(-100, 100), - 'Tail Radius': U(0.7, 0.9), - 'Body Length': U(8.0, 10.0), - 'Body Seed': U(-100, 100), - 'Flap Freq': U(20, 50), - 'Flap Mag': U(0.15, 0.25), - 'Wing Yaw': U(0.43, 0.7), - 'Wing Scale': U(0.9, 1.1), - 'Leg Scale': U(0.9, 1.1), - 'Leg Openness 1': U(0.0, 1.0), - 'Leg Openness 2': U(0.0, 1.0), - 'Leg Openness 3': U(0.0, 1.0), - 'Head Scale': U(1.6, 1.8), - 'Head Roll': U(-0.2, 0.2), - 'Head Pitch': U(-0.6, 0.6), - 'Base Color': base_color_rgba, - 'Body Color': body_color_rgba, - 'Eye Color': eye_color_rgba, - 'V': U(0.0, 0.5), - 'Ring Length': U(0.0, 0.3), - 'PostprocessScale': 0.015 * N(1, 0.1), - } - - def create_placeholder(self, i, loc, rot): - - p = butil.spawn_cube(size=1) - p.location = loc - p.rotation_euler = rot - - if self.bvh is not None: - p.location.z += U(0.5, 2) - animation_policy.animate_trajectory(p, self.bvh, self.policy) - - return p - - def create_asset(self, placeholder, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - phenome = self.genome.copy() - - surface.add_geomod(obj, geometry_dragonfly, apply=False, input_kwargs=phenome) - obj.parent = placeholder - - return obj \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/antenna/dragonfly_antenna.py b/infinigen/assets/creatures/insects/parts/antenna/dragonfly_antenna.py deleted file mode 100644 index 35abfe9eb..000000000 --- a/infinigen/assets/creatures/insects/parts/antenna/dragonfly_antenna.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_simple_tube_v2 - -@node_utils.to_nodegroup('nodegroup_dragonfly_antenna', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_antenna(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.24, 0.02, 0.01)), - ('NodeSocketVector', 'angles_deg', (0.0, -63.9, 31.39)), - ('NodeSocketFloat', 'Carapace Rad Pct', 1.4), - ('NodeSocketVector', 'spike_length_rad1_rad2', (0.1, 0.025, 0.0))]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'proportions': (0.2533, 0.3333, -0.2267), 'do_bezier': False}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/body/dragonfly_body.py b/infinigen/assets/creatures/insects/parts/body/dragonfly_body.py deleted file mode 100644 index 3ee74fe41..000000000 --- a/infinigen/assets/creatures/insects/parts/body/dragonfly_body.py +++ /dev/null @@ -1,231 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_circle_cross_section, nodegroup_surface_bump, nodegroup_random_rotation_scale, nodegroup_instance_on_points -from infinigen.assets.creatures.insects.parts.hair.principled_hair import nodegroup_principled_hair -from infinigen.assets.creatures.insects.utils.shader_utils import shader_black_w_noise_shader, nodegroup_add_noise, nodegroup_color_noise - -def shader_dragonfly_body_shader(nw: NodeWrangler, base_color, v): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'pos'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute.outputs["Vector"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 3.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'body seed'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': combine_xyz, 'W': attribute_1.outputs["Fac"], 'Scale': 0.5, 'Dimension': 1.0, 'Lacunarity': 1.0}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: -0.26, 2: 0.06}) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'spline parameter'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': attribute_2.outputs["Fac"]}) - - group = nw.new_node(nodegroup_add_noise().name, - input_kwargs={'Vector': combine_xyz_1, 'Scale': 0.5, 'amount': (0.16, 0.26, 0.0), 'Noise Eval Position': combine_xyz}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': attribute_1.outputs["Fac"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz_2, 'Scale': 10.0}, - attrs={'voronoi_dimensions': '2D'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 1: 0.14, 2: 0.82}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add, 1: 0.7, 3: 1.0, 4: 0.0}) - - rgb_1 = nw.new_node(Nodes.RGB) - rgb_1.outputs[0].default_value = base_color - - group_2 = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Scale': 1.34, 'Color': rgb_1, 'Value From Max': 0.7, 'Value To Min': 0.18}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': v, 'Color': rgb_1}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': group_2, 'Color2': hue_saturation_value}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Metallic': 0.2182, 'Specular': 0.8318, 'Roughness': 0.1545}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_dragonfly_body', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_body(nw: NodeWrangler, - curve_control_points=[(0.0, 0.15), (0.1586, 0.4688), (0.36, 0.66), (0.7427, 0.4606), (0.9977, 0.2562)], - base_color=(0.2789, 0.3864, 0.0319, 1.0), - v=0.3, - ): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Body Length', 10.0), - ('NodeSocketFloat', 'Random Seed', 0.0), - ('NodeSocketFloat', 'Hair Density', 200.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': group_input.outputs["Body Length"]}) - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': combine_xyz}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': 128}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': resample_curve, 2: spline_parameter.outputs["Factor"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], curve_control_points) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Radius': float_curve}) - - circlecrosssection = nw.new_node(nodegroup_circle_cross_section().name, - input_kwargs={'random seed': group_input.outputs["Random Seed"], 'noise amount': 1.26, 'radius': 4.0}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': circlecrosssection, 'Rotation': (0.0, 0.0, 1.5708)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': transform, 'Fill Caps': True}) - - normal = nw.new_node(Nodes.InputNormal) - - position_2 = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_2, 1: (1.0, 0.2, 0.8)}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'W': group_input.outputs["Random Seed"], 'Scale': 0.5}, - attrs={'voronoi_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 4: 0.4}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': multiply_1}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={1: scale.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Offset': add.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': surface.shaderfunc_to_material(shader_dragonfly_body_shader, base_color, v)}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': set_material, 'Displacement': -0.12, 'Scale': 75.8, 'seed': group_input.outputs["Random Seed"]}) - - position = nw.new_node(Nodes.InputPosition) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': surfacebump, 'Name': 'pos', 2: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.5}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': capture_attribute.outputs[2]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: reroute, 1: 0.4}, - attrs={'operation': 'LESS_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Hair Density"]}) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': store_named_attribute, 'Selection': op_and, 'Density': reroute_1}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'random seed': -2.4, 'rot mean': (-1.0, 0.0, 0.0), 'rot std z': -10.2, 'scale mean': 0.03}) - - leghair = nw.new_node(nodegroup_principled_hair().name, - input_kwargs={'Resolution': 2}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': leghair, 'Scale': (1.0, 1.0, 5.0)}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_3, 'Material': surface.shaderfunc_to_material(shader_black_w_noise_shader)}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': distribute_points_on_faces.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': randomrotationscale.outputs["Value"], 'Points': distribute_points_on_faces.outputs["Points"], 'Instance': set_material_2}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_1, 1: 0.3}, - attrs={'operation': 'MULTIPLY'}) - - distribute_points_on_faces_1 = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': store_named_attribute, 'Density': multiply_2, 'Seed': 1}) - - instanceonpoints_1 = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': distribute_points_on_faces_1.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': randomrotationscale.outputs["Value"], 'Points': distribute_points_on_faces_1.outputs["Points"], 'Instance': set_material_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [store_named_attribute, instanceonpoints, instanceonpoints_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': resample_curve, 'spline parameter': reroute}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/eye/dragonfly_eye.py b/infinigen/assets/creatures/insects/parts/eye/dragonfly_eye.py deleted file mode 100644 index ad3f078aa..000000000 --- a/infinigen/assets/creatures/insects/parts/eye/dragonfly_eye.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.shader_utils import nodegroup_color_noise - -def shader_dragonfly_eye_shader(nw: NodeWrangler, base_color, v): - # Code generated using version 2.4.3 of the node_transpiler - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': 2.0, 'Detail': 1.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: -1.0, 2: 0.2}) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = base_color - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': v, 'Color': rgb}) - - group_1 = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Scale': 1.34, 'Color': rgb, 'Value From Max': 0.7, 'Value To Min': 0.18}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': hue_saturation_value, 'Color2': group_1}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Scale': 1000.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 1: 0.03, 2: 0.2, 3: 1.0, 4: -0.78}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Specular': map_range_1.outputs["Result"], 'Roughness': 0.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_dragonfly_eye', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_eye(nw: NodeWrangler, - base_color=(0.2789, 0.3864, 0.0319, 1.0), - v=0.3, - ): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Rings', 16)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Rings"], 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': multiply, 'Rings': group_input.outputs["Rings"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 1.0, 1.3)}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform, 'Material': surface.shaderfunc_to_material(shader_dragonfly_eye_shader, base_color, v)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/hair/principled_hair.py b/infinigen/assets/creatures/insects/parts/hair/principled_hair.py deleted file mode 100644 index e907e5dc4..000000000 --- a/infinigen/assets/creatures/insects/parts/hair/principled_hair.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_shape_quadratic, nodegroup_circle_cross_section - -@node_utils.to_nodegroup('nodegroup_principled_hair', singleton=False, type='GeometryNodeTree') -def nodegroup_principled_hair(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 4)]) - - crosssection = nw.new_node(nodegroup_circle_cross_section().name, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'radius': 0.5}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 2.0 - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': crosssection, 'Scale': value}) - - shapequadraticleghair = nw.new_node(nodegroup_shape_quadratic(radius_control_points=[(0.0, 0.1125), (0.625, 0.1), (1.0, 0.0531)]).name, - input_kwargs={'Profile Curve': transform, 'noise amount tilt': 0.0, 'Resolution': 8, 'Start': (0.0, 0.0, 0.0), 'Middle': (-0.2, 0.0, 1.0), 'End': (0.0, 0.0, 2.66)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': shapequadraticleghair.outputs["Mesh"]}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/head/dragonfly_head.py b/infinigen/assets/creatures/insects/parts/head/dragonfly_head.py deleted file mode 100644 index a6ee63719..000000000 --- a/infinigen/assets/creatures/insects/parts/head/dragonfly_head.py +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_add_hair, nodegroup_attach_part, nodegroup_symmetric_clone, nodegroup_surface_bump -from infinigen.assets.creatures.insects.parts.mouth.dragonfly_mouth import nodegroup_dragonfly_mouth -from infinigen.assets.creatures.insects.parts.eye.dragonfly_eye import nodegroup_dragonfly_eye -from infinigen.assets.creatures.insects.parts.antenna.dragonfly_antenna import nodegroup_dragonfly_antenna -from infinigen.assets.creatures.insects.parts.hair.principled_hair import nodegroup_principled_hair -from infinigen.assets.creatures.insects.utils.shader_utils import nodegroup_color_noise, shader_black_w_noise_shader - -def shader_dragonfly_head_shader(nw: NodeWrangler, base_color, v): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'pos'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute.outputs["Vector"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': absolute, 'Z': separate_xyz.outputs["Z"]}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': combine_xyz, 'W': 28.0, 'Scale': 2.0, 'Detail': 1.0}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: -0.28, 2: 0.48}) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = base_color - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': v, 'Color': rgb}) - - group = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Scale': 1.34, 'Color': rgb, 'Value From Max': 0.7, 'Value To Min': 0.18}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': hue_saturation_value, 'Color2': group}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Specular': 0.7545, 'Roughness': 0.0636}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_dragon_fly_head', singleton=False, type='GeometryNodeTree') -def nodegroup_dragon_fly_head(nw: NodeWrangler, - base_color=(0.2789, 0.3864, 0.0319, 1.0), - eye_color=(0.2789, 0.3864, 0.0319, 1.0), - v=0.3): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': (1.8, 0.0, 0.0)}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': 32}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 2: spline_parameter_1.outputs["Factor"]}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': capture_attribute.outputs[2]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 0.14), (0.3055, 0.93), (0.7018, 0.79), (0.9236, 0.455), (1.0, 0.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Radius': float_curve_1}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 200, 'Radius': 1.1}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh, 'Material': surface.shaderfunc_to_material(shader_dragonfly_head_shader, base_color, v)}) - - leghair = nw.new_node(nodegroup_principled_hair().name, - input_kwargs={'Resolution': 2}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': leghair, 'Scale': (1.0, 1.0, 5.0)}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_3, 'Material': surface.shaderfunc_to_material(shader_black_w_noise_shader)}) - - addhair = nw.new_node(nodegroup_add_hair().name, - input_kwargs={'Mesh': set_material_1, 'Hair': set_material_2, 'Density': 500.0, 'rot mean': (0.36, 0.0, 0.0), 'scale mean': 0.01}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': addhair}) - - dragonflyeye = nw.new_node(nodegroup_dragonfly_eye(base_color=eye_color, v=0.0).name, - input_kwargs={'Rings': 128}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.6 - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': dragonflyeye, 'Scale': value_1}) - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': reroute, 'Skeleton Curve': set_position, 'Geometry': transform_1, 'Length Fac': 0.5625, 'Ray Rot': (1.5474, -0.3944, 1.4556), 'Rad': 0.64, 'Part Rot': (27.1, 0.0, 0.0)}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': attach_part.outputs["Geometry"]}) - - dragonflymouth = nw.new_node(nodegroup_dragonfly_mouth().name) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': dragonflymouth, 'Material': surface.shaderfunc_to_material(shader_dragonfly_head_shader, base_color, v)}) - - addhair_1 = nw.new_node(nodegroup_add_hair().name, - input_kwargs={'Mesh': set_material_3, 'Hair': set_material_2, 'Density': 5.0, 'rot mean': (-0.04, 0.0, 0.0), 'scale mean': 0.1}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': addhair_1, 'Displacement': 0.05, 'Scale': 5.0}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.07 - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': surfacebump, 'Scale': value}) - - attach_part_1 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': reroute, 'Skeleton Curve': resample_curve, 'Geometry': transform, 'Length Fac': 0.9667, 'Part Rot': (0.0, 31.5, 0.0), 'Do Normal Rot': True}) - - antenna = nw.new_node(nodegroup_dragonfly_antenna().name, - input_kwargs={'length_rad1_rad2': (1.24, 0.05, 0.04), 'angles_deg': (0.0, -31.0, 0.0)}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': antenna.outputs["Geometry"], 'Scale': 5.0}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': surfacebump_1, 'Material': surface.shaderfunc_to_material(shader_black_w_noise_shader)}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.48 - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_material, 'Translation': (-0.02, 0.0, 0.0), 'Scale': value_2}) - - attach_part_2 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': reroute, 'Skeleton Curve': resample_curve, 'Geometry': transform_2, 'Length Fac': 0.6408, 'Ray Rot': (1.9722, -1.4364, 1.5708), 'Rad': 0.9, 'Part Rot': (108.1, -49.8, 26.7)}) - - symmetric_clone_1 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': attach_part_2.outputs["Geometry"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [symmetric_clone.outputs["Both"], reroute, attach_part_1.outputs["Geometry"], symmetric_clone_1.outputs["Both"]]}) - - position = nw.new_node(Nodes.InputPosition) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': join_geometry_1, 'Name': 'pos', "Value": position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': store_named_attribute}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/leg/dragonfly_leg.py b/infinigen/assets/creatures/insects/parts/leg/dragonfly_leg.py deleted file mode 100644 index 05e52d5c9..000000000 --- a/infinigen/assets/creatures/insects/parts/leg/dragonfly_leg.py +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.shader_utils import shader_black_w_noise_shader -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_shape_quadratic, nodegroup_surface_bump -from infinigen.assets.creatures.insects.parts.hair.principled_hair import nodegroup_principled_hair - -@node_utils.to_nodegroup('nodegroup_leg_control', singleton=False, type='GeometryNodeTree') -def nodegroup_leg_control(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Openness', 1.0)]) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Openness"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': reroute_2, 3: 0.6, 4: 1.44}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': reroute_2, 3: -0.26, 4: 0.16}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': reroute_2, 3: 1.68, 4: 1.88}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Femur': map_range.outputs["Result"], 'Tarsus': map_range_1.outputs["Result"], 'Shoulder': map_range_2.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_dragonfly_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_leg(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - legcrosssection = nw.new_node(nodegroup_leg_cross_section().name) - - shapequadraticclaw = nw.new_node(nodegroup_shape_quadratic(radius_control_points=[(0.0, 0.0031), (0.2682, 0.1906), (0.6364, 0.3594), (0.8091, 0.5031), (1.0, 0.5375)]).name, - input_kwargs={'Profile Curve': legcrosssection, 'noise amount tilt': 0.0, 'Resolution': 16, 'Start': (0.0, 0.0, 3.0), 'Middle': (-1.2, 0.0, 1.5), 'End': (0.2, 0.0, 0.0)}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.3 - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shapequadraticclaw, 'Translation': (-0.38, 0.0, 1.0), 'Rotation': (0.0, 0.4318, 0.0), 'Scale': value}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.5 - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shapequadraticclaw, 'Translation': (0.1, 0.0, 0.04), 'Rotation': (0.0, -0.0262, 0.0), 'Scale': value_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [shapequadraticclaw, transform_2, transform_3]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Rot claw', 1.82), - ('NodeSocketFloat', 'Rot Tarsus', 0.02), - ('NodeSocketFloat', 'Rot Femur', 1.42)]) - - legpart = nw.new_node(nodegroup_leg_part().name, - input_kwargs={'NextJoint': join_geometry_1, 'NextJoint Y rot': group_input.outputs["Rot claw"], 'NextJoint Scale': 0.4, 'Num Hairs': 10}) - - legpart_1 = nw.new_node(nodegroup_leg_part().name, - input_kwargs={'NextJoint': legpart, 'NextJoint Y rot': group_input.outputs["Rot Tarsus"], 'NextJoint Scale': 0.45, 'Cross Section Scale': 0.8}) - - legpart_2 = nw.new_node(nodegroup_leg_part().name, - input_kwargs={'NextJoint': legpart_1, 'NextJoint Y rot': group_input.outputs["Rot Femur"], 'NextJoint Scale': 0.75, 'Cross Section Scale': 1.2, 'Num Hairs': 30, 'Hair Scale Max': 0.15}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': legpart_2, 'Displacement': 0.03, 'Scale': 5.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': surfacebump}) - -@node_utils.to_nodegroup('nodegroup_leg_part', singleton=False, type='GeometryNodeTree') -def nodegroup_leg_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - legcrosssection = nw.new_node(nodegroup_leg_cross_section().name) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'NextJoint', None), - ('NodeSocketFloat', 'NextJoint Y rot', 0.0), - ('NodeSocketFloat', 'NextJoint Scale', 1.0), - ('NodeSocketFloat', 'Cross Section Scale', 1.0), - ('NodeSocketInt', 'Num Hairs', 15), - ('NodeSocketFloat', 'Hair Scale Min', 0.18), - ('NodeSocketFloat', 'Hair Scale Max', 0.22)]) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': legcrosssection, 'Rotation': (0.0, 0.0, 3.1416), 'Scale': group_input.outputs["Cross Section Scale"]}) - - tarsus_end = nw.new_node(Nodes.Vector, - label='tarsus end') - tarsus_end.vector = (0.2, 0.0, 6.0) - - shapequadratictarsus = nw.new_node(nodegroup_shape_quadratic(radius_control_points=[(0.0, 0.3125), (0.0841, 0.3469), (0.45, 0.4125), (0.55, 0.3719), (0.9045, 0.325), (1.0, 0.125)]).name, - input_kwargs={'Profile Curve': transform_4, 'noise amount tilt': 0.0, 'Resolution': 128, 'Start': (0.0, 0.0, 0.0), 'Middle': (-0.4, 0.0, 3.0), 'End': tarsus_end}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': shapequadratictarsus.outputs["Mesh"], 2: spline_parameter_1.outputs["Factor"]}) - - curve_to_points_1 = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': capture_attribute_1.outputs["Geometry"], 'Count': group_input.outputs["Num Hairs"]}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: capture_attribute_1.outputs[2], 1: 0.9}) - - delete_geometry_1 = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': curve_to_points_1.outputs["Points"], 'Selection': greater_than}) - - leghair = nw.new_node(nodegroup_principled_hair().name) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.88}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': random_value_3.outputs[1]}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: group_input.outputs["Hair Scale Min"], 3: group_input.outputs["Hair Scale Max"]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': delete_geometry_1, 'Instance': leghair, 'Rotation': combine_xyz_1, 'Scale': random_value_2.outputs[1]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: tarsus_end, 1: (0.0, 0.0, 0.05)}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': group_input.outputs["NextJoint Y rot"]}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["NextJoint"], 'Translation': subtract.outputs["Vector"], 'Rotation': combine_xyz, 'Scale': group_input.outputs["NextJoint Scale"]}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [shapequadratictarsus.outputs["Mesh"], transform_5]}) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instance_on_points_1, join_geometry_3]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_4, 'Material': surface.shaderfunc_to_material(shader_black_w_noise_shader)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - -@node_utils.to_nodegroup('nodegroup_leg_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_leg_cross_section(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 8)]) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start Handle': (-0.9, 0.7, 0.0), 'End Handle': (0.9, 0.38, 0.0)}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': bezier_segment}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': reroute, 'Scale': (1.0, -1.0, 1.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform, reroute]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': curve_to_mesh}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': merge_by_distance}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': mesh_to_curve, 'Rotation': (0.0, 0.0, 1.5708), 'Scale': (0.6, 1.0, 0.6)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_1}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/mouth/dragonfly_mouth.py b/infinigen/assets/creatures/insects/parts/mouth/dragonfly_mouth.py deleted file mode 100644 index 02ac73655..000000000 --- a/infinigen/assets/creatures/insects/parts/mouth/dragonfly_mouth.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_simple_tube_v2 - -@node_utils.to_nodegroup('nodegroup_dragonfly_mouth', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_mouth(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 1.5 - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': (9.5, 9.36, 5.54), 'proportions': (1.0, 1.0, 1.0), 'aspect': value, 'do_bezier': False, 'fullness': 7.9}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Translation': (0.0, 0.0, -9.1), 'Rotation': (0.0, 1.7645, 0.0), 'Scale': (1.0, 1.2, 1.0)}) - - simple_tube_v2_1 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': (9.64, 5.46, 9.04), 'proportions': (1.0, 1.0, 1.0), 'aspect': value, 'do_bezier': False, 'fullness': 7.9}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simple_tube_v2_1.outputs["Geometry"], 'Rotation': (0.0, 1.5708, 0.0), 'Scale': (1.0, 1.2, 1.0)}) - - simple_tube_v2_2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': (8.4, 6.16, 4.7), 'proportions': (1.0, 1.0, 1.0), 'aspect': value, 'do_bezier': False, 'fullness': 7.9}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simple_tube_v2_2.outputs["Geometry"], 'Translation': (-1.1, 0.0, -17.2), 'Rotation': (0.0, 2.6005, 0.0), 'Scale': (1.0, 1.2, 1.0)}) - - simple_tube_v2_3 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': (10.1, 4.28, 6.7), 'angles_deg': (4.64, 0.0, 0.0), 'proportions': (1.0, 1.0, 1.0), 'aspect': 2.1, 'do_bezier': False, 'fullness': 7.9}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simple_tube_v2_3.outputs["Geometry"], 'Translation': (-6.56, 0.0, 5.34), 'Rotation': (0.0, 0.8126, 0.0), 'Scale': (1.0, 1.2, 1.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, transform, transform_2, transform_4]}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry}) - - normal = nw.new_node(Nodes.InputNormal) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 0.5}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 4: 0.3}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': map_range.outputs["Result"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': transform_3, 'Offset': scale.outputs["Vector"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': set_position, 'Level': 2}) - - group_output_1 = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': subdivision_surface}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/tail/dragonfly_tail.py b/infinigen/assets/creatures/insects/parts/tail/dragonfly_tail.py deleted file mode 100644 index 57d9d2116..000000000 --- a/infinigen/assets/creatures/insects/parts/tail/dragonfly_tail.py +++ /dev/null @@ -1,384 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.shader_utils import nodegroup_color_noise, nodegroup_add_noise -from infinigen.assets.creatures.insects.utils.geom_utils import nodegroup_random_rotation_scale, nodegroup_circle_cross_section, nodegroup_shape_quadratic, nodegroup_surface_bump, nodegroup_instance_on_points - -def shader_dragonfly_tail_shader(nw: NodeWrangler, base_color, v, ring_length): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'cross section parameter'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Fac"]}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.4455 - colorramp.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[2].position = 0.5045 - colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[3].position = 1.0 - colorramp.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': colorramp.outputs["Color"], 1: 0.02, 2: 0.38}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'spline parameter'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Fac"], 1: 0.18, 2: 0.42}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range.outputs["Result"], 'Y': map_range_1.outputs["Result"]}) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group = nw.new_node(nodegroup_add_noise().name, - input_kwargs={'Vector': combine_xyz, 'amount': (1.0, 1.0, 0.0), 'Noise Eval Position': texture_coordinate.outputs["Object"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Y"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': attribute_1.outputs["Fac"], 'Scale': 5.34, 'Randomness': 0.0}, - attrs={'voronoi_dimensions': '1D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 3: 1.0, 4: 0.0}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: map_range_2.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = ring_length - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: value, 1: 0.05}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Fac"], 1: value, 2: add_1}) - - minimum = nw.new_node(Nodes.Math, - input_kwargs={0: maximum, 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MINIMUM'}) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = base_color - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': v, 'Color': rgb}) - - group_2 = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Scale': 1.34, 'Color': rgb, 'Value From Max': 0.7, 'Value To Min': 0.18}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': minimum, 'Color1': hue_saturation_value, 'Color2': group_2}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1, 'Metallic': 0.5, 'Specular': 0.5114, 'Roughness': 0.2568}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -@node_utils.to_nodegroup('nodegroup_dragonfly_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_tail(nw: NodeWrangler, - base_color=(0.2789, 0.3864, 0.0319, 1.0), - v=0.3, - ring_length=0.3 - ): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'Middle', (1.84, 0.0, 0.14)), - ('NodeSocketVectorTranslation', 'End', (3.14, 0.0, -0.32)), - ('NodeSocketFloatDistance', 'Segment Length', 0.44), - ('NodeSocketFloat', 'Segment Scale', 0.25), - ('NodeSocketFloat', 'Random Seed', 3.2), - ('NodeSocketFloat', 'Radius', 0.9)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadratic_bezier, 2: spline_parameter.outputs["Factor"]}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Length': group_input.outputs["Segment Length"]}, - attrs={'mode': 'LENGTH'}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Segment Scale"]}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'rot std z': 0.0, 'scale mean': reroute_1, 'scale std': 0.05}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': capture_attribute.outputs[2], 3: 1.0, 4: 0.8}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: randomrotationscale.outputs["Value"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - droplast = nw.new_node(nodegroup_droplast().name, - input_kwargs={'Geometry': curve_to_points.outputs["Points"]}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 128}) - integer.integer = 128 - - circlecrosssection = nw.new_node(nodegroup_circle_cross_section().name, - input_kwargs={'random seed': 23.4, 'noise amount': 0.9, 'Resolution': integer, 'radius': group_input.outputs["Radius"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': circlecrosssection, 'Rotation': (0.0, 0.0, 1.5708)}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter_1.outputs["Factor"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': transform, 'Name': 'cross section parameter', 'Value': multiply_1}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': store_named_attribute_2}) - - shapequadratic_001 = nw.new_node(nodegroup_shape_quadratic(radius_control_points=[(0.0, 0.3906), (0.1795, 0.4656), (0.5, 0.4563), (0.8795, 0.45), (1.0, 0.4344)]).name, - input_kwargs={'Profile Curve': reroute, 'noise amount tilt': 0.0, 'Resolution': integer, 'Start': (0.0, 0.0, -1.5), 'End': (0.0, 0.0, 0.68)}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': shapequadratic_001.outputs["Mesh"], 'Name': 'spline parameter', 'Value': shapequadratic_001.outputs["spline parameter"]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.02 - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': store_named_attribute_1, 'Displacement': value_1, 'Scale': 20.0, 'seed': group_input.outputs["Random Seed"]}) - - addverticalstripes = nw.new_node(nodegroup_add_vertical_stripes().name, - input_kwargs={'Geometry': surfacebump, 'Seed': group_input.outputs["Random Seed"]}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': curve_to_points.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': multiply, 'Points': droplast.outputs["Others"], 'Instance': addverticalstripes}) - - shapequadratic_003 = nw.new_node(nodegroup_shape_quadratic(radius_control_points=[(0.0, 0.3312), (0.1773, 0.4281), (0.4318, 0.5031), (0.5886, 0.3562), (0.7864, 0.2687), (1.0, 0.0)]).name, - input_kwargs={'Profile Curve': reroute, 'noise amount tilt': 0.0, 'Resolution': integer, 'Start': (0.26, 0.0, -1.5), 'Middle': (0.32, 0.0, 0.0), 'End': (-0.04, 0.0, 1.5)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shapequadratic_003.outputs["Mesh"], 'Translation': (0.0, 0.28, 0.0), 'Rotation': (0.0, 0.0, -1.5708)}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': transform_1, 'Name': 'spline parameter', 'Value': shapequadratic_003.outputs["spline parameter"]}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': store_named_attribute, 'Displacement': value_1, 'Scale': 20.0}) - - addverticalstripes_1 = nw.new_node(nodegroup_add_vertical_stripes().name, - input_kwargs={'Geometry': surfacebump_1, 'Seed': group_input.outputs["Random Seed"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': droplast.outputs["Last"], 'Instance': addverticalstripes_1, 'Rotation': curve_to_points.outputs["Rotation"], 'Scale': reroute_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instanceonpoints, instance_on_points]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, 'Material': surface.shaderfunc_to_material(shader_dragonfly_tail_shader, base_color, v, ring_length)}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': set_material}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances}) - -@node_utils.to_nodegroup('nodegroup_droplast', singleton=False, type='GeometryNodeTree') -def nodegroup_droplast(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - index = nw.new_node(Nodes.Index) - - domain_size = nw.new_node(Nodes.DomainSize, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}, - attrs={'component': 'POINTCLOUD'} - ) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: domain_size.outputs["Point Count"], 1: 1.0}, - attrs={'operation': 'SUBTRACT'}) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: index, 3: subtract}, - attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: equal}, - attrs={'operation': 'NOT'}) - - delete_geometry_1 = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Selection': op_not}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Selection': equal}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Last': delete_geometry_1, 'Others': delete_geometry}) - -@node_utils.to_nodegroup('nodegroup_add_vertical_stripes', singleton=False, type='GeometryNodeTree') -def nodegroup_add_vertical_stripes(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Scale', 5.0), - ('NodeSocketFloat', 'Seed', 0.0)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.05}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'W': group_input.outputs["Seed"], 'Scale': group_input.outputs["Scale"]}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': voronoi_texture.outputs["Distance"]}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Name': 'tail vertical strips', 'Value': reroute_1}) - - normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_1, 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': multiply_1}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': store_named_attribute_3, 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) -## old version -# def shader_dragonfly_tail_shader(nw: NodeWrangler): -# # Code generated using version 2.4.3 of the node_transpiler - -# texture_coordinate = nw.new_node(Nodes.TextureCoord) - -# attribute_1 = nw.new_node(Nodes.Attribute, -# attrs={'attribute_name': 'cross section parameter'}) - -# colorramp_1 = nw.new_node(Nodes.ColorRamp, -# input_kwargs={'Fac': attribute_1.outputs["Fac"]}) -# colorramp_1.color_ramp.elements.new(0) -# colorramp_1.color_ramp.elements.new(0) -# colorramp_1.color_ramp.elements[0].position = 0.0 -# colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) -# colorramp_1.color_ramp.elements[1].position = 0.4455 -# colorramp_1.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) -# colorramp_1.color_ramp.elements[2].position = 0.5045 -# colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) -# colorramp_1.color_ramp.elements[3].position = 1.0 -# colorramp_1.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) - -# map_range_1 = nw.new_node(Nodes.MapRange, -# input_kwargs={'Value': colorramp_1.outputs["Color"], 1: 0.02, 2: 0.38}) - -# attribute = nw.new_node(Nodes.Attribute, -# attrs={'attribute_name': 'spline parameter'}) - -# map_range = nw.new_node(Nodes.MapRange, -# input_kwargs={'Value': attribute.outputs["Fac"], 1: 0.18, 2: 0.42}) - -# combine_xyz = nw.new_node(Nodes.CombineXYZ, -# input_kwargs={'X': map_range_1.outputs["Result"], 'Y': map_range.outputs["Result"]}) - -# group_2 = nw.new_node(nodegroup_add_noise().name, -# input_kwargs={'Vector': combine_xyz, 'amount': (1.0, 1.0, 0.0), 'Noise Eval Position': texture_coordinate.outputs["Object"],}) - -# separate_xyz = nw.new_node(Nodes.SeparateXYZ, -# input_kwargs={'Vector': group_2}) - -# add = nw.new_node(Nodes.Math, -# input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Y"]}) - -# voronoi_texture = nw.new_node(Nodes.VoronoiTexture, -# input_kwargs={'W': attribute.outputs["Fac"], 'Scale': 5.34, 'Randomness': 0.0}, -# attrs={'voronoi_dimensions': '1D'}) - -# map_range_2 = nw.new_node(Nodes.MapRange, -# input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 3: 1.0, 4: 0.0}) - -# maximum = nw.new_node(Nodes.Math, -# input_kwargs={0: add, 1: map_range_2.outputs["Result"]}, -# attrs={'operation': 'MAXIMUM'}) - -# group_1 = nw.new_node(nodegroup_color_noise().name, -# input_kwargs={'Scale': 6.4, 'Color': (0.1582, 0.291, 1.0, 1.0), 'Value To Min': 0.4}) - -# attribute_2 = nw.new_node(Nodes.Attribute, -# attrs={'attribute_name': 'tail vertical strips'}) - -# map_range_3 = nw.new_node(Nodes.MapRange, -# input_kwargs={'Value': attribute_2.outputs["Fac"], 1: 0.16, 2: 0.34}) - -# mix_1 = nw.new_node(Nodes.MixRGB, -# input_kwargs={'Fac': 0.0, 'Color1': (0.0144, 0.016, 0.0152, 1.0), 'Color2': (0.544, 0.5299, 0.5841, 1.0)}) - -# mix = nw.new_node(Nodes.MixRGB, -# input_kwargs={'Fac': maximum, 'Color1': group_1, 'Color2': mix_1}) - -# principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, -# input_kwargs={'Base Color': mix, 'Metallic': 0.9, 'Specular': 0.5114, 'Roughness': 0.2568}) - -# material_output = nw.new_node(Nodes.MaterialOutput, -# input_kwargs={'Surface': principled_bsdf}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/parts/wing/dragonfly_wing.py b/infinigen/assets/creatures/insects/parts/wing/dragonfly_wing.py deleted file mode 100644 index f0eb8706c..000000000 --- a/infinigen/assets/creatures/insects/parts/wing/dragonfly_wing.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.insects.utils.shader_utils import nodegroup_add_noise - -@node_utils.to_nodegroup('nodegroup_dragonfly_wing', singleton=False, type='GeometryNodeTree') -def nodegroup_dragonfly_wing(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - resolution = nw.new_node(Nodes.Integer, - label='resolution', - attrs={'integer': 32}) - resolution.integer = 32 - - pivot1 = nw.new_node(Nodes.Vector, - label='pivot1') - pivot1.vector = (1.84, -0.28, 0.0) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: pivot1}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': add.outputs["Vector"]}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': resolution, 'Start': (0.0, 0.0, 0.0), 'Middle': (1.2, -0.16, 0.0), 'End': reroute}) - - pivot2 = nw.new_node(Nodes.Vector, - label='pivot2') - pivot2.vector = (3.98, -0.78, 0.0) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': resolution, 'Start': reroute, 'Middle': (3.98, -0.32, 0.0), 'End': pivot2}) - - pivot3 = nw.new_node(Nodes.Vector, - label='pivot3') - pivot3.vector = (2.54, -1.14, 0.0) - - quadratic_bezier_2 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': resolution, 'Start': pivot2, 'Middle': (4.0, -1.1, 0.0), 'End': pivot3}) - - pivot4 = nw.new_node(Nodes.Vector, - label='pivot4') - pivot4.vector = (-0.06, -0.74, 0.0) - - quadratic_bezier_3 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': resolution, 'Start': pivot3, 'Middle': (0.28, -1.34, 0.0), 'End': pivot4}) - - pivot5 = nw.new_node(Nodes.Vector, - label='pivot5') - pivot5.vector = (0.0, -0.14, 0.0) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': resolution, 'Start': pivot4, 'Start Handle': (0.16, -0.44, 0.0), 'End Handle': (-0.24, -0.34, 0.0), 'End': pivot5}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': bezier_segment, 'Count': resolution}) - - quadratic_bezier_4 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': resolution, 'Start': pivot5, 'Middle': (-0.18, -0.04, 0.0), 'End': (0.0, 0.0, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [quadratic_bezier, quadratic_bezier_1, quadratic_bezier_2, quadratic_bezier_3, resample_curve, quadratic_bezier_4]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': curve_to_mesh}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': merge_by_distance}) - - fill_curve = nw.new_node(Nodes.FillCurve, - input_kwargs={'Curve': mesh_to_curve}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': fill_curve}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadratic_bezier_2}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': curve_to_mesh_1}, - attrs={'target_element': 'EDGES'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': subdivide_mesh, 'Name': 'distance to edge', 'Value': geometry_proximity.outputs["Distance"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadratic_bezier_1, 2: spline_parameter.outputs["Factor"]}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: capture_attribute.outputs[2], 1: 0.65}, - attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: capture_attribute.outputs[2], 1: 0.84}) - - op_or = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: less_than, 1: greater_than}, - attrs={'operation': 'OR'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': curve_to_mesh_2, 'Selection': op_or}) - - geometry_proximity_1 = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': delete_geometry}, - attrs={'target_element': 'EDGES'}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute, 'Name': 'stripes coordinate', 'Value': geometry_proximity_1.outputs["Distance"]}) - - position = nw.new_node(Nodes.InputPosition) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_2, 'Name': 'pos', 'Value': position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': store_named_attribute_1, 'Material': surface.shaderfunc_to_material(shader_wing_shader)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - -def shader_wing_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'stripes coordinate'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_2.outputs["Fac"], 1: 0.04, 2: 0.54}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'pos'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': attribute_1.outputs["Vector"], 'Angle': 0.1047}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.08 - - group = nw.new_node(nodegroup_add_noise().name, - input_kwargs={'Vector': vector_rotate, 'amount': value}) - - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': group, 'Scale': 12.0, 'Randomness': 0.7}, - attrs={'voronoi_dimensions': '2D', 'feature': 'DISTANCE_TO_EDGE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_2.outputs["Distance"], 1: 2.34}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': separate_xyz.outputs["Y"], 'Scale': 14.96, 'Randomness': 0.5}, - attrs={'voronoi_dimensions': '1D', 'feature': 'DISTANCE_TO_EDGE'}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = -0.18 - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, - attrs={'operation': 'LESS_THAN'}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: less_than}, - attrs={'operation': 'MAXIMUM'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum, 1: 0.56}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': attribute_1.outputs["Vector"], 'Angle': 0.2485}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.08 - - group_1 = nw.new_node(nodegroup_add_noise().name, - input_kwargs={'Vector': vector_rotate_1, 'amount': value_1}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_1}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: -0.74}, - attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: 2.22}, - attrs={'operation': 'POWER'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: power}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': add, 'Scale': 10.02}, - attrs={'voronoi_dimensions': '1D', 'feature': 'DISTANCE_TO_EDGE'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, - attrs={'operation': 'GREATER_THAN'}) - - maximum_1 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: greater_than}, - attrs={'operation': 'MAXIMUM'}) - - less_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: -0.48}, - attrs={'operation': 'LESS_THAN'}) - - maximum_2 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_1, 1: less_than_1}, - attrs={'operation': 'MAXIMUM'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_2, 1: 3.0}, - attrs={'operation': 'MULTIPLY'}) - - minimum = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_3}, - attrs={'operation': 'MINIMUM'}) - - minimum_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: minimum}, - attrs={'operation': 'MINIMUM'}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'distance to edge'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute.outputs["Color"], 3: 0.1, 4: 0.0}) - - maximum_3 = nw.new_node(Nodes.Math, - input_kwargs={0: minimum_1, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - minimum_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: maximum_3}, - attrs={'operation': 'MINIMUM'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': minimum_2}) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.1136 - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': colorramp.outputs["Color"]}) - - transparent_bsdf_1 = nw.new_node(Nodes.TransparentBSDF, - input_kwargs={'Color': reroute}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': reroute}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.1, 1: transparent_bsdf_1, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/utils/geom_utils.py b/infinigen/assets/creatures/insects/utils/geom_utils.py deleted file mode 100644 index 552424dc8..000000000 --- a/infinigen/assets/creatures/insects/utils/geom_utils.py +++ /dev/null @@ -1,838 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from .shader_utils import nodegroup_add_noise, nodegroup_color_noise - -@node_utils.to_nodegroup('nodegroup_symmetric_clone', singleton=False, type='GeometryNodeTree') -def nodegroup_symmetric_clone(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, -1.0, 1.0))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Scale': group_input.outputs["Scale"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': transform}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [group_input.outputs["Geometry"], flip_faces]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Both': join_geometry_2, 'Orig': group_input.outputs["Geometry"], 'Inverted': flip_faces}) - -@node_utils.to_nodegroup('nodegroup_add_hair', singleton=False, type='GeometryNodeTree') -def nodegroup_add_hair(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketGeometry', 'Hair', None), - ('NodeSocketFloat', 'Density', 100.0), - ('NodeSocketVector', 'rot mean', (1.18, 0.0, 0.0)), - ('NodeSocketFloat', 'scale mean', 0.05)]) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Density': group_input.outputs["Density"]}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'random seed': -2.4, 'rot mean': group_input.outputs["rot mean"], 'scale mean': group_input.outputs["scale mean"]}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': distribute_points_on_faces.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'scale': randomrotationscale.outputs["Value"], 'Points': distribute_points_on_faces.outputs["Points"], 'Instance': group_input.outputs["Hair"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instanceonpoints, group_input.outputs["Mesh"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': join_geometry}) - -@node_utils.to_nodegroup('nodegroup_attach_part', singleton=False, type='GeometryNodeTree') -def nodegroup_attach_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0), - ('NodeSocketVector', 'Part Rot', (0.0, 0.0, 0.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - part_surface = nw.new_node(nodegroup_part_surface().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length Fac': group_input.outputs["Length Fac"], 'Ray Rot': group_input.outputs["Ray Rot"], 'Rad': group_input.outputs["Rad"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': group_input.outputs["Part Rot"]}) - - raycast_rotation = nw.new_node(nodegroup_raycast_rotation().name, - input_kwargs={'Rotation': deg2rad, 'Hit Normal': part_surface.outputs["Hit Normal"], 'Curve Tangent': part_surface.outputs["Tangent"], 'Do Normal Rot': group_input.outputs["Do Normal Rot"], 'Do Tangent Rot': group_input.outputs["Do Tangent Rot"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Translation': part_surface.outputs["Position"], 'Rotation': raycast_rotation}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Position': part_surface.outputs["Position"], 'Rotation': raycast_rotation}) - - -@node_utils.to_nodegroup('nodegroup_random_rotation_scale', singleton=False, type='GeometryNodeTree') -def nodegroup_random_rotation_scale(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketVector', 'rot mean', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'rot std z', 1.0), - ('NodeSocketFloat', 'scale mean', 0.35), - ('NodeSocketFloat', 'scale std', 0.1)]) - - position_3 = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_3, 1: group_input.outputs["random seed"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Color"], 1: value_2}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs["rot std z"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["rot mean"], 1: combine_xyz}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: group_input.outputs["scale std"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: group_input.outputs["scale mean"]}, - attrs={'use_clamp': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': add_1.outputs["Vector"], 'Value': add_2}) - -@node_utils.to_nodegroup('nodegroup_instance_on_points', singleton=False, type='GeometryNodeTree') -def nodegroup_instance_on_points(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'rotation base', (0.0, 0.0, 0.0)), - ('NodeSocketVectorEuler', 'rotation delta', (-1.5708, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'translation', (0.0, -0.5, 0.0)), - ('NodeSocketFloat', 'scale', 0.0), - ('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None)]) - - rotate_euler_1 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': group_input.outputs["rotation base"], 'Rotate By': group_input.outputs["rotation delta"]}, - attrs={'space': 'LOCAL'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], 'Instance': group_input.outputs["Instance"], 'Rotation': rotate_euler_1, 'Scale': group_input.outputs["scale"]}) - - translate_instances = nw.new_node(Nodes.TranslateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Translation': group_input.outputs["translation"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': translate_instances}) - -def shader_dragonfly_body_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'pos'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute_1.outputs["Vector"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: 3.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': separate_xyz_1.outputs["Y"], 'Z': multiply}) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'body seed'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': combine_xyz_1, 'W': attribute_2.outputs["Fac"], 'Scale': 0.5, 'Dimension': 1.0, 'Lacunarity': 1.0}, - attrs={'musgrave_dimensions': '4D'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: -0.26, 2: 0.06}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'spline parameter'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': attribute.outputs["Fac"]}) - - group = nw.new_node(nodegroup_add_noise().name, - input_kwargs={'Vector': combine_xyz, 'Scale': 0.5, 'amount': (0.16, 0.26, 0.0), 'Noise Eval Position': combine_xyz_1}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': attribute_2.outputs["Fac"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz_2, 'Scale': 10.0}, - attrs={'voronoi_dimensions': '2D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 1: 0.14, 2: 0.82}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: map_range.outputs["Result"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) - colorramp.color_ramp.elements[0].position = 0.7386 - colorramp.color_ramp.elements[0].color = (0.4397, 0.5841, 0.011, 1.0) - colorramp.color_ramp.elements[1].position = 1.0 - colorramp.color_ramp.elements[1].color = (0.008, 0.0065, 0.0116, 1.0) - - group_1 = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Color': colorramp.outputs["Color"], 'Value To Min': 0.4}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_1, 'Metallic': 0.2182, 'Specular': 0.8318, 'Roughness': 0.1545}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf_1}) - - -@node_utils.to_nodegroup('nodegroup_surface_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_surface_bump(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement', 0.02), - ('NodeSocketFloat', 'Scale', 50.0), - ('NodeSocketFloat', 'seed', 0.0)]) - - normal = nw.new_node(Nodes.InputNormal) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': group_input.outputs["seed"], 'Scale': group_input.outputs["Scale"]}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 1: multiply}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_circle_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_circle_cross_section(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 0.5), - ('NodeSocketFloat', 'noise amount', 0.0), - ('NodeSocketInt', 'Resolution', 256), - ('NodeSocketFloat', 'radius', 1.0), - ('NodeSocketBool', 'symmetric noise', False)]) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - normal = nw.new_node(Nodes.InputNormal) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': absolute, 'Z': separate_xyz_1.outputs["Z"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz_1, 'W': group_input.outputs["random seed"], 'Scale': group_input.outputs["noise scale"]}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': absolute_1}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Offset': scale_1.outputs["Vector"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Scale': group_input.outputs["radius"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform}) - -@node_utils.to_nodegroup('nodegroup_deg2_rad', singleton=False, type='GeometryNodeTree') -def nodegroup_deg2_rad(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Deg', (0.0, 0.0, 0.0))]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Rad': multiply.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_raycast_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_raycast_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'Rotation', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Hit Normal', (0.0, 0.0, 1.0)), - ('NodeSocketVector', 'Curve Tangent', (0.0, 0.0, 1.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': group_input.outputs["Hit Normal"]}) - - rotate_euler = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Rotate By': align_euler_to_vector}) - - if_normal_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Normal Rot"], 8: group_input.outputs["Rotation"], 9: rotate_euler}, - label='if_normal_rot', - attrs={'input_type': 'VECTOR'}) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Vector': group_input.outputs["Curve Tangent"]}) - - rotate_euler_1 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': align_euler_to_vector_1, 'Rotate By': group_input.outputs["Rotation"]}, - attrs={'space': 'LOCAL'}) - - if_tangent_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Tangent Rot"], 8: if_normal_rot.outputs[3], 9: rotate_euler_1}, - label='if_tangent_rot', - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': if_tangent_rot.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_part_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_part_surface(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0)]) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Factor': group_input.outputs["Length Fac"]}, - attrs={'mode': 'FACTOR'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': sample_curve.outputs["Tangent"], 'Rotation': group_input.outputs["Ray Rot"]}, - attrs={'rotation_type': 'EULER_XYZ'}) - - raycast = nw.new_node(Nodes.Raycast, - input_kwargs={'Target Geometry': group_input.outputs["Skin Mesh"], 'Source Position': sample_curve.outputs["Position"], 'Ray Direction': vector_rotate, 'Ray Length': 5.0}) - - lerp = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["Rad"], 9: sample_curve.outputs["Position"], 10: raycast.outputs["Hit Position"]}, - label='lerp', - attrs={'data_type': 'FLOAT_VECTOR', 'clamp': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Position': lerp.outputs["Vector"], 'Hit Normal': raycast.outputs["Hit Normal"], 'Tangent': sample_curve.outputs["Tangent"], 'Skeleton Pos': sample_curve.outputs["Position"]}) - -@node_utils.to_nodegroup('nodegroup_shape_quadratic', singleton=False, type='GeometryNodeTree') -def nodegroup_shape_quadratic(nw: NodeWrangler, radius_control_points=[(0.0, 0.5), (1.0, 0.5)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloat', 'random seed tilt', 0.5), - ('NodeSocketFloat', 'noise scale tilt', 0.5), - ('NodeSocketFloat', 'noise amount tilt', 5.0), - ('NodeSocketFloat', 'random seed pos', 0.0), - ('NodeSocketFloat', 'noise scale pos', 0.0), - ('NodeSocketFloat', 'noise amount pos', 0.0), - ('NodeSocketIntUnsigned', 'Resolution', 256), - ('NodeSocketVectorTranslation', 'Start', (0.0, 0.15, -1.5)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'End', (0.0, 0.0, 1.5))]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadratic_bezier, 2: spline_parameter_2.outputs["Factor"]}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: curve_tangent}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["random seed pos"]}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale pos"]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, - attrs={'operation': 'SUBTRACT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': spline_parameter_2.outputs["Factor"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["noise amount pos"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'Offset': scale_1.outputs["Vector"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: group_input.outputs["random seed tilt"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': add_1, 'Scale': group_input.outputs["noise scale tilt"]}, - attrs={'noise_dimensions': '1D'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: group_input.outputs["noise amount tilt"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': set_position, 'Tilt': multiply}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], radius_control_points) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_curve_tilt, 'Radius': float_curve}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': group_input.outputs["Profile Curve"], 'Fill Caps': True}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': set_position}, - attrs={'mode': 'EVALUATED'}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': curve_to_points.outputs["Points"]}, - attrs={'target_element': 'POINTS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh, 'spline parameter': capture_attribute.outputs[2], 'spline tangent': capture_attribute_1.outputs["Attribute"], 'radius to center': geometry_proximity.outputs["Distance"]}) - -@node_utils.to_nodegroup('nodegroup_polar_to_cart', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_to_cart(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Angle', 0.5), - ('NodeSocketFloat', 'Length', 0.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'SINE'}) - - construct_unit_vector = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': cosine, 'Z': sine}, - label='Construct Unit Vector') - - offset_polar = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Length"], 1: construct_unit_vector, 2: group_input.outputs["Origin"]}, - label='Offset Polar', - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': offset_polar.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_switch4', singleton=False, type='GeometryNodeTree') -def nodegroup_switch4(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Arg', 0), - ('NodeSocketVector', 'Arg == 0', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 1', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 3', (0.0, 0.0, 0.0))]) - - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 2}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - greater_equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 1}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_1, 8: group_input.outputs["Arg == 0"], 9: group_input.outputs["Arg == 1"]}, - attrs={'input_type': 'VECTOR'}) - - greater_equal_2 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 3}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_2, 8: group_input.outputs["Arg == 2"], 9: group_input.outputs["Arg == 3"]}, - attrs={'input_type': 'VECTOR'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': switch.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_smooth_taper', singleton=False, type='GeometryNodeTree') -def nodegroup_smooth_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'SINE'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'start_rad', 0.29), - ('NodeSocketFloat', 'end_rad', 0.0), - ('NodeSocketFloat', 'fullness', 2.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: divide}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["start_rad"], 4: group_input.outputs["end_rad"]}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply_1}) - -@node_utils.to_nodegroup('nodegroup_aspect_to_dim', singleton=False, type='GeometryNodeTree') -def nodegroup_aspect_to_dim(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Aspect Ratio', 1.0)]) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Aspect Ratio"], 'Y': 1.0}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.0, 'Y': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'XY Scale': switch.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_warped_circle_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_warped_circle_curve(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0)), - ('NodeSocketInt', 'Vertices', 32)]) - - mesh_circle = nw.new_node(Nodes.MeshCircle, - input_kwargs={'Vertices': group_input.outputs["Vertices"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_circle, 'Position': group_input.outputs["Position"]}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': mesh_to_curve}) - -@node_utils.to_nodegroup('nodegroup_vector_sum', singleton=False, type='GeometryNodeTree') -def nodegroup_vector_sum(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Sum': add_1}) - -@node_utils.to_nodegroup('nodegroup_polar_bezier', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_bezier(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 32), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Seg Lengths', (0.3, 0.3, 0.3)), - ('NodeSocketBool', 'Do Bezier', True)]) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': 4}) - - index = nw.new_node(Nodes.Index) - - deg2_rad = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["angles_deg"], 'Scale': 0.0175}, - label='Deg2Rad', - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': deg2_rad.outputs["Vector"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': separate_xyz.outputs["X"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Seg Lengths"]}) - - polartocart = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': reroute, 'Length': separate_xyz_1.outputs["X"], 'Origin': group_input.outputs["Origin"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]}) - - polartocart_1 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add, 'Length': separate_xyz_1.outputs["Y"], 'Origin': polartocart}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) - - polartocart_2 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add_1, 'Length': separate_xyz_1.outputs["Z"], 'Origin': polartocart_1}) - - switch4 = nw.new_node(nodegroup_switch4().name, - input_kwargs={'Arg': index, 'Arg == 0': group_input.outputs["Origin"], 'Arg == 1': polartocart, 'Arg == 2': polartocart_1, 'Arg == 3': polartocart_2}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line, 'Position': switch4}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': mesh_to_curve, 'Cuts': group_input.outputs["Resolution"]}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 2}) - integer.integer = 2 - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': integer, 'Start': group_input.outputs["Origin"], 'Start Handle': polartocart, 'End Handle': polartocart_1, 'End': polartocart_2}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, - attrs={'operation': 'DIVIDE'}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': bezier_segment, 'Cuts': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Do Bezier"], 14: subdivide_curve_1, 15: subdivide_curve}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': switch.outputs[6], 'Endpoint': polartocart_2}) - -@node_utils.to_nodegroup('nodegroup_profile_part', singleton=False, type='GeometryNodeTree') -def nodegroup_profile_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloatDistance', 'Radius Func', 1.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Radius': group_input.outputs["Radius Func"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': group_input.outputs["Profile Curve"], 'Fill Caps': True}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': curve_to_mesh, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - - -@node_utils.to_nodegroup('nodegroup_simple_tube_v2', singleton=False, type='GeometryNodeTree') -def nodegroup_simple_tube_v2(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.5, 0.3)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'proportions', (0.3333, 0.3333, 0.3333)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketBool', 'do_bezier', True), - ('NodeSocketFloat', 'fullness', 4.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - vector_sum = nw.new_node(nodegroup_vector_sum().name, - input_kwargs={'Vector': group_input.outputs["proportions"]}) - - divide = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, - attrs={'operation': 'DIVIDE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: divide.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["angles_deg"], 'Seg Lengths': scale.outputs["Vector"], 'Do Bezier': group_input.outputs["do_bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["aspect"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': group_input.outputs["fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) \ No newline at end of file diff --git a/infinigen/assets/creatures/insects/utils/shader_utils.py b/infinigen/assets/creatures/insects/utils/shader_utils.py deleted file mode 100644 index 30b2db7cf..000000000 --- a/infinigen/assets/creatures/insects/utils/shader_utils.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -def shader_black_w_noise_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group = nw.new_node(nodegroup_color_noise().name, - input_kwargs={'Scale': 10.0, 'Color': (0.0779, 0.0839, 0.0809, 1.0)}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group, 'Metallic': 0.9, 'Specular': 0.5114, 'Roughness': 0.2568}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf_1}) - -@node_utils.to_nodegroup('nodegroup_add_noise', singleton=False, type='ShaderNodeTree') -def nodegroup_add_noise(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale', 10.0), - ('NodeSocketVector', 'amount', (0.1, 0.26, 0.0)), - ('NodeSocketFloat', 'seed', 0.0), - ('NodeSocketVector', 'Noise Eval Position', None)]) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Noise Eval Position"], 'W': group_input.outputs["seed"], 'Scale': group_input.outputs["Scale"]}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_1.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 1: group_input.outputs["amount"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"], 1: group_input.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': add.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_color_noise', singleton=False, type='ShaderNodeTree') -def nodegroup_color_noise(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Scale', 0.8), - ('NodeSocketColor', 'Color', (0.0147, 0.0156, 0.0152, 1.0)), - ('NodeSocketFloat', 'Hue From Min', 0.4), - ('NodeSocketFloat', 'Hue From Max', 0.7), - ('NodeSocketFloat', 'Hue To Min', 0.48), - ('NodeSocketFloat', 'Hue To Max', 0.55), - ('NodeSocketFloat', 'Value From Min', 0.4), - ('NodeSocketFloat', 'Value From Max', 0.78), - ('NodeSocketFloat', 'Value To Min', -0.56), - ('NodeSocketFloat', 'Value To Max', 1.0)]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': group_input.outputs["Scale"], 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}, - attrs={'mode': 'HSV'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: group_input.outputs["Hue From Min"], 2: group_input.outputs["Hue From Max"], 3: group_input.outputs["Hue To Min"], 4: group_input.outputs["Hue To Max"]}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: group_input.outputs["Value From Min"], 2: group_input.outputs["Value From Max"], 3: group_input.outputs["Value To Min"], 4: group_input.outputs["Value To Max"]}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': group_input.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': hue_saturation_value}) \ No newline at end of file diff --git a/infinigen/assets/creatures/jellyfish.py b/infinigen/assets/creatures/jellyfish.py deleted file mode 100644 index a9db482e5..000000000 --- a/infinigen/assets/creatures/jellyfish.py +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - - -import colorsys - -import numpy as np -import bpy -from mathutils import Vector -from numpy.random import uniform -from scipy.interpolate import interp1d - -from infinigen.assets.creatures.util.animation.driver_repeated import repeated_driver -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.assets.utils.nodegroup import geo_base_selection -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_circle, new_empty, \ - new_icosphere, origin2highest -from infinigen.assets.utils.decorate import geo_extension, read_co, remove_vertices, \ - subsurface2face_size, write_attribute, write_co -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.surface import read_attr_data, shaderfunc_to_material, write_attr_data -import infinigen.core.util.blender as butil - -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class JellyfishFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.base_hue = np.random.normal(0.57, 0.15) - self.outside_material = self.make_transparent() if uniform(0, 1) < .8 else self.make_dotted() - self.inside_material = self.make_transparent() if uniform(0, 1) < .8 else self.make_opaque() - self.tentacle_material = self.make_transparent() - self.arm_mat_transparent = self.make_transparent() - self.arm_mat_opaque = self.make_opaque() - self.arm_mat_solid = self.make_solid() - - self.has_arm = uniform(0, 1) < .5 - arm_radius = uniform(0, .3) - self.arm_radius_range = arm_radius, arm_radius + uniform(.1, .4) - self.arm_height_range = -uniform(.4, .5), -uniform(0, .2) - self.arm_min_distance = uniform(.06, .08) - self.arm_size = uniform(.03, .06) - self.arm_length = log_uniform(2, 5) - self.arm_bend_angle = uniform(0, np.pi / 60) - self.arm_displace_range = uniform(0, .4), uniform(.4, .8) - - self.tentacle_min_distance = uniform(.04, .06) - self.tentacle_size = uniform(.005, .01) - self.tentacle_length = log_uniform(1.5, 2.5) - self.tentacle_bend_angle = uniform(0, np.pi / 12) - - self.cap_thickness = uniform(.05, .6) - self.cap_inner_radius = uniform(.6, .8) - self.cap_z_scale = log_uniform(.4, 1.5) - self.cap_dent = uniform(.15, .3) if uniform(0, 1) < .5 else 0 - - self.length_scale = log_uniform(.25, 2.) - self.anim_freq = 1 / log_uniform(25, 100) - self.move_freq = 1 / log_uniform(500, 1000) - - def create_asset(self, face_size, **params): - obj, radius = self.build_cap(face_size) - - assign_material(obj, [self.outside_material, self.inside_material]) - for axis in 'XY': - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='TWIST', angle=uniform(-np.pi / 3, np.pi / 3), - deform_axis=axis) - for axis in 'XY': - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=uniform(-np.pi / 3, np.pi / 3), - deform_axis=axis) - - def selection(nw: NodeWrangler): - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - r = nw.math('POWER', nw.add(nw.math('POWER', x, 2), nw.math('POWER', y, 2)), .5) - center = nw.boolean_math('AND', nw.compare('GREATER_THAN', r, self.arm_radius_range[0] * radius), - nw.compare('LESS_THAN', r, self.arm_radius_range[1] * radius)) - down = nw.compare('LESS_THAN', nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0) - inside = nw.new_node(Nodes.NamedAttribute, ["inside"]) - return nw.boolean_math('AND', nw.boolean_math('AND', center, down), inside) - - if self.has_arm: - long_arms = self.place_tentacles(obj, selection, self.arm_min_distance, self.arm_size, - self.arm_length, self.arm_bend_angle, displace=True) - for a in long_arms: - assign_material(a, np.random.choice( - [self.arm_mat_opaque, self.arm_mat_transparent, self.arm_mat_solid])) - else: - long_arms = [] - - tentacles = self.place_tentacles(obj, 'boundary', self.tentacle_min_distance, self.tentacle_size, - self.tentacle_length, self.tentacle_bend_angle) - assign_material(tentacles, self.tentacle_material) - - obj = join_objects([obj] + long_arms + tentacles) - head_z = np.amax(read_co(obj)[:, -1]) - tail_z = -np.amin(read_co(obj)[:, -1]) - self.animate_expansion(obj, head_z, tail_z) - self.animate_movement(obj) - tag_object(obj, 'jellyfish') - - return obj - - def animate_movement(self, obj): - offset = uniform(0, 1) - seed = np.random.randint(1e5) - driver_x, driver_y, driver_z = [_.driver for _ in obj.driver_add('location')] - driver_x.expression = repeated_driver(uniform(-.2, .2), uniform(-.2, .2), self.move_freq, offset, seed) - driver_y.expression = repeated_driver(uniform(-.2, .2), uniform(-.2, .2), self.move_freq, offset, seed) - driver_z.expression = repeated_driver(uniform(-1.5, -.5), uniform(.5, 1.5), self.move_freq, offset, - seed) - driver_rot = obj.driver_add('rotation_euler')[-1].driver - twist_range = uniform(0, np.pi / 60) - driver_rot.expression = repeated_driver(-twist_range, twist_range, self.move_freq, offset, seed) - - obj, mod = butil.modify_mesh(obj, 'SIMPLE_DEFORM', False, deform_method='TWIST', deform_axis='Z', - return_mod=True) - twist_driver = mod.driver_add('angle').driver - twist_driver.expression = repeated_driver(-np.pi / 30, np.pi / 30, self.move_freq, offset, seed) - - def animate_expansion(self, obj, head_z, tail_z): - obj.shape_key_add(name='Base') - offset = uniform(0, 1) - seed = np.random.randint(1e5) - self.animate_radius(obj, offset, seed, head_z, tail_z) - self.animate_height(obj, offset, seed, head_z, tail_z) - self.animate_arms(obj, tail_z) - - def animate_height(self, obj, offset, seed, head_z, tail_z): - x, y, z = read_co(obj).T - obj.active_shape_key_index = 0 - key_block_z = obj.shape_key_add(name='Height') - z_anchors = -tail_z, 0, head_z - z_disp = 1, 1, uniform(.6, .8) - z_curve = interp1d(z_anchors, z_disp, fill_value='extrapolate') - co = np.stack([x, y, z_curve(z) * z], -1) - key_block_z.data.foreach_set('co', co.reshape(-1)) - dr = key_block_z.driver_add('value').driver - dr.expression = repeated_driver(0, 1, self.anim_freq, offset + uniform(.05, .15), seed) - - def animate_radius(self, obj, offset, seed, head_z, tail_z): - obj.active_shape_key_index = 0 - x, y, z = read_co(obj).T - key_block_r = obj.shape_key_add(name='Radius') - z_anchors = -tail_z, -head_z * 2, -head_z, 0, head_z - r_scale = uniform(.7, .9), uniform(.85, .95), 1, uniform(1.2, 1.4), 1 - r_curve = interp1d(z_anchors, r_scale, 'quadratic', fill_value='extrapolate') - co = np.stack([r_curve(z) * x, r_curve(z) * y, z], -1) - key_block_r.data.foreach_set('co', co.reshape(-1)) - dr = key_block_r.driver_add('value').driver - dr.expression = repeated_driver(0, 1, self.anim_freq, offset, seed) - - def animate_arms(self, obj, tail_z): - def geo_musgrave_texture(nw: NodeWrangler, axis): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - z = nw.separate(nw.new_node(Nodes.InputPosition))[-1] - musgrave = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Scale': uniform(1, 2)}, - attrs={'musgrave_dimensions': '2D'}) - offset = nw.scalar_multiply(log_uniform(.1, .4), nw.new_node(Nodes.CombineXYZ, input_kwargs={ - axis: nw.scalar_divide(nw.scalar_multiply(musgrave, z), -tail_z) - })) - geometry = nw.new_node(Nodes.SetPosition, [geometry, - nw.boolean_math('NOT', nw.new_node(Nodes.NamedAttribute, ['pin'])), None, offset]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - for i, axis in enumerate('XY'): - obj.active_shape_key_index = 0 - key_block_r = obj.shape_key_add(name=f'Arm_{i}') - temp = deep_clone_obj(obj) - temp.shape_key_clear() - surface.add_geomod(temp, geo_musgrave_texture, apply=True, input_args=[axis]) - key_block_r.data.foreach_set('co', read_co(temp).reshape(-1)) - butil.delete(temp) - dr = key_block_r.driver_add('value').driver - dr.expression = repeated_driver(0, 1, self.anim_freq) - - def place_tentacles(self, obj, selection, min_distance, size, length, bend_angle, displace=False): - temp = butil.spawn_vert('temp') - surface.add_geomod(temp, geo_base_selection, apply=True, input_args=[obj, selection, min_distance]) - locations = read_co(temp) - if displace: - locations[:, -1] -= uniform(*self.arm_displace_range, len(locations)) - butil.delete(temp) - n = min(10, len(locations)) - arms = [self.build_arm(size, length, bend_angle) for _ in range(n)] - arms += [deep_clone_obj(np.random.choice(arms)) for _ in range(len(locations) - n)] - for arm, loc in zip(arms, locations): - arm.rotation_euler[-1] = np.arctan2(loc[1], loc[0]) + uniform(-np.pi / 6, np.pi / 6) + np.pi - arm.location = loc - return arms - - def build_cap(self, face_size): - obj = new_icosphere(subdivisions=6) - write_attribute(obj, lambda nw, position: 0, 'material_index', 'FACE') - - d = np.sqrt(1 - self.cap_inner_radius ** 2) + 1 - self.cap_thickness - r = (d * d + self.cap_inner_radius ** 2) / (2 * d) - - cutter = new_icosphere(subdivisions=6, radius=r) - write_attribute(cutter, lambda nw, position: 1, 'material_index', 'FACE') - cutter.location[-1] = 1 - self.cap_thickness - r - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') - co = read_co(obj) - outside = np.abs(np.linalg.norm(co, axis=-1) - 1) < 1e-6 - co[:, -1] -= cutter.location[-1] - inside = np.abs(np.linalg.norm(co, axis=-1) - r) < 1e-6 - write_attr_data(obj, 'inside', inside.astype(float)) - write_attr_data(obj, 'boundary', ((~inside) & (~outside)).astype(float)) - butil.delete(cutter) - - if self.cap_dent > 0: - self.apply_cap_dent(obj) - - surface.add_geomod(obj, geo_extension, apply=True, - input_args=[log_uniform(.2, .4), log_uniform(.5, 1.), '2D']) - obj.scale *= Vector(uniform(.4, .6, 3)) - obj.scale[-1] *= self.cap_z_scale - radius = self.cap_inner_radius * min(obj.scale[:2]) - butil.apply_transform(obj) - subsurface2face_size(obj, face_size) - - obj.vertex_groups.new(name='pin') - tag_object(obj, 'cap') - return obj, radius - - def apply_cap_dent(self, obj): - n_dent = np.random.randint(6, 12) - angles = polygon_angles(n_dent) - angles = np.concatenate([angles, angles + 2 * np.pi]) - dent = uniform(1 - self.cap_dent, 1, n_dent) - margin = uniform(np.pi * .02, np.pi * .05, n_dent) - x, y, z = read_co(obj).T - a = np.arctan2(y, x) + np.pi * 1.5 - difference = np.abs(a[:, np.newaxis] - angles[np.newaxis, :]) - index = np.argmin(difference, 1) % n_dent - dent_ = np.take(dent, index) - margin_ = np.take(margin, index) - s = np.exp(np.log(dent_) / margin_ * np.clip(margin_ - np.min(difference, 1), 0, None)) - co = np.stack([s * x, s * y, z]).T - write_co(obj, co) - - def build_arm(self, radius, length, bend_angle): - obj = new_circle(vertices=16) - obj.scale = radius, radius * uniform(0, 1), 1 - butil.apply_transform(obj) - remove_vertices(obj, lambda x, y, z: y * (-1) ** np.random.randint(2) > 0) - steps = 256 - - empty = new_empty(location=(0, 0, 1), rotation=(0, -uniform(0, np.pi / 24), 0)) - butil.modify_mesh(obj, 'SCREW', angle=log_uniform(.5, 3) * np.pi * (-1) ** int(uniform(0, 1)), - screw_offset=-length * self.length_scale * uniform(.5, 1.), object=empty, steps=steps, - render_steps=steps) - butil.delete(empty) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='TAPER', factor=uniform(.5, 1.), deform_axis='Z') - texture = bpy.data.textures.new(name='arm', type='MARBLE') - texture.noise_scale = log_uniform(.1, .2) - butil.modify_mesh(obj, 'DISPLACE', texture=texture, strength=uniform(.01, .02), direction='Y') - texture = bpy.data.textures.new(name='arm', type='MARBLE') - texture.noise_scale = log_uniform(.1, 2.) - butil.modify_mesh(obj, 'DISPLACE', texture=texture, strength=log_uniform(.1, .2), direction='X') - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=bend_angle * log_uniform(.5, 1.5), - deform_axis='Y') - co = read_co(obj) - x, y, z = co.T - center = np.mean(co[z > -.01], 0) - obj.location[0] -= center[0] - obj.location[1] -= center[1] - butil.apply_transform(obj, loc=True) - tag_object(obj, 'arm') - return obj - - @staticmethod - def shader_jellyfish(nw: NodeWrangler, base_hue, saturation, transparency): - layerweight = nw.build_float_curve(nw.new_node(Nodes.LayerWeight, input_kwargs={'Blend': 0.3}), - [(0, 0), (.4, 0), (uniform(.6, .9), 1), (1, 1)]) - emission_color = hsv2rgba(base_hue, uniform(.4, .6), 1) - transparent_color = hsv2rgba((base_hue + uniform(-.1, .1)) % 1, saturation, 1) - emission = nw.new_node(Nodes.Emission, [emission_color]) - glossy = nw.new_node(Nodes.GlossyBSDF, - input_kwargs={'Color': transparent_color, 'Roughness': uniform(0.8, 1)}) - transparent = nw.new_node(Nodes.TransparentBSDF, [transparent_color]) - mix_shader = nw.new_node(Nodes.MixShader, [0.5, glossy, transparent]) - mix_shader = nw.new_node(Nodes.MixShader, [layerweight, emission, mix_shader]) - transparent = nw.new_node(Nodes.TransparentBSDF, [transparent_color]) - transparency = surface.eval_argument(nw, transparency) - mix_shader = nw.new_node(Nodes.MixShader, [transparency, mix_shader, transparent]) - return mix_shader - - def make_transparent(self): - hue = (self.base_hue + uniform(-.1, .1)) % 1 - return shaderfunc_to_material(self.shader_jellyfish, hue, uniform(.1, .3), uniform(.88, .92)) - - def make_opaque(self): - hue = (self.base_hue + uniform(-.1, .1)) % 1 - return shaderfunc_to_material(self.shader_jellyfish, hue, uniform(.3, .6), uniform(.75, .8)) - - def make_solid(self): - hue = (self.base_hue + uniform(-.1, .1)) % 1 - return shaderfunc_to_material(self.shader_jellyfish, hue, uniform(.5, .8), uniform(.4, .5)) - - def make_dotted(self): - def transparency(nw: NodeWrangler): - return nw.build_float_curve( - nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': uniform(20, 50)}), - [(0, uniform(.92, .96)), (.62, uniform(.92, .96)), (.65, uniform(.5, .6)), - (1, uniform(.5, .6))]) - - hue = (self.base_hue + uniform(-.1, .1)) % 1 - return shaderfunc_to_material(self.shader_jellyfish, hue, uniform(.5, .8), transparency) diff --git a/infinigen/assets/creatures/parts/__init__.py b/infinigen/assets/creatures/parts/__init__.py deleted file mode 100644 index ae0951026..000000000 --- a/infinigen/assets/creatures/parts/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from . import body, foot, wings, leg, head, \ - head_detail, tail, \ - fin_old, ridged_fin, \ - beak, horn, \ - generic_nurbs, hoof, eye, reptile_detail, chameleon \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/beak.py b/infinigen/assets/creatures/parts/beak.py deleted file mode 100644 index 31a29044c..000000000 --- a/infinigen/assets/creatures/parts/beak.py +++ /dev/null @@ -1,368 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - - -import bpy -import numpy as np -from math import sin, cos, pi, exp - -from infinigen.assets.creatures.util.creature import PartFactory, Part -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util import part_util -from infinigen.core.util import blender as butil - -from infinigen.assets.creatures.util.geometry import nurbs as nurbs_util -from infinigen.core.tagging import tag_object, tag_nodegroup - -def square(x): - return x * x - -class Beak(): - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - self.hook_x = lambda x, theta: self.hook(self.hook_scale_x, self.hook_a, self.hook_b, self.hook_pos_x, self.hook_thickness_x, x, theta) - self.hook_z = lambda x, theta: self.hook(self.hook_scale_z, self.hook_a, self.hook_b, self.hook_pos_z, self.hook_thickness_z, x, theta) - - self.crown_z = lambda x, theta: self.crown(self.crown_scale_z, self.crown_a, self.crown_b, self.crown_pos_z, x, theta) - self.bump_z = lambda x, theta: self.bump(self.bump_scale_z, x, self.bump_l, self.bump_r) * max(sin(theta), 0) - - def cx(self, x): - return x - - def cy(self, x): - return 1 - exp(self.cy_a * (x - 1)) - - def cz(self, x): - return 1 - (x ** self.cz_a) - - def sigmoid(self, x): - return 1 / (1 + exp(-x)) - - def exp(self, a, b, x): - return a * exp(b * x) - - def hook(self, scale, a, b, p, t, x, theta): - return scale * self.exp(a, b, x - p - (1 - x) * t * sin(theta)) - - def bump(self, scale, x, l, r): - if x < l or x > r: - return 0 - x = (x - l) / (r - l) * pi - return scale * sin(x) - - def crown(self, scale, a, b, p, x, theta): - return scale * self.exp(a, b, p - x) * max(sin(theta), 0) - - def dx(self, x, theta): - hook = self.hook_x(x, theta) - sharp = self.sharpness * max(x - 0.95, 0) - return hook + sharp - - def dy(self, x): - return 0 - - def dz(self, x, theta): - hook = self.hook_z(x, theta) - crown = self.crown_z(x, theta) - bump = self.bump_z(x, theta) - return hook + crown + bump - - def generate(self): - self.n = int(self.n) - self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) - for i in range(self.n): - for j in range(self.m): - p = i / (self.n - 1) - theta = 2 * pi * j / (self.m) - ctrls[i][j][0] = self.sx * self.cx(p) + self.dx(p, theta) - ctrls[i][j][1] = self.sy * self.cy(p) * self.r * cos(theta) + self.dy(p) - ctrls[i][j][2] = self.reverse * (self.sz * self.cz(p) * self.r * max(sin(theta), 0) + self.dz(p, theta)) - - method = 'blender' if False else 'geomdl' - return nurbs_util.nurbs(ctrls, method, face_size=0.02) - - -class BirdBeak(PartFactory): - - param_templates = {} - tags = ['head_detail', 'rigid'] - unit_scale = (0.5, 0.5, 0.5) - - def sample_params(self, select=None, var=1): - weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) - params = part_util.rdict_comb(self.param_templates, weights) - # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - # add additional noise to params - for key in params['upper']: - if key in params['range']: - l, r = params['range'][key] - noise = N(0, 0.05 * (r - l)) - params['upper'][key] += noise - params['lower'][key] += noise - params['upper'][key] = max(min(params['upper'][key], r), l) - params['lower'][key] = max(min(params['lower'][key], r), l) - params['lower']['sx'] = min(params['lower']['sx'], params['upper']['sx'] * (params['upper']['hook_pos_x'] - params['upper']['hook_thickness_x'] / 2)) - - return params - - def rescale(self, params, scale): - params['sx'] *= scale - params['sy'] *= scale - params['sz'] *= scale - return params - - def make_part(self, params): - - obj = butil.spawn_vert('beak_parent_temp') - upper = Beak(**params['upper']).generate() - upper.parent = obj - upper.name = 'BeakUpper' - - lower = Beak(**params['lower']).generate() - lower.parent = obj - lower.name = 'BeakLower' - - upper.scale = self.unit_scale - lower.scale = self.unit_scale - butil.apply_transform([upper, lower], scale=True) - - part = Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) - tag_object(part.obj, 'bird_beak') - - return part - - -class FlyingBirdBeak(BirdBeak): - def sample_params(self, select='normal', var=1): - return super().sample_params(select=select) - - def make_part(self, params): - obj = butil.spawn_vert('beak_parent_temp') - params['upper'] = self.rescale(params['upper'], 0.4) - params['lower'] = self.rescale(params['lower'], 0.4) - upper = Beak(**params['upper']).generate() - upper.parent = obj - upper.name = 'BeakUpper' - - lower = Beak(**params['lower']).generate() - lower.parent = obj - lower.name = 'BeakLower' - - upper.scale = self.unit_scale - lower.scale = self.unit_scale - butil.apply_transform([upper, lower], scale=True) - - return Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) - - - -default_beak = { - 'n': 20, - 'm': 20, - 'r': 1.0, - 'sx': 1.0, - 'sy': 1.0, - 'sz': 1.0, - 'cy_a': 1.0, - 'cz_a': 2.0, - 'reverse': 1, - 'hook_a': 0.1, - 'hook_b': 5.0, - 'hook_scale_x': 0.0, - 'hook_pos_x': 0.0, - 'hook_thickness_x': 0.0, - 'hook_scale_z': 0.0, - 'hook_pos_z': 0.0, - 'hook_thickness_z': 0.0, - 'crown_scale_z': 0.0, - 'crown_a': 0.5, - 'crown_b': 0.5, - 'crown_pos_z': 0.5, - 'bump_scale_z': 0.0, - 'bump_l': 0.5, - 'bump_r': 0.5, - 'sharpness': 0.0, -} - -scales = { - 'r': [0.3, 1], - 'sx': [0.2, 1], - 'sy': [0.2, 1], - 'sz': [0.2, 1], - 'cy_a': [1, 10], - 'cz_a': [1, 5], - 'hook_a': [0.1, 0.8], - 'hook_b': [1, 5], - 'hook_scale_x': [-0.5, 0.5], - 'hook_pos_x': [0.5, 1], - 'hook_thickness_x': [0, 0.5], - 'hook_scale_z': [-0.5, 0.5], - 'hook_pos_z': [0.5, 1], - 'hook_thickness_z': [0, 0.5], - 'crown_scale_z': [0, 0.3], - 'crown_a': [0.1, 0.8], - 'crown_b': [0, 2], - 'crown_pos_z': [0, 0.5], - 'bump_scale_z': [0, 0.03], - 'bump_l': [0, 0.4], - 'bump_r': [0.6, 1], - 'sharpness': [-0.5, 0.5], -} -for k, v in scales.items(): - scales[k] = np.array(v) - -eagle_upper = default_beak | { - 'r': 0.4, - 'sx': 0.8, - 'sy': 0.4, - 'sz': 1.0, - 'hook_a': 0.1, - 'hook_b': 5.0, - 'hook_scale_x': -1.0, - 'hook_pos_x': 0.72, - 'hook_thickness_x': 0.35, - 'hook_scale_z': -0.8, - 'hook_pos_z': 0.7, - 'hook_thickness_z': 0.0, -} - -eagle_lower = default_beak | { - 'r': 0.4, - 'sx': 0.4, - 'sy': 0.4, - 'sz': 0.2, - 'reverse': -1, - 'hook_a': 0.1, - 'hook_b': 5.0, - 'hook_scale_x': 0.0, - 'hook_pos_x': 0.72, - 'hook_thickness_x': 0.35, - 'hook_scale_z': 0.1, - 'hook_pos_z': 0.6, - 'hook_thickness_z': -0.2, -} - -normal_upper = default_beak | { - 'r': 0.4, - 'sx': 0.7, - 'sy': 0.3, - 'sz': 0.5, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': 0.0, - 'hook_pos_x': 0.72, - 'hook_thickness_x': 0.35, - 'hook_scale_z': -0.8, - 'hook_pos_z': 0.7, - 'hook_thickness_z': 0.0, -} - -normal_lower = default_beak | { - 'r': 0.4, - 'sx': 0.7, - 'sy': 0.3, - 'sz': 0.3, - 'reverse': -1, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': 0.0, - 'hook_pos_x': 0.72, - 'hook_thickness_x': 0.35, - 'hook_scale_z': 0.8, - 'hook_pos_z': 0.7, - 'hook_thickness_z': 0.0, -} - -duck_upper = default_beak | { - 'n': 50, - 'r': 0.4, - 'sx': 1.0, - 'sy': 0.4, - 'sz': 0.5, - 'cy_a': 10.0, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': -1.5, - 'hook_pos_x': 0.9, - 'hook_thickness_x': 0.0, - 'hook_scale_z': 0.4, - 'hook_pos_z': 0.6, - 'hook_thickness_z': 0.2, - 'crown_scale_z': 0.3, - 'crown_a': 0.1, - 'crown_b': 5.0, - 'crown_pos_z': 0.3, - 'bump_scale_z': 0.02, - 'bump_l': 0.4, - 'bump_r': 1.0, - 'sharpness': -0.5 -} - -duck_lower = default_beak | { - 'n': 50, - 'r': 0.4, - 'sx': 0.97, - 'sy': 0.4, - 'sz': 0.1, - 'cy_a': 10.0, - 'reverse': -1, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': -1.5, - 'hook_pos_x': 0.9, - 'hook_thickness_x': 0.0, - 'hook_scale_z': -0.4, - 'hook_pos_z': 0.6, - 'hook_thickness_z': 0.0, - 'crown_scale_z': 0.1, - 'crown_a': 0.1, - 'crown_b': 5.0, - 'crown_pos_z': 0.3, - 'bump_scale_z': 0.03, - 'bump_l': 0.3, - 'bump_r': 1.0, - 'sharpness': -0.5 -} - -short_upper = default_beak | { - 'r': 0.4, - 'sx': 0.25, - 'sy': 0.3, - 'sz': 0.3, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': -0.5, - 'hook_pos_x': 0.8, - 'hook_thickness_x': 0.35, - 'hook_scale_z': -0.15, - 'hook_pos_z': 0.7, - 'hook_thickness_z': 0.0, -} -short_lower = default_beak | { - 'r': 0.4, - 'sx': 0.25, - 'sy': 0.3, - 'sz': 0.3, - 'cy_a': 1.0, - 'cz_a': 1.1, - 'reverse': -1, - 'hook_a': 0.1, - 'hook_b': 2.0, - 'hook_scale_x': -0.5, - 'hook_pos_x': 0.8, - 'hook_thickness_x': 0.35, - 'hook_scale_z': 0.15, - 'hook_pos_z': 0.7, - 'hook_thickness_z': 0.0, -} - -BirdBeak.param_templates['normal'] = {'upper': normal_upper, 'lower': normal_lower, 'range': scales} -BirdBeak.param_templates['duck'] = {'upper': duck_upper, 'lower': duck_lower, 'range': scales} -BirdBeak.param_templates['eagle'] = {'upper': eagle_upper, 'lower': eagle_lower, 'range': scales} -BirdBeak.param_templates['short'] = {'upper': short_upper, 'lower': short_lower, 'range': scales} diff --git a/infinigen/assets/creatures/parts/body.py b/infinigen/assets/creatures/parts/body.py deleted file mode 100644 index 90b72ec1a..000000000 --- a/infinigen/assets/creatures/parts/body.py +++ /dev/null @@ -1,223 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import numpy as np -from numpy.random import normal as N, uniform - -from infinigen.assets.creatures.util import creature_util as cutil -from infinigen.assets.creatures.util.creature import Part, PartFactory -from infinigen.assets.creatures.util.geometry import nurbs - -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler - -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_polar_bezier, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.geometry import nodegroup_symmetric_clone -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle - -from infinigen.assets.creatures.util import part_util -from infinigen.assets.creatures.util.geometry import lofting, nurbs -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_quadruped_body', singleton=False, type='GeometryNodeTree') -def nodegroup_quadruped_body(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input_1 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Pct Ribcage', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Pct Backpart', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Spine StartRad, EndRad, Fullness', (0.05, 0.05, 3.0)), - ('NodeSocketVector', 'Belly StartRad, EndRad, Fullness', (0.07, 0.15, 2.5)), - ('NodeSocketVector', 'Belly ProfileHeight, StartTilt, EndTilt', (0.5, 114.0, 114.0)), - ('NodeSocketVector', 'TopFlank StartRad, EndRad, Fullness', (0.2, 0.28, 2.5)), - ('NodeSocketVector', 'TopFlank ProfileHeight, StartTilt, EndTilt', (0.6, 72.0, 8.0)), - ('NodeSocketVector', 'BackFlank StartRad, EndRad, Fullness', (0.15, 0.15, 2.5)), - ('NodeSocketVector', 'BackFlank ProfileHeight, StartTilt, EndTilt', (0.6, 53.0, 53.0)), - ('NodeSocketVector', 'BottomFlank StartRad, EndRad, Fullness', (0.14, 0.27, 2.5)), - ('NodeSocketVector', 'BottomFlank0 ProfileHeight, StartTilt, EndTilt', (0.6, -29.0, 48.0)), - ('NodeSocketVector', 'BottomFlank1 ProfileHeight, StartTilt, EndTilt', (0.5, -44.0, -17.4)), - ('NodeSocketFloat', 'aspect', 1.0)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input_1.outputs["length_rad1_rad2"], 1: group_input_1.outputs["Pct Ribcage"]}, - attrs={'operation': 'MULTIPLY'}) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': multiply.outputs["Vector"], 'angles_deg': (0.0, -1.0, 4.0), 'proportions': (0.3333, 0.45, 0.3), 'aspect': group_input_1.outputs["aspect"], 'fullness': 3.0, 'Origin': (0.48, 0.0, -0.07)}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input_1.outputs["length_rad1_rad2"], 1: group_input_1.outputs["Pct Backpart"]}, - attrs={'operation': 'MULTIPLY'}) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (-0.01, 0.0, 0.02) - - simple_tube_v2_1 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': multiply_1.outputs["Vector"], 'angles_deg': (0.94, -3.94, 11.66), 'proportions': (0.3, 0.6, 0.2), 'aspect': group_input_1.outputs["aspect"], 'fullness': 7.0, 'Origin': vector}) - - union = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 2': [simple_tube_v2.outputs["Geometry"], simple_tube_v2_1.outputs["Geometry"]]}, - attrs={'operation': 'UNION'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': vector, 'Middle': simple_tube_v2_1.outputs["Endpoint"], 'End': simple_tube_v2.outputs["Endpoint"]}) - - bottom_flank_0 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.16, 0.91, 0.66), 'Coord 1': (0.38, 0.37, 1.0), 'Coord 2': (0.67, -0.42, 0.6), 'StartRad, EndRad, Fullness': group_input_1.outputs["BottomFlank StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input_1.outputs["BottomFlank0 ProfileHeight, StartTilt, EndTilt"]}, - label='Bottom Flank 0') - - top_flank = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.25, 4.91, 0.5), 'Coord 1': (0.65, -0.35, 1.0), 'Coord 2': (0.88, 0.47, 0.7), 'StartRad, EndRad, Fullness': group_input_1.outputs["TopFlank StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input_1.outputs["TopFlank ProfileHeight, StartTilt, EndTilt"]}, - label='Top Flank') - - bottom_flank_1 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.36, 1.03, 0.95), 'Coord 1': (0.6, 0.85, 1.0), 'Coord 2': (0.9, -0.01, 0.71), 'StartRad, EndRad, Fullness': group_input_1.outputs["BottomFlank StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input_1.outputs["BottomFlank1 ProfileHeight, StartTilt, EndTilt"]}, - label='Bottom Flank 1') - - back_flank = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.02, -0.9, 0.53), 'Coord 1': (0.2, -0.85, 0.85), 'Coord 2': (0.61, -0.99, 0.7), 'StartRad, EndRad, Fullness': group_input_1.outputs["BackFlank StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input_1.outputs["BackFlank ProfileHeight, StartTilt, EndTilt"]}, - label='Back Flank') - - belly = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.24, 1.52, 0.7), 'Coord 1': (0.48, 1.24, 1.42), 'Coord 2': (0.92, 1.41, 0.97), 'StartRad, EndRad, Fullness': group_input_1.outputs["Belly StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input_1.outputs["Belly ProfileHeight, StartTilt, EndTilt"]}, - label='Belly') - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [bottom_flank_0, top_flank, bottom_flank_1, back_flank, belly]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry_1}) - - spine = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union, 'Skeleton Curve': quadratic_bezier, 'Coord 0': (0.05, -1.5708, 1.0), 'Coord 1': (0.5, -1.5708, 1.2), 'Coord 2': (0.95, -1.5708, 1.0), 'StartRad, EndRad, Fullness': group_input_1.outputs["Spine StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': (1.0, 0.0, 0.0)}, - label='Spine') - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [union, symmetric_clone.outputs["Both"], spine]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': quadratic_bezier}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': reroute, 'Base Mesh': union}) - -class QuadrupedBody(PartFactory): - - tags = ['body', 'head'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((1.7, 0.65, 0.65)) * N(1, 0.15, 3), - 'Pct Ribcage': (0.76, 0.56, 0.56) * N(1, 0.1, 3), - 'Pct Backpart': (0.64, 0.25, 0.4) * N(1, 0.1, 3), - 'Spine StartRad, EndRad, Fullness': np.array((0.05, 0.05, 3.0)) * N(1, 0.1, 3), - 'Belly StartRad, EndRad, Fullness': np.array((0.07, 0.15, 2.5)) * N(1, 0.1, 3), - 'Belly ProfileHeight, StartTilt, EndTilt': (0.5, 114.0, 114.0), - 'TopFlank StartRad, EndRad, Fullness': (0.2, 0.28, 2.5), - 'TopFlank ProfileHeight, StartTilt, EndTilt': (0.6, 72.0, 8.0), - 'BackFlank StartRad, EndRad, Fullness': (0.15, 0.15, 2.5), - 'BackFlank ProfileHeight, StartTilt, EndTilt': (0.6, 53.0, 53.0), - 'BottomFlank StartRad, EndRad, Fullness': (0.14, 0.27, 2.5), - 'BottomFlank0 ProfileHeight, StartTilt, EndTilt': (0.6, -29.0, 48.0), - 'BottomFlank1 ProfileHeight, StartTilt, EndTilt': (0.5, -44.0, -17.4), - 'aspect': N(1, 0.1) - } - - def make_part(self, params): - - part = part_util.nodegroup_to_part(nodegroup_quadruped_body, params) - part.joints = { - i: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) - for i in np.linspace(0, 1, 4, endpoint=True) - } - part.iks = { - 0.0: IKParams(name='hip', mode='pin', target_size=0.3), - 1.0: IKParams(name='shoulder', rotation_weight=0.1, target_size=0.4) - } - tag_object(part.obj, 'quadruped_body') - return part - -@node_utils.to_nodegroup('nodegroup_fish_body', singleton=False, type='GeometryNodeTree') -def nodegroup_fish_body(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.89, 0.2, 0.29)), - ('NodeSocketVector', 'angles_deg', (7.0, 0.51, -9.02)), - ('NodeSocketFloat', 'aspect', 0.56), - ('NodeSocketFloat', 'fullness', 3.43)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) - -class FishBody(PartFactory): - - tags = ['body'] - - def sample_params(self): - return {} - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_fish_body, params) - part.joints = { - i: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) - for i in np.linspace(0, 1, 4, endpoint=True) - } - part.iks = { - 0.0: IKParams(name='hip', mode='pin', target_size=0.3), - 1.0: IKParams(name='shoulder', rotation_weight=0.1, target_size=0.4) - } - tag_object(part.obj, 'fish_body') - return part - -@node_utils.to_nodegroup('nodegroup_bird_body', singleton=False, type='GeometryNodeTree') -def nodegroup_bird_body(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0000, 0.5000, 0.3000)), - ('NodeSocketFloat', 'aspect', 1.0000), - ('NodeSocketFloat', 'fullness', 2.0000)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], - 'proportions': (0.1000, 0.1000, 0.1000), - 'aspect': group_input.outputs["aspect"], - 'fullness': group_input.outputs["fullness"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], - 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class BirdBody(PartFactory): - - tags = ['body'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.95, 0.15, 0.2)) * N(1.0, 0.05, size=(3,)), - 'aspect': N(1.2, 0.02), - 'fullness': N(2, 0.1) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_bird_body, params) - part.joints = { - i: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) - for i in np.linspace(0, 1, 4, endpoint=True) - } - part.iks = { - 0.0: IKParams(name='hip', mode='pin', target_size=0.3), - 1.0: IKParams(name='shoulder', rotation_weight=0.1, target_size=0.4) - } - tag_object(part.obj, 'bird_body') - return part - diff --git a/infinigen/assets/creatures/parts/chameleon.py b/infinigen/assets/creatures/parts/chameleon.py deleted file mode 100644 index 31821dafa..000000000 --- a/infinigen/assets/creatures/parts/chameleon.py +++ /dev/null @@ -1,1599 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=LJD3nvFXCLE by Redjam9 - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.util.creature import PartFactory, Part -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util import part_util -from infinigen.core.util import blender as butil -from scipy.interpolate import interp1d -from infinigen.assets.creatures.util.part_util import nodegroup_to_part - -from infinigen.assets.creatures.util.geometry import nurbs as nurbs_util -from infinigen.core import surface -import logging -import numpy as np - -@node_utils.to_nodegroup('nodegroup_chameleon_toe', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_toe(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - spiral = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Rotations': 0.1000, 'Start Radius': 0.1000, 'End Radius': 0.3000, 'Height': 0.0000}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 1.0000), (1.0000, 0.0000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: 0.4000}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': spiral, 'Radius': multiply}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_curve_radius, 2: spline_parameter_1.outputs["Factor"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.1000}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Profile Curve': curve_circle.outputs["Curve"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': curve_to_mesh, 'Name': 'Ridge', 'Value': capture_attribute.outputs[2]}, - attrs={'data_type': 'FLOAT', 'domain': 'POINT'}) - - sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={'Curve': set_curve_radius}, attrs={'mode': 'FACTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute, 'Position': sample_curve.outputs["Position"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_floor_ceil', singleton=False, type='GeometryNodeTree') -def nodegroup_floor_ceil(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Value', 0.0000)]) - - float_to_integer = nw.new_node(Nodes.FloatToInt, input_kwargs={'Float': group_input.outputs["Value"]}, attrs={'rounding_mode': 'FLOOR'}) - - float_to_integer_1 = nw.new_node(Nodes.FloatToInt, - input_kwargs={'Float': group_input.outputs["Value"]}, - attrs={'rounding_mode': 'CEILING'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: float_to_integer}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Floor': float_to_integer, 'Ceil': float_to_integer_1, 'Remainder': subtract}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_clamp_or_wrap', singleton=False, type='GeometryNodeTree') -def nodegroup_clamp_or_wrap(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Value', 0), - ('NodeSocketFloat', 'Max', 0.5000), - ('NodeSocketBool', 'Use Wrap', False)]) - - clamp = nw.new_node(Nodes.Clamp, input_kwargs={'Value': group_input.outputs["Value"], 'Max': group_input.outputs["Max"]}) - - wrap = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Max"], 2: 0.0000}, - attrs={'operation': 'WRAP'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Use Wrap"], 4: clamp, 5: wrap}, - attrs={'input_type': 'INT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Output': switch.outputs[1]}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_claw_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_claw_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.0000, 0.0000, 0.0000), 'Middle': (0.5000, 0.5000, 0.0000), 'End': (0.7000, 0.3000, 0.0000)}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': quadratic_bezier, 'RadStartEnd': (0.2000, 0.2000, 1.0000)}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 10, 'Start': (0.9500, 0.2500, 0.0000), 'Middle': (1.0000, 0.5000, 0.0000), 'End': (0.9500, 0.7500, 0.0000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': simpletube.outputs["Mesh"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 32, 'CtrlptsW': 32}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': simpletube.outputs["Mesh"], 'Curve': curveparametercurve, 'Base Radius': 0.1000, 'Base Factor': 0.0200, 'Attr': True}) - - chameleon_toe = nw.new_node(nodegroup_chameleon_toe().name) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': simpletube.outputs["Curve"], 'Factor': 1.0000}, - attrs={'mode': 'FACTOR'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: sample_curve.outputs["Position"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: chameleon_toe.outputs["Position"]}, - attrs={'operation': 'SUBTRACT'}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_toe.outputs["Geometry"], 'Translation': subtract.outputs["Vector"], 'Rotation': (0.1745, -0.1745, 0.8727)}) - - chameleon_toe_1 = nw.new_node(nodegroup_chameleon_toe().name) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: sample_curve.outputs["Position"]}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_1.outputs["Vector"], 1: chameleon_toe_1.outputs["Position"]}, - attrs={'operation': 'SUBTRACT'}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_toe_1.outputs["Geometry"], 'Translation': subtract_1.outputs["Vector"], 'Rotation': (0.0000, 0.1745, 0.8727)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curvesculpt.outputs["Geometry"], transform_1, transform_2]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'Rotation', (0.0000, 1.0472, 0.0000)), - ('NodeSocketVectorXYZ', 'Scale', (0.2000, 0.2000, 0.4000))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Rotation': group_input.outputs["Rotation"], 'Scale': group_input.outputs["Scale"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_u_v_param_to_vert_idxs', singleton=False, type='GeometryNodeTree') -def nodegroup_u_v_param_to_vert_idxs(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5000), - ('NodeSocketInt', 'Size', 0), - ('NodeSocketBool', 'Cyclic', False)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Size"]}, - attrs={'operation': 'MULTIPLY'}) - - floorceil = nw.new_node(nodegroup_floor_ceil().name, input_kwargs={'Value': multiply}) - - clamporwrap = nw.new_node(nodegroup_clamp_or_wrap().name, - input_kwargs={'Value': floorceil.outputs["Floor"], 'Max': group_input.outputs["Size"], 'Use Wrap': group_input.outputs["Cyclic"]}) - - clamporwrap_1 = nw.new_node(nodegroup_clamp_or_wrap().name, - input_kwargs={'Value': floorceil.outputs["Ceil"], 'Max': group_input.outputs["Size"], 'Use Wrap': group_input.outputs["Cyclic"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Floor': clamporwrap, 'Ceil': clamporwrap_1, 'Remainder': floorceil.outputs["Remainder"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_foot_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_foot_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - chameleon_claw_shape = nw.new_node(nodegroup_chameleon_claw_shape().name, input_kwargs={'Rotation': (0.0000, 0.0000, 0.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'ouRotation', (0.0000, 1.0472, 0.0000)), - ('NodeSocketVectorEuler', 'inRotation', (0.0000, 2.0944, 3.1416)), - ('NodeSocketVectorXYZ', 'ouScale', (1.0000, 1.0000, 1.0000)), - ('NodeSocketVectorXYZ', 'inScale', (1.0000, 1.0000, 1.0000))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_claw_shape, 'Rotation': group_input.outputs["ouRotation"], 'Scale': group_input.outputs["ouScale"]}) - - chameleon_claw_shape_1 = nw.new_node(nodegroup_chameleon_claw_shape().name, input_kwargs={'Rotation': (0.0000, 0.0000, 0.0000)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_claw_shape_1, 'Rotation': group_input.outputs["inRotation"], 'Scale': group_input.outputs["inScale"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_bilinear_interp_index_transfer', singleton=False, type='GeometryNodeTree') -def nodegroup_bilinear_interp_index_transfer(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Source', None), - ('NodeSocketFloat', 'U', 0.5000), - ('NodeSocketFloat', 'V', 0.5000), - ('NodeSocketVector', 'Attribute', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'SizeU', 0), - ('NodeSocketInt', 'SizeV', 0), - ('NodeSocketBool', 'CyclicU', False), - ('NodeSocketBool', 'CyclicV', False)]) - - uvparamtovertidxs = nw.new_node(nodegroup_u_v_param_to_vert_idxs().name, - input_kwargs={'Value': group_input.outputs["V"], 'Size': group_input.outputs["SizeV"], 'Cyclic': group_input.outputs["CyclicV"]}) - - uvparamtovertidxs_1 = nw.new_node(nodegroup_u_v_param_to_vert_idxs().name, - input_kwargs={'Value': group_input.outputs["U"], 'Size': group_input.outputs["SizeU"], 'Cyclic': group_input.outputs["CyclicU"]}) - - floor_floor = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Floor"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Floor"]}, - label='FloorFloor', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input, 'Value': group_input.outputs["Attribute"], 'Index': floor_floor}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - ceil_floor = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Ceil"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Floor"]}, - label='CeilFloor', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_1 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input, 'Value': group_input.outputs["Attribute"], 'Index': ceil_floor}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs_1.outputs["Remainder"], 9: (transfer_attribute, 'Value'), 10: (transfer_attribute_1, 'Value')}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - floor_ceil = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Floor"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Ceil"]}, - label='FloorCeil', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_2 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input, 'Value': group_input.outputs["Attribute"], 'Index': floor_ceil}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - ceil_ceil = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Ceil"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Ceil"]}, - label='CeilCeil', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input, 'Value': group_input.outputs["Attribute"], 'Index': ceil_ceil}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs_1.outputs["Remainder"], 9: (transfer_attribute_2, 'Value'), 10: (transfer_attribute_3, 'Value')}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs.outputs["Remainder"], 9: map_range.outputs["Vector"], 10: map_range_1.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': map_range_2.outputs["Vector"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_polar_to_cart', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_to_cart(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Angle', 0.5000), - ('NodeSocketFloat', 'Length', 0.0000), - ('NodeSocketVector', 'Origin', (0.0000, 0.0000, 0.0000))]) - - cosine = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Angle"]}, attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Angle"]}, attrs={'operation': 'SINE'}) - - construct_unit_vector = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': cosine, 'Z': sine}, label='Construct Unit Vector') - - offset_polar = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Length"], 1: construct_unit_vector, 2: group_input.outputs["Origin"]}, - label='Offset Polar', - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': offset_polar.outputs["Vector"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_switch4', singleton=False, type='GeometryNodeTree') -def nodegroup_switch4(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Arg', 0), - ('NodeSocketVector', 'Arg == 0', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Arg == 1', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Arg == 2', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Arg == 3', (0.0000, 0.0000, 0.0000))]) - - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 2}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - greater_equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 1}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_1, 8: group_input.outputs["Arg == 0"], 9: group_input.outputs["Arg == 1"]}, - attrs={'input_type': 'VECTOR'}) - - greater_equal_2 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 3}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_2, 8: group_input.outputs["Arg == 2"], 9: group_input.outputs["Arg == 3"]}, - attrs={'input_type': 'VECTOR'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Output': switch.outputs[3]}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_symmetric_clone', singleton=False, type='GeometryNodeTree') -def nodegroup_symmetric_clone(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0000, -1.0000, 1.0000))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Scale': group_input.outputs["Scale"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': transform}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [group_input.outputs["Geometry"], flip_faces]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Both': join_geometry_2, 'Orig': group_input.outputs["Geometry"], 'Inverted': flip_faces}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_scale_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_scale_bump(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Density', 50.0000), - ('NodeSocketFloat', 'Depth', 0.0050), - ('NodeSocketFloat', 'Bump', 0.0100), - ('NodeSocketInt', 'Level', 2), - ('NodeSocketBool', 'Selection', True)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': group_input.outputs["Level"]}) - - normal = nw.new_node(Nodes.InputNormal) - - position = nw.new_node(Nodes.InputPosition) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': position}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_1.outputs["Color"], 'Scale': 0.2000}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: position}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["Density"], 'Randomness': 0.5000}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture_1.outputs["Distance"]}) - colorramp_1.color_ramp.elements[0].position = 0.0000 - colorramp_1.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp_1.color_ramp.elements[1].position = 0.9909 - colorramp_1.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: colorramp_1.outputs["Color"], 'Scale': group_input.outputs["Bump"]}, - attrs={'operation': 'SCALE'}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': scale_1.outputs["Vector"]}, - attrs={'operation': 'SCALE'}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture_1.outputs["Distance"]}) - colorramp.color_ramp.elements[0].position = 0.0000 - colorramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp.color_ramp.elements[1].position = 0.0591 - colorramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - scale_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: colorramp.outputs["Color"], 'Scale': group_input.outputs["Depth"]}, - attrs={'operation': 'SCALE'}) - - scale_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': scale_3.outputs["Vector"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: scale_2.outputs["Vector"], 1: scale_4.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': subdivide_mesh, 'Selection': group_input.outputs["Selection"], 'Offset': add_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_leg_raw_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_leg_raw_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thigh_length', 0.6000), - ('NodeSocketFloat', 'calf_length', 0.5000), - ('NodeSocketFloat', 'thigh_body_rotation', 0.5000), - ('NodeSocketFloat', 'calf_body_rotation', 0.5000), - ('NodeSocketFloat', 'thigh_calf_rotation', 20.0000), - ('NodeSocketFloat', 'toe_toe_rotation', 20.0000), - ('NodeSocketVectorXYZ', 'thigh_scale', (1.0000, 0.6500, 1.0000)), - ('NodeSocketVectorXYZ', 'calf_scale', (1.0000, 0.6500, 1.0000)), - ('NodeSocketVectorXYZ', 'ouScale', (1.0000, 1.0000, 1.0000)), - ('NodeSocketVectorXYZ', 'inScale', (1.0000, 1.0000, 1.0000))]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thigh_length"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["thigh_length"]}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.0000, 0.0000, 0.0000), 'Middle': combine_xyz_3, 'End': combine_xyz_2}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': quadratic_bezier, 'RadStartEnd': (0.1500, 0.2000, 0.9000), 'Resolution': 64}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["thigh_calf_rotation"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thigh_body_rotation"], 1: 180.0000}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': add}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_7, 'Scale': 0.0174}, attrs={'operation': 'SCALE'}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube.outputs["Mesh"], 'Rotation': scale.outputs["Vector"], 'Scale': group_input.outputs["thigh_scale"]}) - - round_bump = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': transform_geometry, 'Distance': 0.0070, 'Offset Scale': 0.0020}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["calf_length"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["calf_length"]}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.0000, 0.0000, 0.0000), 'Middle': combine_xyz_4, 'End': combine_xyz_5}) - - simpletube_1 = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': quadratic_bezier_1, 'RadStartEnd': (0.1500, 0.1000, 0.9000), 'Resolution': 64}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["calf_body_rotation"], 1: 180.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["thigh_calf_rotation"], 'Z': add_1}) - - scale_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz, 'Scale': 0.0174}, attrs={'operation': 'SCALE'}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube_1.outputs["Mesh"], 'Rotation': scale_1.outputs["Vector"], 'Scale': group_input.outputs["calf_scale"]}) - - round_bump_1 = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': transform_geometry_1, 'Distance': 0.0070, 'Offset Scale': 0.0020}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 180.0000, 1: group_input.outputs["thigh_calf_rotation"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["toe_toe_rotation"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': subtract, 'Z': multiply_3}) - - scale_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_1, 'Scale': 0.0174}, attrs={'operation': 'SCALE'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["toe_toe_rotation"], 1: 180.0000}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["thigh_calf_rotation"], 'Z': add_2}) - - scale_3 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_6, 'Scale': 0.0174}, attrs={'operation': 'SCALE'}) - - chameleon_foot_shape = nw.new_node(nodegroup_chameleon_foot_shape().name, - input_kwargs={'ouRotation': scale_2.outputs["Vector"], 'inRotation': scale_3.outputs["Vector"], 'ouScale': group_input.outputs["ouScale"], 'inScale': group_input.outputs["inScale"]}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube_1.outputs["Curve"], 'Rotation': scale_1.outputs["Vector"], 'Scale': (1.0000, 0.6500, 1.0000)}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': transform_geometry_2, 'Factor': 0.8500}, - attrs={'mode': 'FACTOR'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': chameleon_foot_shape, 'Offset': sample_curve.outputs["Position"]}) - - round_bump_2 = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': set_position, 'Distance': 0.0050, 'Offset Scale': 0.0020}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [round_bump, round_bump_1, round_bump_2]}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube.outputs["Curve"], 'Rotation': scale.outputs["Vector"], 'Scale': group_input.outputs["thigh_scale"]}) - - sample_curve_1 = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': transform_geometry_3, 'Factor': 1.0000}, - attrs={'mode': 'FACTOR'}) - - scale_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve_1.outputs["Position"], 'Scale': -1.0000}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': join_geometry, 'Offset': scale_4.outputs["Vector"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': set_position_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': subdivision_surface}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_tail_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_tail_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Middle': (0.0000, 0.2000, 0.0000), 'End': (2.0000, -0.5000, 0.0000)}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': quadratic_bezier, 'RadStartEnd': (0.4000, 0.0000, 0.9000), 'Resolution': 64}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.2000, 0.0000, 0.0000), 'Middle': (0.6000, 0.0000, 0.0100), 'End': (0.8000, 0.0000, 0.0200)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': simpletube.outputs["Mesh"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': simpletube.outputs["Mesh"], 'Curve': curveparametercurve, 'Base Radius': 0.0200, 'SymmY': False, 'Attr': True}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': curvesculpt.outputs["Geometry"], 'Level': 2}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface, 'Translation': (1.0000, 0.0000, 0.1000), 'Rotation': (-1.5708, 0.0000, 0.0000)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': quadratic_bezier, 'Translation': (1.0000, 0.0000, 0.0000), 'Rotation': (-1.5708, 0.0000, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': transform, 'Curve': transform_1}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_back_bump1', singleton=False, type='GeometryNodeTree') -def nodegroup_back_bump1(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.0000, 0.7500, 0.1000), 'Middle': (0.6000, 0.7500, 0.0000), 'End': (1.0000, 0.7500, 0.1000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.3000, 'Base Factor': 0.0300, 'Name': ''}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_back_bump2', singleton=False, type='GeometryNodeTree') -def nodegroup_back_bump2(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.1000, 0.7500, 0.1000), 'Middle': (0.4000, 0.7500, 0.0000), 'End': (0.9000, 0.7500, 0.1000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.1500, 'Base Factor': 0.1000}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_back_bump3', singleton=False, type='GeometryNodeTree') -def nodegroup_back_bump3(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.1500, 0.7500, 0.0600), 'Middle': (0.6000, 0.7500, 0.0000), 'End': (0.9000, 0.7500, 0.0600)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': quadratic_bezier_1}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': join_geometry, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.1000, 'Attr': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_belly_sunken1', singleton=False, type='GeometryNodeTree') -def nodegroup_belly_sunken1(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 30, 'Start': (0.0000, 0.2500, 0.0000), 'Middle': (0.6000, 0.2500, 0.0000), 'End': (1.0000, 0.2500, 0.0000)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': quadratic_bezier_1}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': join_geometry, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.0300, 'Base Factor': 0.0200, 'Name': ''}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_shouder_sunken', singleton=False, type='GeometryNodeTree') -def nodegroup_shouder_sunken(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.1500, 0.2500, 0.1000), 'Middle': (0.2000, 0.2500, 0.0000), 'End': (0.3000, 0.2500, 0.1000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.2000, 'Base Factor': -0.0300, 'SymmY': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_neck_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_neck_bump(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.0000, 0.2500, 0.0000), 'Middle': (0.0500, 0.2500, 0.0000), 'End': (0.0700, 0.2500, 0.1000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.2000, 'SymmY': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_neck_bump2', singleton=False, type='GeometryNodeTree') -def nodegroup_neck_bump2(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Surface', None)]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.0000, 0.2500, 0.0000), 'Middle': (0.0250, 0.2500, 0.1000), 'End': (0.0500, 0.2500, 0.2000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': group_input.outputs["Surface"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': group_input.outputs["Surface"], 'Curve': curveparametercurve, 'Base Radius': 0.2000, 'SymmY': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curvesculpt.outputs["Geometry"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_curve_parameter_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_parameter_curve(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Surface', None), - ('NodeSocketGeometry', 'UVCurve', None), - ('NodeSocketInt', 'CtrlptsU', 0), - ('NodeSocketInt', 'CtrlptsW', 0)]) - - normal = nw.new_node(Nodes.InputNormal) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - position_1 = nw.new_node(Nodes.InputPosition) - - bilinearinterpindextransfer = nw.new_node(nodegroup_bilinear_interp_index_transfer().name, - input_kwargs={'Source': group_input.outputs["Surface"], 'U': separate_xyz.outputs["X"], 'V': separate_xyz.outputs["Y"], 'Attribute': position_1, 'SizeU': group_input.outputs["CtrlptsU"], 'SizeV': group_input.outputs["CtrlptsW"], 'CyclicV': True}) - - transfer_attribute = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': group_input.outputs["Surface"], 'Value': normal, 'Sample Position': bilinearinterpindextransfer}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (transfer_attribute, 'Value'), 1: separate_xyz.outputs["Z"], 2: bilinearinterpindextransfer}, - attrs={'operation': 'MULTIPLY_ADD'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["UVCurve"], 'Position': multiply_add.outputs["Vector"]}) - - normal_1 = nw.new_node(Nodes.InputNormal) - - dot_product = nw.new_node(Nodes.VectorMath, input_kwargs={0: normal, 1: normal_1}, attrs={'operation': 'DOT_PRODUCT'}) - - arcsine = nw.new_node(Nodes.Math, input_kwargs={0: dot_product.outputs["Value"]}, attrs={'operation': 'ARCSINE'}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': set_position, 'Tilt': arcsine}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_curve_tilt}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_polar_bezier', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_bezier(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 32), - ('NodeSocketVector', 'Origin', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'angles_deg', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Seg Lengths', (0.3000, 0.3000, 0.3000)), - ('NodeSocketBool', 'Do Bezier', True)]) - - mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={'Count': 4}) - - index = nw.new_node(Nodes.Index) - - deg2_rad = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["angles_deg"], 'Scale': 0.0175}, - label='Deg2Rad', - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': deg2_rad.outputs["Vector"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Seg Lengths"]}) - - polartocart = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': separate_xyz, 'Length': separate_xyz_1.outputs["X"], 'Origin': group_input.outputs["Origin"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz, 1: separate_xyz.outputs["Y"]}) - - polartocart_1 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add, 'Length': separate_xyz_1.outputs["Y"], 'Origin': polartocart}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) - - polartocart_2 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add_1, 'Length': separate_xyz_1.outputs["Z"], 'Origin': polartocart_1}) - - switch4 = nw.new_node(nodegroup_switch4().name, - input_kwargs={'Arg': index, 'Arg == 0': group_input.outputs["Origin"], 'Arg == 1': polartocart, 'Arg == 2': polartocart_1, 'Arg == 3': polartocart_2}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': mesh_line, 'Position': switch4}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': set_position}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, input_kwargs={'Curve': mesh_to_curve, 'Cuts': group_input.outputs["Resolution"]}) - - integer = nw.new_node(Nodes.Integer) - integer.integer = 2 - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': integer, 'Start': group_input.outputs["Origin"], 'Start Handle': polartocart, 'End Handle': polartocart_1, 'End': polartocart_2}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, - attrs={'operation': 'DIVIDE'}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, input_kwargs={'Curve': bezier_segment, 'Cuts': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Do Bezier"], 14: subdivide_curve_1, 15: subdivide_curve}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': switch.outputs[6], 'Endpoint': polartocart_2}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_simple_tube', singleton=False, type='GeometryNodeTree') -def nodegroup_simple_tube(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketVector', 'RadStartEnd', (0.0500, 0.0500, 1.0000)), - ('NodeSocketInt', 'Resolution', 32)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0000, 1: spline_parameter.outputs["Factor"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: spline_parameter.outputs["Factor"]}, - attrs={'operation': 'MULTIPLY'}) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'SQRT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["RadStartEnd"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz.outputs["X"], 4: separate_xyz.outputs["Y"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: sqrt, 1: map_range.outputs["Result"]}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': multiply_1}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': separate_xyz.outputs["Z"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Scale': combine_xyz}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': set_curve_radius, 'Profile Curve': transform}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh, 'Curve': set_curve_radius}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_curve_sculpt', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_sculpt(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input_1 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Target', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Base Radius', 0.0500), - ('NodeSocketFloat', 'Base Factor', 0.0500), - ('NodeSocketBool', 'SymmY', True), - ('NodeSocketGeometry', 'StrokeRadFacModifier', None), - ('NodeSocketBool', 'Switch', True), - ('NodeSocketBool', 'Attr', False), - ('NodeSocketString', 'Name', 'Ridge')]) - - normal = nw.new_node(Nodes.InputNormal) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, input_kwargs={'Geometry': group_input_1.outputs["Curve"]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input_1.outputs["SymmY"], 14: group_input_1.outputs["Curve"], 15: symmetric_clone.outputs["Both"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': switch.outputs[6]}) - - geometry_proximity = nw.new_node(Nodes.Proximity, input_kwargs={'Target': curve_to_mesh}, attrs={'target_element': 'POINTS'}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': group_input_1.outputs["StrokeRadFacModifier"]}) - - position = nw.new_node(Nodes.InputPosition) - - index = nw.new_node(Nodes.Index) - - transfer_attribute = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': curve_to_mesh_1, 'Value': position, 'Index': index}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': (transfer_attribute, 'Value')}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Base Radius"], 1: separate_xyz.outputs["X"]}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': geometry_proximity.outputs["Distance"], 2: add}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0000, 1.0000), (0.4364, 0.9212), (0.6182, 0.0787), (1.0000, 0.0000)], handles=['VECTOR', 'AUTO', 'AUTO', 'VECTOR']) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 1.0000), (0.2500, 0.9588), (0.7455, 0.0475), (1.0000, 0.0000)], handles=['VECTOR', 'AUTO', 'AUTO', 'VECTOR']) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input_1.outputs["Switch"], 2: float_curve_1, 3: float_curve}, - attrs={'input_type': 'FLOAT'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Base Factor"], 1: separate_xyz.outputs["Y"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: switch_2.outputs["Output"], 1: add_1}, attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: normal, 'Scale': multiply}, attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input_1.outputs["Target"], 'Offset': scale.outputs["Vector"]}) - - named_attribute = nw.new_node(Nodes.NamedAttribute, input_kwargs={'Name': group_input_1.outputs["Name"]}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: named_attribute.outputs[1], 1: switch_2.outputs["Output"]}, - attrs={'use_clamp': True, 'operation': 'MAXIMUM'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': set_position, 'Name': group_input_1.outputs["Name"], 'Value': maximum}, - attrs={'data_type': 'FLOAT', 'domain': 'POINT'}) - - switch_3 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input_1.outputs["Attr"], 14: set_position, 15: store_named_attribute}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': switch_3.outputs[6], 'Result': switch_2.outputs["Output"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_eye', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_eye(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 1024, 'angles_deg': (0.0000, 0.0000, 10.0000), 'Seg Lengths': (0.1500, 0.1500, 0.1500)}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': polarbezier.outputs["Curve"], 'RadStartEnd': (0.4000, 0.4000, 1.0000), 'Resolution': 1024}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube.outputs["Mesh"], 'Scale': (4.0000, 4.5000, 4.5000)}) - - quadratic_bezier_25 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 256, 'Start': (0.9900, 0.0000, 0.0000), 'Middle': (0.9900, 0.5000, 0.0000), 'End': (0.9900, 1.0000, 0.0000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': transform, 'UVCurve': quadratic_bezier_25, 'CtrlptsU': 1024, 'CtrlptsW': 1024}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': transform, 'Curve': curveparametercurve, 'Base Factor': 0.1000}) - - quadratic_bezier_26 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 200, 'Start': (1.0000, 0.0000, 0.0000), 'Middle': (1.0000, 0.5000, 0.0000), 'End': (1.0000, 1.0000, 0.0000)}) - - curveparametercurve_1 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt.outputs["Geometry"], 'UVCurve': quadratic_bezier_26, 'CtrlptsU': 1024, 'CtrlptsW': 1024}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'pupil_radius', 0.2200)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["pupil_radius"], 1: 0.0300}) - - curvesculpt_1 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt.outputs["Geometry"], 'Curve': curveparametercurve_1, 'Base Radius': add, 'Base Factor': 0.0000, 'Switch': False, 'Attr': True}) - - quadratic_bezier_27 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 256, 'Start': (1.0000, 0.0000, 0.0000), 'Middle': (1.0000, 0.5000, 0.0000), 'End': (1.0000, 1.0000, 0.0000)}) - - curveparametercurve_2 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_1.outputs["Geometry"], 'UVCurve': quadratic_bezier_27, 'CtrlptsU': 1024, 'CtrlptsW': 1024}) - - curvesculpt_2 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_1.outputs["Geometry"], 'Curve': curveparametercurve_2, 'Base Radius': group_input.outputs["pupil_radius"], 'Base Factor': 0.0000, 'Switch': False, 'Attr': True, 'Name': 'Pupil'}) - - op_or = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: curvesculpt_1.outputs["Result"], 1: curvesculpt_2.outputs["Result"]}, - attrs={'operation': 'OR'}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_or}, attrs={'operation': 'NOT'}) - - scale_bump = nw.new_node(nodegroup_scale_bump().name, - input_kwargs={'Geometry': curvesculpt_2.outputs["Geometry"], 'Density': 20.0000, 'Depth': 0.1000, 'Bump': 0.0200, 'Level': 0, 'Selection': op_not}) - - normal = nw.new_node(Nodes.InputNormal) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': separate_xyz.outputs["X"], 'Scale': 12.0000, 'Detail': 10.0000, 'Roughness': 0.0000}, - attrs={'noise_dimensions': '1D'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.0300}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: normal, 'Scale': multiply}, attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': scale_bump, 'Offset': scale.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': surface.shaderfunc_to_material(shader_chameleon_eye)}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': set_material, 'Scale': (0.0500, 0.0600, 0.0600)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -def shader_chameleon(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - attribute_1 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'Ridge', 'attribute_type': 'GEOMETRY'}) - - # map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': attribute_1.outputs["Fac"], 2: 0.0010}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': attribute_1.outputs["Fac"]}) - colorramp_2.color_ramp.elements[0].position = 0.0091 - colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp_2.color_ramp.elements[1].position = 0.9841 - colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Scale': 10.0000, 'Distortion': 2.0000}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) - colorramp_3.color_ramp.elements.new(0) - colorramp_3.color_ramp.elements[0].position = 0.2773 - colorramp_3.color_ramp.elements[0].color = [0.0660, 0.1203, 0.0151, 1.0000] - colorramp_3.color_ramp.elements[1].position = 0.6386 - colorramp_3.color_ramp.elements[1].color = [0.0405, 0.0397, 0.0064, 1.0000] - colorramp_3.color_ramp.elements[2].position = 1.0000 - colorramp_3.color_ramp.elements[2].color = [0.0069, 0.0278, 0.0000, 1.0000] - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'W': 1.0000}, attrs={'noise_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Fac"]}) - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.2818 - colorramp_1.color_ramp.elements[0].color = [0.3390, 0.1458, 0.0277, 1.0000] - colorramp_1.color_ramp.elements[1].position = 0.5795 - colorramp_1.color_ramp.elements[1].color = [0.1295, 0.0542, 0.0220, 1.0000] - colorramp_1.color_ramp.elements[2].position = 1.0000 - colorramp_1.color_ramp.elements[2].color = [0.2549, 0.1495, 0.0318, 1.0000] - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': colorramp_3.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - separate_color = nw.new_node(Nodes.SeparateColor, input_kwargs={'Color': mix_1}) - - texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Normal"], 'Scale': 20.0000, 'Detail': 200.0000, 'Roughness': 0.0000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_color.outputs["Red"], 1: noise_texture_2.outputs["Fac"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - combine_color = nw.new_node('ShaderNodeCombineColor', - input_kwargs={'Red': multiply, 'Green': separate_color.outputs["Green"], 'Blue': separate_color.outputs["Blue"]}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': combine_color, 'Specular': 0.3000, 'Roughness': 0.6000}) - - material_output_1 = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_leg_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_leg_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'body_length', 0.5000), - ('NodeSocketFloat', 'body_position', 0.1000), - ('NodeSocketFloat', 'body_thickness', 0.0500), - ('NodeSocketFloat', 'body_height', -0.1000), - ('NodeSocketVectorEuler', 'Rotation', (0.0000, -0.6981, 0.0000)), - ('NodeSocketFloat', 'thigh_length', 0.6000), - ('NodeSocketFloat', 'calf_length', 0.5000), - ('NodeSocketFloat', 'thigh_body_rotation', 25.0000), - ('NodeSocketFloat', 'calf_body_rotation', 15.0000), - ('NodeSocketFloat', 'thigh_calf_rotation', 20.0000), - ('NodeSocketFloat', 'toe_toe_rotation', 20.0000), - ('NodeSocketVectorXYZ', 'thigh_scale', (1.0000, 0.6500, 1.0000)), - ('NodeSocketVectorXYZ', 'calf_scale', (1.0000, 0.6500, 1.0000)), - ('NodeSocketVectorXYZ', 'ouScale', (1.0000, 1.0000, 1.0000)), - ('NodeSocketVectorXYZ', 'inScale', (0.6000, 1.0000, 1.0000))]) - - chameleon_leg_raw_shape = nw.new_node(nodegroup_chameleon_leg_raw_shape().name, - input_kwargs={'thigh_length': group_input.outputs["thigh_length"], 'calf_length': group_input.outputs["calf_length"], 'thigh_body_rotation': group_input.outputs["thigh_body_rotation"], 'calf_body_rotation': group_input.outputs["calf_body_rotation"], 'thigh_calf_rotation': group_input.outputs["thigh_calf_rotation"], 'toe_toe_rotation': group_input.outputs["toe_toe_rotation"], 'thigh_scale': group_input.outputs["thigh_scale"], 'calf_scale': group_input.outputs["calf_scale"], 'ouScale': group_input.outputs["ouScale"], 'inScale': group_input.outputs["inScale"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["body_length"], 1: group_input.outputs["body_position"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply, 'Y': group_input.outputs["body_thickness"], 'Z': group_input.outputs["body_height"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_leg_raw_shape, 'Translation': combine_xyz, 'Rotation': group_input.outputs["Rotation"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_tail(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'RadStartEnd', (0.4000, 0.2500, 0.9000)), - ('NodeSocketFloat', 'body_length', 0.5000), - ('NodeSocketFloat', 'body_position', 0.5000)]) - - chameleon_tail_shape = nw.new_node(nodegroup_chameleon_tail_shape().name) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': chameleon_tail_shape.outputs["Curve"]}, - attrs={'mode': 'FACTOR'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Position"], 'Scale': -1.0000}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': chameleon_tail_shape.outputs["Mesh"], 'Offset': scale.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["body_length"], 1: group_input.outputs["body_position"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Z': 0.1000}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Translation': combine_xyz, 'Rotation': (0.0000, 0.1745, 0.3491), 'Scale': (1.0000, 0.8000, 1.0000)}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': transform, 'Level': 2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': subdivide_mesh}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_body_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_body_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorXYZ', 'Scale', (0.9000, 0.7000, 0.8000)), - ('NodeSocketFloat', 'length', 1.4000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': 0.1000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["length"], 'Y': 0.3000}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.0000, 0.0000, 0.0000), 'Middle': combine_xyz_1, 'End': combine_xyz}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': quadratic_bezier, 'RadStartEnd': (0.6000, 0.6000, 1.0000), 'Resolution': 64}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube.outputs["Mesh"], 'Scale': group_input.outputs["Scale"]}) - - back_bump1 = nw.new_node(nodegroup_back_bump1().name, input_kwargs={'Surface': transform_geometry}) - - back_bump2 = nw.new_node(nodegroup_back_bump2().name, input_kwargs={'Surface': back_bump1}) - - back_bump3 = nw.new_node(nodegroup_back_bump3().name, input_kwargs={'Surface': back_bump2}) - - belly_sunken1 = nw.new_node(nodegroup_belly_sunken1().name, input_kwargs={'Surface': back_bump3}) - - shouder_sunken = nw.new_node(nodegroup_shouder_sunken().name, input_kwargs={'Surface': belly_sunken1}) - - neck_bump = nw.new_node(nodegroup_neck_bump().name, input_kwargs={'Surface': shouder_sunken}) - - neck_bump2 = nw.new_node(nodegroup_neck_bump2().name, input_kwargs={'Surface': neck_bump}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': neck_bump2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': subdivision_surface}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon_head_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon_head_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 64, 'angles_deg': (0.0000, 0.0000, -5.0000), 'Seg Lengths': (0.1000, 0.2400, 0.1000)}) - - simpletube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Curve': polarbezier.outputs["Curve"], 'RadStartEnd': (0.4000, 0.1800, 0.7800), 'Resolution': 64}) - - group_input_2 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Crown', 0.2000), - ('NodeSocketFloat', 'EyeBrow', 0.0200), - ('NodeSocketVectorXYZ', 'Scale', (1.0000, 1.0000, 1.0000))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simpletube.outputs["Mesh"], 'Scale': group_input_2.outputs["Scale"]}) - - quadratic_bezier_17 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.2000, 0.2500, 0.1000), 'Middle': (0.6000, 0.2500, 0.0000), 'End': (0.7900, 0.2500, 0.0000)}) - - curveparametercurve = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': transform, 'UVCurve': quadratic_bezier_17, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': transform, 'Curve': curveparametercurve, 'Base Radius': 0.1500, 'Base Factor': 0.0200, 'SymmY': False}) - - quadratic_bezier_22 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.7500, 0.7500, 0.1000), 'Middle': (0.7200, 0.7500, 0.0000), 'End': (0.7000, 0.7500, 0.0000)}) - - curveparametercurve_1 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt.outputs["Geometry"], 'UVCurve': quadratic_bezier_22, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_1 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt.outputs["Geometry"], 'Curve': curveparametercurve_1, 'Base Radius': 0.1700, 'Base Factor': 0.0300, 'SymmY': False}) - - quadratic_bezier_26 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.8000, 0.6800, 0.0300), 'Middle': (0.6500, 0.6800, 0.0000), 'End': (0.5000, 0.6000, 0.0500)}) - - curveparametercurve_2 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_1.outputs["Geometry"], 'UVCurve': quadratic_bezier_26, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_2 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_1.outputs["Geometry"], 'Curve': curveparametercurve_2, 'Base Factor': 0.0300}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.7000, 0.5500, 0.0300), 'Middle': (0.7000, 0.5500, 0.0300), 'End': (0.7500, 0.5700, -0.0200)}) - - curveparametercurve_3 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_2.outputs["Geometry"], 'UVCurve': quadratic_bezier_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_3 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_2.outputs["Geometry"], 'Curve': curveparametercurve_3, 'Base Radius': 0.1000, 'Base Factor': -0.0200}) - - quadratic_bezier_3 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.7000, 0.5800, 0.0100), 'Middle': (0.7500, 0.5800, 0.0100), 'End': (0.7700, 0.5300, 0.0100)}) - - curveparametercurve_4 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_3.outputs["Geometry"], 'UVCurve': quadratic_bezier_3, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_4 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_3.outputs["Geometry"], 'Curve': curveparametercurve_4, 'Base Radius': 0.0400, 'Base Factor': -0.0100}) - - quadratic_bezier_4 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.3000, 0.2500, 0.0000), 'Middle': (0.4000, 0.2500, 0.0000), 'End': (0.7000, 0.2500, 0.0000)}) - - curveparametercurve_5 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_4.outputs["Geometry"], 'UVCurve': quadratic_bezier_4, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_5 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_4.outputs["Geometry"], 'Curve': curveparametercurve_5, 'Base Radius': 0.2000, 'Base Factor': 0.0100, 'SymmY': False}) - - quadratic_bezier_9 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.3000, 0.2500, 0.0000), 'Middle': (0.4000, 0.2500, 0.0000), 'End': (0.5000, 0.2500, 0.0000)}) - - curveparametercurve_6 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_5.outputs["Geometry"], 'UVCurve': quadratic_bezier_9, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_6 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_5.outputs["Geometry"], 'Curve': curveparametercurve_6, 'Base Radius': 0.2000, 'Base Factor': 0.0100, 'SymmY': False}) - - quadratic_bezier_5 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 40, 'Start': (0.5000, 0.6000, 0.0000), 'Middle': (0.7000, 0.7000, 0.0000), 'End': (1.0000, 0.6500, 0.0100)}) - - quadratic_bezier_6 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.5000, 0.6000, 0.0000), 'Middle': (0.3000, 0.5500, 0.0000), 'End': (0.2000, 0.7000, 0.0200)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadratic_bezier_5, quadratic_bezier_6]}) - - curveparametercurve_7 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_6.outputs["Geometry"], 'UVCurve': join_geometry, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_7 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_6.outputs["Geometry"], 'Curve': curveparametercurve_7, 'Base Radius': 0.0150, 'Base Factor': group_input_2.outputs["EyeBrow"]}) - - quadratic_bezier_7 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.6400, 0.7600, 0.0200), 'Middle': (0.4400, 0.8800, 0.0000), 'End': (0.5100, 0.9200, 0.0000)}) - - curveparametercurve_8 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_7.outputs["Geometry"], 'UVCurve': quadratic_bezier_7, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_8 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_7.outputs["Geometry"], 'Curve': curveparametercurve_8, 'Base Radius': 0.1100, 'Base Factor': -0.0100}) - - quadratic_bezier_8 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.6500, 0.7500, 0.0200), 'Middle': (0.3000, 0.7500, 0.0100), 'End': (0.1000, 0.7500, 0.0000)}) - - quadratic_bezier_12 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 12, 'Start': (0.1500, 0.6000, 0.0200), 'Middle': (0.2000, 0.7000, 0.0100), 'End': (0.1000, 0.7500, 0.0050)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadratic_bezier_8, quadratic_bezier_12]}) - - curveparametercurve_9 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_8.outputs["Geometry"], 'UVCurve': join_geometry_1, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_9 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_8.outputs["Geometry"], 'Curve': curveparametercurve_9, 'Base Radius': 0.0300, 'Base Factor': group_input_2.outputs["Crown"]}) - - quadratic_bezier_18 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 200, 'Start': (0.9000, 0.2500, 0.0500), 'Middle': (0.8000, 0.2500, 0.0000), 'End': (0.6000, 0.2500, 0.0400)}) - - curveparametercurve_10 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_9.outputs["Geometry"], 'UVCurve': quadratic_bezier_18, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_10 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_9.outputs["Geometry"], 'Curve': curveparametercurve_10, 'Base Radius': 0.1000, 'Base Factor': 0.0200}) - - quadratic_bezier_16 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.7000, 0.3500, 0.0500), 'Middle': (0.6000, 0.4000, 0.0000), 'End': (0.4000, 0.3500, 0.0400)}) - - curveparametercurve_11 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_10.outputs["Geometry"], 'UVCurve': quadratic_bezier_16, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_11 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_10.outputs["Geometry"], 'Curve': curveparametercurve_11, 'Base Radius': 0.1500, 'Base Factor': 0.0200}) - - quadratic_bezier_15 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 20, 'Start': (0.9000, 0.2500, 0.0100), 'Middle': (0.6000, 0.2500, 0.0000), 'End': (0.2000, 0.2500, 0.0000)}) - - curveparametercurve_12 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_11.outputs["Geometry"], 'UVCurve': quadratic_bezier_15, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_12 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_11.outputs["Geometry"], 'Curve': curveparametercurve_12, 'Base Radius': 0.0200, 'Base Factor': 0.0300, 'SymmY': False}) - - quadratic_bezier_19 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (1.0000, 0.4000, 0.0100), 'Middle': (0.5000, 0.4500, 0.0000), 'End': (0.4500, 0.4000, 0.0100)}) - - curveparametercurve_13 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_12.outputs["Geometry"], 'UVCurve': quadratic_bezier_19, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_13 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_12.outputs["Geometry"], 'Curve': curveparametercurve_13, 'Base Radius': 0.0200, 'Base Factor': 0.0100, 'Switch': False}) - - quadratic_bezier_14 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (0.8000, 0.7500, 0.0000), 'Middle': (0.5000, 0.7500, 0.0000), 'End': (0.1000, 0.7500, 0.0000)}) - - quadratic_bezier_13 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 12, 'Start': (0.1500, 0.6000, 0.0000), 'Middle': (0.2000, 0.7000, 0.0000), 'End': (0.1000, 0.7500, 0.0000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadratic_bezier_14, quadratic_bezier_13]}) - - curveparametercurve_14 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_13.outputs["Geometry"], 'UVCurve': join_geometry_2, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_14 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_13.outputs["Geometry"], 'Curve': curveparametercurve_14, 'Base Radius': 0.0300, 'Base Factor': 0.0000, 'Attr': True}) - - quadratic_bezier_23 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 40, 'Start': (0.6000, 0.6000, 0.0000), 'Middle': (0.9000, 0.7300, 0.0000), 'End': (1.0000, 0.6500, 0.0000)}) - - quadratic_bezier_24 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.6000, 0.6000, 0.0000), 'Middle': (0.5000, 0.5500, 0.0000), 'End': (0.2000, 0.6200, 0.0000)}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [quadratic_bezier_23, quadratic_bezier_24]}) - - curveparametercurve_15 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_14.outputs["Geometry"], 'UVCurve': join_geometry_3, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_15 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_14.outputs["Geometry"], 'Curve': curveparametercurve_15, 'Base Radius': 0.0200, 'Base Factor': 0.0000, 'Attr': True}) - - quadratic_bezier_25 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 64, 'Start': (1.0000, 0.4000, 0.0000), 'Middle': (0.7000, 0.4500, 0.0000), 'End': (0.4500, 0.4000, 0.0000)}) - - curveparametercurve_16 = nw.new_node(nodegroup_curve_parameter_curve().name, - input_kwargs={'Surface': curvesculpt_15.outputs["Geometry"], 'UVCurve': quadratic_bezier_25, 'CtrlptsU': 64, 'CtrlptsW': 64}) - - curvesculpt_16 = nw.new_node(nodegroup_curve_sculpt().name, - input_kwargs={'Target': curvesculpt_15.outputs["Geometry"], 'Curve': curveparametercurve_16, 'Base Radius': 0.0150, 'Base Factor': 0.0000, 'Switch': False, 'Attr': True}) - - merge_by_distance_1 = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': curvesculpt_16.outputs["Geometry"], 'Distance': 0.0000}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': merge_by_distance_1, 'Level': 3}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': subdivision_surface}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_position, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_round_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_round_bump(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloatDistance', 'Distance', 0.0200), - ('NodeSocketFloat', 'Offset Scale', 0.0100), - ('NodeSocketInt', 'Level', 1)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': group_input.outputs["Level"]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': subdivide_mesh, 'Distance': group_input.outputs["Distance"]}) - # merge_by_distance = nw.new_node(Nodes.MergeByDistance, - # input_kwargs={'Geometry': subdivide_mesh, 'Distance': 2}) - - dual_mesh = nw.new_node(Nodes.DualMesh, input_kwargs={'Mesh': merge_by_distance}) - - split_edges = nw.new_node(Nodes.SplitEdges, input_kwargs={'Mesh': dual_mesh}) - - scale_elements = nw.new_node(Nodes.ScaleElements, input_kwargs={'Geometry': split_edges, 'Scale': 0.9000}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': scale_elements, 'Offset Scale': group_input.outputs["Offset Scale"], 'Individual': False}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': extrude_mesh.outputs["Mesh"]}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivision_surface_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_shade_smooth, group_input.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -def shader_chameleon_eye(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - attribute_2 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'Pupil'}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': attribute_2.outputs["Fac"]}) - colorramp_4.color_ramp.elements[0].position = 0.0091 - colorramp_4.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp_4.color_ramp.elements[1].position = 0.9841 - colorramp_4.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - attribute_1 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'Ridge'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': attribute_1.outputs["Fac"]}) - colorramp_2.color_ramp.elements[0].position = 0.0091 - colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp_2.color_ramp.elements[1].position = 0.9841 - colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Scale': 300.0000, 'Smoothness': 0.0000}, - attrs={'feature': 'SMOOTH_F1'}) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': voronoi_texture.outputs["Distance"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mapping_1}) - colorramp.color_ramp.interpolation = "CONSTANT" - colorramp.color_ramp.elements[0].position = 0.0000 - colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] - colorramp.color_ramp.elements[1].position = 0.3159 - colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Location': (1.0000, 0.0000, 0.0000)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mapping, 'Scale': 3.0000}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) - colorramp_3.color_ramp.elements.new(0) - colorramp_3.color_ramp.elements[0].position = 0.2773 - colorramp_3.color_ramp.elements[0].color = [0.0353, 0.0942, 0.0136, 1.0000] - colorramp_3.color_ramp.elements[1].position = 0.6000 - colorramp_3.color_ramp.elements[1].color = [0.0580, 0.0276, 0.0020, 1.0000] - colorramp_3.color_ramp.elements[2].position = 0.6386 - colorramp_3.color_ramp.elements[2].color = [0.0405, 0.0397, 0.0064, 1.0000] - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp_3.outputs["Color"], 'Color2': (0.1421, 0.1015, 0.0241, 1.0000)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mapping, 'W': 1.0000}, attrs={'noise_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Fac"]}) - colorramp_1.color_ramp.elements[0].position = 0.0000 - colorramp_1.color_ramp.elements[0].color = [0.6990, 0.5484, 0.1189, 1.0000] - colorramp_1.color_ramp.elements[1].position = 1.0000 - colorramp_1.color_ramp.elements[1].color = [0.2549, 0.1495, 0.0318, 1.0000] - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': mix, 'Color2': colorramp_1.outputs["Color"]}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_4.outputs["Color"], 'Color1': mix_1, 'Color2': (0.0082, 0.0082, 0.0082, 1.0000)}) - - separate_color = nw.new_node(Nodes.SeparateColor, input_kwargs={'Color': mix_2}) - - texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Normal"], 'Scale': 20.0000, 'Detail': 200.0000, 'Roughness': 0.0000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_color.outputs["Red"], 1: noise_texture_2.outputs["Fac"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - combine_color = nw.new_node('ShaderNodeCombineColor', - input_kwargs={'Red': multiply, 'Green': separate_color.outputs["Green"], 'Blue': separate_color.outputs["Blue"]}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': combine_color, 'Specular': 0.3000, 'Roughness': 0.6000}) - - material_output_1 = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_chameleon', singleton=False, type='GeometryNodeTree') -def nodegroup_chameleon(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'body_length', 1.4000), - ('NodeSocketFloat', 'head_crown', 0.2000), - ('NodeSocketFloat', 'head_eyebrow', 0.0200), - ('NodeSocketVectorXYZ', 'head_scale', (1.0000, 1.0000, 1.0000)), - ('NodeSocketVectorEuler', 'left_eye_rotation', (0.0000, 0.0000, -1.5)), - ('NodeSocketVectorEuler', 'right_eye_rotation', (0.0000, 0.0000, 1.5)), - ('NodeSocketFloat', 'pupil_radius', 0.2200), - ('NodeSocketFloat', 'front_leg_position', 0.0800), - ('NodeSocketFloat', 'back_leg_position', 0.8500)]) - - chameleon_head_shape = nw.new_node(nodegroup_chameleon_head_shape().name, - input_kwargs={'Crown': group_input.outputs["head_crown"], 'EyeBrow': group_input.outputs["head_eyebrow"], 'Scale': group_input.outputs["head_scale"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_head_shape, 'Translation': (0.1000, 0.0000, 0.0000), 'Rotation': (0.0000, 0.0000, 3.1416)}) - - round_bump = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': transform, 'Distance': 0.0080, 'Offset Scale': 0.0030}) - - chameleon_body_shape = nw.new_node(nodegroup_chameleon_body_shape().name, input_kwargs={'length': group_input.outputs["body_length"]}) - - round_bump_1 = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': chameleon_body_shape.outputs["Mesh"], 'Distance': 0.0080, 'Offset Scale': 0.0030}) - - chameleon_tail = nw.new_node(nodegroup_chameleon_tail().name, - input_kwargs={'body_length': group_input.outputs["body_length"], 'body_position': 0.4500}) - - round_bump_2 = nw.new_node(nodegroup_round_bump().name, - input_kwargs={'Geometry': chameleon_tail.outputs["Geometry"], 'Distance': 0.0080, 'Offset Scale': 0.0030}) - - chameleon_leg_shape = nw.new_node(nodegroup_chameleon_leg_shape().name, - input_kwargs={'body_length': group_input.outputs["body_length"], 'body_position': group_input.outputs["back_leg_position"], 'body_thickness': 0.2500, 'Rotation': (0.0000, -1.0472, 3.1416), 'thigh_length': 0.4000, 'thigh_body_rotation': -35.0000, 'calf_body_rotation': -30.0000, 'thigh_calf_rotation': 10.0000, 'ouScale': (0.6000, 1.0000, 1.0000), 'inScale': (1.0000, 1.0000, 1.0000)}) - - chameleon_leg_shape_1 = nw.new_node(nodegroup_chameleon_leg_shape().name, - input_kwargs={'body_length': group_input.outputs["body_length"], 'body_position': group_input.outputs["back_leg_position"], 'body_thickness': 0.1500, 'Rotation': (0.0000, -1.0472, 3.1416), 'thigh_length': 0.4000, 'thigh_body_rotation': 50.0000, 'calf_body_rotation': 5.0000, 'thigh_calf_rotation': 5.0000}) - - chameleon_leg_shape_2 = nw.new_node(nodegroup_chameleon_leg_shape().name, - input_kwargs={'body_length': group_input.outputs["body_length"], 'body_position': group_input.outputs["front_leg_position"], 'body_thickness': 0.0800, 'thigh_body_rotation': 35.0000, 'thigh_calf_rotation': 15.0000}) - - chameleon_leg_shape_3 = nw.new_node(nodegroup_chameleon_leg_shape().name, - input_kwargs={'body_length': group_input.outputs["body_length"], 'body_position': group_input.outputs["front_leg_position"], 'body_thickness': -0.0300, 'thigh_body_rotation': -25.0000, 'calf_body_rotation': -15.0000, 'thigh_calf_rotation': 15.0000, 'ouScale': (0.6000, 1.0000, 1.0000), 'inScale': (1.0000, 1.0000, 1.0000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [chameleon_leg_shape, chameleon_leg_shape_1, chameleon_leg_shape_2, chameleon_leg_shape_3]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [round_bump, round_bump_1, round_bump_2, join_geometry_2]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': surface.shaderfunc_to_material(shader_chameleon)}) - - chameleon_eye = nw.new_node(nodegroup_chameleon_eye().name, input_kwargs={'pupil_radius': group_input.outputs["pupil_radius"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_eye, 'Translation': (-0.2000, -0.0300, 0.0200), 'Rotation': group_input.outputs["left_eye_rotation"]}) - - chameleon_eye_1 = nw.new_node(nodegroup_chameleon_eye().name, input_kwargs={'pupil_radius': group_input.outputs["pupil_radius"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': chameleon_eye_1, 'Translation': (-0.2000, 0.0300, 0.0200), 'Rotation': group_input.outputs["right_eye_rotation"]}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform_1]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, join_geometry_3]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -class Chameleon(PartFactory): - param_templates = {} - tags = [] - - def sample_params(self, select=None, var=1): - return {} - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_chameleon, params) - - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/crustacean/antenna.py b/infinigen/assets/creatures/parts/crustacean/antenna.py deleted file mode 100644 index c63a0b544..000000000 --- a/infinigen/assets/creatures/parts/crustacean/antenna.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform - -from infinigen.assets.creatures.util.animation.driver_repeated import bend_bones_lerp -from infinigen.assets.creatures.util.creature import Part -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures.parts.crustacean.leg import CrabLegFactory -from infinigen.assets.utils.decorate import displace_vertices -from infinigen.assets.utils.object import join_objects -from infinigen.core.util.random import log_uniform - - -class LobsterAntennaFactory(CrabLegFactory): - tag = ['claw'] - - def make_part(self, params) -> Part: - x_length, z_length = params['x_length'], params['z_length'] - segments, x_cuts = self.make_segments(params) - displace_vertices(segments[-1], lambda x, y, z: ( - 0, 0, params['antenna_bend'] * (x / x_length - x_cuts[-2]) ** 2 * params['z_length'])) - obj = join_objects(segments) - - skeleton = np.zeros((2, 3)) - skeleton[1, 0] = x_length - joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} - return Part(skeleton, obj, joints=joints) - - @staticmethod - def animate_bones(arma, bones, params): - bend_bones_lerp(arma, bones, params['antenna_curl'], params['freq']) - - def sample_params(self): - y_length = uniform(.01, .015) - z_length = y_length * log_uniform(1, 1.2) - x_mid_first = uniform(.1, .15) - x_mid_second = uniform(.25, .3) - antenna_bend = uniform(2, 5) - return {**super().sample_params(), - 'y_length': y_length, - 'z_length': z_length, - 'x_mid_first': x_mid_first, - 'x_mid_second': x_mid_second, - 'antenna_bend': antenna_bend, - } - - -class SpinyLobsterAntennaFactory(LobsterAntennaFactory): - tag = ['claw'] - - def sample_params(self): - y_length = uniform(.05, .08) - z_length = y_length * log_uniform(1, 1.2) - return {**super().sample_params(), 'y_length': y_length, 'z_length': z_length} diff --git a/infinigen/assets/creatures/parts/crustacean/body.py b/infinigen/assets/creatures/parts/crustacean/body.py deleted file mode 100644 index 56be383ab..000000000 --- a/infinigen/assets/creatures/parts/crustacean/body.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform -from scipy.interpolate import interp1d - -from infinigen.assets.creatures.util.creature import Part, PartFactory -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures.parts.utils.draw import geo_symmetric_texture -from infinigen.assets.utils.decorate import distance2boundary, displace_vertices, read_co, write_co -from infinigen.assets.utils.draw import leaf, spin -from infinigen.assets.utils.misc import log_uniform -from infinigen.assets.utils.object import join_objects, new_line -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.placement import placeholder_locs -from infinigen.core import surface -from infinigen.core.surface import read_attr_data, write_attr_data -from infinigen.core.util import blender as butil - - -class CrabBodyFactory(PartFactory): - tags = ['body'] - min_spike_distance = .1 - min_spike_radius = .02 - - def make_part(self, params) -> Part: - x_length, x_tip, bend_height = map(params.get, ['x_length', 'x_tip', 'bend_height']) - upper = self.make_surface(params) - lower = butil.deep_clone_obj(upper) - self.make_surface_side(upper, params, 'upper') - self.make_surface_side(lower, params, 'lower') - self.add_spikes(upper, params) - self.add_mouth(lower, params) - obj = join_objects([upper, lower]) - - x, y, z = read_co(obj).T - write_attr_data(obj, 'ratio', np.where(z > np.min(z) * params['color_cutoff'], 1, 0)) - butil.modify_mesh(obj, 'WELD', merge_threshold=.001) - - height_scale = interp1d([0, -x_tip + .01, -x_tip - .01, -1], [0, bend_height, bend_height, 0], - 'quadratic', fill_value="extrapolate") - displace_vertices(obj, lambda x, y, z: (0, 0, height_scale(x / x_length))) - self.add_head(obj, params) - - line = new_line(x_length) - line.location[0] -= x_length - butil.apply_transform(line, loc=True) - - line.rotation_euler[1] = np.pi / 2 - butil.apply_transform(line) - butil.modify_mesh(line, 'SIMPLE_DEFORM', deform_method='BEND', angle=-params['bend_angle'], - deform_axis='Y') - line.rotation_euler[1] = -np.pi / 2 - butil.apply_transform(line) - skeleton = read_co(line) - butil.delete(line) - - obj.rotation_euler[1] = np.pi / 2 - butil.apply_transform(obj) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=-params['bend_angle'], - deform_axis='Y') - obj.rotation_euler[1] = -np.pi / 2 - butil.apply_transform(obj) - joints = {i: Joint((0, 0, 0), bounds=np.array([[0, 0, 0], [0, 0, 0]])) for i in - np.linspace(0, 1, 5, endpoint=True)} - return Part(skeleton, obj, joints=joints) - - def add_head(self, obj, params): - def offset(nw: NodeWrangler, vector): - head = nw.scalar_add(1, nw.scalar_divide(nw.separate(nw.new_node(Nodes.InputPosition))[0], - params['x_length'])) - texture = nw.new_node(Nodes.MusgraveTexture, [vector], - input_kwargs={'Scale': params['noise_scale']}) - return nw.combine(nw.scalar_multiply(head, nw.scalar_multiply(texture, params['noise_strength'])), - 0, 0) - - surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) - - @staticmethod - def make_surface(params): - x_length, y_length, x_tip, y_tail = map(params.get, ['x_length', 'y_length', 'x_tip', 'y_tail']) - x_anchors = np.array([0, 0, -x_tip / 2, -x_tip, -x_tip, -x_tip, -(x_tip + 1) / 2, -1, -1]) * x_length - y_anchors = np.array( - [0, .1, params['front_midpoint'], 1, 1, 1, params['back_midpoint'], y_tail, 0]) * y_length - tip_size = params['tip_size'] - if params['has_sharp_tip']: - front_angle = params['front_angle'] - back_angle = params['back_angle'] - x_anchors[3] += tip_size * np.sin(front_angle) * x_length - x_anchors[5] -= tip_size * np.sin(back_angle) * x_length - y_anchors[3] += tip_size * (1 - np.cos(front_angle)) * x_length - y_anchors[4] += tip_size * x_length - y_anchors[5] += tip_size * (1 - np.cos(back_angle)) * x_length - vector_locations = [4] - else: - x_anchors[3] += .05 * x_tip * x_length - x_anchors[5] -= .05 * (1 - x_tip) * x_length - vector_locations = [] - obj = leaf(x_anchors, y_anchors, vector_locations) - butil.modify_mesh(obj, 'SUBSURF', levels=1, render_levels=1) - distance2boundary(obj) - return obj - - def make_surface_side(self, obj, params, prefix="upper"): - distance = read_attr_data(obj, 'distance') - height_scale = interp1d([0, .5, 1], [0, params[f'{prefix}_alpha'], 1], 'quadratic') - displace_vertices(obj, lambda x, y, z: ( - 0, 0, (1 if prefix == 'upper' else -1) * height_scale(distance) * params[f'{prefix}_z'])) - displace_vertices(obj, lambda x, y, z: (params[f'{prefix}_shift'] * z, 0, 0)) - offset = lambda nw, vector, distance: nw.combine(0, 0, nw.scalar_multiply(distance, nw.scalar_multiply( - nw.new_node(Nodes.MusgraveTexture, [vector], input_kwargs={'Scale': params['noise_scale'] - }), params[f'noise_strength']))) - surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) - return obj - - def add_spikes(self, obj, params): - def selection(nw: NodeWrangler): - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - return nw.boolean_math('AND', nw.compare('GREATER_THAN', y, 0), nw.compare('GREATER_THAN', z, .02)) - - locations = placeholder_locs(obj, params['spike_density'], selection, self.min_spike_distance, 0) - locations_ = locations.copy() - locations_[:, 1] = -locations_[:, 1] - locations = np.concatenate([locations, locations_], 0) - if len(locations) == 0: return - x, y, z = read_co(obj).T - dist = np.amin(np.linalg.norm(read_co(obj)[np.newaxis] - locations[:, np.newaxis], axis=-1), 0) - extrude = params['spike_height'] * np.clip(1 - dist / self.min_spike_radius, 0, None) - d = np.stack([x + params['spike_center'] * params['x_length'], y, z + params['spike_depth']], -1) - d = d / np.linalg.norm(d, axis=-1, keepdims=True) - displace_vertices(obj, lambda x, y, z: (d * extrude[:, np.newaxis]).T) - - def add_mouth(self, obj, params): - def selection(nw: NodeWrangler): - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - z_length = params['lower_z'] if 'lower_z' in params else params['z_length'] - z_range = nw.boolean_math('AND', nw.compare('GREATER_THAN', z, -params['mouth_z'] * z_length), - nw.compare('LESS_THAN', z, 0)) - x_range = nw.compare('GREATER_THAN', x, -params['mouth_x'] * params['x_length']) - return nw.boolean_math('AND', z_range, x_range) - - def offset(nw: NodeWrangler, vector, distance): - wave_texture = nw.new_node(Nodes.WaveTexture, [vector], input_kwargs={ - 'Scale': params['mouth_noise_scale'], - 'Distortion': 20, - 'Detail': 0 - }) - ratio = nw.scalar_multiply(distance, - nw.build_float_curve(distance, [(0, 0), (.001, 0), (.005, 1), (1, 1)])) - return nw.scale( - nw.scalar_multiply(ratio, nw.scalar_multiply(wave_texture, params['mouth_noise_strength'])), - nw.new_node(Nodes.InputNormal)) - - surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset, selection], apply=True) - - def sample_params(self): - x_length = uniform(.8, 1.2) - y_length = x_length * uniform(.5, .7) - x_tip = uniform(.3, .6) - y_tail = uniform(.1, .3) - has_sharp_tip = uniform(0, 1) < .4 - front_midpoint = uniform(.7, .9) - back_midpoint = uniform(.7, .9) - front_angle = uniform(np.pi / 12, np.pi / 8) - back_angle = uniform(np.pi / 6, np.pi / 4) - tip_size = uniform(.05, .15) - upper_z = x_length * uniform(.15, .3) - upper_alpha = uniform(.8, .9) - upper_shift = uniform(-.6, -.4) - noise_strength = uniform(.02, .03) - noise_scale = uniform(8, 15) - lower_alpha = uniform(.96, .98) - lower_z = x_length * uniform(.3, .4) - lower_shift = uniform(.1, .2) - spike_height = uniform(.05, .2) if uniform(0, 1) < .5 else 0 - spike_depth = log_uniform(.4, 2) - spike_center = uniform(.3, .7) - spike_density = log_uniform(100, 500) - mouth_z = uniform(.5, .8) - mouth_x = uniform(.1, .15) - mouth_noise_scale = uniform(10, 15) - mouth_noise_strength = uniform(.1, .2) - bend_angle = uniform(0, np.pi / 3) - bend_height = uniform(.08, .12) - color_cutoff = uniform(0, .5) - return { - 'x_length': x_length, - 'y_length': y_length, - 'x_tip': x_tip, - 'y_tail': y_tail, - 'has_sharp_tip': has_sharp_tip, - 'front_midpoint': front_midpoint, - 'back_midpoint': back_midpoint, - 'front_angle': front_angle, - 'back_angle': back_angle, - 'tip_size': tip_size, - 'upper_z': upper_z, - 'upper_alpha': upper_alpha, - 'upper_shift': upper_shift, - 'noise_strength': noise_strength, - 'noise_scale': noise_scale, - 'lower_z': lower_z, - 'lower_alpha': lower_alpha, - 'lower_shift': lower_shift, - 'spike_height': spike_height, - 'spike_depth': spike_depth, - 'spike_density': spike_density, - 'spike_center': spike_center, - 'mouth_z': mouth_z, - 'mouth_x': mouth_x, - 'mouth_noise_scale': mouth_noise_scale, - 'mouth_noise_strength': mouth_noise_strength, - 'bend_angle': bend_angle, - 'bend_height': bend_height, - 'color_cutoff': color_cutoff, - } - - -class LobsterBodyFactory(CrabBodyFactory): - tags = ['body'] - min_spike_distance = .08 - min_spike_radius = .01 - - def make_part(self, params) -> Part: - x_length, y_length, z_length = map(params.get, ['x_length', 'y_length', 'z_length']) - x_anchors = np.array([0, 0, 1 / 3, 2 / 3, 1, 1]) * x_length - y_anchors = np.array([0, 1, params['midpoint_second'], params['midpoint_first'], .01, 0]) * y_length - obj = spin([x_anchors, y_anchors, 0], [1, 4], axis=(1, 0, 0)) - self.add_mouth(obj, params) - - height_fn = interp1d([0, 1 / 2, 1], [0, params['z_shift_midpoint'] / 2, params['z_shift']], - fill_value='extrapolate') - displace_vertices(obj, lambda x, y, z: (0, 0, height_fn(x / x_length) * y_length)) - - z = read_co(obj).T[-1] - write_attr_data(obj, 'ratio', 1 + np.where(z > 0, 0, uniform(1, 1.5) * z / y_length)) - displace_vertices(obj, lambda x, y, z: ( - 0, 0, -np.clip(z + y_length * params['bottom_cutoff'], None, 0) * (1 - params['bottom_shift']))) - - obj.scale[-1] = z_length / y_length - butil.apply_transform(obj) - - offset = lambda nw, vector: nw.scale(nw.scalar_multiply( - nw.new_node(Nodes.MusgraveTexture, [vector], input_kwargs={'Scale': params['noise_scale'] - }), params[f'noise_strength']), nw.new_node(Nodes.InputNormal)) - surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) - - n_segments = 4 - co = read_co(obj) - skeleton = np.zeros((n_segments, 3)) - skeleton[:, 0] = np.linspace(0, x_length, n_segments) - head_z = co[np.argmax(co[:, 0])][-1] - skeleton[:, -1] = np.linspace(0, head_z, n_segments) - return Part(skeleton, obj) - - def sample_params(self): - x_length = uniform(.6, .8) - y_length = uniform(.15, .2) - z_length = y_length * uniform(1, 1.2) - midpoint_first = uniform(.65, .75) - midpoint_second = uniform(.95, 1.05) - z_shift = uniform(.4, .6) - z_shift_midpoint = uniform(.2, .3) - noise_strength = uniform(.02, .04) - noise_scale = uniform(5, 8) - bottom_shift = uniform(.3, .5) - bottom_cutoff = uniform(.2, .3) - mouth_z = uniform(.5, .8) - mouth_x = uniform(.1, .15) - 1 - mouth_noise_scale = uniform(10, 15) - mouth_noise_strength = uniform(.2, .3) - return { - 'x_length': x_length, - 'y_length': y_length, - 'z_length': z_length, - 'midpoint_first': midpoint_first, - 'midpoint_second': midpoint_second, - 'z_shift': z_shift, - 'z_shift_midpoint': z_shift_midpoint, - 'noise_strength': noise_strength, - 'noise_scale': noise_scale, - 'bottom_shift': bottom_shift, - 'bottom_cutoff': bottom_cutoff, - 'mouth_z': mouth_z, - 'mouth_x': mouth_x, - 'mouth_noise_scale': mouth_noise_scale, - 'mouth_noise_strength': mouth_noise_strength, - } diff --git a/infinigen/assets/creatures/parts/crustacean/claw.py b/infinigen/assets/creatures/parts/crustacean/claw.py deleted file mode 100644 index 9773e69d4..000000000 --- a/infinigen/assets/creatures/parts/crustacean/claw.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform -from scipy.interpolate import interp1d - -from infinigen.assets.creatures.util.animation.driver_repeated import bend_bones_lerp -from infinigen.assets.creatures.util.creature import Part -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures.parts.crustacean.leg import CrabLegFactory -from infinigen.assets.creatures.parts.utils.draw import decorate_segment -from infinigen.assets.utils.decorate import displace_vertices, read_co, remove_vertices -from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.draw import spin -from infinigen.core.util.random import log_uniform -from infinigen.assets.utils.nodegroup import geo_base_selection -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.surface import write_attr_data -from infinigen.core.util import blender as butil - - -class CrabClawFactory(CrabLegFactory): - tags = ['claw'] - min_spike_radius = .01 - - def make_part(self, params) -> Part: - x_length = params['x_length'] - segments, x_cuts = self.make_segments(params) - butil.delete(segments[-1]) - claw, lower = self.make_claw(params) - segments[-1] = claw - obj = join_objects(segments) - lower.parent = obj - - skeleton = np.zeros((2, 3)) - skeleton[1, 0] = x_length - joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} - return Part(skeleton, obj, joints=joints, settings={'rig_extras': True}) - - def make_claw(self, params): - x_length, y_length, z_length, x_mid, y_mid = map(params.get, - ['x_length', 'y_length', 'z_length', 'x_mid_second', - 'y_mid_second']) - xs = x_mid, (x_mid + 1) / 2, (x_mid + 3) / 4, 1 - ys = y_mid, y_mid * params['claw_y_first'], y_mid * params['claw_y_second'], .01 - obj = spin([np.array([xs[0], *xs, xs[-1]]) * x_length, np.array([0, *ys, 0]) * y_length, .0], - [1, len(xs)], axis=(1, 0, 0)) - - bottom_cutoff = params['bottom_cutoff'] - claw_x_depth = params['claw_x_depth'] - displace_vertices(obj, lambda x, y, z: (0, 0, -np.clip( - z + y_length * bottom_cutoff + y_length * (y_mid - bottom_cutoff) * ( - x / x_length - x_mid) / claw_x_depth, None, 0) * (1 - params['bottom_shift']))) - width_scale = interp1d([x_mid, x_mid + claw_x_depth, - x_mid + claw_x_depth + params['claw_x_turn'] * (1 - x_mid - claw_x_depth), - 1], [0, 0, params['claw_z_width'], 0], 'cubic', fill_value='extrapolate') - displace_vertices(obj, lambda x, y, z: (0, 0, - np.where(x > (x_mid + claw_x_depth) * x_length, width_scale(x / x_length) * y_mid * y_length, 0))) - displace_vertices(obj, lambda x, y, z: (0, 0, - np.where(z > 0, np.clip(params['top_cutoff'] * y_length - np.abs(y), 0, None) * params['top_shift'], - 0))) - z = read_co(obj).T[-1] - write_attr_data(obj, 'ratio', 1 + np.where(z > 0, 0, uniform(.5, 1.) * z / params['y_length'])) - - def selection(nw: NodeWrangler): - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - lower = nw.compare('LESS_THAN', nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0) - x_range = nw.boolean_math('AND', - nw.compare('GREATER_THAN', x, (x_mid + claw_x_depth * 1.5) * x_length), - nw.compare('LESS_THAN', x, x_length * .98)) - center = nw.compare('LESS_THAN', nw.math('ABSOLUTE', y), params['y_length'] * .5) - return nw.boolean_math('AND', nw.boolean_math('AND', lower, x_range), center) - - temp = butil.spawn_vert('temp') - surface.add_geomod(temp, geo_base_selection, apply=True, - input_args=[obj, selection, params['claw_spike_distance']]) - locations = read_co(temp) - np.random.shuffle(locations) - locations = locations[:100] - butil.delete(temp) - if len(locations) > 0: - dist = np.amin(np.linalg.norm(read_co(obj)[np.newaxis] - locations[:, np.newaxis], axis=-1), 0) - extrude = params['claw_spike_strength'] * np.clip(1 - dist / self.min_spike_radius, 0, None) - displace_vertices(obj, lambda x, y, z: (0, 0, -extrude)) - - decorate_segment(obj, params, x_mid, 1) - obj.scale[-1] = z_length / y_length - butil.apply_transform(obj) - - lower_scale = params['lower_scale'] - lower = butil.deep_clone_obj(obj) - remove_vertices(lower, lambda x, y, z: x < (x_mid + claw_x_depth) * x_length) - lower.location[0] = -(x_mid + claw_x_depth) * x_length - butil.apply_transform(lower, loc=True) - lower.scale = lower_scale, lower_scale, -lower_scale * params['lower_z_scale'] - lower.rotation_euler[1] = uniform(np.pi / 12, np.pi / 4) - butil.apply_transform(lower) - lower.location[0] = (x_mid + claw_x_depth) * x_length - lower.location[-1] = params['lower_z_offset'] * z_length - butil.apply_transform(lower, loc=True) - butil.modify_mesh(lower, 'WELD', merge_threshold=.001) - return obj, lower - - @staticmethod - def animate_bones(arma, bones, params): - main_bones = [b for b in bones if 'extra' not in b.name] - bend_bones_lerp(arma, main_bones, params['claw_curl'], params['freq'], symmetric=False) - extra_bones = [b for b in bones if 'extra' in b.name] - bend_bones_lerp(arma, extra_bones, params['claw_lower_curl'], params['freq'], symmetric=False) - - def sample_params(self): - params = super().sample_params() - z_length = params['y_length'] * uniform(1, 1.2) - x_mid_first = uniform(.2, .25) - x_mid_second = uniform(.4, .6) - y_mid_first = uniform(1.5, 2.) - y_mid_second = y_mid_first * log_uniform(1., 1.5) - y_expand = uniform(1.4, 1.5) - noise_strength = uniform(.01, .02) - top_shift = uniform(.6, .8) - claw_y_first = uniform(.6, 1.5) - claw_y_second = claw_y_first * uniform(.4, .6) - claw_x_depth = (1 - x_mid_second) * uniform(.3, .5) - claw_x_turn = uniform(.2, .4) - claw_z_width = uniform(.2, .3) - claw_spike_strength = uniform(.02, .03) - claw_spike_distance = uniform(.03, .06) - lower_z_scale = uniform(.4, .6) - lower_scale = uniform(.75, .9) - lower_z_offset = uniform(-.5, .5) - return {**params, - 'z_length': z_length, - 'x_mid_first': x_mid_first, - 'x_mid_second': x_mid_second, - 'y_mid_first': y_mid_first, - 'y_mid_second': y_mid_second, - 'y_expand': y_expand, - 'noise_strength': noise_strength, - 'top_shift': top_shift, - 'claw_y_first': claw_y_first, - 'claw_y_second': claw_y_second, - 'claw_x_depth': claw_x_depth, - 'claw_x_turn': claw_x_turn, - 'claw_z_width': claw_z_width, - 'claw_spike_distance': claw_spike_distance, - 'claw_spike_strength': claw_spike_strength, - 'lower_z_scale': lower_z_scale, - 'lower_scale': lower_scale, - 'lower_z_offset': lower_z_offset, - } - - -class LobsterClawFactory(CrabClawFactory): - def sample_params(self): - y_expand = uniform(1.4, 1.5) - y_mid_first = uniform(1.5, 2.) - y_mid_second = y_mid_first * log_uniform(1.2, 1.6) - claw_y_first = uniform(1.2, 1.5) - claw_y_second = claw_y_first * uniform(.7, .8) - noise_strength = uniform(.01, .02) - claw_spike_strength = uniform(.01, .02) - return {**super().sample_params(), - 'y_expand': y_expand, - 'y_mid_first': y_mid_first, - 'y_mid_second': y_mid_second, - 'claw_y_first': claw_y_first, - 'claw_y_second': claw_y_second, - 'noise_strength': noise_strength, - 'claw_spike_strength': claw_spike_strength, - } diff --git a/infinigen/assets/creatures/parts/crustacean/leg.py b/infinigen/assets/creatures/parts/crustacean/leg.py deleted file mode 100644 index b16e359c7..000000000 --- a/infinigen/assets/creatures/parts/crustacean/leg.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform - -from infinigen.assets.creatures.util.animation.driver_repeated import bend_bones_lerp -from infinigen.assets.creatures.util.creature import Part, PartFactory -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures.parts.utils.draw import make_segments -from infinigen.assets.utils.decorate import read_co -from infinigen.assets.utils.object import join_objects -from infinigen.core.util.random import log_uniform -from infinigen.core.surface import write_attr_data - - -class CrabLegFactory(PartFactory): - tags = ['leg'] - - def make_part(self, params) -> Part: - x_length = params['x_length'] - segments, x_cuts = self.make_segments(params) - obj = join_objects(segments) - - skeleton = np.zeros((2, 3)) - skeleton[1, 0] = x_length - joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:-1]} - return Part(skeleton, obj, joints=joints) - - def make_segments(self, params): - x_cuts = [0, params['x_mid_first'], params['x_mid_second'], 1] - y_cuts = [1, params['y_mid_first'], params['y_mid_second'], .01] - x_anchors = lambda u, v: (u, u + 1e-2, (u + v) / 2, v - 1e-2, v) - y_anchors = lambda u, v: (u * .9, u, (u + v) / 2 * params['y_expand'], v, v * .9) - segments = make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params) - for obj in segments: - z = read_co(obj).T[-1] - write_attr_data(obj, 'ratio', 1 + np.where(z > 0, 0, uniform(.8, 1.5) * z / params['y_length'])) - return segments, x_cuts - - def sample_params(self): - x_length = uniform(.8, 1.2) - y_length = uniform(.025, .035) - z_length = y_length * uniform(1., 1.5) - x_mid_first = uniform(.3, .4) - x_mid_second = uniform(.6, .7) - y_mid_first = uniform(.7, 1.) - y_mid_second = y_mid_first / 2 * uniform(1.1, 1.3) - y_expand = uniform(1.1, 1.3) - noise_strength = uniform(.005, .01) - noise_scale = log_uniform(5, 10) - bottom_shift = uniform(.3, .5) - bottom_cutoff = uniform(.2, .5) - top_shift = uniform(.2, .4) - top_cutoff = uniform(.6, .8) - return { - 'x_length': x_length, - 'y_length': y_length, - 'z_length': z_length, - 'x_mid_first': x_mid_first, - 'x_mid_second': x_mid_second, - 'y_mid_first': y_mid_first, - 'y_mid_second': y_mid_second, - 'y_expand': y_expand, - 'noise_strength': noise_strength, - 'noise_scale': noise_scale, - 'bottom_shift': bottom_shift, - 'bottom_cutoff': bottom_cutoff, - 'top_shift': top_shift, - 'top_cutoff': top_cutoff, - } - - @staticmethod - def animate_bones(arma, bones, params): - bend_bones_lerp(arma, bones, params['leg_curl'], params['freq'], rot=params['leg_rot']) - - -class LobsterLegFactory(CrabLegFactory): - def sample_params(self): - y_length = uniform(.01, .015) - z_length = y_length * log_uniform(1, 1.2) - return {**super(LobsterLegFactory, self).sample_params(), 'y_length': y_length, 'z_length': z_length} diff --git a/infinigen/assets/creatures/parts/crustacean/tail.py b/infinigen/assets/creatures/parts/crustacean/tail.py deleted file mode 100644 index b961cae23..000000000 --- a/infinigen/assets/creatures/parts/crustacean/tail.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform -from scipy.interpolate import interp1d - -from infinigen.assets.creatures.util.animation.driver_repeated import bend_bones_lerp -from infinigen.assets.creatures.util.creature import Part, PartFactory -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures.parts.utils.draw import make_segments -from infinigen.assets.utils.decorate import read_co -from infinigen.assets.utils.object import join_objects -from infinigen.core.util.random import log_uniform -from infinigen.core.surface import write_attr_data - - -class CrustaceanTailFactory(PartFactory): - tags = ['body'] - - def make_part(self, params) -> Part: - x_length = params['x_length'] - segments, x_cuts = self.make_segments(params) - obj = join_objects(segments) - - skeleton = np.zeros((2, 3)) - skeleton[1, 0] = x_length - joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} - return Part(skeleton, obj, joints=joints) - - def make_segments(self, params): - n = params['n_segments'] - decay = np.exp(np.log(params['x_decay']) / n) - x_cuts = np.cumsum(decay ** np.arange(n)) - x_cuts = [0, *x_cuts / x_cuts[-1]] - y_cuts_scale = interp1d([0, 1 / 3, 2 / 3, 1], [1 / params['shell_ratio'], params['y_midpoint_first'], - params['y_midpoint_second'], .1], fill_value='extrapolate') - y_cuts = y_cuts_scale(x_cuts) - x_anchors = lambda u, v: (u, (u + v) / 2, v) - y_anchors = lambda u, v: (u, np.sqrt(u * v), v * params['shell_ratio']) - segments = make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params) - height = uniform(.5, 1.) - for obj in segments: - z = read_co(obj).T[-1] - write_attr_data(obj, 'ratio', 1 + np.where(z > 0, 0, height * z / params['y_length'])) - return segments, x_cuts - - def sample_params(self): - x_length = uniform(1., 1.5) - y_length = uniform(.15, .2) - z_length = y_length * uniform(1, 1.2) - y_expand = uniform(1.1, 1.3) - y_midpoint_first = uniform(.85, .95) - y_midpoint_second = uniform(.7, .8) - noise_strength = uniform(.01, .02) - noise_scale = log_uniform(10, 20) - bottom_shift = uniform(.3, .5) - bottom_cutoff = uniform(.2, .5) - top_shift = 0 - top_cutoff = 1 - n_segments = np.random.randint(6, 10) - x_decay = log_uniform(.2, .3) - shell_ratio = uniform(1.05, 1.08) - fin_x_length = uniform(.5, .8) - return { - 'x_length': x_length, - 'y_length': y_length, - 'z_length': z_length, - 'y_expand': y_expand, - 'noise_strength': noise_strength, - 'noise_scale': noise_scale, - 'bottom_shift': bottom_shift, - 'bottom_cutoff': bottom_cutoff, - 'top_shift': top_shift, - 'top_cutoff': top_cutoff, - 'n_segments': n_segments, - 'x_decay': x_decay, - 'shell_ratio': shell_ratio, - 'y_midpoint_first': y_midpoint_first, - 'y_midpoint_second': y_midpoint_second, - 'fin_x_length': fin_x_length, - } - - @staticmethod - def animate_bones(arma, bones, params): - bend_bones_lerp(arma, bones, params['tail_curl'], params['freq']) diff --git a/infinigen/assets/creatures/parts/eye.py b/infinigen/assets/creatures/parts/eye.py deleted file mode 100644 index e3ee25d3f..000000000 --- a/infinigen/assets/creatures/parts/eye.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils - -import numpy as np -from numpy.random import uniform, normal as N, randint - -from infinigen.core.util.math import clip_gaussian -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.assets.materials.utils.surface_utils import nodegroup_norm_value, nodegroup_norm_vec - -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_warped_circle_curve, nodegroup_smooth_taper, nodegroup_profile_part -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_aspect_to_dim - - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util import part_util -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_eyelid', singleton=True, type='GeometryNodeTree') -def nodegroup_eyelid(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Eyeball Radius', 1.0), - ('NodeSocketFloat', 'Aspect Ratio', 0.34999999999999998), - ('NodeSocketFloat', 'fullness', 2.0), - ('NodeSocketVector', 'TearDuctCoord', (0.0, -1.5, -0.20000000000000001)), - ('NodeSocketVector', 'PeakCoord', (1.2, -0.20000000000000001, 2.0)), - ('NodeSocketVector', 'EyelidEndCoord', (0.0, 1.5, 0.29999999999999999)), - ('NodeSocketFloat', 'StartRadPct', 0.5), - ('NodeSocketFloat', 'EndRadPct', 0.5), - ('NodeSocketFloatAngle', 'Tilt', -0.34910000000000002)]) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["TearDuctCoord"], 'Scale': group_input.outputs["Eyeball Radius"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["PeakCoord"], 'Scale': group_input.outputs["Eyeball Radius"]}, - attrs={'operation': 'SCALE'}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["EyelidEndCoord"], 'Scale': group_input.outputs["Eyeball Radius"]}, - attrs={'operation': 'SCALE'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': scale.outputs["Vector"], 'Middle': scale_1.outputs["Vector"], 'End': scale_2.outputs["Vector"]}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': quadratic_bezier, 'Tilt': group_input.outputs["Tilt"]}) - - position = nw.new_node(Nodes.InputPosition) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["Aspect Ratio"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: aspect_to_dim}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"]}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Eyeball Radius"], 1: group_input.outputs["StartRadPct"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Eyeball Radius"], 1: group_input.outputs["EndRadPct"]}, - attrs={'operation': 'MULTIPLY'}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': multiply_1, 'end_rad': multiply_2, 'fullness': group_input.outputs["fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': set_curve_tilt, 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': tag_nodegroup(nw, profilepart, 'eyelid')}) - -@node_utils.to_nodegroup('nodegroup_mammal_eye', singleton=True, type='GeometryNodeTree') -def nodegroup_mammal_eye(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.050000000000000003), - ('NodeSocketFloat', 'Eyelid Thickness Ratio', 0.34999999999999998), - ('NodeSocketFloat', 'Eyelid Fullness', 2.0), - ('NodeSocketBool', 'Eyelids', True)]) - - eyelid = nw.new_node(nodegroup_eyelid().name, - input_kwargs={'Eyeball Radius': group_input.outputs["Radius"], 'Aspect Ratio': group_input.outputs["Eyelid Thickness Ratio"], 'fullness': group_input.outputs["Eyelid Fullness"], 'TearDuctCoord': (0.0, -1.2, -0.20000000000000001), 'PeakCoord': (1.2, 0.40000000000000002, -1.7), 'EyelidEndCoord': (0.0, 1.2, 0.31), 'Tilt': 0.69810000000000005}) - - eyelid_1 = nw.new_node(nodegroup_eyelid().name, - input_kwargs={'Eyeball Radius': group_input.outputs["Radius"], 'Aspect Ratio': group_input.outputs["Eyelid Thickness Ratio"], 'fullness': group_input.outputs["Eyelid Fullness"], 'PeakCoord': (1.2, -0.20000000000000001, 1.8)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [eyelid, eyelid_1]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Eyelids"], 15: join_geometry}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Radius': group_input.outputs["Radius"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.10000000000000001, 0.0, 0.0), 'Scale': group_input.outputs["Radius"]}, - attrs={'operation': 'SCALE'}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Translation': scale.outputs["Vector"], 'Rotation': (0.0, 1.5708, 0.0), 'Scale': (1.0, 1.0, 0.69999999999999996)}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-1.7, 0.0, 0.0), 'Scale': group_input.outputs["Radius"]}, - attrs={'operation': 'SCALE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Radius"], 1: 6.0}, - attrs={'operation': 'MULTIPLY'}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), 'Scale': multiply}, - attrs={'operation': 'SCALE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Radius"], 1: 3.0}, - attrs={'operation': 'MULTIPLY'}) - - simple_tube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': scale_1.outputs["Vector"], 'Angles Deg': (0.0, 0.0, 0.0), 'Seg Lengths': scale_2.outputs["Vector"], 'Start Radius': group_input.outputs["Radius"], 'End Radius': multiply_1, 'Fullness': 0.29999999999999999, 'Do Bezier': False, 'Aspect Ratio': 1.1000000000000001}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': simple_tube.outputs["Geometry"], 'Rotation': (0.0, 0.0, 0.34910000000000002)}) - - eyeball = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': transform_1, 'Level': 2}) - - position_2 = nw.new_node(Nodes.InputPosition) - - normvec = nw.new_node(nodegroup_norm_vec().name, input_kwargs={'Geometry': eyeball, 'Name': 'EyeballPosition', 'Vector': position_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': None, 'BodyExtra_Lid': switch.outputs[6], 'Eyeballl': normvec, 'ParentCutter': transform_2}) - -class MammalEye(PartFactory): - - tags = ['head_detail', 'eye_socket'] - - def sample_params(self): - return { - 'Radius': 0.03 * N(1, 0.1), - 'Eyelid Thickness Ratio': 0.35 * N(1, 0.05), - 'Eyelid Fullness': 2.0 * N(1, 0.1) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_mammal_eye, params) - tag_object(part.obj, 'mammal_eye') - return part diff --git a/infinigen/assets/creatures/parts/eye_new.py b/infinigen/assets/creatures/parts/eye_new.py deleted file mode 100644 index b1f06e3e4..000000000 --- a/infinigen/assets/creatures/parts/eye_new.py +++ /dev/null @@ -1,2432 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Mingzhe Wang -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=EfNzAaqKHXQ by PixelicaCG, https://www.youtube.com/watch?v=JcHX4AT1vtg by CGCookie and https://www.youtube.com/watch?v=E0JyyWeptSA by CGRogue - - -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util import part_util - -@node_utils.to_nodegroup('nodegroup_circle', singleton=False, type='GeometryNodeTree') -def nodegroup_circle(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'R', 0.5000), - ('NodeSocketInt', 'Resolution', 512)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Point 1': combine_xyz_4, 'Point 2': combine_xyz_3, 'Point 3': combine_xyz_5, 'Radius': 2.0000}, - attrs={'mode': 'POINTS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': curve_circle.outputs["Curve"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_eyeball', singleton=False, type='GeometryNodeTree') -def nodegroup_eyeball(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 1.0), - ('NodeSocketInt', 'Resolution', 32)]) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': group_input.outputs["Resolution"], 'Rings': group_input.outputs["Resolution"]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - sqrt = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'SQRT'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: sqrt, 1: 1.02}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: 0.5}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: subtract_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': subtract_2}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': uv_sphere, 'Offset': combine_xyz_1}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: 0.0}, - attrs={'operation': 'GREATER_THAN'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': set_position_1, 'Name': 'Iris', 3: greater_than}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute}) - -@node_utils.to_nodegroup('nodegroup_cornea', singleton=False, type='GeometryNodeTree') -def nodegroup_cornea(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'ScaleX', 0.5000), - ('NodeSocketFloat', 'Height', 2.0000), - ('NodeSocketFloatFactor', 'ScaleZ', 0.0000), - ('NodeSocketFloat', 'Y', 20.0000), - ('NodeSocketInt', 'Resolution', 128)]) - - uv_sphere_1 = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': group_input.outputs["Resolution"], 'Rings': group_input.outputs["Resolution"]}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: 3.0000, 1: group_input.outputs["Height"]}, attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ScaleX"], 1: subtract}, attrs={'operation': 'DIVIDE'}) - - combine_color = nw.new_node('FunctionNodeCombineColor', - input_kwargs={'Red': group_input.outputs["ScaleX"], 'Green': divide, 'Blue': group_input.outputs["ScaleZ"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': uv_sphere_1, 'Scale': combine_color}) - - position_2 = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_2}) - - greater_than = nw.new_node(Nodes.Compare, input_kwargs={0: separate_xyz_2.outputs["Y"]}) - - separate_geometry = nw.new_node(Nodes.SeparateGeometry, input_kwargs={'Geometry': transform, 'Selection': greater_than}) - - noise_texture = nw.new_node(Nodes.NoiseTexture) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"]}, attrs={'operation': 'SUBTRACT'}) - - normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_1, 1: normal}, attrs={'operation': 'MULTIPLY'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.0200 - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': separate_geometry.outputs["Selection"], 'Offset': multiply_1.outputs["Vector"]}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ScaleX"], 1: group_input.outputs["ScaleX"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: multiply_2}, attrs={'operation': 'SUBTRACT'}) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2}, attrs={'operation': 'SQRT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: sqrt, 1: 0.9500}, attrs={'operation': 'MULTIPLY'}) - - combine_color_1 = nw.new_node('FunctionNodeCombineColor', input_kwargs={'Green': multiply_3}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Y"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_4}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Translation': combine_color_1, 'Rotation': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_eyelid_radius', singleton=False, type='GeometryNodeTree') -def nodegroup_eyelid_radius(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': (0.0, 0.8, 0.0)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'OuterControl', 0.3), - ('NodeSocketFloat', 'InnerControl1', 5.4), - ('NodeSocketFloat', 'InnerControl2', 0.3), - ('NodeSocketInt', 'Resolution', 32)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': group_input.outputs["Resolution"]}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': resample_curve, 2: separate_xyz.outputs["Y"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': separate_xyz.outputs["Y"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: 0.4}, - attrs={'operation': 'SUBTRACT'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 2.0}, - attrs={'operation': 'POWER'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: -0.7}, - attrs={'operation': 'MULTIPLY'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: group_input.outputs["InnerControl2"]}, - attrs={'operation': 'GREATER_THAN'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: greater_than}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: greater_than}, - attrs={'operation': 'SUBTRACT'}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["OuterControl"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_3, 1: reroute}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: subtract_2}, - attrs={'operation': 'MULTIPLY'}) - - power_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: 2.0}, - attrs={'operation': 'POWER'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: power_1, 1: group_input.outputs["InnerControl1"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_3}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: 0.0}, - attrs={'operation': 'SUBTRACT'}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["OuterControl"]}) - - subtract_4 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: reroute_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': subtract_3, 'Y': subtract_4}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': combine_xyz}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Scale': (1.5, 1.5, 1.5)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Attribute': capture_attribute.outputs[2]}) - -@node_utils.to_nodegroup('nodegroup_eyelid_circle', singleton=False, type='GeometryNodeTree') -def nodegroup_eyelid_circle(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'ShapeW', 0.0), - ('NodeSocketFloat', 'ShapeH', 0.0), - ('NodeSocketInt', 'Resolution', 32)]) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["ShapeW"]}) - - circle = nw.new_node(nodegroup_circle().name, - input_kwargs={'R': reroute_3, 'Resolution': group_input.outputs["Resolution"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': circle, 2: spline_parameter.outputs["Factor"]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: position_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: -0.5}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: subtract}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: -0.02}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ShapeH"], 1: group_input.outputs["ShapeW"]}, - attrs={'operation': 'MULTIPLY'}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_2}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_1, 1: reroute_1}, - attrs={'operation': 'MULTIPLY'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0}, - attrs={'operation': 'GREATER_THAN'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: greater_than, 1: multiply_4}, - attrs={'operation': 'MULTIPLY'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0}, - attrs={'operation': 'LESS_THAN'}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: less_than, 1: separate_xyz.outputs["X"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_5, 1: multiply_6}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': add_1}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: reroute}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: multiply_7}, - attrs={'operation': 'SUBTRACT'}) - - sqrt = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1}, - attrs={'operation': 'SQRT'}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_3, 1: reroute_3}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: multiply_8}, - attrs={'operation': 'SUBTRACT'}) - - sqrt_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_2}, - attrs={'operation': 'SQRT'}) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': sqrt_1}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: sqrt, 1: reroute_2}, - attrs={'operation': 'SUBTRACT'}) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0}, - attrs={'operation': 'SIGN'}) - - multiply_9 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_3, 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': add, 'Z': multiply_9}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'Position': combine_xyz_1}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 50.0, 'Scale': 0.5}, - attrs={'noise_dimensions': '4D'}) - - subtract_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - multiply_10 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_4.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_10.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position, "Attribute": capture_attribute.outputs[2], "Attribute1": capture_attribute_1.outputs["Attribute"]}) - -@node_utils.to_nodegroup('nodegroup_eye_ball', singleton=False, type='GeometryNodeTree') -def nodegroup_eye_ball(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'CorneaScaleX', 0.52), - ('NodeSocketFloat', 'Height', 1.2), - ('NodeSocketFloatFactor', 'CorneaScaleZ', 0.8), - ('NodeSocketFloat', 'Y', 20.0), - ('NodeSocketInt', 'EyeballResolution', 32), - ('NodeSocketInt', 'CorneaResolution', 128)]) - - cornea_008 = nw.new_node(nodegroup_cornea().name, - input_kwargs={'ScaleX': group_input.outputs["CorneaScaleX"], 'Height': group_input.outputs["Height"], 'ScaleZ': group_input.outputs["CorneaScaleZ"], 'Y': group_input.outputs["Y"], 'Resolution': group_input.outputs["CorneaResolution"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cornea_008, 'Name': 'tag_cornea', 5: True}, - attrs={'data_type': 'BOOLEAN'}) - - eyeball_009 = nw.new_node(nodegroup_eyeball().name, - input_kwargs={'Value': group_input.outputs["CorneaScaleX"], 'Resolution': group_input.outputs["EyeballResolution"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Cornea': store_named_attribute, 'Eyeball': eyeball_009}) - -@node_utils.to_nodegroup('nodegroup_raycast_append', singleton=False, type='GeometryNodeTree') -def nodegroup_raycast_append(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Target Geometry', None), - ('NodeSocketVector', 'Ray Direction', (-1.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Default Offset', -0.005)]) - - raycast = nw.new_node(Nodes.Raycast, - input_kwargs={'Target Geometry': group_input.outputs["Target Geometry"], 'Ray Direction': group_input.outputs["Ray Direction"], 'Ray Length': 0.1}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: raycast.outputs["Hit Distance"], 1: 0.07}, - attrs={'operation': 'LESS_THAN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: raycast.outputs["Hit Distance"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) - - named_attribute = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': 'pos'}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - distance = nw.new_node(Nodes.VectorMath, - input_kwargs={0: named_attribute.outputs["Attribute"]}, - attrs={'operation': 'DISTANCE'}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 1.2 - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: distance.outputs["Value"], 1: value_1}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 1.5}, - attrs={'operation': 'MULTIPLY', 'use_clamp': True}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: multiply_1}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: group_input.outputs["Default Offset"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: multiply_3}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Ray Direction"]}, - attrs={'operation': 'LENGTH'}) - - divide = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Ray Direction"], 1: length.outputs["Value"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add, 1: divide.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_4.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_vector_sum', singleton=False, type='GeometryNodeTree') -def nodegroup_vector_sum(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Sum': add_1}) - -def shader_material(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.8, 0.0, 0.6028, 1.0)}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_part_surface_simple', singleton=False, type='GeometryNodeTree') -def nodegroup_part_surface_simple(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketVector', 'Length, Yaw, Rad', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Length, Yaw, Rad"]}) - - clamp_1 = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': separate_xyz.outputs["X"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.5708, 'Y': separate_xyz.outputs["Y"], 'Z': 1.5708}) - - part_surface = nw.new_node(nodegroup_part_surface().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length Fac': clamp_1, 'Ray Rot': combine_xyz, 'Rad': separate_xyz.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Position': part_surface.outputs["Position"], 'Hit Normal': part_surface.outputs["Hit Normal"], 'Tangent': part_surface.outputs["Tangent"]}) - -@node_utils.to_nodegroup('nodegroup_aspect_to_dim', singleton=False, type='GeometryNodeTree') -def nodegroup_aspect_to_dim(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Aspect Ratio', 1.0)]) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Aspect Ratio"], 'Y': 1.0}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.0, 'Y': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'XY Scale': switch.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_polar_to_cart', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_to_cart(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Angle', 0.5), - ('NodeSocketFloat', 'Length', 0.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'SINE'}) - - construct_unit_vector = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': cosine, 'Z': sine}, - label='Construct Unit Vector') - - offset_polar = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Length"], 1: construct_unit_vector, 2: group_input.outputs["Origin"]}, - label='Offset Polar', - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': offset_polar.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_switch4', singleton=False, type='GeometryNodeTree') -def nodegroup_switch4(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Arg', 0), - ('NodeSocketVector', 'Arg == 0', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 1', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 3', (0.0, 0.0, 0.0))]) - - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 2}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - greater_equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 1}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_1, 8: group_input.outputs["Arg == 0"], 9: group_input.outputs["Arg == 1"]}, - attrs={'input_type': 'VECTOR'}) - - greater_equal_2 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 3}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_2, 8: group_input.outputs["Arg == 2"], 9: group_input.outputs["Arg == 3"]}, - attrs={'input_type': 'VECTOR'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': switch.outputs[3]}) - -def shader_eyeball_fish(nw: NodeWrangler, rand=True, **input_kwargs): - # Code generated using version 2.6.3 of the node_transpiler - - attribute_2 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'tag_cornea'}) - - attribute_1 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'EyeballPosition'}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': attribute_1, 'Scale': (1.2000, 1.0000, 0.4000)}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mapping, 'Scale': 50.0000}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.0200, 'Color1': mapping, 'Color2': noise_texture_2.outputs["Color"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': mix_3}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.0000 - - group = nw.new_node(nodegroup_rotate2_d().name, - input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Z"], 2: value}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs[1], 1: 0.3000}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs["Value"], 1: 0.8000}, attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_2}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.6300}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_1}) - colorramp.color_ramp.elements[0].position = 0.6400 - colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] - colorramp.color_ramp.elements[1].position = 0.6591 - colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': attribute_1, 'Scale': (1.0000, 100.0000, 1.0000)}) - - mix_4 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.3000, 'Color1': mapping_1, 'Color2': attribute_1}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mix_4, 'Scale': 10.0000}) - - mix = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.7000, 'Color1': noise_texture.outputs["Fac"], 'Color2': mix_4}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': mix, 'Scale': 20.0000}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"], 2: 0.0000}, - attrs={'operation': 'MULTIPLY'}) - - mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': attribute_1, 'Scale': (1.0000, 20.0000, 1.0000)}) - - mix_8 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.3000, 'Color1': mapping_2, 'Color2': attribute_1}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mix_8, 'Scale': 10.0000}) - - mix_1 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.7000, 'Color1': noise_texture_3.outputs["Fac"], 'Color2': mix_8}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix_1, 'W': 4.5000, 'Scale': 1.0000}, - attrs={'voronoi_dimensions': '4D'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"], 2: 0.0000}, - attrs={'operation': 'MULTIPLY'}) - - mix_9 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0000, 'Color1': multiply_4, 'Color2': multiply_5}, - attrs={'blend_type': 'OVERLAY'}) - - bright_contrast = nw.new_node('ShaderNodeBrightContrast', input_kwargs={'Color': mix_9, 'Bright': 0.6000, 'Contrast': 1.5000}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs[1], 1: 0.6000}, attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: multiply_6}, attrs={'operation': 'MULTIPLY'}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs["Value"], 1: 0.6000}, attrs={'operation': 'MULTIPLY'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: multiply_8}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_7, 1: multiply_9}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_3}) - colorramp_1.color_ramp.elements[0].position = 0.6159 - colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] - colorramp_1.color_ramp.elements[1].position = 0.6591 - colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - colorramp_5 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': colorramp_1.outputs["Color"]}) - colorramp_5.color_ramp.elements[0].position = 0.0295 - colorramp_5.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - colorramp_5.color_ramp.elements[1].position = 0.0523 - colorramp_5.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: bright_contrast, 1: colorramp_5.outputs["Color"]}, - attrs={'use_clamp': True}) - - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs[1]}, attrs={'operation': 'MULTIPLY'}) - - multiply_11 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_10, 1: multiply_10}, attrs={'operation': 'MULTIPLY'}) - - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs["Value"], 1: 0.7000}, attrs={'operation': 'MULTIPLY'}) - - multiply_13 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_12, 1: multiply_12}, attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_11, 1: multiply_13}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: 0.1800}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_6}) - colorramp_2.color_ramp.elements[0].position = 0.4773 - colorramp_2.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] - colorramp_2.color_ramp.elements[1].position = 0.6659 - colorramp_2.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'W': 1.0000}, attrs={'noise_dimensions': '4D'}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Color"]}) - colorramp_4.color_ramp.interpolation = "CARDINAL" - colorramp_4.color_ramp.elements[0].position = 0.2886 - colorramp_4.color_ramp.elements[0].color = [1.0000, 0.5767, 0.0000, 1.0000] - colorramp_4.color_ramp.elements[1].position = 0.5455 - colorramp_4.color_ramp.elements[1].color = [1.0000, 0.0000, 0.0112, 1.0000] - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': (0.7384, 0.5239, 0.2703, 1.0000), 'Color2': colorramp_4.outputs["Color"]}) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': mix_7, 'Color2': (0.0000, 0.0000, 0.0000, 1.0000)}) - - mix_5 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': add_4, 'Color1': (0.0000, 0.0000, 0.0000, 1.0000), 'Color2': mix_6}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': mix_5, 'Color2': (0.0000, 0.0000, 0.0000, 1.0000)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix_2, 'Specular': 0.0000, 'Roughness': 0.0000}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Specular': 1.0000, 'Roughness': 0.0000, 'IOR': 1.3500, 'Transmission': 1.0000}) - - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - - mix_shader_1 = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': 0.1577, 1: principled_bsdf_1, 2: transparent_bsdf}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': attribute_2.outputs["Color"], 1: principled_bsdf, 2: mix_shader_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_eyeball_eyelid_inner', singleton=False, type='GeometryNodeTree') -def nodegroup_eyeball_eyelid_inner(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input_2 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'EyeRot', 0.5000), - ('NodeSocketVector', 'EyelidCircleShape(W, H)', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'EyelidRadiusShape(Out, In1, In2)', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'EyelidResolution(Circle, Radius)', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'CorneaScale(W, H, Thickness)', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'EyeballResolution(White, Cornea)', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVectorXYZ', 'Scale', (1.0000, 1.0000, 1.0000))]) - - separate_xyz_6 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input_2.outputs["CorneaScale(W, H, Thickness)"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input_2.outputs["EyeRot"], 1: 0.0175}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_7 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input_2.outputs["EyeballResolution(White, Cornea)"]}) - - eyeball = nw.new_node(nodegroup_eye_ball().name, - input_kwargs={'CorneaScaleX': separate_xyz_6.outputs["X"], 'Height': separate_xyz_6.outputs["Y"], 'CorneaScaleZ': separate_xyz_6.outputs["Z"], 'Y': multiply, 'EyeballResolution': separate_xyz_7.outputs["X"], 'CorneaResolution': separate_xyz_7.outputs["Y"]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [eyeball.outputs["Cornea"], eyeball.outputs["Eyeball"]]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_2, 'Material': surface.shaderfunc_to_material(shader_eyeball_tiger)}) - - value_5 = nw.new_node(Nodes.Value) - value_5.outputs[0].default_value = 1.5000 - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_material_1, 'Translation': (0.0000, -1.3500, -0.0500), 'Scale': value_5}) - - position_2 = nw.new_node(Nodes.InputPosition) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': transform_2, 'Name': 'EyeballPosition', 2: position_2}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': store_named_attribute}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input_2.outputs["EyelidCircleShape(W, H)"]}) - - separate_xyz_5 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input_2.outputs["EyelidResolution(Circle, Radius)"]}) - - eyelidcircle = nw.new_node(nodegroup_eyelid_circle().name, - input_kwargs={'ShapeW': separate_xyz_3.outputs["X"], 'ShapeH': separate_xyz_3.outputs["Y"], 'Resolution': separate_xyz_5.outputs["X"]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.6000 - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': eyelidcircle.outputs["Geometry"], 'Scale': value_1}) - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input_2.outputs["EyelidRadiusShape(Out, In1, In2)"]}) - - eyelidradis = nw.new_node(nodegroup_eyelid_radius().name, - input_kwargs={'OuterControl': separate_xyz_4.outputs["X"], 'InnerControl1': separate_xyz_4.outputs["Y"], 'InnerControl2': separate_xyz_4.outputs["Z"], 'Resolution': separate_xyz_5.outputs["Y"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': transform_1, 'Profile Curve': eyelidradis.outputs["Geometry"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 0.7000}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Fac"], 1: (0.5000, 0.5000, 0.5000)}, - attrs={'operation': 'SUBTRACT'}) - - normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.1000 - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_1.outputs["Vector"], 1: value_2}, - attrs={'operation': 'MULTIPLY'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': curve_to_mesh, 'Offset': multiply_2.outputs["Vector"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': eyelidcircle}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0000}, attrs={'operation': 'LESS_THAN'}) - - absolute = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0000}, attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: -0.0000, 1: absolute}, attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: less_than, 1: subtract_1}, attrs={'operation': 'MULTIPLY'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: eyelidradis, 1: 0.6000}, attrs={'operation': 'GREATER_THAN'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: greater_than}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: -1.2000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_5}) - - set_position_2 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': set_position_1, 'Offset': combine_xyz_2}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': set_position_2, 'Scale': group_input_2.outputs["Scale"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': transform_3}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - cosine = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'COSINE'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: cosine}, attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'SINE'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: sine}, attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: multiply_7}, attrs={'operation': 'SUBTRACT'}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: cosine}, attrs={'operation': 'MULTIPLY'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: sine}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: multiply_9}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_2, 'Y': separate_xyz_2.outputs["Y"], 'Z': add}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': join_geometry_1, 'Position': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Eyeball': join_geometry_3, 'Eyelid': set_position}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_append_eye', singleton=False, type='GeometryNodeTree') -def nodegroup_append_eye(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Target Geometry', None), - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Translation', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale', 0.0), - ('NodeSocketVectorEuler', 'Rotation', (0.1745, 0.0, -1.3963)), - ('NodeSocketVector', 'Ray Direction', (-1.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Default Offset', -0.002)]) - - position = nw.new_node(Nodes.InputPosition) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Name': 'pos', 2: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': group_input.outputs["Translation"], 'Rotation': group_input.outputs["Rotation"], 'Scale': group_input.outputs["Scale"]}) - - raycastappend = nw.new_node(nodegroup_raycast_append().name, - input_kwargs={'Geometry': transform, 'Target Geometry': group_input.outputs["Target Geometry"], 'Ray Direction': group_input.outputs["Ray Direction"], 'Default Offset': group_input.outputs["Default Offset"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': raycastappend}) - -@node_utils.to_nodegroup('nodegroup_eye_sockets', singleton=False, type='GeometryNodeTree') -def nodegroup_eye_sockets(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Base Mesh', None), - ('NodeSocketVector', 'Length/Yaw/Rad', (0.5000, 0.0000, 1.0000)), - ('NodeSocketVector', 'Part Rot', (0.0000, 0.0000, 53.7000)), - ('NodeSocketVectorXYZ', 'Scale', (2.0000, 2.0000, 2.0000))]) - - eyehole_cutter = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': (-0.1000, 0.0000, 0.0000), 'Angles Deg': (0.0000, 0.0000, 0.0000), 'Seg Lengths': (0.0500, 0.0500, 0.0900), 'Start Radius': 0.0200, 'Fullness': 0.3000}, - label='Eyehole Cutter') - - part_surface_simple = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Base Mesh"], 'Length, Yaw, Rad': group_input.outputs["Length/Yaw/Rad"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': eyehole_cutter.outputs["Geometry"], 'Translation': part_surface_simple.outputs["Position"], 'Rotation': group_input.outputs["Part Rot"], 'Scale': group_input.outputs["Scale"]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, input_kwargs={'Geometry': transform}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': group_input.outputs["Skin Mesh"], 'Mesh': symmetric_clone.outputs["Both"], 'Position': part_surface_simple.outputs["Position"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_simple_tube_v2', singleton=False, type='GeometryNodeTree') -def nodegroup_simple_tube_v2(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.5, 0.3)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'proportions', (0.3333, 0.3333, 0.3333)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketBool', 'do_bezier', True), - ('NodeSocketFloat', 'fullness', 4.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - vector_sum = nw.new_node(nodegroup_vector_sum().name, - input_kwargs={'Vector': group_input.outputs["proportions"]}) - - divide = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, - attrs={'operation': 'DIVIDE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: divide.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["angles_deg"], 'Seg Lengths': scale.outputs["Vector"], 'Do Bezier': group_input.outputs["do_bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["aspect"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': group_input.outputs["fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) - -@node_utils.to_nodegroup('nodegroup_surface_muscle', singleton=False, type='GeometryNodeTree') -def nodegroup_surface_muscle(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketVector', 'Coord 0', (0.4, 0.0, 1.0)), - ('NodeSocketVector', 'Coord 1', (0.5, 0.0, 1.0)), - ('NodeSocketVector', 'Coord 2', (0.6, 0.0, 1.0)), - ('NodeSocketVector', 'StartRad, EndRad, Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'ProfileHeight, StartTilt, EndTilt', (0.0, 0.0, 0.0)), - ('NodeSocketBool', 'Debug Points', False)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': (0.03, 0.03, 0.03)}) - - part_surface_simple = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 0"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple.outputs["Position"]}) - - part_surface_simple_1 = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 1"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple_1.outputs["Position"]}) - - part_surface_simple_2 = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 2"]}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple_2.outputs["Position"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_2, transform_1, transform_3]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Debug Points"], 15: join_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': switch.outputs[6], 'Material': surface.shaderfunc_to_material(shader_material)}) - - u_resolution = nw.new_node(Nodes.Integer, - label='U Resolution', - attrs={'integer': 16}) - u_resolution.integer = 16 - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': u_resolution, 'Start': part_surface_simple.outputs["Position"], 'Middle': part_surface_simple_1.outputs["Position"], 'End': part_surface_simple_2.outputs["Position"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["ProfileHeight, StartTilt, EndTilt"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz_1.outputs["Y"], 4: separate_xyz_1.outputs["Z"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': map_range_1.outputs["Result"]}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': quadratic_bezier, 'Tilt': deg2rad}) - - position = nw.new_node(Nodes.InputPosition) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': 1.0, 'Z': 1.0}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: combine_xyz}, - attrs={'operation': 'MULTIPLY'}) - - v_resolution = nw.new_node(Nodes.Integer, - label='V resolution', - attrs={'integer': 10}) - v_resolution.integer = 10 - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': v_resolution}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["StartRad, EndRad, Fullness"]}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["X"], 'end_rad': separate_xyz.outputs["Y"], 'fullness': separate_xyz.outputs["Z"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': set_curve_tilt, 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, profilepart]}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={1: True, 15: join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': switch_1.outputs[6]}) - -@node_utils.to_nodegroup('nodegroup_simple_tube', singleton=False, type='GeometryNodeTree') -def nodegroup_simple_tube(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Angles Deg', (30.0, -1.5, 11.0)), - ('NodeSocketVector', 'Seg Lengths', (0.02, 0.02, 0.02)), - ('NodeSocketFloat', 'Start Radius', 0.06), - ('NodeSocketFloat', 'End Radius', 0.03), - ('NodeSocketFloat', 'Fullness', 8.17), - ('NodeSocketBool', 'Do Bezier', True), - ('NodeSocketFloat', 'Aspect Ratio', 1.0)]) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["Angles Deg"], 'Seg Lengths': group_input.outputs["Seg Lengths"], 'Do Bezier': group_input.outputs["Do Bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["Aspect Ratio"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': group_input.outputs["Start Radius"], 'end_rad': group_input.outputs["End Radius"], 'fullness': group_input.outputs["Fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) - -@node_utils.to_nodegroup('nodegroup_smooth_taper', singleton=False, type='GeometryNodeTree') -def nodegroup_smooth_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'SINE'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'start_rad', 0.29), - ('NodeSocketFloat', 'end_rad', 0.0), - ('NodeSocketFloat', 'fullness', 2.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: divide}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["start_rad"], 4: group_input.outputs["end_rad"]}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply_1}) - -@node_utils.to_nodegroup('nodegroup_warped_circle_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_warped_circle_curve(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0)), - ('NodeSocketInt', 'Vertices', 32)]) - - mesh_circle = nw.new_node(Nodes.MeshCircle, - input_kwargs={'Vertices': group_input.outputs["Vertices"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_circle, 'Position': group_input.outputs["Position"]}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': mesh_to_curve}) - -@node_utils.to_nodegroup('nodegroup_profile_part', singleton=False, type='GeometryNodeTree') -def nodegroup_profile_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloatDistance', 'Radius Func', 1.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Radius': group_input.outputs["Radius Func"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': group_input.outputs["Profile Curve"], 'Fill Caps': True}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': curve_to_mesh, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -@node_utils.to_nodegroup('nodegroup_polar_bezier', singleton=False, type='GeometryNodeTree') -def nodegroup_polar_bezier(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 32), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Seg Lengths', (0.3, 0.3, 0.3)), - ('NodeSocketBool', 'Do Bezier', True)]) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': 4}) - - index = nw.new_node(Nodes.Index) - - deg2_rad = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["angles_deg"], 'Scale': 0.0175}, - label='Deg2Rad', - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': deg2_rad.outputs["Vector"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': separate_xyz.outputs["X"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Seg Lengths"]}) - - polartocart = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': reroute, 'Length': separate_xyz_1.outputs["X"], 'Origin': group_input.outputs["Origin"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]}) - - polartocart_1 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add, 'Length': separate_xyz_1.outputs["Y"], 'Origin': polartocart}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) - - polartocart_2 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add_1, 'Length': separate_xyz_1.outputs["Z"], 'Origin': polartocart_1}) - - switch4 = nw.new_node(nodegroup_switch4().name, - input_kwargs={'Arg': index, 'Arg == 0': group_input.outputs["Origin"], 'Arg == 1': polartocart, 'Arg == 2': polartocart_1, 'Arg == 3': polartocart_2}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line, 'Position': switch4}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': mesh_to_curve, 'Cuts': group_input.outputs["Resolution"]}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 2}) - integer.integer = 2 - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': integer, 'Start': group_input.outputs["Origin"], 'Start Handle': polartocart, 'End Handle': polartocart_1, 'End': polartocart_2}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, - attrs={'operation': 'DIVIDE'}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': bezier_segment, 'Cuts': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Do Bezier"], 14: subdivide_curve_1, 15: subdivide_curve}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': switch.outputs[6], 'Endpoint': polartocart_2}) - -@node_utils.to_nodegroup('nodegroup_solidify', singleton=False, type='GeometryNodeTree') -def nodegroup_solidify(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloatDistance', 'Distance', 0.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Offset Scale': multiply, 'Individual': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Distance"], 1: -0.5}, - attrs={'operation': 'MULTIPLY'}) - - extrude_mesh_1 = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Offset Scale': multiply_1, 'Individual': False}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': extrude_mesh_1.outputs["Mesh"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [extrude_mesh.outputs["Mesh"], flip_faces]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry, 'Distance': 0.0}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': merge_by_distance, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -@node_utils.to_nodegroup('nodegroup_taper', singleton=False, type='GeometryNodeTree') -def nodegroup_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Start', (1.0, 0.63, 0.72)), - ('NodeSocketVector', 'End', (1.0, 1.0, 1.0))]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz.outputs["X"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': separate_xyz.outputs["X"], 7: attribute_statistic.outputs["Min"], 8: attribute_statistic.outputs["Max"], 9: group_input.outputs["Start"], 10: group_input.outputs["End"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'clamp': False}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: map_range.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': multiply.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_raycast_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_raycast_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'Rotation', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Hit Normal', (0.0, 0.0, 1.0)), - ('NodeSocketVector', 'Curve Tangent', (0.0, 0.0, 1.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': group_input.outputs["Hit Normal"]}) - - rotate_euler = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Rotate By': align_euler_to_vector}) - - if_normal_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Normal Rot"], 8: group_input.outputs["Rotation"], 9: rotate_euler}, - label='if_normal_rot', - attrs={'input_type': 'VECTOR'}) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Vector': group_input.outputs["Curve Tangent"]}) - - rotate_euler_1 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': align_euler_to_vector_1, 'Rotate By': group_input.outputs["Rotation"]}, - attrs={'space': 'LOCAL'}) - - if_tangent_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Tangent Rot"], 8: if_normal_rot.outputs[3], 9: rotate_euler_1}, - label='if_tangent_rot', - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': if_tangent_rot.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_part_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_part_surface(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0)]) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Factor': group_input.outputs["Length Fac"]}, - attrs={'mode': 'FACTOR'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': sample_curve.outputs["Tangent"], 'Rotation': group_input.outputs["Ray Rot"]}, - attrs={'rotation_type': 'EULER_XYZ'}) - - raycast = nw.new_node(Nodes.Raycast, - input_kwargs={'Target Geometry': group_input.outputs["Skin Mesh"], 'Source Position': sample_curve.outputs["Position"], 'Ray Direction': vector_rotate, 'Ray Length': 5.0}) - - lerp = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["Rad"], 9: sample_curve.outputs["Position"], 10: raycast.outputs["Hit Position"]}, - label='lerp', - attrs={'data_type': 'FLOAT_VECTOR', 'clamp': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Position': lerp.outputs["Vector"], 'Hit Normal': raycast.outputs["Hit Normal"], 'Tangent': sample_curve.outputs["Tangent"], 'Skeleton Pos': sample_curve.outputs["Position"]}) - -@node_utils.to_nodegroup('nodegroup_eyeball_eyelid', singleton=False, type='GeometryNodeTree') -def nodegroup_eyeball_eyelid(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Base Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketVector', 'Length/Yaw/Rad', (0.5000, 0.0000, 1.0000)), - ('NodeSocketGeometry', 'Target Geometry', None), - ('NodeSocketFloat', 'EyeRot', -23.0000), - ('NodeSocketVector', 'EyelidCircleShape(W, H)', (2.0000, 1.4500, 0.0000)), - ('NodeSocketVector', 'EyelidRadiusShape(Out, In1, In2)', (0.4000, 5.3000, 0.4000)), - ('NodeSocketVector', 'EyelidResolution(Circle, Radius)', (32.0000, 32.0000, 0.0000)), - ('NodeSocketVector', 'CorneaScale(W, H, Thickness)', (0.8000, 0.8000, 0.5500)), - ('NodeSocketVector', 'EyeballResolution(White, Cornea)', (32.0000, 128.0000, 0.0000)), - ('NodeSocketVector', 'OffsetPreAppending', (0.0120, 0.0000, 0.0000)), - ('NodeSocketFloat', 'Scale', 1.0), - ('NodeSocketVectorEuler', 'Rotation', (0.1745, 0.0000, -1.3963)), - ('NodeSocketVector', 'RayDirection', (-1.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'DefaultAppendDistance', -0.0020), - ('NodeSocketVector', 'EyeSocketRot', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVectorXYZ', 'EyelidScale', (1.0000, 1.0000, 1.0000))]) - - eyesockets = nw.new_node(nodegroup_eye_sockets().name, - input_kwargs={'Skin Mesh': group_input.outputs["Skin Mesh"], 'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Base Mesh': group_input.outputs["Base Mesh"], 'Length/Yaw/Rad': group_input.outputs["Length/Yaw/Rad"], 'Part Rot': group_input.outputs["EyeSocketRot"], 'Scale': group_input.outputs["Scale"]}) - - #transform = nw.new_node(Nodes.Transform, - # input_kwargs={'Geometry': eyesockets.outputs["Mesh"], 'Scale': group_input.outputs["Scale"]}) - - tigereyeinner = nw.new_node(nodegroup_eyeball_eyelid_inner().name, - input_kwargs={'EyeRot': group_input.outputs["EyeRot"], 'EyelidCircleShape(W, H)': group_input.outputs["EyelidCircleShape(W, H)"], 'EyelidRadiusShape(Out, In1, In2)': group_input.outputs["EyelidRadiusShape(Out, In1, In2)"], 'EyelidResolution(Circle, Radius)': group_input.outputs["EyelidResolution(Circle, Radius)"], 'CorneaScale(W, H, Thickness)': group_input.outputs["CorneaScale(W, H, Thickness)"], 'EyeballResolution(White, Cornea)': group_input.outputs["EyeballResolution(White, Cornea)"], 'Scale': group_input.outputs["EyelidScale"]}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: eyesockets.outputs["Position"], 1: group_input.outputs["OffsetPreAppending"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 0.0170}, attrs={'operation': 'MULTIPLY'}) - - appendeye = nw.new_node(nodegroup_append_eye().name, - input_kwargs={'Target Geometry': group_input.outputs["Target Geometry"], 'Geometry': tigereyeinner.outputs["Eyeball"], 'Translation': add, 'Scale': multiply, 'Rotation': group_input.outputs['Rotation'], 'Ray Direction': group_input.outputs["RayDirection"], 'Default Offset': group_input.outputs["DefaultAppendDistance"]}) - - appendeye_1 = nw.new_node(nodegroup_append_eye().name, - input_kwargs={'Target Geometry': group_input.outputs["Target Geometry"], 'Geometry': tigereyeinner.outputs["Eyelid"], 'Translation': add, 'Scale': multiply, 'Rotation': group_input.outputs['Rotation'], 'Ray Direction': group_input.outputs["RayDirection"], 'Default Offset': group_input.outputs["DefaultAppendDistance"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': None, 'ParentCutter': eyesockets.outputs["Mesh"], 'Eyeballl': appendeye, 'BodyExtra_Lid': appendeye_1}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_carnivore__face_structure', singleton=False, type='GeometryNodeTree') -def nodegroup_carnivore__face_structure(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Skull Length Width1 Width2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Snout Length Width1 Width2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Snout Y Scale', 0.62), - ('NodeSocketVectorXYZ', 'Nose Bridge Scale', (1.0, 0.35, 0.9)), - ('NodeSocketVector', 'Jaw Muscle Middle Coord', (0.24, 0.41, 1.3)), - ('NodeSocketVector', 'Jaw StartRad, EndRad, Fullness', (0.06, 0.11, 1.5)), - ('NodeSocketVector', 'Jaw ProfileHeight, StartTilt, EndTilt', (0.8, 33.1, 0.0)), - ('NodeSocketVector', 'Lip Muscle Middle Coord', (0.95, 0.0, 1.5)), - ('NodeSocketVector', 'Lip StartRad, EndRad, Fullness', (0.05, 0.09, 1.48)), - ('NodeSocketVector', 'Lip ProfileHeight, StartTilt, EndTilt', (0.8, 0.0, -17.2)), - ('NodeSocketVector', 'Forehead Muscle Middle Coord', (0.7, -1.32, 1.31)), - ('NodeSocketVector', 'Forehead StartRad, EndRad, Fullness', (0.06, 0.05, 2.5)), - ('NodeSocketVector', 'Forehead ProfileHeight, StartTilt, EndTilt', (0.3, 60.6, 66.0)), - ('NodeSocketFloat', 'aspect', 1.0)]) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (-0.07, 0.0, 0.05) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["Skull Length Width1 Width2"], 'angles_deg': (-5.67, 0.0, 0.0), 'aspect': group_input.outputs["aspect"], 'fullness': 3.63, 'Origin': vector}) - - snout_origin = nw.new_node(Nodes.VectorMath, - input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: (-0.1, 0.0, 0.0)}, - label='Snout Origin') - - split_length_width1_width2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Snout Length Width1 Width2"]}, - label='Split Length / Width1 / Width2') - - snout_seg_lengths = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33), 'Scale': split_length_width1_width2.outputs["X"]}, - label='Snout Seg Lengths', - attrs={'operation': 'SCALE'}) - - bridge = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': snout_origin.outputs["Vector"], 'Angles Deg': (-4.0, -4.5, -5.61), 'Seg Lengths': snout_seg_lengths.outputs["Vector"], 'Start Radius': 0.17, 'End Radius': 0.1, 'Fullness': 5.44}, - label='Bridge') - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': bridge.outputs["Geometry"], 'Translation': (0.0, 0.0, 0.03), 'Scale': group_input.outputs["Nose Bridge Scale"]}) - - snout = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': snout_origin.outputs["Vector"], 'Angles Deg': (-3.0, -4.5, -5.61), 'Seg Lengths': snout_seg_lengths.outputs["Vector"], 'Start Radius': split_length_width1_width2.outputs["Y"], 'End Radius': split_length_width1_width2.outputs["Z"], 'Fullness': 2.0}, - label='Snout') - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': snout.outputs["Geometry"], 'Translation': (0.0, 0.0, 0.03), 'Scale': (1.0, 0.7, 0.7)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.0, 'Y': group_input.outputs["Snout Y Scale"], 'Z': 1.0}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Scale': combine_xyz}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform, transform_2]}) - - union = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 2': [join_geometry, simple_tube_v2.outputs["Geometry"]], 'Self Intersection': True}, - attrs={'operation': 'UNION'}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': vector, 'End': snout.outputs["Endpoint"]}) - - jaw_muscle = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Coord 0': (0.19, -0.41, 0.78), 'Coord 1': group_input.outputs["Jaw Muscle Middle Coord"], 'Coord 2': (0.67, 1.26, 0.52), 'StartRad, EndRad, Fullness': group_input.outputs["Jaw StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Jaw ProfileHeight, StartTilt, EndTilt"]}, - label='Jaw Muscle') - - lip = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Coord 0': (0.51, -0.13, 0.02), 'Coord 1': group_input.outputs["Lip Muscle Middle Coord"], 'Coord 2': (0.99, 10.57, 0.1), 'StartRad, EndRad, Fullness': group_input.outputs["Lip StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Lip ProfileHeight, StartTilt, EndTilt"]}, - label='Lip') - - forehead = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.31, -1.06, 0.97), 'Coord 1': group_input.outputs["Forehead Muscle Middle Coord"], 'Coord 2': (0.95, -1.52, 0.9), 'StartRad, EndRad, Fullness': group_input.outputs["Forehead StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Forehead ProfileHeight, StartTilt, EndTilt"]}, - label='Forehead') - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [jaw_muscle, lip, forehead]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry_1}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33)}, - attrs={'operation': 'SCALE'}) - - jaw_cutter = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': (0.0, 0.0, 0.09), 'Angles Deg': (0.0, 0.0, 0.0), 'Seg Lengths': scale.outputs["Vector"], 'Start Radius': 0.13}, - label='Jaw Cutter') - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Geometry': jaw_cutter.outputs["Geometry"], 'Length Fac': 0.2, 'Ray Rot': (0.0, 1.5708, 0.0), 'Rad': 1.25, 'Part Rot': (0.0, -8.5, 0.0), 'Do Tangent Rot': True}) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': union.outputs["Mesh"], 'Mesh 2': attach_part.outputs["Geometry"], 'Self Intersection': True}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [symmetric_clone.outputs["Both"], difference.outputs["Mesh"]]}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': curve_line_1, 'Cuts': 10}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2, 'Skeleton Curve': subdivide_curve, 'Base Mesh': union.outputs["Mesh"], 'Cranium Skeleton': simple_tube_v2.outputs["Skeleton Curve"]}) - -@node_utils.to_nodegroup('nodegroup_rotate2_d', singleton=False, type='ShaderNodeTree') -def nodegroup_rotate2_d(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketFloat', 'Value', 0.0175), - ('NodeSocketFloat', 'Value2', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs[2], 1: 0.0175}, - attrs={'operation': 'MULTIPLY'}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_3}, - attrs={'operation': 'SINE'}) - - reroute_5 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs[1]}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: reroute_5}, - attrs={'operation': 'MULTIPLY'}) - - reroute_4 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Value"]}) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_3}, - attrs={'operation': 'COSINE'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_4, 1: cosine}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_2}, - attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_5, 1: cosine}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_4, 1: sine}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: multiply_4}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={"Value": subtract, "Value1": add}) - -@node_utils.to_nodegroup('nodegroup_carnivore_jaw', singleton=False, type='GeometryNodeTree') -def nodegroup_carnivore_jaw(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloatFactor', 'Width Shaping', 0.6764), - ('NodeSocketFloat', 'Canine Length', 0.05), - ('NodeSocketFloat', 'Incisor Size', 0.01), - ('NodeSocketFloat', 'Tooth Crookedness', 0.0), - ('NodeSocketFloatFactor', 'Tongue Shaping', 1.0), - ('NodeSocketFloat', 'Tongue X Scale', 0.9)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33), 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'angles_deg': (0.0, 0.0, 13.0), 'Seg Lengths': scale.outputs["Vector"]}) - - position = nw.new_node(Nodes.InputPosition) - - vector_curves = nw.new_node(Nodes.VectorCurve, - input_kwargs={'Vector': position}) - node_utils.assign_curve(vector_curves.mapping.curves[0], [(-1.0, -1.0), (0.0036, 0.0), (0.2436, 0.21), (1.0, 1.0)]) - node_utils.assign_curve(vector_curves.mapping.curves[1], [(-1.0, 0.12), (-0.7745, 0.06), (-0.6509, -0.44), (-0.3673, -0.4), (-0.0545, -0.01), (0.1055, 0.02), (0.5273, 0.5), (0.7964, 0.64), (1.0, 1.0)], handles=['AUTO', 'AUTO', 'AUTO', 'AUTO_CLAMPED', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']) - node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, -1.0), (1.0, 1.0)]) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': vector_curves}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Width Shaping"], 'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.955), (0.4255, 0.785), (0.6545, 0.535), (0.9491, 0.75), (1.0, 0.595)], handles=['AUTO', 'AUTO', 'AUTO', 'AUTO_CLAMPED', 'AUTO']) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': 2.6}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: smoothtaper}, - attrs={'operation': 'MULTIPLY'}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': multiply}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': profilepart, 'Scale': (1.0, 1.7, 1.0)}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33), 'Scale': group_input.outputs["Canine Length"]}, - attrs={'operation': 'SCALE'}) - - canine_tooth = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Seg Lengths': scale_1.outputs["Vector"], 'Start Radius': 0.015, 'End Radius': 0.003}, - label='Canine Tooth') - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': transform, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Geometry': canine_tooth.outputs["Geometry"], 'Length Fac': 0.9, 'Ray Rot': (1.5708, 0.1204, 1.5708), 'Rad': 1.0, 'Part Rot': (-17.6, -53.49, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': attach_part.outputs["Geometry"]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: attach_part.outputs["Position"], 1: (0.015, -0.05, 0.0)}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: (1.0, -1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: multiply_1.outputs["Vector"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_1.outputs["Vector"], 1: (0.5, 0.5, 0.5), 2: (-0.02, 0.0, 0.0), 'Scale': 0.5}, - attrs={'operation': 'MULTIPLY_ADD'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 6, 'Start': add.outputs["Vector"], 'Middle': multiply_add.outputs["Vector"], 'End': multiply_1.outputs["Vector"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadratic_bezier}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (3.0, 1.0, 0.6), 'Scale': group_input.outputs["Incisor Size"]}, - attrs={'operation': 'SCALE'}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': scale_2.outputs["Vector"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': cube, 'Level': 3}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform_1, 'Instance': transform_2, 'Rotation': (0.0, -1.5708, 0.0)}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (2.0, 2.0, 2.0), 1: group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SUBTRACT'}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: subtract.outputs["Vector"], 1: group_input.outputs["Tooth Crookedness"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points, 'Scale': random_value.outputs["Value"]}) - - scale_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-3.0, -3.0, -3.0), 'Scale': group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SCALE'}) - - scale_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (3.0, 3.0, 3.0), 'Scale': group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SCALE'}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={0: scale_3.outputs["Vector"], 1: scale_4.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': random_value_1.outputs["Value"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': scale_instances, 'Rotation': deg2rad}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [symmetric_clone.outputs["Both"], realize_instances]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': polarbezier.outputs["Curve"]}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Tongue Shaping"], 'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 1.0), (0.6982, 0.55), (0.9745, 0.35), (1.0, 0.175)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={3: separate_xyz.outputs["Y"], 4: separate_xyz.outputs["Z"]}, - attrs={'clamp': False}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': resample_curve, 'Radius': multiply_3}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 3, 'Middle': (0.0, 0.7, 0.0)}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': quadratic_bezier_1, 'Fill Caps': True}) - - solidify = nw.new_node(nodegroup_solidify().name, - input_kwargs={'Mesh': curve_to_mesh_1, 'Distance': 0.02}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': solidify, 'Shade Smooth': False}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Tongue X Scale"], 'Y': 1.0, 'Z': 1.0}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_shade_smooth, 'Rotation': (0.0, -0.0159, 0.0), 'Scale': combine_xyz}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': transform_3, 'Level': 2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Teeth': join_geometry_1, 'Tongue': subdivision_surface_1}) - -@node_utils.to_nodegroup('nodegroup_deg2_rad', singleton=False, type='GeometryNodeTree') -def nodegroup_deg2_rad(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Deg', (0.0, 0.0, 0.0))]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Rad': multiply.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_cat_ear', singleton=False, type='GeometryNodeTree') -def nodegroup_cat_ear(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Depth', 0.0), - ('NodeSocketFloatDistance', 'Thickness', 0.0), - ('NodeSocketFloatDistance', 'Curl Deg', 0.0)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Curl Deg"], 1: (-1.0, 1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 3.0}, - attrs={'operation': 'DIVIDE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Origin': (-0.07, 0.0, 0.0), 'angles_deg': multiply.outputs["Vector"], 'Seg Lengths': divide}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.3236, 0.98), (0.7462, 0.63), (1.0, 0.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': polarbezier.outputs["Curve"], 'Radius': float_curve}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': set_curve_radius}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Y"], 1: (-0.5, 0.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Depth"], 1: (0.0, -1.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Y"], 1: (0.5, 0.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': multiply_1.outputs["Vector"], 'Middle': multiply_2.outputs["Vector"], 'End': multiply_3.outputs["Vector"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_tilt, 'Profile Curve': quadratic_bezier}) - - solidify = nw.new_node(nodegroup_solidify().name, - input_kwargs={'Mesh': curve_to_mesh, 'Distance': group_input.outputs["Thickness"]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': solidify, 'Distance': 0.005}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': merge_by_distance}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': subdivision_surface, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': set_shade_smooth}) - -@node_utils.to_nodegroup('nodegroup_symmetric_clone', singleton=False, type='GeometryNodeTree') -def nodegroup_symmetric_clone(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, -1.0, 1.0))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Scale': group_input.outputs["Scale"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': transform}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [group_input.outputs["Geometry"], flip_faces]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Both': join_geometry_2, 'Orig': group_input.outputs["Geometry"], 'Inverted': flip_faces}) - -@node_utils.to_nodegroup('nodegroup_cat_nose', singleton=False, type='GeometryNodeTree') -def nodegroup_cat_nose(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Nose Radius', 0.06), - ('NodeSocketFloatDistance', 'Nostril Size', 0.025), - ('NodeSocketFloatFactor', 'Crease', 0.008), - ('NodeSocketVectorXYZ', 'Scale', (1.2, 1.0, 1.0))]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Nose Radius"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': cube, 'Level': 3}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface, 'Scale': group_input.outputs["Scale"]}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Radius': group_input.outputs["Nostril Size"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Translation': (0.04, 0.025, 0.015), 'Rotation': (0.5643, 0.0, 0.0), 'Scale': (1.0, 0.87, 0.31)}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_1}) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': transform, 'Mesh 2': symmetric_clone.outputs["Both"], 'Self Intersection': True}) - - taper = nw.new_node(nodegroup_taper().name, - input_kwargs={'Geometry': difference.outputs["Mesh"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': taper}) - -@node_utils.to_nodegroup('nodegroup_attach_part', singleton=False, type='GeometryNodeTree') -def nodegroup_attach_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0), - ('NodeSocketVector', 'Part Rot', (0.0, 0.0, 0.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - part_surface = nw.new_node(nodegroup_part_surface().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length Fac': group_input.outputs["Length Fac"], 'Ray Rot': group_input.outputs["Ray Rot"], 'Rad': group_input.outputs["Rad"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': group_input.outputs["Part Rot"]}) - - raycast_rotation = nw.new_node(nodegroup_raycast_rotation().name, - input_kwargs={'Rotation': deg2rad, 'Hit Normal': part_surface.outputs["Hit Normal"], 'Curve Tangent': part_surface.outputs["Tangent"], 'Do Normal Rot': group_input.outputs["Do Normal Rot"], 'Do Tangent Rot': group_input.outputs["Do Tangent Rot"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Translation': part_surface.outputs["Position"], 'Rotation': raycast_rotation}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Position': part_surface.outputs["Position"], 'Rotation': raycast_rotation}) - -@node_utils.to_nodegroup('nodegroup_carnivore_head', singleton=False, type='GeometryNodeTree') -def nodegroup_carnivore_head(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.5, 0.0, 0.0)), - ('NodeSocketVector', 'snout_length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Snout Y Scale', 0.62), - ('NodeSocketVector', 'eye_coord', (0.96, -0.95, 0.79)), - ('NodeSocketVectorXYZ', 'Nose Bridge Scale', (1.0, 0.35, 0.9)), - ('NodeSocketVector', 'Jaw Muscle Middle Coord', (0.24, 0.41, 1.3)), - ('NodeSocketVector', 'Jaw StartRad, EndRad, Fullness', (0.06, 0.11, 1.5)), - ('NodeSocketVector', 'Jaw ProfileHeight, StartTilt, EndTilt', (0.8, 33.1, 0.0)), - ('NodeSocketVector', 'Lip Muscle Middle Coord', (0.95, 0.0, 1.5)), - ('NodeSocketVector', 'Lip StartRad, EndRad, Fullness', (0.05, 0.09, 1.48)), - ('NodeSocketVector', 'Lip ProfileHeight, StartTilt, EndTilt', (0.8, 0.0, -17.2)), - ('NodeSocketVector', 'Forehead Muscle Middle Coord', (0.7, -1.32, 1.31)), - ('NodeSocketVector', 'Forehead StartRad, EndRad, Fullness', (0.06, 0.05, 2.5)), - ('NodeSocketVector', 'Forehead ProfileHeight, StartTilt, EndTilt', (0.3, 60.6, 66.0)), - ('NodeSocketFloat', 'aspect', 1.0)]) - - carnivore_face_structure = nw.new_node(nodegroup_carnivore__face_structure().name, - input_kwargs={'Skull Length Width1 Width2': group_input.outputs["length_rad1_rad2"], 'Snout Length Width1 Width2': group_input.outputs["snout_length_rad1_rad2"], 'Snout Y Scale': group_input.outputs["Snout Y Scale"], 'Nose Bridge Scale': group_input.outputs["Nose Bridge Scale"], 'Jaw Muscle Middle Coord': group_input.outputs["Jaw Muscle Middle Coord"], 'Jaw StartRad, EndRad, Fullness': group_input.outputs["Jaw StartRad, EndRad, Fullness"], 'Jaw ProfileHeight, StartTilt, EndTilt': group_input.outputs["Jaw ProfileHeight, StartTilt, EndTilt"], 'Lip Muscle Middle Coord': group_input.outputs["Lip Muscle Middle Coord"], 'Lip StartRad, EndRad, Fullness': group_input.outputs["Lip StartRad, EndRad, Fullness"], 'Lip ProfileHeight, StartTilt, EndTilt': group_input.outputs["Lip ProfileHeight, StartTilt, EndTilt"], 'Forehead Muscle Middle Coord': group_input.outputs["Forehead Muscle Middle Coord"], 'Forehead StartRad, EndRad, Fullness': group_input.outputs["Forehead StartRad, EndRad, Fullness"], 'Forehead ProfileHeight, StartTilt, EndTilt': group_input.outputs["Forehead ProfileHeight, StartTilt, EndTilt"], 'aspect': group_input.outputs["aspect"]}) - - tigereye = nw.new_node(nodegroup_eyeball_eyelid().name, - input_kwargs={ - 'Skin Mesh': carnivore_face_structure.outputs["Geometry"], - 'Base Mesh': carnivore_face_structure.outputs["Base Mesh"], - 'Skeleton Curve': carnivore_face_structure.outputs["Cranium Skeleton"], - 'Length/Yaw/Rad': group_input.outputs["eye_coord"], - 'Target Geometry': carnivore_face_structure.outputs["Geometry"], - 'EyelidCircleShape(W, H)': (2.0, 1.35, 0.0), - 'CorneaScale(W, H, Thickness)': (0.8, 0.8, 0.7), - 'DefaultAppendDistance': 0.002, - 'EyelidScale': (1.1, 1.6, 1.6), - 'Scale': 1.0, - }) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': carnivore_face_structure.outputs["Geometry"], 'Mesh 2': tigereye.outputs["ParentCutter"], 'Self Intersection': True}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': tigereye.outputs["Eyeballl"]}) - - symmetric_clone_1 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': tigereye.outputs["BodyExtra_Lid"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': difference.outputs["Mesh"], 'Skeleton Curve': carnivore_face_structure.outputs["Skeleton Curve"], 'Base Mesh': carnivore_face_structure.outputs["Base Mesh"], 'LeftEye': symmetric_clone.outputs["Orig"], 'RightEye': symmetric_clone.outputs["Inverted"], 'Eyelid': symmetric_clone_1.outputs["Both"]}) - -def shader_eyeball_tiger(nw: NodeWrangler, **input_kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_cornea'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'EyeballPosition'}) - - reroute_8 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': attribute_1.outputs["Color"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': reroute_8}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute, 'Scale': 50.0}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.02, 'Color1': reroute, 'Color2': noise_texture_2.outputs["Color"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mix_3}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 30.0 - - group = nw.new_node(nodegroup_rotate2_d().name, - input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Z"], 2: value}) - - w_offset = U(0, 0.2) - iris_scale = U(0.4, 0.8) - scale2 = iris_scale*1.7+N(0, 0.05) - scale3 = iris_scale*1.75+N(0, 0.05) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs[1], 1: iris_scale}, - attrs={'operation': 'MULTIPLY'}) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_2, 1: reroute_2}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs["Value"], 1: iris_scale+w_offset}, - attrs={'operation': 'MULTIPLY'}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_2}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_1, 1: reroute_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_3}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: 0.63}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_1}) - colorramp.color_ramp.elements[0].position = 0.64 - colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.6591 - colorramp.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': reroute_8, 'Scale': (1.0, U(1, 200), 1.0)}, - attrs={'vector_type': 'NORMAL'}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': U(0.2, 0.4), 'Color1': mapping_1, 'Color2': reroute_8}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': mix_4}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute_3, 'Scale': 10.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': U(0.5, 0.9), 'Color1': noise_texture.outputs["Fac"], 'Color2': reroute_3}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 10.0+N(0, 2)}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"], 2: 0.0}, - attrs={'operation': 'MULTIPLY'}) - - bright_contrast = nw.new_node('ShaderNodeBrightContrast', - input_kwargs={'Color': multiply_4, 'Bright': 0.6, 'Contrast': U(0.8, 1.2)}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs[1], 1: scale3}, - attrs={'operation': 'MULTIPLY'}) - - reroute_6 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_5}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_6, 1: reroute_6}, - attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs["Value"], 1: scale3+w_offset}, - attrs={'operation': 'MULTIPLY'}) - - reroute_7 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_7}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_7, 1: reroute_7}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_6, 1: multiply_8}) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add_2, 1: 0.18}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_3}) - colorramp_3.color_ramp.elements[0].position = 0.5955 - colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_3.color_ramp.elements[1].position = 1.0 - colorramp_3.color_ramp.elements[1].color = (0.7896, 0.7896, 0.7896, 1.0) - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: bright_contrast, 1: colorramp_3.outputs["Color"]}) - - multiply_9 = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs[1], 1: scale2}, - attrs={'operation': 'MULTIPLY'}) - - reroute_4 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_9}) - - multiply_10 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_4, 1: reroute_4}, - attrs={'operation': 'MULTIPLY'}) - - multiply_11 = nw.new_node(Nodes.Math, - input_kwargs={0: group.outputs["Value"], 1: scale2+w_offset}, - attrs={'operation': 'MULTIPLY'}) - - reroute_5 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply_11}) - - multiply_12 = nw.new_node(Nodes.Math, - input_kwargs={0: reroute_5, 1: reroute_5}, - attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_10, 1: multiply_12}) - - add_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add_5}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_6}) - colorramp_1.color_ramp.elements[0].position = 0.6159 - colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.6591 - colorramp_1.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_3}) - colorramp_2.color_ramp.elements[0].position = 0.4773 - colorramp_2.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp_2.color_ramp.elements[1].position = 0.6659 - colorramp_2.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': (U(0.5, 0.9), U(0.3, 0.8), U(0.3, 0.7), 1.0), 'Color2': (U(0.2, 0.6), U(0.15, 0.6), U(0.1, 0.4), 1.0)}) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': mix_7, 'Color2': (U(0.1, 0.55), U(0.1, 0.55), U(0.1, 0.55), 1.0)}) - - color1 = max(0, N(0.125, 0.05)) - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': add_4, 'Color1': (color1, U(0, color1), U(0, color1), 1.0), 'Color2': mix_6}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': mix_5, 'Color2': (0.0, 0.0, 0.0, 1.0)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2, 'Specular': 0.0, 'Roughness': 0.0}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Specular': 1.0, 'Roughness': 0.0, 'IOR': 1.35, 'Transmission': 1.0}) - - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.1577, 1: principled_bsdf_1, 2: transparent_bsdf}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': attribute_2.outputs["Color"], 1: principled_bsdf, 2: mix_shader}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_1}) - -def geometry_tiger_head(nw: NodeWrangler, input_kwargs={}): - # Code generated using version 2.4.3 of the node_transpiler - - carnivorehead = nw.new_node(nodegroup_carnivore_head().name, - input_kwargs={'length_rad1_rad2': (0.36, 0.2, 0.18), 'snout_length_rad1_rad2': (0.25, 0.15, 0.15), 'eye_coord': (0.96, -0.85, 0.79), 'Lip Muscle Middle Coord': (0.95, -0.45, 2.03)}) - - nose_radius = nw.new_node(nodegroup_cat_nose().name, - input_kwargs={'Nose Radius': 0.11, 'Nostril Size': 0.03, 'Crease': 0.237}, - label='NoseRadius ~ N(0.1, 0.02)') - - attach_nose = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': carnivorehead.outputs["Base Mesh"], 'Skeleton Curve': carnivorehead.outputs["Skeleton Curve"], 'Geometry': nose_radius, 'Length Fac': 0.9017, 'Ray Rot': (0.0, -1.3277, 0.0), 'Rad': 0.56, 'Part Rot': (0.0, 26.86, 0.0), 'Do Normal Rot': True, 'Do Tangent Rot': True}, - label='Attach Nose') - - cat_ear = nw.new_node(nodegroup_cat_ear().name, - input_kwargs={'length_rad1_rad2': (0.2, 0.1, 0.0), 'Depth': 0.06, 'Thickness': 0.01, 'Curl Deg': 49.0}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': (90.0, -44.0, 90.0)}) - - attach_ear = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': carnivorehead.outputs["Base Mesh"], 'Skeleton Curve': carnivorehead.outputs["Skeleton Curve"], 'Geometry': cat_ear, 'Length Fac': 0.3328, 'Ray Rot': deg2rad, 'Rad': 1.0, 'Part Rot': (-43.3, -9.5, -29.6), 'Do Normal Rot': True}, - label='Attach Ear') - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': attach_ear.outputs["Geometry"]}) - - carnivore_jaw = nw.new_node(nodegroup_carnivore_jaw().name, - input_kwargs={'length_rad1_rad2': (0.4, 0.12, 0.08), 'Width Shaping': 1.0, 'Tooth Crookedness': 1.2}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [carnivore_jaw.outputs["Geometry"], carnivore_jaw.outputs["Teeth"], carnivore_jaw.outputs["Tongue"]]}) - - attach_jaw = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': carnivorehead.outputs["Base Mesh"], 'Skeleton Curve': carnivorehead.outputs["Skeleton Curve"], 'Geometry': join_geometry_3, 'Length Fac': 0.2, 'Ray Rot': (0.0, 1.5708, 0.0), 'Rad': 0.36, 'Part Rot': (0.0, 21.1, 0.0), 'Do Normal Rot': True, 'Do Tangent Rot': True}, - label='Attach Jaw') - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [carnivorehead.outputs["Geometry"], attach_nose.outputs["Geometry"], carnivorehead.outputs["LeftEye"], symmetric_clone.outputs["Both"], attach_jaw.outputs["Geometry"], carnivorehead.outputs["RightEye"], carnivorehead.outputs["Eyelid"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2}) - -class Eye(PartFactory): - - tags = ['head_detail', 'eye_socket'] - - def sample_params(self): - return { - 'Skin Mesh': None, - 'Base Mesh': None, - 'Skeleton Curve': None, - 'Length/Yaw/Rad': (0.5, 0.0, 1.0), - 'Target Geometry': None, - 'EyeRot': -U(15, 35), - 'EyelidCircleShape(W, H)': (2.0, U(1.3, 1.5), 0.0), - 'EyelidRadiusShape(Out, In1, In2)': (0.4, 5.3, 0.4), - 'EyelidResolution(Circle, Radius)': (32.0, 32.0, 0.0), - 'CorneaScale(W, H, Thickness)': (0.8, 0.8, 0.55), - 'EyeballResolution(White, Cornea)': (32.0, 128.0, 0.0), - 'OffsetPreAppending': (0.012, 0.0, 0.0), - 'Scale': (0.9, 1.1), - 'Rotation': (0.1745, 0.0, -1.3963), - 'RayDirection': (-1.0, 0.0, 0.0), - 'DefaultAppendDistance': -0.002, - } - - def sample_params_fish(self): - return { - 'Skin Mesh': None, - 'Base Mesh': None, - 'Skeleton Curve': None, - 'Length/Yaw/Rad': (0.8800, -0.6000, 1.0000), - 'Target Geometry': None, - 'EyeRot': 0.0000, - 'EyelidCircleShape(W, H)': (2.0000, 1.0000, 0.0000), - 'EyelidRadiusShape(Out, In1, In2)': (0.4000, 5.3000, 0.3000), - 'CorneaScale(W, H, Thickness)': (0.8000, 0.8000, 0.8500), - 'OffsetPreAppending': (0.0000, 0.0100, 0.0000), - 'Scale': 1.5000, - 'Rotation': (0.0873, 0.0000, -0.2618), - 'RayDirection': (-0.3000, -0.8000, 0.0000), - 'DefaultAppendDistance': 0.0050, - 'EyeSocketRot': (0.0000, 0.0000, 80.0000) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_eyeball_eyelid, params) - return part - -def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geometry_tiger_head, apply=False, input_kwargs=geo_kwargs) - -if __name__ == "__main__": - mat = 'tigereye' - for i in range(1): - bpy.ops.wm.open_mainfile(filepath='test.blend') - apply(bpy.data.objects['SolidModel'], geo_kwargs={}, shader_kwargs={}) - fn = os.path.join(os.path.abspath(os.curdir), 'tigereye_test.blend') - bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', '%s_%d.jpg'%(mat, i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/fin_old.py b/infinigen/assets/creatures/parts/fin_old.py deleted file mode 100644 index 510e5bf77..000000000 --- a/infinigen/assets/creatures/parts/fin_old.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy - -import numpy as np -from numpy.random import uniform, normal - -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_attach_part - - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_fish_fin', singleton=False, type='GeometryNodeTree') -def nodegroup_fish_fin(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.34, 0.07, 0.04)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'proportions', (0.3333, 0.3333, 0.3333)), - ('NodeSocketFloat', 'aspect', 2.65), - ('NodeSocketFloat', 'fullness', 4.0)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'proportions': group_input.outputs["proportions"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) - -class FishFin(PartFactory): - - tags = ['limb', 'fin'] - - def sample_params(self): - return { - 'length_rad1_rad2': (0.34, 0.07, 0.04), - 'angles_deg': (0.0, 0.0, 0.0), - 'proportions': (0.3333, 0.3333, 0.3333), - 'aspect': 2.65, - 'fullness': 4. - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_fish_fin, params) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.6: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) # elbow - } - tag_object(part.obj, 'fish_fin') - return part - -@node_utils.to_nodegroup('nodegroup_fish_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_fish_tail(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.5, 0.18, 0.04)), - ('NodeSocketVector', 'angles_deg', (0.0, -4.6, 0.0)), - ('NodeSocketFloat', 'aspect', 0.46), - ('NodeSocketFloat', 'fullness', 4.0)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"], 'Origin': (-0.07, 0.0, 0.0)}) - - fishfin = nw.new_node(nodegroup_fish_fin().name, - input_kwargs={'length_rad1_rad2': (0.34, 0.07, 0.11), 'aspect': 4.7}) - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Geometry': fishfin.outputs["Geometry"], 'Length Fac': 0.775, 'Part Rot': (90.0, -20.7, 0.0)}) - - fishfin_1 = nw.new_node(nodegroup_fish_fin().name, - input_kwargs={'length_rad1_rad2': (0.34, 0.07, 0.11), 'aspect': 4.7}) - - attach_part_1 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Geometry': fishfin_1.outputs["Geometry"], 'Length Fac': 0.775, 'Part Rot': (90.0, 18.64, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [attach_part.outputs["Geometry"], simple_tube_v2.outputs["Geometry"], attach_part_1.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) - -class FishTail(PartFactory): - - tags = ['tail'] - - def sample_params(self): - return { - 'length_rad1_rad2': (0.5, 0.18, 0.04), - 'angles_deg': (0.0, -4.6, 0.0), - 'aspect': 0.46, - 'fullness': 4. - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_fish_tail, params) - part.joints = { - t: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) - for t in np.linspace(0, 0.7, 4) - } - part.iks = {1.0: IKParams('tail', rotation_weight=0, chain_parts=1)} - tag_object(part.obj, 'fish_tail') - return part diff --git a/infinigen/assets/creatures/parts/foot.py b/infinigen/assets/creatures/parts/foot.py deleted file mode 100644 index 724f6159b..000000000 --- a/infinigen/assets/creatures/parts/foot.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy - -import numpy as np -from numpy.random import uniform, normal as N - -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle, nodegroup_attach_part -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_deg2_rad - -from infinigen.assets.creatures.util.creature import Part, PartFactory -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_tiger_toe', singleton=False, type='GeometryNodeTree') -def nodegroup_tiger_toe(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.18, 0.045, 0.024)), - ('NodeSocketFloatDistance', 'Toebean Radius', 0.03), - ('NodeSocketFloat', 'Claw Curl Deg', 30.0), - ('NodeSocketVector', 'Claw Pct Length Rad1 Rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Toe Curl Scalar', 1.0)]) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-50.0, 25.0, 35.0), 'Scale': group_input.outputs["Toe Curl Scalar"]}, - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["X"], 'Scale': 0.18}, - attrs={'operation': 'SCALE'}) - - toe = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': (-0.05, 0.0, 0.0), 'Angles Deg': scale.outputs["Vector"], 'Seg Lengths': scale_1.outputs["Vector"], 'Start Radius': separate_xyz.outputs["Y"], 'End Radius': separate_xyz.outputs["Z"]}, - label='Toe') - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 16, 'Rings': 8, 'Radius': group_input.outputs["Toebean Radius"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.5, 1.0, 0.6)}) - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': toe.outputs["Geometry"], 'Skeleton Curve': toe.outputs["Skeleton Curve"], 'Geometry': transform_1, 'Length Fac': 0.5037, 'Ray Rot': (0.0, 1.5708, 0.0), 'Rad': 0.9}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 0.7, 0.6)}) - - attach_part_1 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': toe.outputs["Geometry"], 'Skeleton Curve': toe.outputs["Skeleton Curve"], 'Geometry': transform, 'Length Fac': 0.8, 'Ray Rot': (0.0, 1.5708, 0.0), 'Rad': 0.7}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["Z"], 'Y': separate_xyz.outputs["Z"], 'Z': 3.0}) - - toe_top = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': toe.outputs["Geometry"], 'Skeleton Curve': toe.outputs["Skeleton Curve"], 'Coord 0': (0.56, -1.5708, 0.3), 'Coord 1': (0.7, -1.5708, 1.0), 'Coord 2': (0.95, -1.5708, 0.0), 'StartRad, EndRad, Fullness': combine_xyz, 'ProfileHeight, StartTilt, EndTilt': (0.9, 0.0, 0.0)}, - label='Toe Top') - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [toe.outputs["Geometry"], attach_part.outputs["Geometry"], attach_part_1.outputs["Geometry"], toe_top]}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (1.0, -2.0, -1.0), 'Scale': group_input.outputs["Claw Curl Deg"]}, - attrs={'operation': 'SCALE'}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["length_rad1_rad2"], 1: group_input.outputs["Claw Pct Length Rad1 Rad2"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': multiply.outputs["Vector"]}) - - scale_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33), 'Scale': separate_xyz_1.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - claw = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': (-0.007, 0.0, 0.0), 'Angles Deg': scale_2.outputs["Vector"], 'Seg Lengths': scale_3.outputs["Vector"], 'Start Radius': separate_xyz_1.outputs["Y"], 'End Radius': separate_xyz_1.outputs["Z"]}, - label='Claw') - - attach_part_2 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': toe.outputs["Geometry"], 'Skeleton Curve': toe.outputs["Skeleton Curve"], 'Geometry': claw.outputs["Geometry"], 'Length Fac': 0.85}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1, 'Skeleton Curve': toe.outputs["Skeleton Curve"], 'Claw': attach_part_2.outputs["Geometry"]}) - - -@node_utils.to_nodegroup('nodegroup_foot', singleton=False, type='GeometryNodeTree') -def nodegroup_foot(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Num Toes', 3), - ('NodeSocketVector', 'length_rad1_rad2', (0.2700, 0.0400, 0.0900)), - ('NodeSocketVector', 'Toe Rotate', (0.0000, -1.57, 0.0000)), - ('NodeSocketVector', 'Toe Length Rad1 Rad2', (0.3000, 0.0450, 0.0250)), - ('NodeSocketFloat', 'Toe Splay', 0.0000), - ('NodeSocketFloatDistance', 'Toebean Radius', 0.0300), - ('NodeSocketFloat', 'Claw Curl Deg', 30.0000), - ('NodeSocketVector', 'Claw Pct Length Rad1 Rad2', (0.3000, 0.5000, 0.0000)), - ('NodeSocketVector', 'Thumb Pct', (1.0000, 1.0000, 1.0000)), - ('NodeSocketFloat', 'Toe Curl Scalar', 1.0000)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], - 'angles_deg': (10.0000, 8.0000, -25.0000)}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Z"], 1: (0.0000, -0.4500, 0.1000), - 2: (-0.0700, 0.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: multiply_add.outputs["Vector"]}) - - multiply_add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Z"], 1: (0.0000, 0.4500, 0.1000), - 2: (-0.0700, 0.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: multiply_add_1.outputs["Vector"]}) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': group_input.outputs["Num Toes"], - 'Start Location': add.outputs["Vector"], 'Offset': add_1.outputs["Vector"]}, - attrs={'mode': 'END_POINTS'}) - - tigertoe = nw.new_node(nodegroup_tiger_toe().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["Toe Length Rad1 Rad2"], - 'Toebean Radius': group_input.outputs["Toebean Radius"], - 'Claw Curl Deg': group_input.outputs["Claw Curl Deg"], - 'Claw Pct Length Rad1 Rad2': group_input.outputs["Claw Pct Length Rad1 Rad2"], - 'Toe Curl Scalar': group_input.outputs["Toe Curl Scalar"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': tigertoe.outputs["Geometry"]}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points, - 'Rotation': group_input.outputs["Toe Rotate"]}) - - index = nw.new_node(Nodes.Index) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Num Toes"], 1: -1.0000}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: add_2}, - attrs={'operation': 'DIVIDE'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.0000, 0.0000, -1.0000), 'Scale': group_input.outputs["Toe Splay"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.0000, 0.0000, 1.0000), 'Scale': group_input.outputs["Toe Splay"]}, - attrs={'operation': 'SCALE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': divide, 9: scale.outputs["Vector"], 10: scale_1.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': map_range.outputs["Vector"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_1, 'Rotation': deg2rad}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 16, 'Rings': 8, 'Radius': 0.01500}) - - add_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: (-0.0200, 0.0000, 0.0000)}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Translation': add_3.outputs["Vector"], - 'Scale': (0.7000, 1.0000, 1.0000)}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': simple_tube_v2.outputs["Geometry"]}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': simple_tube_v2.outputs["Skeleton Curve"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Toe Length Rad1 Rad2"], - 1: group_input.outputs["Thumb Pct"]}, - attrs={'operation': 'MULTIPLY'}) - - tigertoe_1 = nw.new_node(nodegroup_tiger_toe().name, - input_kwargs={'length_rad1_rad2': multiply.outputs["Vector"], - 'Toebean Radius': group_input.outputs["Toebean Radius"], - 'Claw Curl Deg': group_input.outputs["Claw Curl Deg"], - 'Claw Pct Length Rad1 Rad2': group_input.outputs[ - "Claw Pct Length Rad1 Rad2"], - 'Toe Curl Scalar': group_input.outputs["Toe Curl Scalar"]}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.3000 - - vector_1 = nw.new_node(Nodes.Vector) - vector_1.vector = (90.0000, 90.0000, 90.0000) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.8000 - - vector = nw.new_node(Nodes.Vector) - vector.vector = (90.0000, 1.4300, -55.6800) - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': reroute, 'Skeleton Curve': reroute_1, - 'Geometry': tigertoe_1.outputs["Geometry"], 'Length Fac': value_2, - 'Ray Rot': vector_1, 'Rad': value_1, 'Part Rot': vector, - 'Do Tangent Rot': True}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={ - 'Geometry': [realize_instances, transform, attach_part.outputs["Geometry"], - simple_tube_v2.outputs["Geometry"]]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': tigertoe.outputs["Claw"]}) - - rotate_instances_2 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, - 'Rotation': group_input.outputs["Toe Rotate"]}) - - rotate_instances_3 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_2, 'Rotation': deg2rad}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances_3}) - - attach_part_1 = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': reroute, 'Skeleton Curve': reroute_1, - 'Geometry': tigertoe_1.outputs["Claw"], 'Length Fac': value_2, - 'Ray Rot': vector_1, 'Rad': value_1, 'Part Rot': vector, - 'Do Tangent Rot': True}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [realize_instances_1, attach_part_1.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, - 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], - 'Base Mesh': simple_tube_v2.outputs["Geometry"], 'Claws': join_geometry_1}) - -class Foot(PartFactory): - - def __init__(self, params=None, bald=False): - super().__init__(params) - self.tags = ['foot'] - if bald: - self.tags.append('bald') - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.27, 0.04, 0.09)) * N(1, (0.2, 0.05, 0.05), 3), - 'Num Toes': max(int(N(4, 1)), 2), - 'Toe Length Rad1 Rad2': np.array((0.3, 0.045, 0.025)) * N(1, 0.1, 3), - 'Toe Rotate': (0., -N(0.7, 0.15), 0.), - 'Toe Splay': 20.0 * N(1, 0.2), - 'Toebean Radius': 0.03 * N(1, 0.2), - 'Claw Curl Deg': 30 * N(1, 0.4), - 'Claw Pct Length Rad1 Rad2': np.array((0.3, 0.5, 0.0)) * N(1, 0.1, 3) - } - - def make_part(self, params): - - part = nodegroup_to_part(nodegroup_foot, params, split_extras=True) - part.iks = {1.0: IKParams('foot', rotation_weight=0.1, chain_parts=2, chain_length=-1)} - part.settings['rig_extras'] = True - tag_object(part.obj, 'foot') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/generic_nurbs.py b/infinigen/assets/creatures/parts/generic_nurbs.py deleted file mode 100644 index 9e92d917d..000000000 --- a/infinigen/assets/creatures/parts/generic_nurbs.py +++ /dev/null @@ -1,162 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import pdb - -import bpy -from pathlib import Path -import numpy as np -from infinigen.assets.creatures.util.creature import Part, PartFactory - -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler - -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util import part_util -from infinigen.assets.creatures.util.geometry import lofting - -from infinigen.core.util import blender as butil -from infinigen.core.util.logging import Suppress -from infinigen.core.tagging import tag_object, tag_nodegroup - -NURBS_BASE_PATH = Path(__file__).parent/'nurbs_data' -NURBS_KEYS = [p.stem for p in NURBS_BASE_PATH.iterdir()] -def load_nurbs(name:str): - return np.load(NURBS_BASE_PATH/(name + '.npy'))[..., :3] - -def decompose_nurbs_handles(handles): - - skeleton, ts, profiles = lofting.factorize_nurbs_handles(handles) - - rads = np.linalg.norm(profiles, axis=2, keepdims=True).mean(axis=1, keepdims=True) - rads = np.clip(rads, 1e-3, 1e5) - profiles_norm = profiles / rads - - skeleton_root = skeleton[[0]] - dirs = np.diff(skeleton, axis=0) - - lens = np.linalg.norm(dirs, axis=-1) - length = lens.sum() - proportions = lens / length - - thetas = np.arctan2(dirs[:, 2], dirs[:, 0]) - thetas = np.rad2deg(thetas) - skeleton_yoffs = dirs[:, 1] / lens - - return { - 'ts': ts, - 'rads': rads, - 'skeleton_root': skeleton_root, - 'skeleton_yoffs': skeleton_yoffs, - 'length': length, - 'proportions': proportions, - 'thetas': thetas, - 'profiles_norm': profiles_norm - } - -def recompose_nurbs_handles(params): - - lens = params['length'] * params['proportions'] - thetas = np.deg2rad(params['thetas']) - skeleton_offs = np.stack([ - lens * np.cos(thetas), - lens * params['skeleton_yoffs'], - lens * np.sin(thetas) - ], axis=-1) - skeleton = np.concatenate([params['skeleton_root'], skeleton_offs], axis=0) - skeleton = np.cumsum(skeleton, axis=0) - - handles = lofting.compute_profile_verts( - skeleton, params['ts'], - params['profiles_norm'] * params['rads'], profile_as_points=True) - - return handles - -class NurbsPart(PartFactory): - - def __init__(self, params=None, prefix=None, tags=None, temperature=0.3, var=1, exps=None): - self.prefix = prefix - self.tags = tags or [] - self.temperature = temperature - self.var = var - self.exps = exps - super(NurbsPart, self).__init__(params) - - def sample_params(self, select=None): - - if self.prefix is None: - # for compatibility with interp which will not init prefix but does not need sample_params - return {} # TODO hacky, replace - - N = lambda u, v, d=1: np.random.normal(u, np.array(v) * self.var, d) - - target_keys = [k for k in NURBS_KEYS if self.prefix is None or k.startswith(self.prefix)] - weights = part_util.random_convex_coord(target_keys, select=select, temp=self.temperature) - if self.exps is not None: - for k, exp in self.exps.items(): - weights[k] = weights[k] ** exp - - handles = sum(w * load_nurbs(k) for k, w in weights.items()) - decomp = decompose_nurbs_handles(handles) - - sz = N(1, 0.1) - decomp['length'] *= sz * N(1, 0.1) - decomp['rads'] *= sz * N(1, 0.1) * N(1, 0.15, decomp['rads'].shape) - decomp['proportions'] *= N(1, 0.15) - - ang_noise = N(0, 7, decomp['thetas'].shape) - ang_noise -= ang_noise.mean() - decomp['thetas'] += ang_noise - - n, m, d = decomp['profiles_norm'].shape - profile_noise = N(1, 0.07, (1, m, 1)) * N(1, 0.15, (n, m, 1)) - profile_noise[:, :m//2-1] = profile_noise[:, m//2:-1][:, ::-1] # symmetrize noise - decomp['profiles_norm'] *= profile_noise # profiles are 0-centered so multiplication is sensible - - return decomp - - def make_part(self, params): - handles = recompose_nurbs_handles(params) - part = part_util.nurbs_to_part(handles) - with butil.ViewportMode(part.obj, mode='EDIT'), Suppress(): - bpy.ops.mesh.select_all() - bpy.ops.mesh.remove_doubles() - bpy.ops.mesh.normals_make_consistent(inside=False) - return part - -class NurbsBody(NurbsPart): - - def __init__(self, *args, shoulder_ik_ts=[0.0, 0.6], n_bones=8, rig_reverse_skeleton=False, **kwargs): - super().__init__(*args, **kwargs) - self.shoulder_ik_ts = shoulder_ik_ts - self.n_bones = n_bones - self.rig_reverse_skeleton = rig_reverse_skeleton - - def make_part(self, params): - part = super().make_part(params) - part.joints = { - i: Joint((0,0,0), bounds=np.array([[-30, -30, -30], [30, 30, 30]])) - for i in np.linspace(0, 1, self.n_bones, endpoint=True) - } - part.iks = { - t: IKParams(name=f'body_{i}', mode='pin' if i == 0 else 'iksolve', - rotation_weight=0, target_size=0.3) - for i, t in enumerate(self.shoulder_ik_ts) - } - part.settings['rig_reverse_skeleton'] = self.rig_reverse_skeleton - tag_object(part.obj, 'body') - return part - -class NurbsHead(NurbsPart): - - def make_part(self, params): - part = super().make_part(params) - part.iks = { - 1.0: IKParams(name='head', rotation_weight=0.1, target_size=0.4, chain_length=1) - } - part.settings['rig_extras'] = True - tag_object(part.obj, 'head') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/head.py b/infinigen/assets/creatures/parts/head.py deleted file mode 100644 index caf2b7af6..000000000 --- a/infinigen/assets/creatures/parts/head.py +++ /dev/null @@ -1,576 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import numpy as np -from numpy.random import uniform, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.core.util.math import clip_gaussian -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_polar_bezier, nodegroup_simple_tube_v2, nodegroup_warped_circle_curve -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle, nodegroup_part_surface_simple, nodegroup_attach_part, nodegroup_smooth_taper, nodegroup_profile_part -from infinigen.assets.creatures.util.nodegroups.geometry import nodegroup_solidify, nodegroup_symmetric_clone -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_deg2_rad - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util import part_util -from infinigen.assets.creatures.parts.eye import nodegroup_mammal_eye -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_carnivore_jaw', singleton=True, type='GeometryNodeTree') -def nodegroup_carnivore_jaw(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloatFactor', 'Width Shaping', 0.6764), - ('NodeSocketFloat', 'Canine Length', 0.050000000000000003), - ('NodeSocketFloat', 'Incisor Size', 0.01), - ('NodeSocketFloat', 'Tooth Crookedness', 0.0), - ('NodeSocketFloatFactor', 'Tongue Shaping', 1.0), - ('NodeSocketFloat', 'Tongue X Scale', 0.90000000000000002)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'angles_deg': (0.0, 0.0, 13.0), 'Seg Lengths': scale.outputs["Vector"]}) - - position = nw.new_node(Nodes.InputPosition) - - vector_curves = nw.new_node(Nodes.VectorCurve, - input_kwargs={'Vector': position}) - node_utils.assign_curve(vector_curves.mapping.curves[0], [(-1.0, -1.0), (0.0035999999999999999, 0.0), (0.24360000000000001, 0.20999999999999999), (1.0, 1.0)]) - node_utils.assign_curve(vector_curves.mapping.curves[1], [(-1.0, 0.12), (-0.77449999999999997, 0.059999999999999998), (-0.65090000000000003, -0.44), (-0.36730000000000002, -0.40000000000000002), (-0.0545, -0.01), (0.1055, 0.02), (0.52729999999999999, 0.5), (0.7964, 0.64000000000000001), (1.0, 1.0)], handles=['AUTO', 'AUTO', 'AUTO', 'AUTO_CLAMPED', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']) - node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, -1.0), (1.0, 1.0)]) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': vector_curves}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Width Shaping"], 'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.95499999999999996), (0.42549999999999999, 0.78500000000000003), (0.65449999999999997, 0.53500000000000003), (0.94910000000000005, 0.75), (1.0, 0.59499999999999997)], handles=['AUTO', 'AUTO', 'AUTO', 'AUTO_CLAMPED', 'AUTO']) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': 2.6000000000000001}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: smoothtaper}, - attrs={'operation': 'MULTIPLY'}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': multiply}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': profilepart, 'Scale': (1.0, 1.7, 1.0)}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["Canine Length"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), 'Scale': group_input.outputs["Canine Length"]}, - attrs={'operation': 'SCALE'}) - - canine_tooth = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Seg Lengths': scale_1.outputs["Vector"], 'Start Radius': 0.014999999999999999, 'End Radius': 0.0030000000000000001}, - label='Canine Tooth') - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': transform, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Geometry': canine_tooth.outputs["Geometry"], 'Length Fac': 0.90000000000000002, 'Ray Rot': (1.5708, 0.12039999999999999, 1.5708), 'Rad': 1.0, 'Part Rot': (-17.600000000000001, -53.490000000000002, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': attach_part.outputs["Geometry"]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={1: greater_than, 15: symmetric_clone.outputs["Both"]}) - - greater_than_1 = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["Incisor Size"]}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: attach_part.outputs["Position"], 1: (0.014999999999999999, -0.050000000000000003, 0.0)}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: (1.0, -1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: multiply_1.outputs["Vector"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_1.outputs["Vector"], 1: (0.5, 0.5, 0.5), 2: (-0.02, 0.0, 0.0)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 6, 'Start': add.outputs["Vector"], 'Middle': multiply_add.outputs["Vector"], 'End': multiply_1.outputs["Vector"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadratic_bezier}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (3.0, 1.0, 0.59999999999999998), 'Scale': group_input.outputs["Incisor Size"]}, - attrs={'operation': 'SCALE'}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': scale_2.outputs["Vector"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': cube, 'Level': 3}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform_1, 'Instance': transform_2, 'Rotation': (0.0, -1.5708, 0.0)}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (2.0, 2.0, 2.0), 1: group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SUBTRACT'}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: subtract.outputs["Vector"], 1: group_input.outputs["Tooth Crookedness"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points, 'Scale': random_value.outputs["Value"]}) - - scale_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-3.0, -3.0, -3.0), 'Scale': group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SCALE'}) - - scale_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (3.0, 3.0, 3.0), 'Scale': group_input.outputs["Tooth Crookedness"]}, - attrs={'operation': 'SCALE'}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={0: scale_3.outputs["Vector"], 1: scale_4.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': random_value_1.outputs["Value"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': scale_instances, 'Rotation': deg2rad}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: greater_than_1, 15: realize_instances}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [switch_1.outputs[6], switch.outputs[6]]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': polarbezier.outputs["Curve"]}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Tongue Shaping"], 'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 1.0), (0.69820000000000004, 0.55000000000000004), (0.97450000000000003, 0.34999999999999998), (1.0, 0.17499999999999999)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={3: separate_xyz.outputs["Y"], 4: separate_xyz.outputs["Z"]}, - attrs={'clamp': False}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': resample_curve, 'Radius': multiply_3}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 3, 'Middle': (0.0, 0.69999999999999996, 0.0)}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': quadratic_bezier_1, 'Fill Caps': True}) - - solidify = nw.new_node(nodegroup_solidify().name, - input_kwargs={'Mesh': curve_to_mesh_1, 'Distance': 0.02}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': solidify, 'Shade Smooth': False}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Tongue X Scale"], 'Y': 1.0, 'Z': 1.0}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_shade_smooth, 'Rotation': (0.0, -0.015900000000000001, 0.0), 'Scale': combine_xyz}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': transform_3, 'Level': 2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Teeth': join_geometry_1, 'Tongue': subdivision_surface_1}) - -@node_utils.to_nodegroup('nodegroup_carnivore_head', singleton=False, type='GeometryNodeTree') -def nodegroup_carnivore_head(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'snout_length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'snout_y_scale', 0.62), - ('NodeSocketVectorXYZ', 'Nose Bridge Scale', (1.0, 0.35, 0.9)), - ('NodeSocketVector', 'Jaw Muscle Middle Coord', (0.24, 0.41, 1.3)), - ('NodeSocketVector', 'Jaw StartRad, EndRad, Fullness', (0.06, 0.11, 1.5)), - ('NodeSocketVector', 'Jaw ProfileHeight, StartTilt, EndTilt', (0.8, 33.1, 0.0)), - ('NodeSocketVector', 'Lip Muscle Middle Coord', (0.95, 0.0, 1.5)), - ('NodeSocketVector', 'Lip StartRad, EndRad, Fullness', (0.05, 0.09, 1.48)), - ('NodeSocketVector', 'Lip ProfileHeight, StartTilt, EndTilt', (0.8, 0.0, -17.2)), - ('NodeSocketVector', 'Forehead Muscle Middle Coord', (0.7, -1.32, 1.31)), - ('NodeSocketVector', 'Forehead StartRad, EndRad, Fullness', (0.06, 0.05, 2.5)), - ('NodeSocketVector', 'Forehead ProfileHeight, StartTilt, EndTilt', (0.3, 60.6, 66.0)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketFloatDistance', 'EyeRad', 0.03), - ('NodeSocketVector', 'EyeOffset', (-0.2, 0.5, 0.2))]) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (-0.07, 0.0, 0.05) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': (-5.67, 0.0, 0.0), 'aspect': group_input.outputs["aspect"], 'fullness': 3.63, 'Origin': vector}) - - snout_origin = nw.new_node(Nodes.VectorMath, - input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: (-0.1, 0.0, 0.0)}, - label='Snout Origin') - - split_length_width1_width2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["snout_length_rad1_rad2"]}, - label='Split Length / Width1 / Width2') - - snout_seg_lengths = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33), 'Scale': split_length_width1_width2.outputs["X"]}, - label='Snout Seg Lengths', - attrs={'operation': 'SCALE'}) - - bridge = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': snout_origin.outputs["Vector"], 'Angles Deg': (-4.0, -4.5, -5.61), 'Seg Lengths': snout_seg_lengths.outputs["Vector"], 'Start Radius': 0.17, 'End Radius': 0.1, 'Fullness': 5.44}, - label='Bridge') - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': bridge.outputs["Geometry"], 'Translation': (0.0, 0.0, 0.03), 'Scale': group_input.outputs["Nose Bridge Scale"]}) - - snout = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': snout_origin.outputs["Vector"], 'Angles Deg': (-3.0, -4.5, -5.61), 'Seg Lengths': snout_seg_lengths.outputs["Vector"], 'Start Radius': split_length_width1_width2.outputs["Y"], 'End Radius': split_length_width1_width2.outputs["Z"], 'Fullness': 2.0}, - label='Snout') - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': snout.outputs["Geometry"], 'Translation': (0.0, 0.0, 0.03), 'Scale': (1.0, 0.7, 0.7)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.0, 'Y': group_input.outputs["snout_y_scale"], 'Z': 1.0}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Scale': combine_xyz}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform, transform_2]}) - - union = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 2': [join_geometry, simple_tube_v2.outputs["Geometry"]], 'Self Intersection': True}, - attrs={'operation': 'UNION'}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': vector, 'End': snout.outputs["Endpoint"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.33, 0.33, 0.33)}, - attrs={'operation': 'SCALE'}) - - jaw_cutter = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Origin': (0.0, 0.0, 0.09), 'Angles Deg': (0.0, 0.0, 0.0), 'Seg Lengths': scale.outputs["Vector"], 'Start Radius': 0.13}, - label='Jaw Cutter') - - attach_part = nw.new_node(nodegroup_attach_part().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Geometry': jaw_cutter.outputs["Geometry"], 'Length Fac': 0.2, 'Ray Rot': (0.0, 1.5708, 0.0), 'Rad': 1.25, 'Part Rot': (0.0, -8.5, 0.0), 'Do Tangent Rot': True}) - - mammaleye = nw.new_node(nodegroup_mammal_eye().name, - input_kwargs={'Radius': group_input.outputs["EyeRad"]}) - - reroute_4 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["length_rad1_rad2"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': reroute_4}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': simple_tube_v2.outputs["Endpoint"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["EyeOffset"], 1: separate_xyz.outputs["Z"], 2: reroute_3}, - attrs={'operation': 'MULTIPLY_ADD'}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': mammaleye.outputs["ParentCutter"], 'Translation': multiply_add.outputs["Vector"]}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_4}) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': union.outputs["Mesh"], 'Mesh 2': [attach_part.outputs["Geometry"], symmetric_clone.outputs["Both"]], 'Self Intersection': True}) - - jaw_muscle = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Coord 0': (0.19, -0.41, 0.78), 'Coord 1': group_input.outputs["Jaw Muscle Middle Coord"], 'Coord 2': (0.67, 1.26, 0.52), 'StartRad, EndRad, Fullness': group_input.outputs["Jaw StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Jaw ProfileHeight, StartTilt, EndTilt"]}, - label='Jaw Muscle') - - lip = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': union.outputs["Mesh"], 'Skeleton Curve': curve_line_1, 'Coord 0': (0.51, -0.13, 0.02), 'Coord 1': group_input.outputs["Lip Muscle Middle Coord"], 'Coord 2': (0.99, 10.57, 0.1), 'StartRad, EndRad, Fullness': group_input.outputs["Lip StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Lip ProfileHeight, StartTilt, EndTilt"]}, - label='Lip') - - forehead = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.31, -1.06, 0.97), 'Coord 1': group_input.outputs["Forehead Muscle Middle Coord"], 'Coord 2': (0.95, -1.52, 0.9), 'StartRad, EndRad, Fullness': group_input.outputs["Forehead StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Forehead ProfileHeight, StartTilt, EndTilt"]}, - label='Forehead') - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [jaw_muscle, lip, forehead]}) - - symmetric_clone_1 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry_1}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [difference.outputs["Mesh"], symmetric_clone_1.outputs["Both"]]}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': curve_line_1, 'Cuts': 10}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': mammaleye.outputs["Eyeballl"], 'Translation': multiply_add.outputs["Vector"]}) - - symmetric_clone_2 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_3}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': mammaleye.outputs["BodyExtra_Lid"], 'Translation': multiply_add.outputs["Vector"]}) - - symmetric_clone_3 = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_5}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2, 'Skeleton Curve': subdivide_curve, 'Base Mesh': union.outputs["Mesh"], 'Eyeball_Left': symmetric_clone_2.outputs["Orig"], 'Eyeball_Right': symmetric_clone_2.outputs["Inverted"], 'BodyExtra_Lid': symmetric_clone_3.outputs["Both"]}) - -@node_utils.to_nodegroup('nodegroup_neck', singleton=True, type='GeometryNodeTree') -def nodegroup_neck(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.5, 0.3)), - ('NodeSocketVector', 'angles_deg', (0.0, 3.2, -18.11)), - ('NodeSocketVector', 'Muscle StartRad, EndRad, Fullness', (0.17, 0.17, 2.5)), - ('NodeSocketVector', 'ProfileHeight, StartTilt, EndTilt', (0.5, 0.0, 66.0)), - ('NodeSocketFloat', 'fullness', 5.0), - ('NodeSocketFloat', 'aspect', 1.0)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"]}) - - rear_top = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.1, 0.0, 0.9), 'Coord 1': (0.48, -0.77, 1.0), 'Coord 2': (0.87, -1.5708, 0.8), 'StartRad, EndRad, Fullness': group_input.outputs["Muscle StartRad, EndRad, Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["ProfileHeight, StartTilt, EndTilt"]}, - label='Rear Top') - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': rear_top}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': join_geometry_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [symmetric_clone.outputs["Both"], simple_tube_v2.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Base Mesh': simple_tube_v2.outputs["Geometry"]}) - -class Neck(PartFactory): - - tags = ['neck'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.65, 0.35, 0.16)) * N(1, (0.2, 0, 0), 3), - 'angles_deg': np.array((0.0, 3.2, -18.11)) * N(1, 0.2, 3), - 'Muscle StartRad, EndRad, Fullness': (0.17, 0.17, 2.5), - 'ProfileHeight, StartTilt, EndTilt': (0.5, 0.0, 66.0), - 'fullness': 5.0, - 'aspect': 1.0 * N(1, 0.05) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_neck, params) - part.joints = { - i: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) - for i in np.linspace(0, 1, 4, endpoint=True) - } - tag_object(part.obj, 'neck') - return part - -class CarnivoreHead(PartFactory): - - tags = ['head'] - - def sample_params(self): - params = { - 'length_rad1_rad2': np.array((0.36, 0.20, 0.18)) * N(1, 0.2, 3), - 'snout_length_rad1_rad2': np.array((0.22, 0.15, 0.15)) * N(1, 0.2, 3), - 'aspect': N(1, 0.2), - } - - muscle_params = { - 'Nose Bridge Scale': (1.0, 0.35, 0.9), - 'Jaw Muscle Middle Coord': (0.24, 0.41, 1.3), - 'Jaw StartRad, EndRad, Fullness': (0.06, 0.11, 1.5), - 'Jaw ProfileHeight, StartTilt, EndTilt': (0.8, 33.1, 0.0), - 'Lip Muscle Middle Coord': (0.95, 0.0, 1.5), - 'Lip StartRad, EndRad, Fullness': (0.05, 0.09, 1.48), - 'Lip ProfileHeight, StartTilt, EndTilt': (0.8, 0.0, -17.2), - 'Forehead Muscle Middle Coord': (0.7, -1.32, 1.31), - 'Forehead StartRad, EndRad, Fullness': (0.06, 0.05, 2.5), - 'Forehead ProfileHeight, StartTilt, EndTilt': (0.3, 60.6, 66.0) - } - - for k, v in muscle_params.items(): - v = np.array(v) - v *= N(1, 0.05, len(v)) - params[k] = v - - params.update(muscle_params) - params['EyeRad'] = 0.023 * N(1, 0.3) - params['EyeOffset'] = np.array((-0.25, 0.45, 0.3)) + N(0, (0, 0.02, 0.03)) - - return params - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_carnivore_head, params) - part.iks = {1.0: IKParams('head', rotation_weight=0.1, chain_length=1)} - part.settings['rig_extras'] = True - tag_object(part.obj, 'carnivore_head') - return part - -class CarnivoreJaw(PartFactory): - - tags = ['head', 'jaw'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.4, 0.12, 0.08)) * N(1, 0.1, 3), - 'Width Shaping': 1.0 * clip_gaussian(1, 0.1, 0.5, 1), - 'Canine Length': 0.05 * N(1, 0.2), - 'Incisor Size': 0.01 * N(1, 0.2), - 'Tooth Crookedness': 1.2 * N(1, 0.3), - 'Tongue Shaping': 1 * clip_gaussian(1, 0.1, 0.5, 1), - 'Tongue X Scale': 0.9 * clip_gaussian(1, 0.1, 0.5, 1) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_carnivore_jaw, params) - tag_object(part.obj, 'carnivore_jaw') - return part - -@node_utils.to_nodegroup('nodegroup_flying_bird_head', singleton=True, type='GeometryNodeTree') -def nodegroup_flying_bird_head(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.34999999999999998, 0.11, 0.17000000000000001)), - ('NodeSocketVector', 'angles_deg', (0.0, -24.0, -20.0)), - ('NodeSocketVector', 'eye_coord', (0.5, 0.0, 1.0)), - ('NodeSocketFloatDistance', 'Radius', 0.040000000000000001)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': N(0.9, 0.05), 'fullness': 0.9, 'Origin': (-0.13, 0.0, 0.1)}) - - simple_tube_v2_1 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': 1.1899999999999999, 'fullness': 2.25, 'Origin': (-0.13, 0.0, 0.1-0.040000000000000001)}) - - union = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 2': [simple_tube_v2.outputs["Geometry"], simple_tube_v2_1.outputs["Geometry"]]}, - attrs={'operation': 'UNION'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -@node_utils.to_nodegroup('nodegroup_bird_head', singleton=True, type='GeometryNodeTree') -def nodegroup_bird_head(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.34999999999999998, 0.11, 0.17000000000000001)), - ('NodeSocketVector', 'angles_deg', (0.0, -24.0, -20.0)), - ('NodeSocketVector', 'eye_coord', (0.5, 0.0, 1.0)), - ('NodeSocketFloatDistance', 'Radius', 0.040000000000000001)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': 0.85999999999999999, 'fullness': 1.7, 'Origin': (-0.13, 0.0, 0.1)}) - - simple_tube_v2_1 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': 1.1899999999999999, 'fullness': 2.25, 'Origin': (-0.13, 0.0, 0.1-0.040000000000000001)}) - - union = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 2': [simple_tube_v2.outputs["Geometry"], simple_tube_v2_1.outputs["Geometry"]]}, - attrs={'operation': 'UNION'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': union.outputs["Mesh"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class BirdHead(PartFactory): - - tags = ['head'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.35, 0.11, 0.13)) * N(1, 0.05) * N(1, 0.1, 3), - 'angles_deg': N(0, 5, 3), - 'eye_coord': np.array((0.65, -0.32, 0.95)) * N(1, (0.1, 0.2, 0), 3), - 'Radius': 0.025 * N(1, 0.05) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_bird_head, params) - part.iks = {1.0: IKParams('head', rotation_weight=0.1, chain_parts=2)} - part.settings['rig_extras'] = True - tag_object(part.obj, 'bird_head') - return part - -class FlyingBirdHead(PartFactory): - - tags = ['head'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((0.3, 0.04, 0.12)) * N(1, 0.05, size=(3,)), - 'angles_deg': N(0, 0.1, 3), - 'eye_coord': np.array((0.65, -0.32, 0.95)) * N(1, (0.1, 0.2, 0), 3), - 'Radius': 0.03 * N(1, 0.05) - } - - def make_part(self, params): - part = part_util.nodegroup_to_part(nodegroup_flying_bird_head, params) - part.iks = {1.0: IKParams('head', rotation_weight=0.1, chain_parts=2)} - part.settings['rig_extras'] = True - tag_object(part.obj, 'bird_head') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/head_detail.py b/infinigen/assets/creatures/parts/head_detail.py deleted file mode 100644 index f1fb163fe..000000000 --- a/infinigen/assets/creatures/parts/head_detail.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy - -from platform import node -import numpy as np -from numpy.random import normal as N, uniform as U - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util.part_util import nodegroup_to_part - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_polar_bezier, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle -from infinigen.assets.creatures.util.nodegroups.geometry import nodegroup_solidify, nodegroup_symmetric_clone, nodegroup_taper -from infinigen.core.util.math import clip_gaussian -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_cat_ear', singleton=False, type='GeometryNodeTree') -def nodegroup_cat_ear(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Depth', 0.0), - ('NodeSocketFloatDistance', 'Thickness', 0.0), - ('NodeSocketFloatDistance', 'Curl Deg', 0.0)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Curl Deg"], 1: (-1.0, 1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 3.0}, - attrs={'operation': 'DIVIDE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Origin': (-0.07, 0.0, 0.0), 'angles_deg': multiply.outputs["Vector"], 'Seg Lengths': divide}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.3236, 0.98), (0.7462, 0.63), (1.0, 0.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': polarbezier.outputs["Curve"], 'Radius': float_curve}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': set_curve_radius}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Y"], 1: (-0.5, 0.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Depth"], 1: (0.0, -1.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Y"], 1: (0.5, 0.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': multiply_1.outputs["Vector"], 'Middle': multiply_2.outputs["Vector"], 'End': multiply_3.outputs["Vector"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_tilt, 'Profile Curve': quadratic_bezier}) - - solidify = nw.new_node(nodegroup_solidify().name, - input_kwargs={'Mesh': curve_to_mesh, 'Distance': group_input.outputs["Thickness"]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': solidify, 'Distance': 0.005}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': merge_by_distance}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': subdivision_surface, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -class CatEar(PartFactory): - - tags = ['head_detail'] - - def sample_params(self): - size = clip_gaussian(1, 0.1, 0.2, 5) - return { - 'length_rad1_rad2': np.array((0.25, 0.1, 0.0)) * N(1, (0.1, 0.05, 0.05)), - 'Depth': 0.06 * N(1, 0.1), - 'Thickness': 0.01, - 'Curl Deg': 49.0 * N(1, 0.2) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_cat_ear, params) - tag_object(part.obj, 'cat_ear') - return part - -@node_utils.to_nodegroup('nodegroup_cat_nose', singleton=False, type='GeometryNodeTree') -def nodegroup_cat_nose(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Nose Radius', 0.06), - ('NodeSocketFloatDistance', 'Nostril Size', 0.025), - ('NodeSocketFloatFactor', 'Crease', 0.008), - ('NodeSocketVectorXYZ', 'Scale', (1.2, 1.0, 1.0))]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Nose Radius"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': cube, 'Level': 4, 'Edge Crease': group_input.outputs["Crease"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface, 'Scale': group_input.outputs["Scale"]}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Radius': group_input.outputs["Nostril Size"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Translation': (0.04, 0.025, 0.015), 'Rotation': (0.5643, 0.0, 0.0), 'Scale': (1.0, 0.87, 0.31)}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': transform_1}) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': transform, 'Mesh 2': symmetric_clone.outputs["Both"], 'Self Intersection': True}) - - taper = nw.new_node(nodegroup_taper().name, - input_kwargs={'Geometry': difference}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': taper}) - -class CatNose(PartFactory): - - tags = ['head_detail'] - - def sample_params(self): - size_mult = N(0.7, 0.05) - return { - 'Nose Radius': 0.11 * size_mult, - 'Nostril Size': 0.03 * size_mult * N(1, 0.1), - 'Crease': 0.237 * N(1, 0.1) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_cat_nose, params) - nose = part.obj - nose.name = 'Nose' - part.obj = butil.spawn_vert('nose_parent') - nose.parent = part.obj - tag_object(part.obj, 'cat_nose') - return part - -@node_utils.to_nodegroup('nodegroup_mandible', singleton=False, type='GeometryNodeTree') -def nodegroup_mandible(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.1, 0.1, 0.02)), - ('NodeSocketVector', 'angles_deg', (-4.4, 58.22, 77.96)), - ('NodeSocketFloat', 'aspect', 0.52)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) - -class InsectMandible(PartFactory): - - tags = ['head_detail', 'rigid', 'bald'] - - def sample_params(self): - return { - 'length_rad1_rad2': (1.1 * U(0.2, 1), 0.1 * N(1, 0.2), 0.02 * N(1, 0.1) ), - 'angles_deg': np.array((-4.4, 58.22, 77.96)) * N(1, 0.2, 3), - 'aspect': U(0.3, 1) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_mandible, params) - part.joints = { - 0.4: Joint(rest=(0,0,0), bounds=np.zeros((2, 3))) - } - tag_object(part.obj, 'insect_mandible') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/hoof.py b/infinigen/assets/creatures/parts/hoof.py deleted file mode 100644 index 9662c085c..000000000 --- a/infinigen/assets/creatures/parts/hoof.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen - - -import bpy -import bmesh -import mathutils - -import numpy as np -from math import sin, cos, pi, exp -from numpy.random import uniform as U, normal as N - -from infinigen.assets.creatures.util.creature import PartFactory, Part -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util import part_util -from infinigen.core.util import blender as butil - -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle -from infinigen.assets.creatures.util.part_util import nodegroup_to_part - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils - -from infinigen.assets.creatures.util.geometry import nurbs as nurbs_util -from infinigen.core.tagging import tag_object, tag_nodegroup - -def square(x): - return x * x - -def tri(x): - return x ** 3 - -class Hoof(): - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - def scale(self, p): - return 1 - 0.2 * p - - def transform(self, p): - return -0.6 * p - - def down(self, p, theta): - return 0.4 * p * cos(theta) - - def get_shape(self): - points = [] - r = self.r - N = lambda m, v: np.random.normal(m, v) - for i in range(self.m): - theta = 2 * pi * i / (self.m) - nx = N(0, 0.01) - ny = N(0, 0.01) - if i >= self.m - r or i <= r: - points.append((-0.2 * cos(theta) + nx, 0.05 * sin(theta) + ny)) - elif i >= self.m - 2 * r or i <= 2 * r: - points.append((cos(theta) + nx, 0.2 * sin(theta) + ny)) - # elif i >= self.m - 4 * r or i <= 4 * r: - # points.append((cos(theta) + nx, 0.6 * sin(theta) + ny)) - else: - points.append((cos(theta) + nx, sin(theta) + ny)) - return points - - def make_face(self, obj): - bm = bmesh.new() - for v in obj.data.vertices: - x, y, z = obj.matrix_world @ v.co - if z == 0: - bm.verts.new((x, y, z)) - bm.faces.new(bm.verts) - bm.normal_update() - bm.from_mesh(obj.data) - butil.delete(obj) - - me = bpy.data.meshes.new("face") - bm.to_mesh(me) - # add bmesh to scene - ob = bpy.data.objects.new("face", me) - bpy.context.scene.collection.objects.link(ob) - return ob - - def generate(self): - self.n = int(self.n) - self.m = int(self.m) - - points = self.get_shape() - ctrls = np.zeros((self.n, self.m, 3)) - for i in range(self.n): - for j in range(self.m): - p = i / (self.n - 1) - theta = 2 * pi * j / (self.m) - ctrls[i][j][0] = self.scale(p) * points[j][0] + self.transform(p) - ctrls[i][j][1] = self.scale(p) * points[j][1] # + self.transform(p) - ctrls[i][j][2] = p + self.down(p, theta) - ctrls[i][j][0] *= self.sx - ctrls[i][j][1] *= self.sy - ctrls[i][j][2] *= self.sz - - method = 'blender' if False else 'geomdl' - - obj = nurbs_util.nurbs(ctrls, method, face_size=0.01) - obj = self.make_face(obj) - - top_pos = mathutils.Vector(ctrls[-1].mean(axis=0)) - with butil.CursorLocation(top_pos), butil.SelectObjects(obj): - bpy.ops.object.origin_set(type='ORIGIN_CURSOR') - obj.location = (0,0,0) - - obj.rotation_euler.y -= np.pi / 2 - butil.apply_transform(obj, rot=True) - tag_object(obj, 'hoof') - - return obj - -class HoofClaw(PartFactory): - - param_templates = {} - tags = ['head_detail', 'rigid'] - - def sample_params(self, select=None, var=1): - params = { - 'n': 20, - 'm': 20, - 'sx': 0.1 * N(1, 0.05), - 'sy': 0.1 * N(1, 0.05), - 'sz': 0.08 * N(1, 0.05), - 'r': 0.5 + N(0, 1) - } - return params - - def make_part(self, params): - obj = butil.spawn_vert('hoofclaw_parent_temp') - - hoof = Hoof(**params).generate() - hoof.parent = obj - hoof.name = 'HoofClaw' - - part = Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) - tag_object(part.obj, 'hoof_claw') - return part - -@node_utils.to_nodegroup('nodegroup_hoof', singleton=False, type='GeometryNodeTree') -def nodegroup_hoof(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.4299999999999999, 0.10000000000000001, 0.10000000000000001)), - ('NodeSocketVector', 'angles_deg', (-20.0, 16.0, 9.1999999999999993)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketVector', 'Upper Rad1 Rad2 Fullness', (0.22, 0.0, 0.0)), - ('NodeSocketVector', 'Lower Rad1 Rad2 Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Height, Tilt1, Tilt2', (0.73999999999999999, 0.0, 0.0))]) - - simple_tube_v2_001 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': 2.5}) - - shoulder = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2_001.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2_001.outputs["Skeleton Curve"], 'Coord 0': (0.0, 0.0, 0.0), 'Coord 1': (0.20000000000000001, 0.0, 0.0), 'Coord 2': (0.55000000000000004, 0.0, 0.0), 'StartRad, EndRad, Fullness': group_input.outputs["Lower Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Height, Tilt1, Tilt2"]}, - label='Shoulder') - - shoulder_1 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2_001.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2_001.outputs["Skeleton Curve"], 'Coord 0': (1.0, 0.0, 0.0), 'Coord 1': (0.20000000000000001, 0.0, 0.0), 'Coord 2': (0.80000000000000004, 0.0, 0.0), 'StartRad, EndRad, Fullness': group_input.outputs["Upper Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Height, Tilt1, Tilt2"]}, - label='Shoulder') - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [shoulder, simple_tube_v2_001.outputs["Geometry"], shoulder_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2_001.outputs["Skeleton Curve"]}) - - -class HoofAnkle(PartFactory): - - tags = ['foot_detail', 'rigid'] - ankle_scale = (0.8, 0.8, 0.8) - - def sample_params(self, var=1): - ankle = { - 'length_rad1_rad2': (0.45 * N(1, 0.05), 0.07 * N(1, 0.05), 0.1 * N(1, 0.05)), - 'angles_deg': (-90.0 + N(0, 5), 40.0 + N(0, 5), N(0, 5)), - 'aspect': 1.0, - 'Upper Rad1 Rad2 Fullness': (0.2, 0.0, 4), - 'Lower Rad1 Rad2 Fullness': (0.15, 0.0, 4), - 'Height, Tilt1, Tilt2': (1, 0.0, 0.0) - } - return ankle - - def make_part(self, params): - obj = butil.spawn_vert('hoof_parent_temp') - - part = nodegroup_to_part(nodegroup_hoof, params) - ankle = part.obj - with butil.SelectObjects(ankle): - bpy.ops.object.shade_flat() - butil.modify_mesh(ankle, 'SUBSURF', apply=True, levels=2) - ankle.parent = obj - ankle.name = "HoofAnkle" - - ankle.scale = self.ankle_scale - butil.apply_transform(ankle, scale=True) - tag_object(part.obj, 'hoof_ankle') - - part.iks = {1.0: IKParams('foot', rotation_weight=0.1, chain_parts=2, chain_length=-1)} - - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/horn.py b/infinigen/assets/creatures/parts/horn.py deleted file mode 100644 index 271d76e6b..000000000 --- a/infinigen/assets/creatures/parts/horn.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hongyu Wen -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=5BXvwqVyCQw by Artisans of Vaul - - -from re import M -import bpy -import math - -from platform import node -import numpy as np -from numpy.random import normal as N, uniform as U - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.assets.creatures.util import part_util - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_noise', singleton=False, type='GeometryNodeTree') -def nodegroup_noise(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Scale', 0.05), - ('NodeSocketFloat', 'W', 0.0)]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': group_input.outputs["W"], 'Roughness': 0.0}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["Scale"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_ridge', singleton=False, type='GeometryNodeTree') -def nodegroup_ridge(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 4.0), - ('NodeSocketFloat', 'depth_of_ridge', 0.2), - ('NodeSocketInt', 'number_of_ridge', 150), - ('NodeSocketGeometry', 'geometry', None)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["geometry"], 'Count': group_input.outputs["number_of_ridge"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 1.0), (0.2, 0.9), (0.3705, 0.7406), (0.55, 0.5938), (0.6886, 0.4188), (0.85, 0.1844), (1.0, 0.0)]) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Index"], 1: 5.0}, - attrs={'operation': 'MODULO'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: -1.0, 1: modulo}, - attrs={'operation': 'POWER'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["depth_of_ridge"], 1: power}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: add}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["depth_of_ridge"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_2}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: group_input.outputs["thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': resample_curve, 'Radius': multiply_3}) - - noise = nw.new_node(nodegroup_noise().name, - input_kwargs={'Geometry': set_curve_radius, 'Scale': 0.02}, - label='Noise') - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': noise}) - -@node_utils.to_nodegroup('nodegroup_horn', singleton=False, type='GeometryNodeTree') -def nodegroup_horn(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[ - ('NodeSocketFloat', 'length', 0.0), - ('NodeSocketFloat', 'rad1', 0.0), - ('NodeSocketFloat', 'rad2', 0.0), - ('NodeSocketFloat', 'thickness', 4.0), - ('NodeSocketFloat', 'density_of_ridge', 0.0), - ('NodeSocketFloat', 'depth_of_ridge', 0.2), - ('NodeSocketFloatDistance', 'height', 2.5), - ('NodeSocketFloat', 'rotation_x', 0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["length"], 1: group_input.outputs["density_of_ridge"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["rad1"], 1: group_input.outputs["rad2"]}) - - # divide = nw.new_node(Nodes.Math, - # input_kwargs={0: add, 1: 2.0}, - # attrs={'operation': 'DIVIDE'}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["length"], 1: add}, - attrs={'operation': 'DIVIDE'}) - - divide_2 = nw.new_node(Nodes.Math, - input_kwargs={0: divide_1, 1: 3.1415}, - attrs={'operation': 'DIVIDE'}) - - spiral = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Resolution': 150, 'Rotations': divide_2, 'Start Radius': group_input.outputs["rad1"], 'End Radius': group_input.outputs["rad2"], 'Height': group_input.outputs["height"]}) - - ridge = nw.new_node(nodegroup_ridge().name, - input_kwargs={'thickness': group_input.outputs["thickness"], 'depth_of_ridge': group_input.outputs["depth_of_ridge"], 'number_of_ridge': multiply, 'geometry': spiral}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 10, 'Radius': 0.5}) - - noise = nw.new_node(nodegroup_noise().name, - input_kwargs={'Geometry': curve_circle_2.outputs["Curve"], 'Scale': 0.2}, - label='Noise') - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': ridge, 'Profile Curve': noise}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["rad1"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Offset': combine_xyz}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Rotation': (-0.8, 0.0, 2.6)}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["rotation_x"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_2}) - - -class Horn(PartFactory): - param_templates = {} - tags = ['head_detail', 'rigid'] - - def sample_params(self, select=None, var=1): - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) - params = part_util.rdict_comb(self.param_templates, weights) - - for key in params['horn']: - if key in params['range']: - l, r = params['range'][key] - noise = N(0, 0.02 * (r - l)) - params['horn'][key] += noise - return params['horn'] - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_horn, params) - horn = part.obj - - # postprocess horn - with butil.SelectObjects(horn): - bpy.ops.object.shade_flat() - horn.name = 'Horn' - butil.modify_mesh(horn, 'SUBSURF', apply=True, levels=2) - - # swap the horn to be an extra so it doesnt get remeshed etc - part.obj = butil.spawn_vert('horn_parent') - horn.parent = part.obj - tag_object(part.obj, 'horn') - - return part - -goat_horn = { - 'length': 0.5, - 'rad1': 0.18, - 'rad2': 0.3, - 'thickness': 0.15, - 'density_of_ridge': 250, - 'depth_of_ridge': 0.02, - 'height': 0.1, - 'rotation_x': 0, -} - -gazelle_horn = { - 'length': 0.4, - 'rad1': 0.7, - 'rad2': 0.5, - 'thickness': 0.1, - 'density_of_ridge': 150, - 'depth_of_ridge': 0.1, - 'height': 0.1, - 'rotation_x': 0, -} - -bull_horn = { - 'length': 0.1, - 'rad1': 0.5, - 'rad2': 0.1, - 'thickness': 0.1, - 'density_of_ridge': 150, - 'depth_of_ridge': 0.01, - 'height': -0.1, - 'rotation_x': -1 -} - -scales = { - 'length': [0.1, 0.6], - 'rad1': [0.1, 1], - 'rad2': [0.1, 1], - 'thickness': [0.05, 0.3], - 'density_of_ridge': [100, 300], - 'depth_of_ridge': [0.01, 0.1], - 'height': [-0.3, 0.3], - 'rotation_x': [-1, 1] -} - -for k, v in scales.items(): - scales[k] = np.array(v) - -Horn.param_templates['bull'] = {'horn': bull_horn, 'range': scales} -Horn.param_templates['gazelle'] = {'horn': gazelle_horn, 'range': scales} -Horn.param_templates['goat'] = {'horn': goat_horn, 'range': scales} diff --git a/infinigen/assets/creatures/parts/leg.py b/infinigen/assets/creatures/parts/leg.py deleted file mode 100644 index afe40be65..000000000 --- a/infinigen/assets/creatures/parts/leg.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -from itertools import chain -import bpy - -import numpy as np -from numpy.random import uniform as U, normal as N - -from infinigen.core.util.math import clip_gaussian - -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_quadruped_back_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_quadruped_back_leg(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.8, 0.1, 0.05)), - ('NodeSocketVector', 'angles_deg', (30.0, -100.0, 81.0)), - ('NodeSocketVector', 'Thigh Rad1 Rad2 Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Calf Rad1 Rad2 Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Thigh Height Tilt1 Tilt2', (0.6, 0.0, 0.0)), - ('NodeSocketVector', 'Calf Height Tilt1 Tilt2', (0.8, 0.0, 0.0)), - ('NodeSocketFloat', 'fullness', 50.0), - ('NodeSocketFloat', 'aspect', 1.0)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"], 'Origin': (-0.05, 0.0, 0.0)}) - - thigh = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.02, 3.1416, 3.0), 'Coord 1': (0.1, -0.14, 1.47), 'Coord 2': (0.73, 4.71, 1.13), 'StartRad, EndRad, Fullness': group_input.outputs["Thigh Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Thigh Height Tilt1 Tilt2"]}, - label='Thigh') - - calf = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.51, 18.91, 0.4), 'Coord 1': (0.69, 0.26, 0.0), 'Coord 2': (0.94, 1.5708, 1.13), 'StartRad, EndRad, Fullness': group_input.outputs["Calf Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Calf Height Tilt1 Tilt2"]}, - label='Calf') - - thigh_2 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.04, 3.1416, 0.0), 'Coord 1': (0.01, 3.46, -0.05), 'Coord 2': (0.73, 4.71, 0.9), 'StartRad, EndRad, Fullness': group_input.outputs["Thigh Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Thigh Height Tilt1 Tilt2"]}, - label='Thigh 2') - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [thigh, calf, thigh_2]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [join_geometry, simple_tube_v2.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class QuadrupedBackLeg(PartFactory): - - tags = ['leg'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((1.8, 0.1, 0.05)) * N(1, (0.2, 0, 0), 3), - 'angles_deg': np.array((40.0, -120.0, 100)), - 'fullness': 50.0, - 'aspect': 1.0, - 'Thigh Rad1 Rad2 Fullness': np.array((0.33, 0.15, 2.5),) * N(1, 0.1, 3), - 'Calf Rad1 Rad2 Fullness': np.array((0.17, 0.07, 2.5),) * N(1, 0.1, 3), - 'Thigh Height Tilt1 Tilt2': np.array((0.6, 0.0, 0.0),) + N(0, [0.05, 2, 10]), - 'Calf Height Tilt1 Tilt2': np.array((0.8, 0.0, 0.0)) + N(0, [0.05, 10, 10]) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_quadruped_back_leg, params) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.5: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # elbow - } - tag_object(part.obj, 'quadruped_back_leg') - return part - -@node_utils.to_nodegroup('nodegroup_quadruped_front_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_quadruped_front_leg(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.43, 0.1, 0.1)), - ('NodeSocketVector', 'angles_deg', (-20.0, 16.0, 9.2)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketVector', 'Shoulder Rad1 Rad2 Fullness', (0.22, 0.0, 0.0)), - ('NodeSocketVector', 'Calf Rad1 Rad2 Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Elbow Rad1 Rad2 Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Shoulder Height, Tilt1, Tilt2', (0.74, 0.0, 0.0)), - ('NodeSocketVector', 'Elbow Height, Tilt1, Tilt2', (0.9, 0.0, 0.0)), - ('NodeSocketVector', 'Calf Height, Tilt1, Tilt2', (0.74, 0.0, 0.0))]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': 2.5, 'Origin': (-0.15, 0.0, 0.09)}) - - shoulder = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.0, 0.0, 0.0), 'Coord 1': (0.2, 0.0, 0.0), 'Coord 2': (0.55, 0.0, 0.0), 'StartRad, EndRad, Fullness': group_input.outputs["Shoulder Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Shoulder Height, Tilt1, Tilt2"]}, - label='Shoulder') - - elbow_2 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.53, 1.5708, 1.69), 'Coord 1': (0.57, 0.0, 0.0), 'Coord 2': (0.95, 0.0, 0.0), 'StartRad, EndRad, Fullness': group_input.outputs["Elbow Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Elbow Height, Tilt1, Tilt2"]}, - label='Elbow 2') - - elbow_1 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.22, 1.5708, 1.0), 'Coord 1': (0.4, 0.0, 0.0), 'Coord 2': (0.57, 1.571, 1.7), 'StartRad, EndRad, Fullness': group_input.outputs["Elbow Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Elbow Height, Tilt1, Tilt2"]}, - label='Elbow 1') - - forearm = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.41, -1.7008, 0.6), 'Coord 1': (0.57, 0.0, 0.8), 'Coord 2': (0.95, 0.0, 0.0), 'StartRad, EndRad, Fullness': group_input.outputs["Calf Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': group_input.outputs["Calf Height, Tilt1, Tilt2"]}, - label='Forearm') - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [shoulder, elbow_2, elbow_1, forearm, simple_tube_v2.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class QuadrupedFrontLeg(PartFactory): - - tags = ['leg'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((1.43, 0.1, 0.1)) * N(1, (0.2, 0, 0), 3), - 'angles_deg': np.array((-40.0, 120.0, -100)), - 'aspect': 1.0, - 'Shoulder Rad1 Rad2 Fullness': np.array((0.22, 0.22, 2.5)) * N(1, 0.1, 3), - 'Calf Rad1 Rad2 Fullness': np.array((0.08, 0.08, 2.5)) * N(1, 0.1, 3), - 'Elbow Rad1 Rad2 Fullness': np.array((0.12, 0.1, 2.5) * N(1, 0.1, 3)), - 'Shoulder Height, Tilt1, Tilt2': np.array((0.74, 0.0, 0.0)) + N(0, [0.05, 10, 10]), - 'Elbow Height, Tilt1, Tilt2': np.array((0.9, 0.0, 0.0)) + N(0, [0.05, 10, 10]), - 'Calf Height, Tilt1, Tilt2': np.array((0.74, 0.0, 0.0)) + N(0, [0.05, 10, 10]), - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_quadruped_front_leg, params) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.6: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) # elbow - } - tag_object(part.obj, 'quadruped_front_leg') - return part - -@node_utils.to_nodegroup('nodegroup_bird_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_bird_leg(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.09, 0.06)), - ('NodeSocketVector', 'angles_deg', (-70.0, 90.0, -2.0)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketFloat', 'fullness', 8.0), - ('NodeSocketVector', 'Thigh Rad1 Rad2 Fullness', (0.18, 0.1, 1.26)), - ('NodeSocketVector', 'Shin Rad1 Rad2 Fullness', (0.07, 0.06, 5.0))]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"], 'fullness': group_input.outputs["fullness"]}) - - surface_muscle = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.0, 0.0, 0.0), 'Coord 1': (0.2, 0.0, 0.0), 'Coord 2': (0.4, 1.5708, 1.0), 'StartRad, EndRad, Fullness': group_input.outputs["Thigh Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': (0.72, -21.05, 0.0)}) - - surface_muscle_1 = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.32, 0.0, 0.0), 'Coord 1': (0.5, 1.5708, 0.0), 'Coord 2': (0.74, 1.32, 0.29), 'StartRad, EndRad, Fullness': group_input.outputs["Shin Rad1 Rad2 Fullness"], 'ProfileHeight, StartTilt, EndTilt': (0.72, -21.05, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [surface_muscle, surface_muscle_1, simple_tube_v2.outputs["Geometry"]]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class BirdLeg(PartFactory): - - tags = ['leg'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((1, 0.09, 0.06)) * np.array((clip_gaussian(1, 0.3, 0.2, 1.5), *N(1, 0.1, 2))), - 'angles_deg': np.array((-70.0, 90.0, -2.0)), - 'aspect': N(1, 0.05), - 'fullness': 8.0 * N(1, 0.1), - 'Thigh Rad1 Rad2 Fullness': np.array((0.18, 0.1, 1.26)) * N(1, 0.1, 3), - 'Shin Rad1 Rad2 Fullness': np.array((0.07, 0.06, 5.0)) * N(1, 0.1, 3) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_bird_leg, params) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.5: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # elbow - } - part.iks = {} - tag_object(part.obj, 'bird_leg') - return part - -@node_utils.to_nodegroup('nodegroup_insect_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_insect_leg(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.24, 0.02, 0.01)), - ('NodeSocketVector', 'angles_deg', (0.0, -63.9, 31.39)), - ('NodeSocketFloat', 'Carapace Rad Pct', 1.4), - ('NodeSocketVector', 'spike_length_rad1_rad2', (0.1, 0.025, 0.0))]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'proportions': (0.2533, 0.3333, 0.1333), 'do_bezier': False}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["length_rad1_rad2"], 'Scale': group_input.outputs["Carapace Rad Pct"]}, - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': scale.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["Y"], 'Y': separate_xyz.outputs["Y"], 'Z': 30.0}) - - surface_muscle = nw.new_node(nodegroup_surface_muscle().name, - input_kwargs={'Skin Mesh': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Coord 0': (0.0, 0.0, 0.0), 'Coord 1': (0.01, 0.0, 0.0), 'Coord 2': (0.35, 0.0, 0.0), 'StartRad, EndRad, Fullness': combine_xyz, 'ProfileHeight, StartTilt, EndTilt': (0.73, 0.0, 0.0)}) - - trim_curve = nw.new_node(Nodes.TrimCurve, - input_kwargs={'Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Start': 0.4892, 'End': 0.725}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': trim_curve, 'Count': 4}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': resample_curve}) - - simple_tube_v2_1 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["spike_length_rad1_rad2"], 'angles_deg': (0.0, -40.0, 0.0)}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': curve_to_mesh, 'Instance': simple_tube_v2_1.outputs["Geometry"], 'Rotation': (0.0, 0.1239, 0.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [simple_tube_v2.outputs["Geometry"], surface_muscle, instance_on_points]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Endpoint': simple_tube_v2.outputs["Endpoint"]}) - -class InsectLeg(PartFactory): - - tags = ['leg', 'rigid'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((1, 0.02, 0.01)) * N(1, 0.25, 3) , - 'angles_deg': np.array((0.0, -63.9, 31.39)) + N(0, 10, 3), - 'Carapace Rad Pct': 1.4 * U(0.5, 2), - 'spike_length_rad1_rad2': np.array((0.2, 0.025, 0.0)) * N(1, (0.2, 0.1, 0.1), 3), - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_insect_leg, params) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.3: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), - 0.7: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) - } - part.iks = {1.0: IKParams('foot', rotation_weight=0.1, chain_parts=1)} - tag_object(part.obj, 'insect_leg') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/ridged_fin.py b/infinigen/assets/creatures/parts/ridged_fin.py deleted file mode 100644 index 6c8cb5171..000000000 --- a/infinigen/assets/creatures/parts/ridged_fin.py +++ /dev/null @@ -1,480 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Mingzhe Wang - - -import bpy - -import numpy as np -from numpy.random import uniform, normal -import random - -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.surface import set_geomod_inputs -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio - -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_attach_part -from infinigen.assets.creatures.util.creature import PartFactory, Part -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_mix2_values', singleton=True, type='GeometryNodeTree') -def nodegroup_mix2_values(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Ratio', 0.5), - ('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value1"], 1: group_input.outputs["Ratio"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["Ratio"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["Value2"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add}) - -@node_utils.to_nodegroup('nodegroup_fish_fin', singleton=False, type='GeometryNodeTree') -def nodegroup_fish_fin(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - grid = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Vertices X': 100, 'Vertices Y': 100}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': grid, 'Rotation': (1.5708, 0.0000, 0.0000)}) - - position_3 = nw.new_node(Nodes.InputPosition) - - sep_z = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_3}, - label='sep_z') - - z_stats = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': transform_3, 2: sep_z.outputs["Z"]}, - label='z_stats') - - norm_z = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': sep_z.outputs["Z"], 1: z_stats.outputs["Min"], 2: z_stats.outputs["Max"]}, - label='norm_z') - - remap_z = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': norm_z.outputs["Result"]}, - label='remap_z') - node_utils.assign_curve(remap_z.mapping.curves[0], [(0.1727, 0.9875), (0.5182, 0.2438), (1.0000, 0.0063)]) - - capture_z_rigidity = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': transform_3, 2: remap_z}, - label='capture_z_rigidity') - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: separate_xyz.outputs["Y"]}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={1: greater_than}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': capture_z_rigidity.outputs["Geometry"], 'Selection': op_and}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': delete_geometry, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position_1 = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_1, 1: (0.5000, 0.0000, 0.5000)}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add.outputs["Vector"]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorXYZ', 'FinScale', (1.0000, 1.0000, 0.5000)), - ('NodeSocketFloat', 'RoundWeight', 1.0000), - ('NodeSocketFloat', 'Freq', 69.1150), - ('NodeSocketFloat', 'OffsetWeightZ', 1.0000), - ('NodeSocketVector', 'PatternRotation', (4.0000, 0.0000, 2.0000)), - ('NodeSocketFloat', 'OffsetWeightY', 1.0000), - ('NodeSocketFloat', 'RoundingWeight', 0.0000), - ('NodeSocketFloat', 'AffineX', 0.0000), - ('NodeSocketFloat', 'AffineZ', 0.0000), - ('NodeSocketFloat', 'Value', 0.5000), - ('NodeSocketFloat', 'NoiseWeight', 0.0000), - ('NodeSocketFloat', 'BumpX', 0.0000), - ('NodeSocketFloat', 'BumpZ', 0.0000), - ('NodeSocketFloat', 'NoiseRatioZ', 1.0000), - ('NodeSocketFloat', 'NoiseRatioX', 1.0000)]) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Z"], 1: group_input.outputs["NoiseWeight"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: separate_xyz_1.outputs["X"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["AffineZ"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': add_2}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0068, 0.0000), (0.0455, 0.3812), (0.1091, 0.5419), (0.1955, 0.6437), (0.3205, 0.7300), (0.4955, 0.7719), (0.7545, 0.7350), (0.8705, 0.6562), (1.0000, 0.4413)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 0.7000}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["RoundWeight"]}, - attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["Value"]}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add_3, 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: group_input.outputs["AffineX"]}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: multiply_4}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Z': add_4}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': combine_xyz}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 3.0000}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - nodegroup_mix2_values_no_gc = nw.new_node(nodegroup_mix2_values().name, - input_kwargs={'Ratio': group_input.outputs["NoiseRatioX"], 'Value1': separate_xyz_2.outputs["X"], 'Value2': subtract_1}) - - add_5 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_mix2_values_no_gc, 1: 10.0000}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["PatternRotation"]}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: add_5, 1: 0.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_3.outputs["X"], 1: multiply_5}, - attrs={'operation': 'SUBTRACT'}) - - add_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add_5, 1: subtract_2}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: add_6, 1: 2.0000}, - attrs={'operation': 'POWER'}) - - nodegroup_mix2_values_no_gc_1 = nw.new_node(nodegroup_mix2_values().name, - input_kwargs={'Ratio': group_input.outputs["NoiseRatioZ"], 'Value1': separate_xyz_2.outputs["Z"], 'Value2': subtract_1}) - - add_7 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_mix2_values_no_gc_1, 1: 1.0000}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add_7, 1: 0.1000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_3.outputs["Z"], 1: multiply_6}, - attrs={'operation': 'SUBTRACT'}) - - add_8 = nw.new_node(Nodes.Math, - input_kwargs={0: add_7, 1: subtract_3}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: add_8}, - attrs={'operation': 'MULTIPLY'}) - - power_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_7, 1: 2.0000}, - attrs={'operation': 'POWER'}) - - add_9 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: power_1}) - - sqrt = nw.new_node(Nodes.Math, - input_kwargs={0: add_9}, - attrs={'operation': 'SQRT'}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: sqrt, 1: group_input.outputs["Freq"]}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_8}, - attrs={'operation': 'SINE'}) - - power_2 = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: 2.1000}, - attrs={'operation': 'POWER'}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 2: power_2}) - - multiply_9 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs["BumpX"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_10 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: group_input.outputs["BumpZ"]}, - attrs={'operation': 'MULTIPLY'}) - - add_10 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_9, 1: multiply_10}) - - subtract_4 = nw.new_node(Nodes.Math, - input_kwargs={1: add_10}, - attrs={'operation': 'SUBTRACT'}) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 2: subtract_4}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 100.0000}) - - subtract_5 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - normal = nw.new_node(Nodes.InputNormal) - - multiply_11 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_5, 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.0010 - - multiply_12 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_11.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], 'Offset': multiply_12.outputs["Vector"]}) - - subtract_6 = nw.new_node(Nodes.Math, - input_kwargs={1: separate_xyz_2.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_13 = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: subtract_6}, - attrs={'operation': 'MULTIPLY'}) - - multiply_14 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["OffsetWeightZ"], 1: -0.0200}, - attrs={'operation': 'MULTIPLY'}) - - multiply_15 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_13, 1: multiply_14}, - attrs={'operation': 'MULTIPLY'}) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"]}, - attrs={'operation': 'SIGN'}) - - multiply_16 = nw.new_node(Nodes.Math, - input_kwargs={0: power_2, 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - multiply_17 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["OffsetWeightY"], 1: 0.0060}, - attrs={'operation': 'MULTIPLY'}) - - multiply_18 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_17, 1: subtract_4}, - attrs={'operation': 'MULTIPLY'}) - - multiply_19 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_16, 1: multiply_18}, - attrs={'operation': 'MULTIPLY'}) - - multiply_20 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["OffsetWeightZ"], 1: 0.0300}, - attrs={'operation': 'MULTIPLY'}) - - multiply_21 = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: multiply_20}, - attrs={'operation': 'MULTIPLY'}) - - add_11 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_21, 1: 0.0000}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_15, 'Y': multiply_19, 'Z': add_11}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Offset': combine_xyz_1}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': -0.6000, 'Scale': 0.8000}, - attrs={'noise_dimensions': '4D'}) - - subtract_7 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, - attrs={'operation': 'SUBTRACT'}) - - multiply_22 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_7.outputs["Vector"], 1: group_input.outputs["NoiseWeight"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position_3 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_2, 'Offset': multiply_22.outputs["Vector"]}) - - position_2 = nw.new_node(Nodes.InputPosition) - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_2}) - - subtract_8 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_4.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_8}, - attrs={'operation': 'ABSOLUTE'}) - - power_3 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: 1.0000}, - attrs={'operation': 'POWER'}) - - multiply_23 = nw.new_node(Nodes.Math, - input_kwargs={0: power_3, 1: 0.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_23}) - - set_position_4 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_3, 'Offset': combine_xyz_2}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position_4, 'Translation': (0.0000, 0.0000, 0.4000)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 0.0000, -1.5708), 'Scale': group_input.outputs["FinScale"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': (1.5708, 0.0000, 1.5708)}) - - multiply_24 = nw.new_node(Nodes.Math, - input_kwargs={0: capture_attribute_1.outputs[2], 1: capture_z_rigidity.outputs[2]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_25 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_24, 1: 1.600}, - attrs={'operation': 'MULTIPLY'}) - - add_final_rigidity = nw.new_node(Nodes.Math, - input_kwargs={0: capture_z_rigidity.outputs[2], 1: multiply_25}, - label='add_final_rigidity', - attrs={'use_clamp': True}) - - store_cloth_pin = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={ - 'Geometry': transform_2, - 'Name': 'cloth_pin_rigidity', - 'Value': add_final_rigidity - }, - label='store_cloth_pin') - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_cloth_pin, 'Bump': capture_attribute_1.outputs[2], 'BumpMask': capture_attribute_2.outputs[2]}) - -class FishFin(PartFactory): - - tags = ['limb', 'fin'] - - def __init__(self, *args, rig=True, **kwargs): - super().__init__(*args, **kwargs) - self.rig = rig - - def sample_params(self): - params = { - 'FinScale': np.array((1.0, 1.0, 0.5), dtype=np.float32), - 'RoundWeight': sample_range(0, 1), - 'Freq': sample_range(50, 100), - 'OffsetWeightZ': sample_range(0.1, 0.5), - 'PatternRotation': np.array((4.0 if random.random()<0.5 else -4, 0.0, 2.0), dtype=np.float32), - 'OffsetWeightY': sample_range(0.5, 1), - 'RoundingWeight': sample_range(0.02, 0.07), - 'AffineX': sample_range(0, 0.3), - 'AffineZ': sample_range(0, 1), - 'Value': 0.5, - 'Value': 0.5, - 'NoiseWeight': 0.0, - 'BumpX': 0.0, - 'BumpZ': 1.0, - 'NoiseRatioZ': 1.0, - 'NoiseRatioX': sample_range(0.9, 0.95) - } - return params - - def make_part(self, params): - - part = Part(skeleton=np.zeros((2, 3), dtype=float), obj=butil.spawn_vert('fin_parent')) - - fin = butil.spawn_vert('Fin') - fin.parent = part.obj - - _, mod = butil.modify_mesh(fin, 'NODES', apply=False, return_mod=True, node_group=nodegroup_fish_fin()) - set_geomod_inputs(mod, params) - - id1 = mod.node_group.outputs['Bump'].identifier - mod[f'{id1}_attribute_name'] = 'Bump' - id2 = mod.node_group.outputs['BumpMask'].identifier - mod[f'{id2}_attribute_name'] = 'BumpMask' - - butil.apply_modifiers(fin, mod) - - part.settings['rig_extras'] = self.rig - tag_object(part.obj, 'fish_fin') - return part - -if __name__ == "__main__": - fin = FishFin() - import os - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_fin.blend') - bpy.ops.wm.save_as_mainfile(filepath=fn) \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/tail.py b/infinigen/assets/creatures/parts/tail.py deleted file mode 100644 index 26e6d0c82..000000000 --- a/infinigen/assets/creatures/parts/tail.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import numpy as np -from numpy.random import normal as N - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils - -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_tail(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.49, 0.05, 0.02)), - ('NodeSocketVector', 'angles_deg', (31.39, 65.81, -106.93)), - ('NodeSocketFloat', 'aspect', 1.0)]) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': group_input.outputs["angles_deg"], 'aspect': group_input.outputs["aspect"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - -class Tail(PartFactory): - - tags = ['tail'] - - def sample_params(self): - return { - 'length_rad1_rad2': (N(0.5, 0.1), 0.05, 0.02), - 'angles_deg': np.array((31.39, 65.81, -106.93)) * N(1, 0.1), - 'aspect': N(1, 0.05) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_tail, params) - part.joints = { - i: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) - for i in np.linspace(0, 1, 6) - } - part.iks = {1.0: IKParams(name='tail', chain_parts=1)} - tag_object(part.obj, 'tail') - return part \ No newline at end of file diff --git a/infinigen/assets/creatures/parts/utils/draw.py b/infinigen/assets/creatures/parts/utils/draw.py deleted file mode 100644 index 44f022501..000000000 --- a/infinigen/assets/creatures/parts/utils/draw.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np - -from infinigen.assets.utils.decorate import displace_vertices -from infinigen.assets.utils.draw import spin -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util import blender as butil - - -def make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params): - x_length, y_length, z_length = map(params.get, ['x_length', 'y_length', 'z_length']) - segments = [] - for i in range(len(x_cuts) - 1): - x_start, x_end = x_cuts[i], x_cuts[i + 1] - y_start, y_end = y_cuts[i], y_cuts[i + 1] - xs = x_anchors(x_start, x_end) - ys = y_anchors(y_start, y_end) - obj = spin([np.array([xs[0], *xs, xs[-1]]) * x_length, np.array([0, *ys, 0]) * y_length, .0], - [1, len(xs)], axis=(1, 0, 0)) - - y_base = y_length * y_start - displace_vertices(obj, lambda x, y, z: ( - 0, 0, -np.clip(z + y_base * params['bottom_cutoff'], None, 0) * (1 - params['bottom_shift']))) - displace_vertices(obj, lambda x, y, z: (0, 0, - np.where(z > 0, np.clip(params['top_cutoff'] * y_base - np.abs(y), 0, None) * params['top_shift'], 0))) - - decorate_segment(obj, params, x_start, x_end) - obj.scale[-1] = params['z_length'] / y_length - butil.apply_transform(obj) - segments.append(obj) - return segments - - -def decorate_segment(obj, params, x_start, x_end): - def offset(nw: NodeWrangler, vector): - noise_texture = nw.new_node(Nodes.NoiseTexture, [vector], input_kwargs={'Scale': params['noise_scale']}) - x = nw.separate(nw.new_node(Nodes.InputPosition))[0] - ratio = nw.build_float_curve(nw.scalar_divide(x, params['x_length']), - [(x_start, 1), (x_end - .01, 1), (x_end, 0), (x_end + .01, 0)]) - return nw.scale(nw.scalar_multiply(ratio, nw.scalar_multiply(noise_texture, params['noise_strength'])), - nw.new_node(Nodes.InputNormal)) - - surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) - butil.modify_mesh(obj, 'WELD', merge_threshold=.001) - - -def geo_symmetric_texture(nw: NodeWrangler, offset, selection=None): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - pos = nw.new_node(Nodes.InputPosition) - x, y, z = nw.separate(pos) - vector = nw.combine(x, nw.math('ABSOLUTE', y), z) - distance = nw.new_node(Nodes.NamedAttribute, ['distance']) - geometry = nw.new_node(Nodes.SetPosition, [geometry, surface.eval_argument(nw, selection), None, - surface.eval_argument(nw, offset, vector=vector, distance=distance)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/creatures/parts/wings.py b/infinigen/assets/creatures/parts/wings.py deleted file mode 100644 index 4331db191..000000000 --- a/infinigen/assets/creatures/parts/wings.py +++ /dev/null @@ -1,675 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Alexander Raistrick: base version -# - Beining Han: flying variant - - -import bpy - -import numpy as np -from numpy.random import uniform as U, normal as N - -from infinigen.core.util.math import clip_gaussian - -from infinigen.assets.creatures.util.genome import Joint, IKParams - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.assets.creatures.util.nodegroups.curve import nodegroup_simple_tube, nodegroup_simple_tube_v2 -from infinigen.assets.creatures.util.nodegroups.attach import nodegroup_surface_muscle -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_deg2_rad -from infinigen.assets.creatures.util.nodegroups.geometry import nodegroup_symmetric_clone - -from infinigen.assets.creatures.util.creature import PartFactory -from infinigen.assets.creatures.util.part_util import nodegroup_to_part -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_feather', singleton=False, type='GeometryNodeTree') -def nodegroup_feather(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Length Rad1 Rad2', (0.5, 0.1, 0.1))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Length Rad1 Rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (1.0, 0.0, 0.0), 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': scale.outputs["Vector"]}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': curve_line, 'Cuts': 30}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.2327, 0.985), (0.8909, 0.6), (1.0, 0.0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz.outputs["Y"], 4: separate_xyz.outputs["Z"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': subdivide_curve, 'Radius': multiply}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0, -1.0, 0.0), 'End': (0.0, 1.0, 0.0)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_line_1}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': curve_line, 'Cuts': 4}) - - trim_curve = nw.new_node(Nodes.TrimCurve, - input_kwargs={'Curve': subdivide_curve_1, 'End': 0.8742}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 0.15, 4: 0.05}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': trim_curve, 'Radius': multiply_1}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 6}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': curve_circle.outputs["Curve"]}) - - #join_geometry = nw.new_node(Nodes.JoinGeometry, - # input_kwargs={'Geometry': [curve_to_mesh, curve_to_mesh_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, curve_to_mesh, 'feather')}) - -@node_utils.to_nodegroup('nodegroup_bird_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_bird_tail(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - simple_tube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Angles Deg': (0.0, 0.0, 0.0), 'Seg Lengths': (0.11, 0.11, 0.11), 'Start Radius': 0.07, 'End Radius': 0.02, 'Fullness': 3.0}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Feather Length Rad1 Rad2', (0.5, 0.08, 0.1)), - ('NodeSocketVector', 'Feather Rot Extent', (136.51, -11.8, 34.0)), - ('NodeSocketVector', 'Feather Rot Rand Bounds', (5.0, 5.0, 5.0)), - ('NodeSocketIntUnsigned', 'N Feathers', 16)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["N Feathers"], 'Start': (0.0, 0.0, -0.1), 'Middle': (0.0, 0.15, -0.05), 'End': (0.0, 0.15, 0.11)}) - - feather = nw.new_node(nodegroup_feather().name, - input_kwargs={'Length Rad1 Rad2': group_input.outputs["Feather Length Rad1 Rad2"]}) - - index = nw.new_node(Nodes.Index) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["N Feathers"]}, - attrs={'operation': 'DIVIDE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': divide, 9: (-90.0, -14.88, 4.01), 10: group_input.outputs["Feather Rot Extent"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Feather Rot Rand Bounds"], 'Scale': -1.0}, - attrs={'operation': 'SCALE'}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: scale.outputs["Vector"], 1: group_input.outputs["Feather Rot Rand Bounds"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: random_value.outputs["Value"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': add.outputs["Vector"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': quadratic_bezier, 'Instance': feather, 'Rotation': deg2rad}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': realize_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube.outputs["Geometry"], 'Skeleton Curve': simple_tube.outputs["Skeleton Curve"], 'TailFeathers': symmetric_clone.outputs["Both"]}) - -@node_utils.to_nodegroup('nodegroup_bird_wing', singleton=False, type='GeometryNodeTree') -def nodegroup_bird_wing(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.26, 0.0)), - ('NodeSocketFloat', 'feather_density', 18.7), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketFloat', 'fullness', 4.0), - ('NodeSocketFloatFactor', 'Wing Shape Sculpting', 1.0), - ('NodeSocketVector', 'Feather length_rad1_rad2', (0.6, 0.04, 0.04)), - ('NodeSocketFloat', 'Extension', 1.68)]) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["Extension"], 9: (-83.46, 154.85, -155.38), 10: (-15.04, 60.5, -41.1)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], 'angles_deg': map_range_3.outputs["Vector"], 'proportions': (0.2, 0.27, 0.3), 'aspect': group_input.outputs["aspect"], 'do_bezier': False, 'fullness': group_input.outputs["fullness"]}) - - curve_length = nw.new_node(Nodes.CurveLength, - input_kwargs={'Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: curve_length, 1: group_input.outputs["feather_density"]}, - attrs={'operation': 'MULTIPLY'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Count': multiply}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': resample_curve}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': curve_to_mesh}) - - feather = nw.new_node(nodegroup_feather().name, - input_kwargs={'Length Rad1 Rad2': group_input.outputs["Feather length_rad1_rad2"]}) - - index = nw.new_node(Nodes.Index) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': curve_to_mesh, 2: index}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index, 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"]}) - - transfer_attribute_index = nw.new_node(Nodes.SampleNearest, - input_kwargs={'Geometry': curve_to_mesh, 'Sample Position': map_range_1.outputs["Result"]}) - - transfer_attribute = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': curve_to_mesh, 'Index': transfer_attribute_index}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Wing Shape Sculpting"], 'Value': (transfer_attribute, 'Value')}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.5164, 0.245), (0.7564, 0.625), (1.0, 1.0)]) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Extension"], 3: 115.65, 4: 0.0}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': map_range_2.outputs["Result"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': float_curve, 9: (0.0, 80.0, 0.0), 10: combine_xyz}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: (-5.0, 0.0, -1.0)}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': add.outputs["Vector"]}) - - vector_curves = nw.new_node(Nodes.VectorCurve, - input_kwargs={'Fac': group_input.outputs["Wing Shape Sculpting"], 'Vector': transfer_attribute}) - node_utils.assign_curve(vector_curves.mapping.curves[0], [(-1.0, -0.0), (0.0036, 0.0), (0.0473, 0.6), (0.3527, 0.54), (0.6, 0.9), (0.8836, 0.92), (1.0, 0.58)], handles=['AUTO', 'VECTOR', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']) - node_utils.assign_curve(vector_curves.mapping.curves[1], [(-1.0, 1.0), (1.0, 1.0)]) - node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, 1.0), (1.0, 1.0)]) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': reroute, 'Instance': feather, 'Rotation': deg2rad, 'Scale': vector_curves}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: (-5.0, 0.0, 0.0)}) - - deg2rad_1 = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': add_1.outputs["Vector"]}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_curves, 1: (0.75, 1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': reroute, 'Instance': feather, 'Rotation': deg2rad_1, 'Scale': multiply_1.outputs["Vector"]}) - - add_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: (-10.3, 0.0, 1.0)}) - - deg2rad_2 = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': add_2.outputs["Vector"]}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_curves, 1: (0.45, 1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': reroute, 'Instance': feather, 'Rotation': deg2rad_2, 'Scale': multiply_2.outputs["Vector"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instance_on_points, instance_on_points_1, instance_on_points_2]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Feathers': realize_instances}) - -class BirdTail(PartFactory): - - tags = ['tail', 'wing'] - - def sample_params(self): - return { - 'Feather Length Rad1 Rad2': np.array((0.4, 0.06, 0.04)) * N(1, 0.1) * N(1, 0.1, 3), - 'Feather Rot Extent': np.array((25, -10, -16)) * N(1, 0.1, 3), - 'Feather Rot Rand Bounds': np.array((5.0, 5.0, 5.0)) * N(1, 0.1) * N(1, 0.05, 3), - 'N Feathers': int(N(16, 3)) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_bird_tail, params) - return part - -class BirdWing(PartFactory): - - tags = ['limb', 'wing'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((clip_gaussian(1.2, 0.7, 0.4, 2), 0.1, 0.02)), - 'feather_density': 30, - 'aspect': N(0.4, 0.05), - 'fullness': N(4, 0.1), - 'Wing Shape Sculpting': U(0.6, 1), - 'Feather length_rad1_rad2': np.array((0.7 * N(1, 0.2), 0.04, 0.04)), - 'Extension': U(0, 0.05) if U() < 0.8 else U(0.7, 1) - } - - def make_part(self, params): - # split extras is essential to make automatic rigging work. We will join them back together later - part = nodegroup_to_part(nodegroup_bird_wing, params, split_extras=True) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.27: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), - 0.65: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) # elbow - } - part.iks = {1.0: IKParams(name='wingtip', chain_parts=1)} - tag_object(part.obj, 'bird_wing') - part.settings['parent_extras_rigid'] = True - return part - - -@node_utils.to_nodegroup('nodegroup_flying_feather', singleton=False, type='GeometryNodeTree') -def nodegroup_flying_feather(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - vector = nw.new_node(Nodes.Vector) - vector.vector = (0.5000, 0.0500, 0.0000) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Length Rad1 Rad2', (0.5000, 0.1000, 0.1000))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Length Rad1 Rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector, 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (1.0000, 0.0000, 0.0000), 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 32, 'Start': (0.0000, 0.0000, 0.0000), - 'Middle': scale.outputs["Vector"], 'End': scale_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': quadratic_bezier}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': set_position, 'Cuts': 4}) - - trim_curve = nw.new_node(Nodes.TrimCurve, - input_kwargs={'Curve': subdivide_curve_1, 'End': 0.8742}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 0.1500, 4: 0.0100}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': trim_curve, 'Radius': multiply}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 6}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, - 'Profile Curve': curve_circle.outputs["Curve"]}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': set_position, 'Cuts': 30}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0000, 0.0000), (0.3373, 0.8188), (0.7182, 0.7375), (1.0000, 0.0000)]) - - white_noise_texture = nw.new_node(Nodes.WhiteNoiseTexture) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: white_noise_texture.outputs["Value"], 1: 0.1000}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: multiply_1}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz.outputs["Y"], - 4: separate_xyz.outputs["Z"]}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': subdivide_curve, 'Radius': multiply_2}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0000, -1.0000, 0.1000), 'End': (0.0000, 1.0000, 0.0000)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_line_1, - 'Fill Caps': True}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh_1, curve_to_mesh]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': join_geometry}) - -@node_utils.to_nodegroup('nodegroup_flying_bird_tail', singleton=False, type='GeometryNodeTree') -def nodegroup_flying_bird_tail(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - simple_tube = nw.new_node(nodegroup_simple_tube().name, - input_kwargs={'Angles Deg': (0.0000, 0.0000, 0.0000), - 'Seg Lengths': (0.00, 0.00, 0.00), 'Start Radius': 0.000, - 'End Radius': 0.000, 'Fullness': 3.0000}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Feather Length Rad1 Rad2', (0.5000, 0.0800, 0.1000)), - ('NodeSocketVector', 'Feather Rot Extent', (136.5100, -11.8000, 34.0000)), - ('NodeSocketVector', 'Feather Rot Rand Bounds', (5.0000, 5.0000, 5.0000)), - ('NodeSocketIntUnsigned', 'N Feathers', 16)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["N Feathers"], - 'Start': (0.0000, 0.0000, 0.0000), 'Middle': (0.0000, 0.0500, 0.0000), - 'End': (-0.0500, 0.1000, 0.0300)}) - - feather = nw.new_node(nodegroup_flying_feather().name, - input_kwargs={'Length Rad1 Rad2': group_input.outputs["Feather Length Rad1 Rad2"]}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Y'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': quadratic_bezier, 'Instance': feather, - 'Rotation': align_euler_to_vector}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points, 'Rotation': (1.5708, 0.0000, 0.0000)}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1000, 3: 0.1000}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1000, 3: 0.1000, 'Seed': 1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1000, 3: 0.1000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_2.outputs[1], - 'Z': random_value_3.outputs[1]}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances, 'Rotation': combine_xyz}) - - index_1 = nw.new_node(Nodes.Index) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index_1, 2: group_input.outputs["N Feathers"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - - if U(0, 1) < 0.5: - control_points = [0.2, 0.3, 0.45, 0.9] - else: - control_points = [0.25, 0.3, 0.35, 0.4] - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0136, control_points[0] + N(0., 0.02)), (0.3273, control_points[1] + N(0., 0.02)), - (0.7500, control_points[2] + N(0., 0.03)), (1.0000, control_points[3] + N(0., 0.04))]) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 1.2000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_add, 'Y': 1.0000, 'Z': 1.0000}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_1, 'Scale': combine_xyz_1}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': scale_instances}) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, - input_kwargs={'Geometry': realize_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube.outputs["Geometry"], - 'Skeleton Curve': simple_tube.outputs["Skeleton Curve"], - 'Feathers': symmetric_clone.outputs["Both"]}) - - - -@node_utils.to_nodegroup('nodegroup_flying_bird_wing', singleton=False, type='GeometryNodeTree') -def nodegroup_flying_bird_wing(nw: NodeWrangler): - # Code generated using version 2.5.1 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0000, 0.2600, 0.0000)), - ('NodeSocketFloat', 'feather_density', 18.7000), - ('NodeSocketFloat', 'aspect', 1.0000), - ('NodeSocketFloat', 'fullness', 4.0000), - ('NodeSocketFloatFactor', 'Wing Shape Sculpting', 1.0000), - ('NodeSocketVector', 'Length Rad1 Rad2', (0.6000, 0.0400, 0.0400)), - ('NodeSocketFloat', 'Extension', 1.6800)]) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["Extension"], - 9: (-76.2600, 170.9500, -144.3800), 10: (10.0000, -10.0000, 0.0000)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - simple_tube_v2 = nw.new_node(nodegroup_simple_tube_v2().name, - input_kwargs={'length_rad1_rad2': group_input.outputs["length_rad1_rad2"], - 'angles_deg': map_range_3.outputs["Vector"], - 'proportions': (0.2000, 0.2700, 0.5000), - 'aspect': group_input.outputs["aspect"], 'do_bezier': False, - 'fullness': group_input.outputs["fullness"]}) - - curve_length = nw.new_node(Nodes.CurveLength, - input_kwargs={'Curve': simple_tube_v2.outputs["Skeleton Curve"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: curve_length, 1: group_input.outputs["feather_density"]}, - attrs={'operation': 'MULTIPLY'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': simple_tube_v2.outputs["Skeleton Curve"], 'Count': multiply}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': resample_curve}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': curve_to_mesh}) - - feather = nw.new_node(nodegroup_flying_feather().name, - input_kwargs={'Length Rad1 Rad2': group_input.outputs["Length Rad1 Rad2"]}) - - index = nw.new_node(Nodes.Index) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': curve_to_mesh, 2: index}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index, 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"]}) - - transfer_attribute_index = nw.new_node(Nodes.SampleNearest, - input_kwargs={'Geometry': curve_to_mesh, 'Sample Position': map_range_1.outputs["Result"]}) - - transfer_attribute = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': curve_to_mesh, 'Index': transfer_attribute_index}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Extension"], 3: 115.6500, 4: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': map_range_2.outputs["Result"]}) - - wing_feathers = [] - - for i in range(3): - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Wing Shape Sculpting"], - 'Value': (transfer_attribute, 'Value')}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0000, 0.0000), (0.25, 0.2), (0.50, 0.4), - (0.75, 0.6), (1.0000, 0.8 - i * 0.02 + N(0., 0.02))]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': float_curve, 9: (0.0000, 80.0000, 0.0000), 10: combine_xyz}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: (0., -5 + 5 * i, (i - 1) * 8.)}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': add.outputs["Vector"]}) - - vector_curves = nw.new_node(Nodes.VectorCurve, - input_kwargs={'Fac': group_input.outputs["Wing Shape Sculpting"], - 'Vector': (transfer_attribute, 'Value')}) - node_utils.assign_curve(vector_curves.mapping.curves[0], - [(-1.0000, -0.0000), (0.0218, 0.4), (0.20, 0.45), - (0.5, 0.5), (0.65000, 0.6), (0.80, 0.7), (1.0000, 0.78 + N(0., 0.02))], - handles=['AUTO', 'VECTOR', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']) - node_utils.assign_curve(vector_curves.mapping.curves[1], [(-1.0000, 1.0000), (1.0000, 1.0000)]) - node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0000, 1.0000), (1.0000, 1.0000)]) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_curves, 'Scale': U(1.6, 2.0) - i * 0.65}, - attrs={'operation': 'SCALE'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': reroute, 'Instance': feather, 'Rotation': deg2rad, - 'Scale': scale.outputs["Vector"]}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.01, 3: 0.01}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.03, 3: 0.03, 'Seed': 1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.01, 3: 0.01, 'Seed': 2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_2.outputs[1], - 'Z': random_value_3.outputs[1]}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points, 'Rotation': combine_xyz}) - wing_feathers.append(rotate_instances_1) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': wing_feathers}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': simple_tube_v2.outputs["Geometry"], - 'Skeleton Curve': simple_tube_v2.outputs["Skeleton Curve"], - 'Feathers': realize_instances}) - - -class FlyingBirdTail(PartFactory): - - tags = ['tail', 'wing'] - - def sample_params(self): - return { - 'Feather Length Rad1 Rad2': np.array((0.4, 0.06, 0.04)) * N(1, 0.1) * N(1, 0.1, 3), - 'Feather Rot Extent': np.array((25, -10, -16)) * N(1, 0.1, 3), - 'Feather Rot Rand Bounds': np.array((5.0, 5.0, 5.0)) * N(1, 0.1) * N(1, 0.05, 3), - 'N Feathers': int(N(16, 3)) - } - - def make_part(self, params): - part = nodegroup_to_part(nodegroup_flying_bird_tail, params) - return part - - -class FlyingBirdWing(PartFactory): - - tags = ['limb', 'wing'] - - def sample_params(self): - return { - 'length_rad1_rad2': np.array((clip_gaussian(1.2, 0.7, 0.4, 2), U(0.08, 0.13), 0.02)), - 'feather_density': 40, - 'aspect': N(0.35, 0.04), - 'fullness': N(4, 0.1), - 'Wing Shape Sculpting': U(0.6, 1), - 'Length Rad1 Rad2': np.array((0.6 * N(1, 0.2), 0.04, 0.04)), - 'Extension': U(0, 0.05) if U() < 0.8 else U(0.7, 1) - } - - def make_part(self, params): - # split extras is essential to make automatic rigging work. We will join them back together later - part = nodegroup_to_part(nodegroup_flying_bird_wing, params, split_extras=True) - part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # shoulder - 0.27: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), - 0.65: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) # elbow - } - part.iks = {1.0: IKParams(name='wingtip', chain_length=3)} - part.settings['parent_extras_rigid'] = True - return part diff --git a/infinigen/assets/creatures/util/animation/curve_slither.py b/infinigen/assets/creatures/util/animation/curve_slither.py deleted file mode 100644 index 34682dffc..000000000 --- a/infinigen/assets/creatures/util/animation/curve_slither.py +++ /dev/null @@ -1,214 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import logging - -import bpy -import bpy_types -from mathutils import Vector - -import numpy as np -from numpy.random import uniform as U, normal as N - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -import pdb -from infinigen.core.util import blender as butil - -@node_utils.to_nodegroup('nodegroup_add_wiggles', singleton=True, type='GeometryNodeTree') -def nodegroup_add_wiggles(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Magnitude', 1.6800), - ('NodeSocketFloat', 'MagRandom', 0.5000), - ('NodeSocketVector', 'Up', (0.0000, 0.0000, 1.0000))]) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: curve_tangent, 1: group_input.outputs["Up"]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - index = nw.new_node(Nodes.Index) - - modulo = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 4.0000}, attrs={'operation': 'MODULO'}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: modulo, 1: 2.0000}, attrs={'operation': 'LESS_THAN'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': less_than, 3: -1.0000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: group_input.outputs["Magnitude"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0000, 1: group_input.outputs["MagRandom"]}, - attrs={'operation': 'SUBTRACT'}) - - random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: subtract}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: random_value.outputs[1]}, attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product.outputs["Vector"], 'Scale': multiply_1}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_add_loopbacks', singleton=True, type='GeometryNodeTree') -def nodegroup_add_loopbacks(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'Amount', 0.5800), - ('NodeSocketFloat', 'Randomness', 0.0000)]) - - index_1 = nw.new_node(Nodes.Index) - - add = nw.new_node(Nodes.Math, input_kwargs={0: index_1, 1: 1.0000}) - - modulo = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'MODULO'}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: modulo}, attrs={'operation': 'LESS_THAN'}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': less_than, 3: -1.0000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Amount"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0000, 1: group_input.outputs["Randomness"]}, - attrs={'operation': 'SUBTRACT'}) - - random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: subtract, 'ID': index_1}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: random_value.outputs[1]}, attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 'Scale': multiply_1}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_wiggles', singleton=True, type='GeometryNodeTree') -def nodegroup_wiggles(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloatDistance', 'Wavelength', 2.3300), - ('NodeSocketFloat', 'Magnitude', 1.6800), - ('NodeSocketFloat', 'MagRandom', 1.0000), - ('NodeSocketFloat', 'Loopyness', 0.5800), - ('NodeSocketFloat', 'LoopRandom', 0.0000), - ('NodeSocketFloat', 'AltitudeOffset', 0.00), - ('NodeSocketVector', 'Up', (0.0000, 0.0000, 1.0000))]) - - curve_tangent_1 = nw.new_node(Nodes.CurveTangent) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: curve_tangent_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Wavelength"], 1: 4.0000}, - attrs={'operation': 'DIVIDE'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Length': divide}, - attrs={'mode': 'LENGTH'}) - - addwiggles = nw.new_node(nodegroup_add_wiggles().name, - input_kwargs={'Geometry': resample_curve, 'Magnitude': group_input.outputs["Magnitude"], 'MagRandom': group_input.outputs["MagRandom"], 'Up': group_input.outputs["Up"]}) - - addloopbacks = nw.new_node(nodegroup_add_loopbacks().name, - input_kwargs={'Geometry': addwiggles, 'Vector': capture_attribute.outputs["Attribute"], 'Amount': group_input.outputs["Loopyness"], 'Randomness': group_input.outputs["LoopRandom"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': addloopbacks, 'Fill Caps': True}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': curve_to_mesh, 'Level': 3}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': subdivision_surface}) - - off = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs['AltitudeOffset']}) - result = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': mesh_to_curve, 'Offset': off}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': result}, attrs={'is_active_output': True}) - - -def add_curve_slithers(curve, snake_length): - params = { - 'Wavelength': snake_length / U(2, 4), - 'Magnitude': snake_length * 0.05 * N(1, 0.2), - 'MagRandom': U(0, 0.7), - 'Loopyness': 0, - 'LoopRandom': 0, - 'AltitudeOffset': 0.02 - } - butil.modify_mesh(curve, 'NODES', node_group=nodegroup_wiggles(), - ng_inputs=params, apply=False, show_viewport=True) - with butil.SelectObjects(curve): - bpy.ops.object.convert(target='MESH') - bpy.ops.object.convert(target='CURVE') - return curve - -def slither_along_path(obj, curve, speed, zoff_pct=0.7, orig_len=None): - - if not curve.type == 'CURVE': - with butil.SelectObjects(curve): - bpy.ops.object.convert(target='CURVE') - curve = bpy.context.active_object - if curve.type != 'CURVE': - message = f'slither_along_path failed, {curve.name=} had {curve.type=} but expected CURVE' - if curve.type == 'MESH': - message == f'. {len(curve.data.vertices)=}' - logging.warning(message) - return - - curve.data.twist_mode = 'Z_UP' - - xmax = max(v[0] for v in obj.bound_box) - - l = curve.data.splines[0].calc_length() - - zoff = zoff_pct * obj.dimensions[-1] / 2 - obj.location = (xmax,0,zoff) - obj.keyframe_insert(data_path="location", frame=0) - obj.location = (l, 0, zoff) - obj.keyframe_insert(data_path="location", frame=bpy.context.scene.frame_end) - - for fc in obj.animation_data.action.fcurves: - for k in fc.keyframe_points: - k.interpolation = 'LINEAR' - - butil.modify_mesh(obj, 'CURVE', object=curve, apply=False, show_viewport=True) - obj.rotation_euler = (0, 0, np.pi) - -def snap_curve_to_floor(curve, bvh, step_height=1): - - s = curve.data.splines[0] - for p in s.points: - raystart = Vector(p.co[:3]) + Vector((0, 0, step_height)) - loc, *_ = bvh.ray_cast(raystart, Vector((0, 0, -1))) - if loc is not None: - p.co = (*loc, 1) \ No newline at end of file diff --git a/infinigen/assets/creatures/util/animation/idle.py b/infinigen/assets/creatures/util/animation/idle.py deleted file mode 100644 index 7d9adac33..000000000 --- a/infinigen/assets/creatures/util/animation/idle.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import logging - -import bpy -import bpy_types -import mathutils - -import numpy as np -from numpy.random import uniform as U, normal as N - -import pdb - -from infinigen.assets.creatures.util import creature, creature_util as cutil -from infinigen.core.util.math import clip_gaussian, randomspacing, lerp -from infinigen.core.util import blender as butil - -def compute_ik_length_height(targets): - bounds = [] - for i in range(3): - vmin = min(t.matrix_world.translation[i] for t in targets) - vmax = max(t.matrix_world.translation[i] for t in targets) - bounds.append([vmin, vmax]) - return np.array(bounds) - -def snap_iks_to_floor(targets, floor_bvh, minweight=0.7): - - assert floor_bvh is not None - - bpy.context.view_layer.update() - - get_targets = lambda k: [t for t in targets if k in t.name] - - bounds = compute_ik_length_height(targets) - - def find_floor_offset(t): - ray_origin = mathutils.Vector((t.matrix_world.translation.x, t.matrix_world.translation.y, bounds[2, 1])) - location, normal, index, dist = floor_bvh.ray_cast(ray_origin, mathutils.Vector((0, 0, -1))) - if location is None: - return None - return location - t.matrix_world.translation - - feet = get_targets('foot') - feet_offsets = [find_floor_offset(f) for f in feet] - - if any(off is None for off in feet_offsets): - logging.warning(f'snap_iks_to_floor found {feet_offsets=}, aborting snap operation') - return - - # dont allow the pose diff to be too large (ie, prevent weird behavior at cliffs) - for i, o in enumerate(feet_offsets): - if o.length > bounds[2, 1] - bounds[2, 0]: - logging.warning(f'snap_iks_to_floor ignoring too-long offset {o.length=}') - feet_offsets[i] = mathutils.Vector() - - for f, fo, in zip(feet, feet_offsets): - f.location += fo - - hips = get_targets('body') - if len(feet) == len(hips) * 2: - - # hips seem coupled to pairs of feet, take that into consideration - # TODO: Restructure to make detecting this more robust - - hip_offsets = [] - for i in range(len(feet) // 2): - o1, o2 = feet_offsets[2*i], feet_offsets[2*i + 1] - hip_off = minweight * min(o1, o2) + (1 - minweight) * max(o1, o2) - hip_offsets.append(hip_off) - - for h, ho in zip(hips, hip_offsets): - h.location += ho - - for o in get_targets('head'): # front-associated - o.location += hip_offsets[-1] - for o in get_targets('tail'): # back associated - o.location += hip_offsets[0] - - else: - logging.warning(f'Couldnt establish feet-hip mapping') - off = mathutils.Vector(np.array(feet_offsets).mean(axis=0)) - for o in targets: - if o in feet: - continue - o.location += off - -def idle_body_noise_drivers(targets, foot_motion_chance=0.2, head_benddown=1.0, body_mag=1.0, wing_mag=1.0): - - # all magnitudes are determined as multiples of the creatures overall length/height/width - bounds = compute_ik_length_height(targets) - ls = bounds[:, 1] - bounds[:, 0] - - # scalars for the whole creature - freq_scalar = N(1, 0.15) - mag_scalar = N(1, 0.15) - - def add_noise(t, k, axis, mag, freq, off=0, mode='noise', seeds=None): - d = t.driver_add(k, axis) - p = getattr(t, k)[axis] - - if k == 'location': - mag *= ls[axis] - - freq = freq / bpy.context.scene.render.fps - - freq *= freq_scalar - mag *= mag_scalar - - if mode == 'noise': - s1, s2 = seeds if seeds is not None else U(0, 1000, 2) # random offsets as 'seeds' - varying = f'noise.noise(({freq:.6f}*frame, {s1:.2f}, {s2:.2f}))' - elif mode == 'sin': - varying = f'sin({freq:6f}*frame*2*pi)' - else: - raise ValueError(mode) - - d.driver.expression = f'{p:.4f}+{mag:.4f}*({off:.4f}+{varying})' - - get_targets = lambda k: [t for t in targets if k in t.name] - - for i, t in enumerate(get_targets('body')): - add_noise(t, 'location', 0, mag=body_mag*0.025*N(1, 0.2), freq=0.25*N(1, 0.2)) - if i != 0: - add_noise(t, 'location', 2, mag=body_mag*0.015*N(1, 0.2), freq=0.5*N(1, 0.2), mode='sin') - - for t in get_targets('foot'): - if U() < foot_motion_chance: - add_noise(t, 'location', 0, mag=0.07*N(1, 0.1), freq=U(0.2, 0.7)) - add_noise(t, 'location', 2, mag=0.04*N(1, 0.1), freq=U(0.2, 0.7)) - - for t in get_targets('head'): - headfreq = 0.4 - add_noise(t, 'location', 0, mag=0.07*N(1, 0.1), freq=headfreq, off=-0.5*head_benddown) - add_noise(t, 'location', 1, mag=0.03*N(1, 0.1), freq=headfreq) - add_noise(t, 'location', 2, mag=0.2*N(1, 0.1), freq=headfreq/2, off=-0.7*head_benddown) - #add_noise(t, 'rotation_euler', 0, mag=0.4*N(1, 0.1), freq=U(0.1, 0.4)) - #add_noise(t, 'rotation_euler', 1, mag=0.4*N(1, 0.1), freq=U(0.1, 0.4)) - - seeds = U(0, 1000, 2) # synchronize wing motion a little bit - for t in get_targets('wingtip'): - add_noise(t, 'location', 0, mag=wing_mag*0.1*N(1, 0.1), freq=U(0.6, 4), seeds=seeds+N(0, 0.2, 2)) - add_noise(t, 'location', 2, mag=wing_mag*0.2*N(1, 0.1), freq=U(0.6, 4), seeds=seeds+N(0, 0.2, 2)) - - for t in get_targets('tail'): - for i in range(3): - add_noise(t, 'location', 0, mag=0.07*N(1, 0.1), freq=headfreq, off=-0.5) - -def head_look_around(targets): - pass \ No newline at end of file diff --git a/infinigen/assets/creatures/util/cloth_sim.py b/infinigen/assets/creatures/util/cloth_sim.py deleted file mode 100644 index c68915a90..000000000 --- a/infinigen/assets/creatures/util/cloth_sim.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick -# Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=YDrbyITWMGU by Mr. Cheebs - - -import pdb -import logging - -from numpy.random import normal, uniform - -import bpy - -from infinigen.core.surface import attribute_to_vertex_group - -from infinigen.core.util import blender as butil -from infinigen.core.util.math import dict_convex_comb -from infinigen.core.util.logging import Timer -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes - -logger = logging.getLogger(__name__) - -def local_pos_rigity_mask(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'To Min', 0.4), - ('NodeSocketFloat', 'To Max', 0.9)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': nw.expose_input('Local Pos', attribute='local_pos')}) - - clamp = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': nw.expose_input("Radius", attribute='skeleton_rad'), 'Min': 0.03, 'Max': 0.49}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: clamp, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: clamp, 1: 1.5}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: multiply, 2: multiply_1}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': uniform(1e3), 'Scale': normal(10, 1)}, - attrs={'musgrave_dimensions': '4D'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: musgrave_texture, 1: normal(0.07, 0.007)}, - attrs={'operation': 'MULTIPLY'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': normal(5, 0.5), 'W': uniform(1e3)}, - attrs={'musgrave_dimensions': '4D'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: musgrave_texture_1, 1: normal(0.12, 0.01)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: multiply_3}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: add}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_1}) - colorramp.color_ramp.elements.new(1) - colorramp.color_ramp.elements[0].position = normal(0.23, 0.05) - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = normal(0.6, 0.05) - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': colorramp.outputs["Color"], 3: group_input.outputs["To Min"], 4: group_input.outputs["To Max"]}) - - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: musgrave_texture_2, 1: normal(0.1, 0.02)}, - attrs={'operation': 'MULTIPLY'}) - - return nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: multiply_4}) - -def bake_cloth(obj, settings=None, attributes=None, frame_start=None, frame_end=None): - - if frame_start is None: - frame_start = bpy.context.scene.frame_start - if frame_end is None: - frame_end = bpy.context.scene.frame_end - if settings is None: - settings = {} - if attributes is None: - attributes = {} - - mod = obj.modifiers.new('bake_cloth', 'CLOTH') - - mod.settings.effector_weights.gravity = settings.pop('gravity', 1) - mod.collision_settings.distance_min = settings.pop('distance_min', .015) - mod.collision_settings.use_self_collision = settings.pop('use_self_collision', False) - - for k, v in settings.items(): - setattr(mod.settings, k, v) - - with butil.DisableModifiers(obj): - for name, attr in attributes.items(): - vgroup = attribute_to_vertex_group(obj, attr, name=f'skin_sim.{name}') - setattr(mod.settings, name, vgroup.name) - - mod.point_cache.frame_start = frame_start - mod.point_cache.frame_end = frame_end - with butil.ViewportMode(obj, mode='OBJECT'), butil.SelectObjects(obj), Timer('Baking fish cloth'): - override = {'scene': bpy.context.scene, 'active_object': obj, 'point_cache': mod.point_cache} - bpy.ops.ptcache.bake(override, bake=True) - - return mod - - - diff --git a/infinigen/assets/creatures/util/geometry/blending.py b/infinigen/assets/creatures/util/geometry/blending.py deleted file mode 100644 index 6ad08d3fb..000000000 --- a/infinigen/assets/creatures/util/geometry/blending.py +++ /dev/null @@ -1,1181 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Jia Deng - - -""" -Module for constructing blending surfaces -See exmaple_use() for an example -""" -from __future__ import annotations -from multiprocessing.sharedctypes import Value -from typing import Callable, Iterable, Literal, Tuple -from networkx.classes import ordered -import numpy as np -import bpy -import mathutils -from geomdl import NURBS - -from infinigen.core.util.math import rotate_match_directions, normalize, project_to_unit_vector -from infinigen.core.util.math import wrap_around_cyclic_coord, new_domain_from_affine, affine_from_new_domain -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil - -from infinigen.assets.creatures.util.geometry.nurbs import blender_mesh_from_pydata, compute_cylinder_topology -from infinigen.assets.creatures.util.geometry import nurbs -from infinigen.assets.creatures.util.geometry import nurbs, lofting, skin_ops - -raise NotImplementedError ('blending.py not currently used, please re-add shapely as a dependency and delete this line') - -from shapely.geometry import Polygon, Point, LineString -import shapely -import rtree - -class CurveND: - def __init__(self, eval_fn: Callable[[np.array], Tuple[np.array, np.array]], dim, domain=(0, 1)): - self._eval_fn = eval_fn - if domain[1] <= domain[0]: - raise ValueError("invalid domain") - self._domain = domain - self._dim = dim - - @property - def domain(self): - return self._domain - - @property - def eval_fn(self): - return self._eval_fn - - @property - def dim(self): - return self._dim - - def evaluate_points_and_derivatives_at_t(self, t: np.array) -> Tuple[np.array, np.array]: - if (t < self.domain[0]).any() or (t > self.domain[1]).any(): - raise ValueError("out of domain t value") - - points, derivatives = self._eval_fn(t) - - if points.shape != t.shape + (self.dim,): - raise ValueError( - f"points {points.shape} has wrong shape, {t.shape}") - if derivatives.shape != t.shape + (self.dim,): - raise ValueError("derivatives has wrong shape") - - return (points, derivatives) - - def evaluate_points_and_derivatives(self, resolution: int) -> Tuple[np.array, np.array]: - t = np.linspace(self.domain[0], self.domain[1], resolution) - return self.eval_points_and_derivatives_at_t(t) - - def affine_transform_domain(self, a=1, b=0) -> CurveND: - """get a new curve (u(f(t)), v(f(t)) where f(t) = a * t + b""" - new_domain = new_domain_from_affine(self.domain, a, b) - - def new_eval_fn(t): - ft = a * t + b - p, d = self.eval_fn(ft) - return (p, a * d) - return CurveND(new_eval_fn, self.dim, domain=new_domain) - - def affine_new_domain(self, new_domain=(0, 1)) -> CurveND: - """ get an equivalent curve whose domain is new_domain. - Here new_domain as (1, 0) is valid: the new curve will have domain (0,1) - but with a flipped axis - """ - a, b = affine_from_new_domain(self.domain, new_domain) - return self.affine_transform_domain(a, b) - - def sub_curve(self, interval=(0, 1)) -> CurveND: - """ get a new curve restricted to interval [a,b] """ - return CurveND(self.eval_fn, self.dim, domain=interval) - - -class Curve2DFactory: - @staticmethod - def circle(center, start_pos, arc_angle=2 * np.pi) -> CurveND: - center = np.array(center) - start_pos = np.array(start_pos) - if center.shape != (2,) or start_pos.shape != (2,): - raise ValueError( - f"wrong shapes for center {center.shape} or start_pos {start_pos.shape}") - r = start_pos - center - rad = np.linalg.norm(r) - start_angle = np.arccos(r[0]/rad) - if r[1] < 0: - start_angle = 2 * np.pi - start_angle - - def eval_fn(t): - uv = np.stack([np.cos(t * arc_angle + start_angle) * rad + center[0], - np.sin(t * arc_angle + start_angle) * rad + center[1]], axis=-1) - duvdt = rad * arc_angle * np.stack([-np.sin(t * arc_angle + start_angle), - np.cos(t * arc_angle + start_angle)], axis=-1) - return (uv, duvdt) - return CurveND(eval_fn, dim=2) - - @staticmethod - def nurbs(ctrlpts: np.array, degree=3, knots: np.array = None, weights: np.array = None, make_cyclic=False) -> CurveND: - """returns a 2D curve. extra dimensions in ctrlpts are ignored - If make_cyclic is True, will create new control points and knots to wrap around - If knots is not specified, defaults to clamped uniform, unless make_cyclic is True, in which case defaults to unclamped uniform - """ - if ctrlpts.shape[-1] < 2: - raise ValueError( - f"control points have wrong shape {ctrlpts.shape}") - ctrlpts = ctrlpts[..., :2].reshape(-1, 2) - - if make_cyclic: - ctrlpts = np.concatenate([ctrlpts, ctrlpts[0:degree, :]], axis=0) - if knots is None: - knots = np.arange(len(ctrlpts) + degree + 1) - else: - knots = knots.append(knots[0:degree]) - if weights is not None: - weights = weights.append(weights[0:degree]) - else: - if knots is None: - knots = np.arange(len(ctrlpts) + degree + 1) - knots[0:degree] = knots[degree] - knots[-degree:] = knots[-degree-1] - - curve = NURBS.Curve(normalize_kv=False) - curve.degree = degree - curve.ctrlpts = ctrlpts - curve.knotvector = knots - if weights is not None: - curve.weights = weights - - def eval_fn(t): - d_tmp = np.empty(t.shape + (2, 2)) - for i in np.ndindex(t.shape): - d_tmp[i] = np.array(curve.derivatives(t[i], order=1)) - uv = d_tmp[..., 0, :] - duvdt = d_tmp[..., 1, :] - return (uv, duvdt) - - return CurveND(eval_fn, dim=2, domain=curve.domain) - - -class UVMesh: - def __init__(self, uvpoints, edges, faces, cyclic_v=False, pos_cross_edges=None, domain=None): - if uvpoints.shape[-1] != 2: - raise ValueError("wrong shape of uvpoints") - - uvpoints = uvpoints.reshape(-1, 2) - self._uvpoints = [uv for uv in uvpoints] - self._uvpoints_deleted = [False] * len(self._uvpoints) - - self._edges_of_point = {i: set() for i in range(len(self._uvpoints))} - for e in edges: - self._edges_of_point[e[0]].add(e[1]) - self._edges_of_point[e[1]].add(e[0]) - - self._faces = [list(f) for f in faces] - self._faces_deleted = [False] * len(faces) - - self._face_of_edge = { - (f[j-1], f[j]): i for i, f in enumerate(self._faces) for j in range(0, len(f))} - - self._cyclic_v = cyclic_v - self._pos_cross_edges = set(((e[0], e[1]) for e in pos_cross_edges)) - self._domain = domain - - self._vspan = domain[1][1] - domain[1][0] - voffsets = [np.cumsum([0] + [self._cross_edge_dir((f[i-1], f[i])) - for i in range(1, len(f))]) for f in self._faces] - - self._faces_cross_direction = [ - vs[np.argmax(vs != 0)] for vs in voffsets] - - self._polygons = [Polygon([self._uvpoints[p] + np.array([0, c * self._vspan]) - for c, p in zip(cs, f)]) for cs, f in zip(voffsets, faces)] - - self._polygons_rshift = [shapely.affinity.translate( - p, yoff=self._vspan) if self._faces_cross_direction[i] < 0 else None for i, p in enumerate(self._polygons)] - self._polygons_lshift = [shapely.affinity.translate( - p, yoff=-self._vspan) if self._faces_cross_direction[i] > 0 else None for i, p in enumerate(self._polygons)] - - self._rtree_idx = rtree.index.Index( - ((i, x.bounds, None) for ps in [self._polygons, self._polygons_rshift, self._polygons_lshift] - for i, x in enumerate(ps) if x is not None)) - - def export_uvmesh(self): - """return uvpoints, edges, faces""" - mask = ~np.array(self._uvpoints_deleted) - new_ids = np.cumsum(mask)-1 - uvpoints = np.array(self._uvpoints)[mask] - edges = [(new_ids[pt1], new_ids[pt2]) for pt1 in range(len(self._uvpoints)) - for pt2 in self._edges_of_point[pt1] - if not self._uvpoints_deleted[pt1] and not self._uvpoints_deleted[pt2]] - faces = [[new_ids[pt] for pt in f_pts] for f_id, f_pts in enumerate( - self._faces) if not self._faces_deleted[f_id]] - return (uvpoints, edges, faces) - - @staticmethod - def from_meshgrid(resolution_u: int, resolution_v: int, domain=((0, 1), (0, 1)), cyclic_v=False) -> UVMesh: - if cyclic_v and resolution_v <= 3: - raise ValueError("resoultion v is too low") - - d = domain - u = np.linspace(d[0][0], d[0][1], resolution_u) - v = np.linspace(d[1][0], d[1][1], resolution_v) - uv = np.stack(np.meshgrid(u, v, indexing='ij'), axis=-1) - - # drop the duplicates - if cyclic_v: - uv = uv[:, :-1, :] - edges, faces = compute_cylinder_topology( - resolution_u, resolution_v - cyclic_v, cyclic_v) - - cross_edges = [e for e in edges if e[0] % ( - resolution_v - 1) == resolution_v - 2 and e[1] % (resolution_v-1) == 0] if cyclic_v else [] - - return UVMesh(uv.reshape(-1, 2), edges, faces, cyclic_v, cross_edges, domain) - - def _enclosing_polygon(self, face_id, pt_coords): - point = Point(pt_coords) - for poly in [self._polygons[face_id], self._polygons_lshift[face_id], self._polygons_rshift[face_id]]: - if poly is not None and poly.covers(point): - return poly - return None - - def _intersecting_polygons_of_line(self, face_id, line: LineString): - if line is None: - return [] - res = [] - for poly in [self._polygons[face_id], self._polygons_lshift[face_id], self._polygons_rshift[face_id]]: - if poly is not None and poly.covers(line): - res.append(poly) - return res - - def _enclosing_faces_polys_edges_verts_of_point(self, coords): - if self._cyclic_v: - coords = self._wrap_around_v(coords) - if not self._within_domain(coords): - raise ValueError("coords must be within domain") - candidate_faces = self._rtree_idx.intersection(coords) - e_faces_polys = [(i, self._enclosing_polygon(i, coords)) - for i in candidate_faces if not self._faces_deleted[i]] - e_faces_polys = [(i, p) for i, p in e_faces_polys if p is not None] - e_edges = [(self._faces[f][i], self._faces[f][(i+1) % len(self._faces[f])]) for f, po in e_faces_polys for i in range(len(self._faces[f])) - if shapely.geometry.LineString([po.exterior.coords[i], po.exterior.coords[i+1]]).contains(Point(coords))] - e_edges = list( - set(((e[0], e[1]) if e[0] < e[1] else ((e[1], e[0])) for e in e_edges))) - e_verts = [pt for f, _ in e_faces_polys for pt in self._faces[f] if ( - np.array(coords) == np.array(self._uvpoints[pt])).all()] - e_verts = list(set(e_verts)) - return (e_faces_polys, e_edges, e_verts) - - @staticmethod - def _print_polygon(p: Polygon): - print(list(p.exterior.coords)) - - def _print_all(self): - print("points") - print([(i, p[0]) for i, p in enumerate( - zip(self._uvpoints, self._uvpoints_deleted)) if not p[1]]) - - print("edges") - print(sorted(self._edges_of_point.items())) - - print("cross edges") - print(sorted(self._pos_cross_edges)) - - print("faces") - print([(i, fd[0]) for i, fd in enumerate( - zip(self._faces, self._faces_deleted)) if not fd[1]]) - - print("face_of_edge") - print(sorted(self._face_of_edge.items())) - - print("polys") - print([list(p.exterior.coords) for p in self._polygons]) - - print("polly lshift") - print([list(p.exterior.coords) - for p in self._polygons_lshift if p is not None]) - - print("polly rshift") - print([list(p.exterior.coords) - for p in self._polygons_rshift if p is not None]) - - def _cross_edge_dir(self, e): - if e in self._pos_cross_edges: - return 1 - if (e[1], e[0]) in self._pos_cross_edges: - return -1 - return 0 - - def _get_enclosing_polygon(self, new_pt_coords, enclosing_face): - p_m = self._polygons[enclosing_face] - p_l = self._polygons_lshift[enclosing_face] - p_r = self._polygons_rshift[enclosing_face] - p_e = None - for p in [p_m, p_l, p_r]: - if p is not None and p.covers(Point(new_pt_coords)): - p_e = p - break - return p_e - - def _add_face(self, pts): - f_id = len(self._faces) - self._faces.append(pts) - self._faces_deleted.append(False) - self._face_of_edge.update( - ((pts[i-1], pts[i]), f_id) for i in range(len(pts))) - - vs = np.cumsum([0] + [self._cross_edge_dir((pts[i-1], pts[i])) - for i in range(1, len(pts))]) - - cross_dir = vs[np.argmax(vs != 0)] - self._faces_cross_direction.append(cross_dir) - - poly = Polygon([self._uvpoints[p] + np.array([0, c * self._vspan]) - for c, p in zip(vs, pts)]) - self._polygons.append(poly) - - poly_r = shapely.affinity.translate( - poly, yoff=self._vspan) if cross_dir < 0 else None - poly_l = shapely.affinity.translate( - poly, yoff=-self._vspan) if cross_dir > 0 else None - self._polygons_rshift.append(poly_r) - self._polygons_lshift.append(poly_l) - - self._rtree_idx.insert(f_id, poly.bounds, None) - if poly_r is not None: - self._rtree_idx.insert(f_id, poly_r.bounds, None) - if poly_l is not None: - self._rtree_idx.insert(f_id, poly_l.bounds, None) - - return f_id - - def _add_point(self, new_pt_coords): - new_pt = len(self._uvpoints) - self._uvpoints.append(new_pt_coords) - self._uvpoints_deleted.append(False) - self._edges_of_point[new_pt] = set() - return new_pt - - def _delete_edge(self, pt1, pt2): - self._edges_of_point[pt1].remove(pt2) - self._edges_of_point[pt2].remove(pt1) - if self._cyclic_v: - self._pos_cross_edges.discard((pt1, pt2)) - self._pos_cross_edges.discard((pt2, pt1)) - - def _add_edge(self, pt1, pt2, cross_dir=0): - self._edges_of_point[pt1].add(pt2) - self._edges_of_point[pt2].add(pt1) - if self._cyclic_v and cross_dir != 0: - self._pos_cross_edges.add( - (pt1, pt2) if cross_dir > 0 else (pt2, pt1)) - - def _get_cross_dir_from_pt(self, pt, enclosing_face, enclosing_polygon): - p_e = enclosing_polygon - pt_idx = self._faces[enclosing_face].index(pt) - pt = p_e.exterior.coords[pt_idx] - pt_poly_co = p_e.exterior.coords[pt_idx] - cross_dir = 1 if pt_poly_co[1] < self._domain[1][0] else ( - -1 if pt_poly_co[1] >= self._domain[1][1] else 0) - return cross_dir - - def _delete_edge_and_merge_faces(self, pt1, pt2): - f1 = self._face_of_edge.pop((pt1, pt2)) - f2 = self._face_of_edge.pop((pt2, pt1)) - - self._faces_deleted[f1] = True - self._faces_deleted[f2] = True - - f1_pts = np.array(self._faces[f1]) - f2_pts = np.array(self._faces[f2]) - - if len(set(f1_pts).intersection(set(f2_pts))) > 2: - raise ValueError( - "cannot merge faces that share more than one edge") - - new_f_pts = list(np.roll(f1_pts, -np.argmax(f1_pts == pt2)) - )[:-1] + list(np.roll(f2_pts, -np.argmax(f2_pts == pt1)))[:-1] - - self._delete_edge(pt1, pt2) - return self._add_face(new_f_pts) - - def _delete_point_and_merge_faces(self, pt): - """delete pt and its edges and faces""" - neighbor_pts = [i for i in self._edges_of_point[pt]] - faces = [self._face_of_edge[(pt, j)] for j in neighbor_pts] - new_f_pts = neighbor_pts[0:1] - face_count = 0 - while face_count < len(neighbor_pts): - cur_pt = new_f_pts[-1] - f = self._face_of_edge.get((pt, cur_pt), None) - if f is None: - break - f_pts = self._faces[f] - f_pts = list(np.roll(f_pts, -f_pts.index(pt))) - new_f_pts.extend(f_pts[2:]) - face_count += 1 - while face_count < len(neighbor_pts): - cur_pt = new_f_pts[0] - f = self._face_of_edge.get((cur_pt, pt), None) - if f is None: - break - f_pts = self._faces[f] - f_pts = list(np.roll(f_pts, -f_pts.index(cur_pt))) - new_f_pts = f_pts[2:] + new_f_pts - face_count += 1 - if face_count < len(neighbor_pts): - raise ValueError("non-manifold mesh") - if new_f_pts[-1] == new_f_pts[0]: - new_f_pts = new_f_pts[:-1] - - for f in faces: - self._faces_deleted[f] = True - for cur_pt in neighbor_pts: - self._delete_edge(pt, cur_pt) - - self._uvpoints_deleted[pt] = True - - return self._add_face(new_f_pts) - - def _split_face_with_new_edge(self, face_id, pt1, pt2): - # todo: make sure new edge is within face - # check if cross edge - self._faces_deleted[face_id] = True - - f_pts = self._faces[face_id] - r_pts = list(np.roll(f_pts, -f_pts.index(pt1))) - r_pt2_idx = r_pts.index(pt2) - - if r_pt2_idx == 1: - raise ValueError("(pt1, pt2) is already an edge") - - cross_dir = np.cumsum([self._cross_edge_dir( - (r_pts[i-1], r_pts[i]))for i in range(1, r_pt2_idx+1)])[-1] - - self._add_edge(pt1, pt2, cross_dir) - f1 = self._add_face(r_pts[:r_pt2_idx+1]) - f2 = self._add_face(r_pts[r_pt2_idx:] + [pt1]) - return (f1, f2) - - def _split_face_with_new_point(self, face_id, pt_coords): - new_pt = self._add_point(pt_coords) - f_pts = self._faces[face_id] - p_e = self._get_enclosing_polygon(pt_coords, face_id) - if p_e is None: - raise ValueError("no enclosing polygon") - - self._faces_deleted[face_id] = True - for i in range(len(f_pts)): - pt1 = f_pts[i-1] - pt2 = f_pts[i] - cross_dir1 = self._get_cross_dir_from_pt(pt1, face_id, p_e) - cross_dir2 = self._get_cross_dir_from_pt(pt2, face_id, p_e) - self._add_edge(pt1, new_pt, cross_dir1) - self._add_edge(pt2, new_pt, cross_dir2) - self._add_face([pt1, pt2, new_pt]) - return new_pt - - def _triangulate_face_from_pt(self, face_id, pt): - f_pts = self._faces[face_id] - if len(f_pts) <= 3: - return 0 - - poly = self._polygons[face_id] - - pt_idx = f_pts.index(pt) - pt_coords = poly.exterior.coords[pt_idx] - r_pts = np.roll(f_pts, -pt_idx) - - pt1_coords = poly.exterior.coords[f_pts.index(r_pts[1])] - for pt2 in r_pts[2:-1]: - pt2_coords = poly.exterior.coords[f_pts.index(pt2)] - line = LineString([pt_coords, pt2_coords]) - if poly.covers(line) and \ - not line.covers(Point(pt1_coords)): - new_f0, new_f = self._split_face_with_new_edge( - face_id, pt, pt2) - return 1 + self._triangulate_face_from_pt(new_f, pt) - return 0 - - def _triangulate_all_faces_of_point(self, pt): - for f in self._faces_of_point(pt): - self._triangulate_face(f) - - def _triangulate_face(self, face_id): - poly = self._polygons[face_id] - f_pts = self._faces[face_id] - if len(f_pts) <= 3: - return - for i, pt1 in enumerate(f_pts): - for j, pt2 in enumerate(f_pts): - if i != j and pt2 not in self._edges_of_point[pt1]: - line = LineString( - [poly.exterior.coords[i], poly.exterior.coords[j]]) - if poly.covers(line) and \ - not poly.exterior.covers(line): - f1, f2 = self._split_face_with_new_edge( - face_id, pt1, pt2) - self._triangulate_face(f1) - self._triangulate_face(f2) - return - - def _split_edge_with_new_point(self, pt1, pt2, pt_coords, enclosing_face=None, enclosing_polygon=None): - new_pt = self._add_point(pt_coords) - - f1 = self._face_of_edge.pop((pt1, pt2), None) - f2 = self._face_of_edge.pop((pt2, pt1), None) - - if f1 is not None: - self._faces_deleted[f1] = True - if enclosing_face is None: - enclosing_face = f1 - enclosing_polygon = self._get_enclosing_polygon( - pt_coords, enclosing_face) - if f2 is not None: - self._faces_deleted[f2] = True - if enclosing_face is None: - enclosing_face = f2 - enclosing_polygon = self._get_enclosing_polygon( - pt_coords, enclosing_face) - - self._delete_edge(pt1, pt2) - - cross_dir1 = self._get_cross_dir_from_pt( - pt1, enclosing_face, enclosing_polygon) - cross_dir2 = self._get_cross_dir_from_pt( - pt2, enclosing_face, enclosing_polygon) - self._add_edge(pt1, new_pt, cross_dir1) - self._add_edge(pt2, new_pt, cross_dir2) - - # done adding edges and points. now add faces - f1_pts = self._faces[f1] if f1 is not None else None - f2_pts = self._faces[f2] if f2 is not None else None - f1_pts.insert(f1_pts.index(pt1) + 1, - new_pt) if f1 is not None else None - f2_pts.insert(f2_pts.index(pt2) + 1, - new_pt) if f2 is not None else None - if f1 is not None: - new_f1 = self._add_face(f1_pts) - self._triangulate_face_from_pt(new_f1, new_pt) - if f2 is not None: - new_f2 = self._add_face(f2_pts) - self._triangulate_face_from_pt(new_f2, new_pt) - - return new_pt - - def _wrap_around_v(self, uv): - uv = np.array(uv) - new_uv = uv.copy() - new_uv[..., 1] = wrap_around_cyclic_coord(uv[..., 1], *self._domain[1]) - return new_uv - - def _within_domain(self, uv): - uv = np.array(uv) - return (self._domain[0][0] <= uv[..., 0]).all() and (uv[..., 0] < self._domain[0][1]).all() \ - and (self._domain[1][0] <= uv[..., 1]).all() and (uv[..., 1] < self._domain[1][1]).all() - - def _edges_of_face(self, f): - return [(f[i-1], f[i]) for i in range(len(self._faces[f]))] - - def _faces_of_point(self, pt): - return [self._face_of_edge[(pt, j)] for j in self._edges_of_point[pt] if (pt, j) in self._face_of_edge] - - def _poly_of_point_on_face(self, face_id, pt): - for poly in [self._polygons[face_id], self._polygons_lshift[face_id], self._polygons_rshift[face_id]]: - if poly is not None and \ - self._within_domain(np.array(poly.exterior.coords[self._faces[face_id].index(pt)])): - return poly - - def add_edge_and_remesh(self, pt1, pt2, cross_dir=0): - if self._uvpoints_deleted[pt1] or self._uvpoints_deleted[pt2]: - raise ValueError("pt1 or pt2 does not exist") - if pt2 in self._edges_of_point[pt1]: - if (pt1, pt2) in self._face_of_edge: - self._triangulate_face(self._face_of_edge[(pt1, pt2)]) - if (pt2, pt1) in self._face_of_edge: - self._triangulate_face(self._face_of_edge[(pt2, pt1)]) - return - - shared_f = set(self._faces_of_point(pt1)).intersection( - set(self._faces_of_point(pt2))) - if len(shared_f) > 0: - if len(shared_f) > 1: - raise ValueError("non-convex faces or redudant points") - f1, f2 = self._split_face_with_new_edge( - list(shared_f)[0], pt1, pt2) - self._triangulate_face(f1) - self._triangulate_face(f2) - return - - pt1_coords = self._uvpoints[pt1] - pt2_coords = self._uvpoints[pt2].copy() - pt2_coords[1] += cross_dir * self._vspan - line = LineString([pt1_coords, pt2_coords]) - - for pt in self._edges_of_point[pt1]: - pt_coords = self._uvpoints[pt].copy() - pt_coords[1] += self._cross_edge_dir((pt1, pt)) * self._vspan - if line.covers(Point(pt_coords)): - new_f = self._delete_point_and_merge_faces(pt) - self._triangulate_face_from_pt(new_f, pt1) - self.add_edge_and_remesh(pt1, pt2, cross_dir) - self._triangulate_all_faces_of_point(pt1) - return - - for f in self._faces_of_point(pt1): - f_pts = self._faces[f] - - poly = self._poly_of_point_on_face(f, pt1) - - # for numerical stability. get pt1_coords from poly - line = LineString( - [poly.exterior.coords[f_pts.index(pt1)], pt2_coords]) - - if poly.crosses(line): - for i in range(0, len(f_pts)): - q1 = f_pts[i] - q2 = f_pts[(i+1) % len(f_pts)] - q1_coords = poly.exterior.coords[i] - q2_coords = poly.exterior.coords[i+1] - if LineString([q1_coords, q2_coords]).crosses(line): - new_f = self._delete_edge_and_merge_faces(q1, q2) - self._triangulate_face_from_pt(new_f, pt1) - self.add_edge_and_remesh(pt1, pt2, cross_dir) - self._triangulate_all_faces_of_point(pt1) - return - raise ValueError("should never be here") - - raise ValueError("should never be here") - - def add_point_and_remesh(self, pt_coords): - pt_coords = np.array(pt_coords) - if not self._within_domain(pt_coords): - raise ValueError("pt_coords must be within domain") - e_faces_polys, e_edges, e_verts = self._enclosing_faces_polys_edges_verts_of_point( - pt_coords) - - new_pt = None - if len(e_verts) > 0: - if len(e_verts) > 1: - raise ValueError("cannot have more than 1 e_vert") - new_pt = e_verts[0] - elif len(e_edges) > 0: - if len(e_edges) > 1: - raise ValueError("cannot have more than 1 e_edge") - e = e_edges[0] - new_pt = self._split_edge_with_new_point(e[0], e[1], pt_coords) - else: - if len(e_faces_polys) != 1: - raise ValueError("must have an enclosing face") - f, _ = e_faces_polys[0] - new_pt = self._split_face_with_new_point(f, pt_coords) - return new_pt - - def add_line_and_remesh(self, start_coords, end_coords): - pt1 = self.add_point_and_remesh(self._wrap_around_v(start_coords)) - pt2 = self.add_point_and_remesh(self._wrap_around_v(end_coords)) - q1 = (start_coords[1] - self._domain[1][0]) // self._vspan - q2 = (end_coords[1] - self._domain[1][0]) // self._vspan - cross_dir = q2 - q1 - if cross_dir > 0: - cross_dir = 1 - if cross_dir < 0: - cross_dir = -1 - self.add_edge_and_remesh(pt1, pt2, cross_dir) - return (pt1, pt2) - - def add_poly_curve_and_remesh(self, uvpoints: np.array, cyclic_curve=False, vloop=False): - if uvpoints.shape[-1] != 2: - raise ValueError("wrong shape of curve") - uvpoints = uvpoints.reshape(-1, 2) - pts = [] - for i in range(len(uvpoints)-1): - pt1, pt2 = self.add_line_and_remesh(uvpoints[i], uvpoints[i+1]) - if i == 0: - pts.append(pt1) - else: - if pt1 != pts[-1]: - raise ValueError("numerical issues!") - pts.append(pt2) - if cyclic_curve: - if vloop: - cross_dir = 1 if uvpoints[-1][1] >= uvpoints[0][1] else -1 - self.add_edge_and_remesh(pts[-1], pts[0], cross_dir) - else: - self.add_line_and_remesh(uvpoints[-1], uvpoints[0]) - return pts - - def add_curve_and_remesh(self, curve: CurveND, resolution: int, cyclic_curve=False, vloop=False, - cut_inside=False, cut_outside=False) -> Iterable[int]: - t = np.linspace(*curve.domain, resolution) - if cyclic_curve: - t = t[:-1] - uvpoints, _ = curve.evaluate_points_and_derivatives_at_t(t) - pts = self.add_poly_curve_and_remesh(uvpoints, cyclic_curve, vloop) - if cyclic_curve and (cut_inside or cut_outside): - comps = self.connected_components(pts) - if len(comps) < 2: - raise ValueError("two few components") - poly_points = list(uvpoints) - if vloop: - head = uvpoints[0].copy() - tail1 = uvpoints[0].copy() - tail2 = uvpoints[-1].copy() - head[0] = self._domain[0][0] - 0.1 - tail1[1] += self._vspan - tail2[0] = self._domain[0][0] - 0.1 - poly_points = [head] + poly_points + [tail1, tail2] - poly = Polygon(poly_points) - for comp in comps: - pt = comp[0] - pt_coords = self._uvpoints[pt] - pt_coords1 = pt_coords.copy() - pt_coords1[1] += self._vspan - pt_coords2 = pt_coords.copy() - pt_coords2[1] -= self._vspan - if poly.covers(Point(pt_coords)) or poly.covers(Point(pt_coords1)) or poly.covers(Point(pt_coords2)): - if cut_inside: - self.remove_points(comp) - else: - if cut_outside: - self.remove_points(comp) - return pts - - def connected_components(self, boundary_pts: Iterable[int]) -> Iterable[Iterable[int]]: - color_of_pts = - np.ones(len(self._uvpoints)) - color_of_pts[boundary_pts] = -2 - color_of_pts[self._uvpoints_deleted] = -2 - cur_color = 0 - for i in range(len(color_of_pts)): - if color_of_pts[i] == -1: - stack = [i] - while len(stack) > 0: - pt = stack.pop() - color_of_pts[pt] = cur_color - for pt2 in self._edges_of_point[pt]: - if color_of_pts[pt2] == -1: - stack.append(pt2) - cur_color += 1 - res = [[i for i, c in enumerate(color_of_pts) if c == color] - for color in range(cur_color)] - return res - - def remove_points(self, pts: Iterable[int]): - for pt in pts: - self._uvpoints_deleted[pt] = True - for f in self._faces_of_point(pt): - self._faces_deleted[f] = True - for j in self._edges_of_point[pt]: - self._edges_of_point[j].remove(pt) - self._edges_of_point[pt] = set() - - -class Surface: - """ - General parametric surface S(u,v) - eval_fn: evaluation function that returns 3D points and derivatives (s(u,v), ds/du(u,v), ds/dv(u,v)) - See SurfaceFactory for examples of eval_fn - """ - - def __init__(self, eval_fn: Callable[[np.array], Tuple[np.array, np.array]], - domain=((0, 1), (0, 1)), cyclic_u=False, cyclic_v=False): - self._eval_fn = eval_fn - - if (np.array(domain)[:, 1] <= np.array(domain)[:, 0]).any(): - raise ValueError("invalid domain") - self._domain = tuple(domain) - - self._cyclic_u = cyclic_u - self._cyclic_v = cyclic_v - - @property - def eval_fn(self): - return self._eval_fn - - @property - def domain(self): - return self._domain - - @property - def cyclic_u(self): - return self._cyclic_u - - @property - def cyclic_v(self): - return self._cyclic_v - - def affine_transform_domain(self, ua=1, ub=0, va=1, vb=0) -> Surface: - """ get a reparameterized surface G(u,v) = S(ua*u + ub, va * v + vb) """ - new_domain_u = new_domain_from_affine(self.domain[0], ua, ub) - new_domain_v = new_domain_from_affine(self.domain[1], va, vb) - new_domain = (new_domain_u, new_domain_v) - - def new_eval_fn(uv): - fuv = np.stack( - [ua * uv[..., 0] + ub, va * uv[..., 1] + vb], axis=-1) - p, dsdu, dsdv = self.eval_fn(fuv) - return (p, dsdu * ua, dsdv * va) - return Surface(new_eval_fn, new_domain, self.cyclic_u, self.cyclic_v) - - def affine_new_domain(self, new_domain=((0, 1), (0, 1))) -> CurveND: - """ - get an equivalent Surface whose domain is new_domain - new_domain such as ((1, 0), (0,1)) is valid; the new surface will still - have domain ((0,1),(0,1)) but with a flipped u axis - """ - ua, ub = affine_from_new_domain(self.domain[0], new_domain[0]) - va, vb = affine_from_new_domain(self.domain[1], new_domain[1]) - return self.affine_transform_domain(ua, ub, va, vb) - - def create_mesh(self, resolution_u: int, resolution_v: int): - points, _, _ = self.evaluate_points_and_derivatives( - resolution_u, resolution_v) - points = points.reshape(-1, 3) - edges, faces = compute_cylinder_topology( - resolution_u, resolution_v, self.cyclic_v) - return blender_mesh_from_pydata(points, edges, faces) - - def create_mesh_from_uvmesh(self, uvmesh: UVMesh): - uvpoints, edges, faces = uvmesh.export_uvmesh() - points, _, _ = self.evaluate_points_and_derivatives_at_uv(uvpoints) - return blender_mesh_from_pydata(points, edges, faces) - - def evaluate_points_and_derivatives_at_uv(self, uv: np.array) -> Tuple[np.array, np.array, np.array]: - if uv.shape[-1] != 2: - raise ValueError("wrong uv shape") - - def check_domain(u, domain, cyclic, name): - if cyclic: - return - if (u < domain[0]).any() or (u > domain[1]).any(): - raise ValueError( - f"{name} out of domain, {domain}, {self.domain}") - - check_domain(uv[..., 0], self.domain[0], self.cyclic_u, "u") - check_domain(uv[..., 1], self.domain[1], self.cyclic_v, "v") - - eval_uv = uv.copy() - if self.cyclic_u: - eval_uv[..., 0] = wrap_around_cyclic_coord( - uv[..., 0], *self.domain[0]) - if self.cyclic_v: - eval_uv[..., 1] = wrap_around_cyclic_coord( - uv[..., 1], *self.domain[1]) - - points, derivatives_u, derivatives_v = self._eval_fn(eval_uv) - if points.shape != uv.shape[:-1] + (3,): - raise ValueError("points has wrong shape", points.shape, uv.shape) - if derivatives_u is not None and derivatives_u.shape != uv.shape[:-1] + (3,): - raise ValueError("derivatives u has wrong shape") - if derivatives_v is not None and derivatives_v.shape != uv.shape[:-1] + (3,): - raise ValueError("derivatives v has wrong shape") - return (points, derivatives_u, derivatives_v) - - def evaluate_points_and_derivatives(self, resolution_u: int, resolution_v: int) -> Tuple[np.array, np.array, np.array]: - d = self.domain - u = np.linspace(d[0][0], d[0][1], resolution_u) - v = np.linspace(d[1][0], d[1][1], resolution_v) - uv = np.stack(np.meshgrid(u, v, indexing='ij'), axis=-1) - return self.evaluate_points_and_derivatives_at_uv(uv) - - -class RailCurve: - """ - constructs a rail curve on a Surface surf S(u,v) and a Curve2D curve_uv (u(t),v(t)) - The (u(t), v(t)) should be within S(u,v)'s domain - The constructed rail curve's domain is always (0,1). curve_uv can have arbitrary domain - """ - - def __init__(self, surf: Surface, curve_uv: CurveND): - self._surf = surf - self._curve_uv = curve_uv.affine_new_domain((0, 1)) - - def evaluate_points_derivatives_normals(self, t: np.array) -> Tuple[np.array, np.array]: - uv, duvdt = self._curve_uv.evaluate_points_and_derivatives_at_t(t) - points, dsdu, dsdv = self._surf.evaluate_points_and_derivatives_at_uv( - uv) - dcdt = dsdu * duvdt[..., 0, None] + dsdv * duvdt[..., 1, None] - z = np.cross(dsdu, dsdv) - return (points, dcdt, z) - - -class SurfaceFactory: - @staticmethod - def from_blender_nurbs(s: bpy.types.Spline) -> Surface: - surf = nurbs.blender_nurbs_to_geomdl(s) - - def eval_fn(uv): - d_tmp = np.empty(uv.shape[:-1] + (2, 2, 3)) - for i in np.ndindex(uv.shape[:-1]): - d_tmp[i] = np.array(surf.derivatives(*uv[i], 1)) - points = d_tmp[..., 0, 0, :] - dsdu = d_tmp[..., 1, 0, :] - dsdv = d_tmp[..., 0, 1, :] - return (points, dsdu, dsdv) - return Surface(eval_fn, domain=surf.domain, cyclic_u=s.use_cyclic_u, cyclic_v=s.use_cyclic_v) - - @staticmethod - def plane(center, normal, domain=((-1, 1), (-1, 1))) -> Surface: - center = np.array(center) - normal = np.array(normal) - - def eval_fn(uv): - points = np.concatenate( - [uv, np.zeros(uv.shape[:-1] + (1,))], axis=-1) - dsdu = np.zeros_like(points) - dsdu[..., 0] = 1 - dsdv = np.zeros_like(points) - dsdv[..., 1] = 1 - upward = np.array([0, 0, 1]) - rot_mat = np.squeeze(rotate_match_directions( - upward[None], normal[None])) - points = np.einsum('...ij,...j->...i', rot_mat, - points) + center[None] - dsdu = np.einsum('...ij,...j->...i', rot_mat, dsdu) - dsdv = np.einsum('...ij,...j->...i', rot_mat, dsdv) - - return (points, dsdu, dsdv) - return Surface(eval_fn, domain) - - @staticmethod - def blending(r1: RailCurve, r2: RailCurve, alpha=(0, 0), w=(0.1, 0.1), sweep_left=(False, False)) -> Surface: - """ - Constructs a blending surface B(s,t) that spans two rail curves r1(t) and r2(t), where s,t in [0,1], - such that r1(t) = B(0,t) and r2(t) = s(1,t) for all t in [0,1], and that B(s,t) smoothly blends. - The blending surface basically smoothly sweeps one rail curve toward the other to span a new surface. - - r1(t) and r2(t) share the same t, i.e. with fixed t, r1(t) and r2(t) are "corresponding" points. - it is important that r1(t) and r2(t) travel in the same direction (e.g. counterclockwise), otherwise you will get - a twisted surface. You can flip the travel direction by CurveND.affine_new_domain - - Algorithm by Daniel Filip "Blending parametric surfaces", ACM Trans. on Graphics, 1989, with modification to - gaurantee that we sweep a rail curve consistently to one side (left or right, but not mixed) and avoid twisted surfaces - - alpha: parameter in [0,1] that controls the direction in which to sweep a rail curve. if alpha=0, sweep more in the orthogonal direction - if alipha=1, sweep more in the direction of the corresponding point in the other rail curve. There is an alpha for each rail curve. - - w: parameter in [0,1] that controls the curvature of the blending surface at the rail curve. If w=1, it will approximately give a - large circle arc that connects the two rail curves. Large w results in slower transition. One w for each rail curve - - sweep_left: given a rail curve, we have two choices of constructing a blend: we can sweep to the left side (i.e. leaving the right side visible) - or right. By default we sweep right. We use right hand convention: z = (u cross v) points to outside (visible side of the base surface), - (rail_curve_tangent cross z) points to the right. - """ - alpha1, alpha2 = alpha - w1, w2 = w - - def eval_fn(st): - s = st[..., 0] - t = st[..., 1] - c1, dc1dt, z1 = r1.evaluate_points_derivatives_normals(t) - c2, dc2dt, z2 = r2.evaluate_points_derivatives_normals(t) - - def _compute_blending_tangent(dcdt, k, z, alpha, w, flip): - if flip: - z = -z - - # unit vector on tangent plane orthogonal to curve pointing to covered side of the curve after blending - n = normalize(np.cross(dcdt, z), disallow_zero_norm=True) - - N = normalize(n + alpha * project_to_unit_vector(k, dcdt)) - k_norm = np.linalg.norm(k, axis=-1) - g = k_norm + (k * N).sum(axis=-1) - l = np.square(k_norm) * 2 - l[g > 0] /= g[g > 0] - return l[..., None] * w * N - - k = c2 - c1 - T1 = _compute_blending_tangent( - dc1dt, k, z1, alpha1, w1, sweep_left[0]) - T2 = _compute_blending_tangent( - dc2dt, k, z2, alpha2, w2, sweep_left[1]) - - H1 = s * s * (2 * s - 3) + 1 - H2 = 1 - H1 - H3 = s * np.square(s - 1) - H4 = s * s * (s - 1) - - points = H1[..., None] * c1 + H2[..., None] * \ - c2 + H3[..., None] * T1 + H4[..., None] * T2 - return (points, None, None) - - return Surface(eval_fn) - - -def example_use(): - n = 4 - m = 10 - resolution_s = 50 - resolution_t = 50 - with FixedSeed(1103): - def make_object(): - skin = skin_ops.random_skin(1, n, m) - skeleton = np.hstack( - (np.random.normal(0, 0.2, [5, 2]), np.linspace(0, 5, 5).reshape(-1, 1))) - method = 'blender' - obj = lofting.loft(skeleton, skin, method=method, - face_size=0.1, cyclic_v=True) - return obj - obj1 = make_object() - obj2 = make_object() - obj2.location = mathutils.Vector((5, 0, 0)) - - with butil.SelectObjects([obj1, obj2]): - bpy.ops.object.transform_apply( - location=True, rotation=True, scale=True) - - # base surface 1, with domain normalized to ((0,1), (0,1)) - # domain normalization isn't required, but may be convenient for specifying the curve in uv space - surf1 = SurfaceFactory.from_blender_nurbs( - obj1.data.splines[0]).affine_new_domain(((0, 1), (0, 1))) - r1 = RailCurve( - surf1, Curve2DFactory.circle([0.5, 0.8], [0.5, 0.6])) - - # base surface 2, with domain normalized to ((0,1), (0,1)), but with a flipped u axis - surf2 = SurfaceFactory.from_blender_nurbs( - obj2.data.splines[0]).affine_new_domain(((1, 0), (0, 1))) - r2 = RailCurve( - surf2, Curve2DFactory.circle([0.5, 0.3], [0.5, 0.5])) - - b = SurfaceFactory.blending(r1, r2, alpha=( - 0, 0), w=(0.5, 0.5), sweep_left=(True, False)) - b.create_mesh(resolution_s, resolution_t) - - -def example_use2(): - n = 4 - m = 10 - resolution_s = 50 - resolution_t = 50 - with FixedSeed(1103): - def make_object(): - skin = skin_ops.random_skin(1, n, m) - skeleton = np.hstack( - (np.random.normal(0, 0.2, [5, 2]), np.linspace(0, 5, 5).reshape(-1, 1))) - method = 'blender' - obj = lofting.loft(skeleton, skin, method=method, - face_size=0.1, cyclic_v=True) - return obj - obj1 = make_object() - obj2 = make_object() - obj2.location = mathutils.Vector((5, 0, 0)) - - with butil.SelectObjects([obj1, obj2]): - bpy.ops.object.transform_apply( - location=True, rotation=True, scale=True) - - # base surface 1, with domain normalized to ((0,1), (0,1)) - # domain normalization isn't required, but may be convenient for specifying the curve in uv space - surf1 = SurfaceFactory.from_blender_nurbs( - obj1.data.splines[0]).affine_new_domain(((0, 1), (0, 1))) - ctrlpts = np.array([[1, 0], [1, 1], [0, 1], [-1, 1], [-1, 0], [-1, -1], [0, -1], [ - 1, -1]]) * np.array([-1, -1])[None] * 0.2 + np.array([0.5, 0.8])[None] - r1 = RailCurve( - surf1, Curve2DFactory.nurbs(ctrlpts, make_cyclic=True)) - - # base surface 2, with domain normalized to ((0,1), (0,1)), but with a flipped u axis - surf2 = SurfaceFactory.from_blender_nurbs( - obj2.data.splines[0]).affine_new_domain(((1, 0), (0, 1))) - r2 = RailCurve( - surf2, Curve2DFactory.circle([0.5, 0.3], [0.5, 0.5])) - - b = SurfaceFactory.blending(r1, r2, alpha=( - 0, 0), w=(0.5, 0.5), sweep_left=(True, False)) - b.create_mesh(resolution_s, resolution_t) - - -def example_use3(): - n = 4 - m = 10 - resolution_s = 50 - resolution_t = 50 - with FixedSeed(1103): - uv_mesh = UVMesh.from_meshgrid( - resolution_s, resolution_t, cyclic_v=True) - - def make_object(): - skin = skin_ops.random_skin(1, n, m) - skeleton = np.hstack( - (np.random.normal(0, 0.2, [5, 2]), np.linspace(0, 5, 5).reshape(-1, 1))) - method = 'blender' - obj = lofting.loft(skeleton, skin, method=method, - face_size=0.1, cyclic_v=True) - return obj - obj1 = make_object() - - with butil.SelectObjects([obj1, ]): - bpy.ops.object.transform_apply( - location=True, rotation=True, scale=True) - - surf1 = SurfaceFactory.from_blender_nurbs( - obj1.data.splines[0]).affine_new_domain(((0, 1), (0, 1))) - butil.delete([obj1, ]) - - #uv_mesh.add_line_and_remesh((0.2, -0.2), (0.29, 0.1)) - resolution_c = 50 - curve1 = Curve2DFactory.circle([0.5, 0.1], [0.5, 0.3]) - curve2 = Curve2DFactory.nurbs( - np.array([[0.5, 0.1], [0.5, 1.1]]), degree=1) - comps0 = uv_mesh.connected_components([]) - if False: - uv_mesh.add_curve_and_remesh( - curve2, resolution_c, cyclic_curve=True, vloop=True, cut_inside=True, cut_outside=False) - else: - uv_mesh.add_curve_and_remesh( - curve1, resolution_c, cyclic_curve=True, vloop=False, cut_inside=True, cut_outside=False) - obj2 = surf1.create_mesh_from_uvmesh(uv_mesh) - - -def example_use4(): - n = 4 - m = 10 - resolution_s = 50 - resolution_t = 50 - with FixedSeed(1103): - def make_object(): - skin = skin_ops.random_skin(1, n, m) - skeleton = np.hstack( - (np.random.normal(0, 0.2, [5, 2]), np.linspace(0, 5, 5).reshape(-1, 1))) - method = 'blender' - obj = lofting.loft(skeleton, skin, method=method, - face_size=0.1, cyclic_v=True) - return obj - obj1 = make_object() - obj2 = make_object() - obj2.location = mathutils.Vector((5, 0, 0)) - - with butil.SelectObjects([obj1, obj2]): - bpy.ops.object.transform_apply( - location=True, rotation=True, scale=True) - - # base surface 1, with domain normalized to ((0,1), (0,1)) - # domain normalization isn't required, but may be convenient for specifying the curve in uv space - surf1 = SurfaceFactory.from_blender_nurbs( - obj1.data.splines[0]).affine_new_domain(((0, 1), (0, 1))) - ctrlpts = np.array([[1, 0], [1, 1], [0, 1], [-1, 1], [-1, 0], [-1, -1], [0, -1], [ - 1, -1]]) * np.array([-1, -1])[None] * 0.2 + np.array([0.5, 0.8])[None] - curve1 = Curve2DFactory.nurbs(ctrlpts, make_cyclic=True) - r1 = RailCurve(surf1, curve1) - - # base surface 2, with domain normalized to ((0,1), (0,1)), but with a flipped u axis - surf2 = SurfaceFactory.from_blender_nurbs( - obj2.data.splines[0]).affine_new_domain(((1, 0), (0, 1))) - curve2 = Curve2DFactory.circle([0.5, 0.3], [0.5, 0.5]) - r2 = RailCurve(surf2, curve2) - - b = SurfaceFactory.blending(r1, r2, alpha=( - 0, 0), w=(0.5, 0.5), sweep_left=(True, False)) - b.create_mesh(resolution_s, resolution_t) - - # replace obj1, obj2 with custom mesh - butil.delete([obj1, obj2]) - uv_mesh1 = UVMesh.from_meshgrid(resolution_s, resolution_t) - uv_mesh1.add_curve_and_remesh(curve1, resolution_t, cyclic_curve=True, cut_inside=True) - uv_mesh2 = UVMesh.from_meshgrid(resolution_s, resolution_t) - uv_mesh2.add_curve_and_remesh(curve2, resolution_t, cyclic_curve=True, cut_inside=True) - - obj1 = surf1.create_mesh_from_uvmesh(uv_mesh1) - obj2 = surf2.create_mesh_from_uvmesh(uv_mesh2) - diff --git a/infinigen/assets/creatures/util/hair.py b/infinigen/assets/creatures/util/hair.py deleted file mode 100644 index 667f638ff..000000000 --- a/infinigen/assets/creatures/util/hair.py +++ /dev/null @@ -1,421 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=dCIKH649gac by Hey Pictures - -import pdb -import warnings -import logging - -import bpy -import bmesh -import mathutils - -import numpy as np -from scipy.spatial import KDTree - -from infinigen.core.util import blender as butil -from infinigen.core import surface -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes -from infinigen.core.nodes import node_utils - -from infinigen.assets.creatures.util.nodegroups.hair import nodegroup_comb_direction, nodegroup_duplicate_to_clumps, \ - nodegroup_hair_position, nodegroup_comb_hairs, nodegroup_strand_noise, nodegroup_hair_length_rescale, \ - nodegroup_snap_roots_to_surface - -logger = logging.getLogger(__name__) - -def add_hair_particles(obj, params, props): - - _, mod = butil.modify_mesh(obj, 'PARTICLE_SYSTEM', apply=False, return_mod=True) - - settings = mod.particle_system.settings - settings.type = 'HAIR' - for k, v in params.items(): - setattr(settings, k, v) - - for k, v in props.items(): - setattr(mod.particle_system, k, v) - -def as_hair_bsdf(mat, hair_bsdf_params): - - assert mat.use_nodes - - new_mat = mat.copy() - new_mat.name = f'as_hair_bsdf({mat.name})' - ng = new_mat.node_tree - - child = lambda inp: next(link.from_node for link in ng.links if link.to_socket == inp) - - try: - out = ng.nodes['Material Output'] - shader = child(out.inputs['Surface']) - rgb = child(shader.inputs['Base Color']) - except StopIteration: - # shader didnt match expected structure, abort and use original shader - warnings.warn(f'as_hair_bsdf failed for {mat.name=}, did not match expected structure') - return new_mat - - nw = NodeWrangler(ng) - hair_bsdf = nw.new_node(Nodes.PrincipledHairBSDF, input_kwargs={'Color': rgb, **hair_bsdf_params}) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': hair_bsdf}) - - return new_mat - -def compute_hair_placement_vertgroup(obj, root, avoid_features_dist): - - avoid_types = ['Eyeball', 'Teeth', 'Tongue']#, 'Nose'] - extras = [o for o in butil.iter_object_tree(root) if 'extra' in o.name] - avoid_extras = [o for o in extras if any(n in o.name for n in avoid_types)] - - avoid_verts = [] - for o in avoid_extras: - for v in o.data.vertices: - avoid_verts.append(o.matrix_world @ v.co) - avoid_verts = np.array(avoid_verts).reshape(-1, 3) - - verts = np.array([obj.matrix_world @ v.co for v in obj.data.vertices]) - if len(avoid_verts): - kd = KDTree(avoid_verts) - dists, _ = kd.query(verts, k=1) - else: - dists = np.full(len(verts), 1e5) - - tag_bald_mask = np.zeros(len(verts), dtype=np.float32) - if 'tag_bald' in obj.data.attributes: - obj.data.attributes['tag_bald'].data.foreach_get('value', tag_bald_mask) - - idxs = np.where((dists > avoid_features_dist) & (tag_bald_mask < 0.5))[0] - - group = obj.vertex_groups.new(name='hair_placement') - group.add(idxs.tolist(), 1.0, 'ADD') # .tolist() necessary to avoid np.int64 type error - - return group - -@node_utils.to_nodegroup('nodegroup_decode_noise', singleton=True, type='GeometryNodeTree') -def nodegroup_decode_noise(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'MinMaxScale', (0.0, 0.0, 0.0)), - ('NodeSocketGeometry', 'Source', None), - ('NodeSocketVector', 'Source Position', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["MinMaxScale"]}) - - noise_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': separate_xyz.outputs["Z"], 'Detail': 5.0}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: separate_xyz.outputs["X"], 4: separate_xyz.outputs["Y"]}) - - transfer_attribute = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={ - 'Mesh': group_input.outputs["Source"], - 'Value': map_range_1.outputs["Result"], - 'Sample Position': group_input.outputs["Source Position"] - }) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Attribute': (transfer_attribute, 'Value')}) - -@node_utils.to_nodegroup('nodegroup_hair_grooming', singleton=True, type='GeometryNodeTree') -def nodegroup_hair_grooming(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketObject', 'Object', None), - ('NodeSocketVector', 'Length MinMaxScale', (0.014, 0.04, 40.0)), - ('NodeSocketVector', 'Puff MinMaxScale', (0.14, 0.40, 40.0)), - ('NodeSocketFloat', 'Combing', 0.0), - ('NodeSocketFloat', 'Strand Random Mag', 0.001), - ('NodeSocketFloat', 'Strand Perlin Mag', 0.05), - ('NodeSocketFloat', 'Strand Perlin Scale', 33.38), - ('NodeSocketInt', 'Tuft Amount', 1), - ('NodeSocketFloat', 'Tuft Spread', 0.005), - ('NodeSocketFloat', 'Tuft Clumping', 0.5), - ('NodeSocketFloat', 'Root Radius', 0.01), - ('NodeSocketFloat', 'Post Clump Noise Mag', 0.0), - ('NodeSocketFloat', 'Hair Length Pct Min', 0.7)]) - - hairposition = nw.new_node(nodegroup_hair_position().name, - input_kwargs={'Curves': group_input.outputs["Geometry"]}) - - object_info = nw.new_node(Nodes.ObjectInfo, - input_kwargs={'Object': group_input.outputs["Object"]}) - - combdirection = nw.new_node(nodegroup_comb_direction().name, - input_kwargs={'Surface': object_info.outputs["Geometry"], 'Root Positiion': hairposition.outputs["Root Position"]}) - - decode_length = nw.new_node(nodegroup_decode_noise().name, - input_kwargs={'MinMaxScale': group_input.outputs["Length MinMaxScale"], 'Source': object_info.outputs["Geometry"], 'Source Position': hairposition.outputs["Root Position"]}, - label='Decode Length') - - decode_puff = nw.new_node(nodegroup_decode_noise().name, - input_kwargs={'MinMaxScale': group_input.outputs["Puff MinMaxScale"], 'Source': object_info.outputs["Geometry"], 'Source Position': hairposition.outputs["Root Position"]}, - label='Decode Puff') - - combhairs = nw.new_node(nodegroup_comb_hairs().name, - input_kwargs={'Curves': group_input.outputs["Geometry"], 'Root Position': hairposition.outputs["Root Position"], 'Comb Dir': combdirection.outputs["Combing Direction"], 'Surface Normal': combdirection.outputs["Surface Normal"], 'Length': decode_length, 'Puiff': group_input.outputs["Combing"], 'Comb': decode_puff}) - - strandnoise = nw.new_node(nodegroup_strand_noise().name, - input_kwargs={'Geometry': combhairs, 'Random Mag': group_input.outputs["Strand Random Mag"], 'Perlin Mag': group_input.outputs["Strand Perlin Mag"], 'Perlin Scale': group_input.outputs["Strand Perlin Scale"]}) - - duplicatetoclumps = nw.new_node(nodegroup_duplicate_to_clumps().name, - input_kwargs={'Geometry': strandnoise, 'Surface Normal': combdirection.outputs["Surface Normal"], 'Amount': group_input.outputs["Tuft Amount"], 'Tuft Spread': group_input.outputs["Tuft Spread"], 'Tuft Clumping': group_input.outputs["Tuft Clumping"]}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: (-1.0, -1.0, -1.0)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: random_value.outputs["Value"], 'Scale': group_input.outputs["Post Clump Noise Mag"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': duplicatetoclumps, 'Offset': scale.outputs["Vector"]}) - - hairlengthrescale = nw.new_node(nodegroup_hair_length_rescale().name, - input_kwargs={'Curves': set_position, 'Min': group_input.outputs['Hair Length Pct Min']}) - - snaprootstosurface = nw.new_node(nodegroup_snap_roots_to_surface().name, - input_kwargs={'Target': object_info.outputs["Geometry"], 'Curves': hairlengthrescale}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["Root Radius"], 4: 0.0}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': snaprootstosurface, 'Radius': map_range.outputs["Result"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_curve_radius}) - -def mat_attr_dependencies(node_tree): - attrs = set() - for node in node_tree.nodes: - if node.bl_idname == Nodes.Attribute: - attrs.add(node.attribute_name) - elif node.bl_idname == "ShaderNodeGroup": - attrs = attrs | mat_attr_dependencies(node.node_tree) - - return attrs - -def geo_transfer_hair_attributes(nw, obj, attrs): - - group_input = nw.new_node(Nodes.GroupInput) - - hairposition = nw.new_node(nodegroup_hair_position().name, - input_kwargs={'Curves': group_input.outputs["Geometry"]}) - - object_info = nw.new_node(Nodes.ObjectInfo, - input_kwargs={'Object': obj}) - - attrs_out = {} - for attr_name in attrs: - if not attr_name in obj.data.attributes: - logger.warn(f'Attempted to geo_transfer_hair_attributes() including {attr_name=} which is not present on {obj=}. Available are {list(obj.data.attributes.keys())}') - continue - - obj_attr = obj.data.attributes[attr_name] - - named_attr = nw.new_node(Nodes.NamedAttribute, - attrs={'data_type': obj_attr.data_type}, - input_kwargs={'Name': attr_name}) - transfer = nw.new_node(Nodes.SampleNearestSurface, - attrs={'data_type': obj_attr.data_type}, - input_kwargs={ - 'Mesh': object_info.outputs['Geometry'], - "Value": named_attr, - 'Sample Position': hairposition - }) - attrs_out[attr_name] = (transfer, 'Value') - - nw.new_node(Nodes.GroupOutput, input_kwargs={ - 'Geometry': group_input.outputs['Geometry'], **attrs_out}) - -def configure_hair(obj, root, hair_genome: dict, apply=True, is_dynamic=None): - - if is_dynamic is None: - is_dynamic = any(m.type == 'ARMATURE' for m in obj.modifiers) - - # re-parameterize density params - sa = butil.surface_area(obj) - count = int(sa * hair_genome['density']) - n_guide_hairs = count // hair_genome['clump_n'] - hair_genome['grooming']['Tuft Amount'] = hair_genome['clump_n'] - - logger.debug(f'Computing hair placement vertex group') - avoid_group = compute_hair_placement_vertgroup(obj, root, - avoid_features_dist=hair_genome['avoid_features_dist']) - - logger.debug(f'Add particle system with {n_guide_hairs=}') - add_hair_particles(obj, params={'count': n_guide_hairs}, - props={'vertex_group_density': avoid_group.name}) - - logger.debug(f'Converting particles to curves') - with butil.SelectObjects(obj): - for m in obj.modifiers: - if m.type == 'PARTICLE_SYSTEM': - m.show_viewport = True - bpy.ops.curves.convert_from_particle_system() - curves = bpy.context.active_object - - with butil.SelectObjects(obj): - bpy.ops.object.particle_system_remove() - - logger.debug(f'Performing geonodes hair grooming') - with butil.DisableModifiers(obj): - _, mod = butil.modify_mesh(curves, 'NODES', apply=False, return_mod=True) - mod.node_group = nodegroup_hair_grooming() - surface.set_geomod_inputs(mod, {'Object': obj, **hair_genome['grooming']}) - - if apply: - butil.apply_modifiers(curves, mod=mod) - - curves.parent = obj - curves.matrix_parent_inverse = obj.matrix_world.inverted() # keep prexisting transform - curves.data.surface = obj - - if len(obj.material_slots) == 0: - return - - if obj.active_material is not None: - - hair_mat = as_hair_bsdf(obj.active_material, hair_genome['material']) - - logger.debug(f'Transfer material attr dependencies from surf to curves') - attr_deps = mat_attr_dependencies(hair_mat.node_tree) - attr_deps = [a for a in attr_deps if a in obj.data.attributes] - surface.add_geomod(curves, geo_transfer_hair_attributes, apply=apply, - input_kwargs=dict(obj=obj, attrs=attr_deps), attributes=attr_deps) - curves.active_material = hair_mat - - if is_dynamic: - attach_hair_to_surface(curves, obj) - - curves.name = obj.name + '.hair_curves' - - return curves - -@node_utils.to_nodegroup('nodegroup_transfer_uvs_to_curves_vec3', singleton=True) -def nodegroup_transfer_uvs_to_curves_vec3(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketObject', 'Object', None), - ('NodeSocketString', 'from_uv', None), - ('NodeSocketString', 'to_attr', None)]) - - object_info = nw.new_node(Nodes.ObjectInfo, - input_kwargs={'Object': group_input.outputs["Object"]}, - attrs={'transform_space': 'RELATIVE'}) - obj = object_info.outputs["Geometry"] - - domain = 'POINT' - uvtype = 'FLOAT_VECTOR' - - uv = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': group_input.outputs["from_uv"]}, - attrs={'data_type': uvtype}) - - capture = nw.new_node( - Nodes.CaptureAttribute, - input_kwargs={'Geometry': obj, 'Value': uv}, - attrs={'data_type': uvtype, 'domain': domain} - ) - - root_pos = nw.new_node(nodegroup_hair_position().name, [group_input.outputs['Geometry']]) - - nearest_idx = nw.new_node(Nodes.SampleNearest, input_kwargs={ - 'Geometry': capture.outputs['Geometry'], - 'Sample Position': root_pos - }, - attrs={'domain': domain}) - #transfer_attribute = nw.new_node(Nodes.SampleNearest, - # input_kwargs={ - # 'Mesh': capture.outputs['Geometry'], - # 'Value': capture.outputs["Attribute"], - # 'Sample Position': root_pos - # }, - # attrs={'data_type': 'FLOAT_VECTOR'}) - transfer_attribute = nw.new_node(Nodes.SampleIndex, input_kwargs={ - 'Geometry': capture.outputs['Geometry'], - 'Index': nearest_idx.outputs['Index'], - 'Value': capture.outputs["Attribute"] - }, - attrs={'data_type': uvtype, 'domain': domain}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={ - 'Geometry': group_input.outputs["Geometry"], - 'Name': group_input.outputs['to_attr'], - 'Value': transfer_attribute}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CURVE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute}) - -def transfer_uvs_to_curves(curves, target, uv_name): - - # blender doesnt seem to support writing directly to FLOAT2 uv attributes. - # lets write to a FLOAT_VECTOR then change it over to a FLOAT2 - - curve_uv_attr = 'surface_uv_coordinate' - butil.modify_mesh(curves, 'NODES', node_group=nodegroup_transfer_uvs_to_curves_vec3(), - ng_inputs={'Object': target, 'from_uv': uv_name, 'to_attr': curve_uv_attr}, apply=True) - - # rip uvs to np array - n = len(curves.data.curves) - uvs = np.empty(3 * n, dtype=np.float32) - attr = curves.data.attributes[curve_uv_attr] - assert attr.domain == 'CURVE' and attr.data_type == 'FLOAT_VECTOR' - attr.data.foreach_get('vector', uvs) - curves.data.attributes.remove(attr) - - # write back as FLOAT2 - uvs = uvs.reshape(n, 3)[:, :2].reshape(-1) - attr = curves.data.attributes.new(curve_uv_attr, type='FLOAT2', domain='CURVE') - attr.data.foreach_set('vector', uvs) - -@node_utils.to_nodegroup('nodegroup_deform_curves_on_surface', singleton=True) -def nodegroup_deform_curves_on_surface(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - deform_curves_on_surface = nw.new_node('GeometryNodeDeformCurvesOnSurface', - input_kwargs={'Curves': group_input.outputs["Geometry"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': deform_curves_on_surface}) - - -def attach_hair_to_surface(curves, target): - - # target object needs UVMap and rest_position attribute, - # curves obj needs surface_uv_coordinate attribute - # defined in https://docs.blender.org/manual/en/latest/modeling/geometry_nodes/curve/deform_curves_on_surface.html - - surface.write_attribute(target, lambda nw: nw.new_node(Nodes.InputPosition), 'rest_position', apply=True) - with butil.ViewportMode(target, mode='EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.uv.smart_project(island_margin=0.03) - assert len(target.data.uv_layers) > 0 - - curves.data.surface = target - curves.data.surface_uv_map = target.data.uv_layers[-1].name - transfer_uvs_to_curves(curves, target, curves.data.surface_uv_map) - - butil.modify_mesh(curves, 'NODES', apply=False, show_viewport=True, - node_group=nodegroup_deform_curves_on_surface()) - - - - diff --git a/infinigen/assets/creatures/util/nodegroups/__init__.py b/infinigen/assets/creatures/util/nodegroups/__init__.py deleted file mode 100644 index 1f51d8fa9..000000000 --- a/infinigen/assets/creatures/util/nodegroups/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import geometry, attach, curve \ No newline at end of file diff --git a/infinigen/assets/creatures/util/nodegroups/attach.py b/infinigen/assets/creatures/util/nodegroups/attach.py deleted file mode 100644 index 60cb482cb..000000000 --- a/infinigen/assets/creatures/util/nodegroups/attach.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from .math import nodegroup_deg2_rad -from .curve import nodegroup_warped_circle_curve, nodegroup_smooth_taper, nodegroup_profile_part - -@node_utils.to_nodegroup('nodegroup_part_surface', singleton=True, type='GeometryNodeTree') -def nodegroup_part_surface(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0)]) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Factor': group_input.outputs["Length Fac"]}, - attrs={'mode': 'FACTOR'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': sample_curve.outputs["Tangent"], 'Rotation': group_input.outputs["Ray Rot"]}, - attrs={'rotation_type': 'EULER_XYZ'}) - - raycast = nw.new_node(Nodes.Raycast, - input_kwargs={'Target Geometry': group_input.outputs["Skin Mesh"], 'Source Position': sample_curve.outputs["Position"], 'Ray Direction': vector_rotate, 'Ray Length': 5.0}) - - lerp = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["Rad"], 9: sample_curve.outputs["Position"], 10: raycast.outputs["Hit Position"]}, - label='lerp', - attrs={'data_type': 'FLOAT_VECTOR', 'clamp': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Position': lerp.outputs["Vector"], 'Hit Normal': raycast.outputs["Hit Normal"], 'Tangent': sample_curve.outputs["Tangent"], 'Skeleton Pos': sample_curve.outputs["Position"]}) - -@node_utils.to_nodegroup('nodegroup_part_surface_simple', singleton=True, type='GeometryNodeTree') -def nodegroup_part_surface_simple(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketVector', 'Length, Yaw, Rad', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Length, Yaw, Rad"]}) - - clamp_1 = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': separate_xyz.outputs["X"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.5708, 'Y': separate_xyz.outputs["Y"], 'Z': 1.5708}) - - part_surface = nw.new_node(nodegroup_part_surface().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length Fac': clamp_1, 'Ray Rot': combine_xyz, 'Rad': separate_xyz.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Position': part_surface.outputs["Position"], 'Hit Normal': part_surface.outputs["Hit Normal"], 'Tangent': part_surface.outputs["Tangent"]}) - -@node_utils.to_nodegroup('nodegroup_raycast_rotation', singleton=True, type='GeometryNodeTree') -def nodegroup_raycast_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'Rotation', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Hit Normal', (0.0, 0.0, 1.0)), - ('NodeSocketVector', 'Curve Tangent', (0.0, 0.0, 1.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': group_input.outputs["Hit Normal"]}) - - rotate_euler = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Rotate By': align_euler_to_vector}) - - if_normal_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Normal Rot"], 8: group_input.outputs["Rotation"], 9: rotate_euler}, - label='if_normal_rot', - attrs={'input_type': 'VECTOR'}) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': group_input.outputs["Rotation"], 'Vector': group_input.outputs["Curve Tangent"]}) - - rotate_euler_1 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': align_euler_to_vector_1, 'Rotate By': group_input.outputs["Rotation"]}, - attrs={'space': 'LOCAL'}) - - if_tangent_rot = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Do Tangent Rot"], 8: if_normal_rot.outputs[3], 9: rotate_euler_1}, - label='if_tangent_rot', - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': if_tangent_rot.outputs[3]}) - - -@node_utils.to_nodegroup('nodegroup_surface_muscle', singleton=True, type='GeometryNodeTree') -def nodegroup_surface_muscle(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketVector', 'Coord 0', (0.4, 0.0, 1.0)), - ('NodeSocketVector', 'Coord 1', (0.5, 0.0, 1.0)), - ('NodeSocketVector', 'Coord 2', (0.6, 0.0, 1.0)), - ('NodeSocketVector', 'StartRad, EndRad, Fullness', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'ProfileHeight, StartTilt, EndTilt', (0.0, 0.0, 0.0)), - ('NodeSocketBool', 'Debug Points', False)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': (0.03, 0.03, 0.03)}) - - part_surface_simple = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 0"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple.outputs["Position"]}) - - part_surface_simple_1 = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 1"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple_1.outputs["Position"]}) - - part_surface_simple_2 = nw.new_node(nodegroup_part_surface_simple().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length, Yaw, Rad': group_input.outputs["Coord 2"]}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube, 'Translation': part_surface_simple_2.outputs["Position"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_2, transform_1, transform_3]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Debug Points"], 15: join_geometry}) - - u_resolution = nw.new_node(Nodes.Integer, - label='U Resolution') - u_resolution.integer = 16 - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': u_resolution, 'Start': part_surface_simple.outputs["Position"], 'Middle': part_surface_simple_1.outputs["Position"], 'End': part_surface_simple_2.outputs["Position"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["ProfileHeight, StartTilt, EndTilt"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz_1.outputs["Y"], 4: separate_xyz_1.outputs["Z"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': map_range_1.outputs["Result"]}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': quadratic_bezier, 'Tilt': deg2rad}) - - position = nw.new_node(Nodes.InputPosition) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': 1.0, 'Z': 1.0}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: combine_xyz}, - attrs={'operation': 'MULTIPLY'}) - - v_resolution = nw.new_node(Nodes.Integer, - label='V resolution') - v_resolution.integer = 24 - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': v_resolution}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["StartRad, EndRad, Fullness"]}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["X"], 'end_rad': separate_xyz.outputs["Y"], 'fullness': separate_xyz.outputs["Z"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': set_curve_tilt, 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [switch.outputs[6], profilepart]}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={1: True, 15: join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': switch_1.outputs[6]}) - -@node_utils.to_nodegroup('nodegroup_attach_part', singleton=True, type='GeometryNodeTree') -def nodegroup_attach_part(nw: NodeWrangler): - # Code generated using version 2.4.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skin Mesh', None), - ('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloatFactor', 'Length Fac', 0.0), - ('NodeSocketVectorEuler', 'Ray Rot', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Rad', 0.0), - ('NodeSocketVector', 'Part Rot', (0.0, 0.0, 0.0)), - ('NodeSocketBool', 'Do Normal Rot', False), - ('NodeSocketBool', 'Do Tangent Rot', False)]) - - part_surface = nw.new_node(nodegroup_part_surface().name, - input_kwargs={'Skeleton Curve': group_input.outputs["Skeleton Curve"], 'Skin Mesh': group_input.outputs["Skin Mesh"], 'Length Fac': group_input.outputs["Length Fac"], 'Ray Rot': group_input.outputs["Ray Rot"], 'Rad': group_input.outputs["Rad"]}) - - deg2rad = nw.new_node(nodegroup_deg2_rad().name, - input_kwargs={'Deg': group_input.outputs["Part Rot"]}) - - raycast_rotation = nw.new_node(nodegroup_raycast_rotation().name, - input_kwargs={'Rotation': deg2rad, 'Hit Normal': part_surface.outputs["Hit Normal"], 'Curve Tangent': part_surface.outputs["Tangent"], 'Do Normal Rot': group_input.outputs["Do Normal Rot"], 'Do Tangent Rot': group_input.outputs["Do Tangent Rot"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Translation': part_surface.outputs["Position"], 'Rotation': raycast_rotation}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Position': part_surface.outputs["Position"]}) - diff --git a/infinigen/assets/creatures/util/nodegroups/curve.py b/infinigen/assets/creatures/util/nodegroups/curve.py deleted file mode 100644 index a77e6bdef..000000000 --- a/infinigen/assets/creatures/util/nodegroups/curve.py +++ /dev/null @@ -1,370 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from .math import nodegroup_polar_to_cart, nodegroup_aspect_to_dim, nodegroup_vector_sum, nodegroup_switch4 - -@node_utils.to_nodegroup('nodegroup_simple_tube', singleton=True, type='GeometryNodeTree') -def nodegroup_simple_tube(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Angles Deg', (30.0, -1.5, 11.0)), - ('NodeSocketVector', 'Seg Lengths', (0.02, 0.02, 0.02)), - ('NodeSocketFloat', 'Start Radius', 0.06), - ('NodeSocketFloat', 'End Radius', 0.03), - ('NodeSocketFloat', 'Fullness', 8.17), - ('NodeSocketBool', 'Do Bezier', True), - ('NodeSocketFloat', 'Aspect Ratio', 1.0)]) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["Angles Deg"], 'Seg Lengths': group_input.outputs["Seg Lengths"], 'Do Bezier': group_input.outputs["Do Bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["Aspect Ratio"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': group_input.outputs["Start Radius"], 'end_rad': group_input.outputs["End Radius"], 'fullness': group_input.outputs["Fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) - -@node_utils.to_nodegroup('nodegroup_simple_tube_v2', singleton=True, type='GeometryNodeTree') -def nodegroup_simple_tube_v2(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.5, 0.3)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'proportions', (0.3333, 0.3333, 0.3333)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketBool', 'do_bezier', True), - ('NodeSocketFloat', 'fullness', 4.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - vector_sum = nw.new_node(nodegroup_vector_sum().name, - input_kwargs={'Vector': group_input.outputs["proportions"]}) - - divide = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, - attrs={'operation': 'DIVIDE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: divide.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["angles_deg"], 'Seg Lengths': scale.outputs["Vector"], 'Do Bezier': group_input.outputs["do_bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["aspect"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': group_input.outputs["fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) - -@node_utils.to_nodegroup('nodegroup_smooth_taper', singleton=True, type='GeometryNodeTree') -def nodegroup_smooth_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'SINE'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'start_rad', 0.29), - ('NodeSocketFloat', 'end_rad', 0.0), - ('NodeSocketFloat', 'fullness', 2.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: divide}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["start_rad"], 4: group_input.outputs["end_rad"]}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply_1}) - -@node_utils.to_nodegroup('nodegroup_warped_circle_curve', singleton=True, type='GeometryNodeTree') -def nodegroup_warped_circle_curve(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0)), - ('NodeSocketInt', 'Vertices', 32)]) - - mesh_circle = nw.new_node(Nodes.MeshCircle, - input_kwargs={'Vertices': group_input.outputs["Vertices"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_circle, 'Position': group_input.outputs["Position"]}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': mesh_to_curve}) - -@node_utils.to_nodegroup('nodegroup_polar_bezier', singleton=True, type='GeometryNodeTree') -def nodegroup_polar_bezier(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 32), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Seg Lengths', (0.3, 0.3, 0.3)), - ('NodeSocketBool', 'Do Bezier', True)]) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': 4}) - - index = nw.new_node(Nodes.Index) - - deg2_rad = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["angles_deg"], 'Scale': 0.0175}, - label='Deg2Rad', - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': deg2_rad.outputs["Vector"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': separate_xyz.outputs["X"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Seg Lengths"]}) - - polartocart = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': reroute, 'Length': separate_xyz_1.outputs["X"], 'Origin': group_input.outputs["Origin"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]}) - - polartocart_1 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add, 'Length': separate_xyz_1.outputs["Y"], 'Origin': polartocart}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) - - polartocart_2 = nw.new_node(nodegroup_polar_to_cart().name, - input_kwargs={'Angle': add_1, 'Length': separate_xyz_1.outputs["Z"], 'Origin': polartocart_1}) - - switch4 = nw.new_node(nodegroup_switch4().name, - input_kwargs={'Arg': index, 'Arg == 0': group_input.outputs["Origin"], 'Arg == 1': polartocart, 'Arg == 2': polartocart_1, 'Arg == 3': polartocart_2}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line, 'Position': switch4}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - subdivide_curve_1 = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': mesh_to_curve, 'Cuts': group_input.outputs["Resolution"]}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 2}) - integer.integer = 2 - - bezier_segment = nw.new_node(Nodes.BezierSegment, - input_kwargs={'Resolution': integer, 'Start': group_input.outputs["Origin"], 'Start Handle': polartocart, 'End Handle': polartocart_1, 'End': polartocart_2}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, - attrs={'operation': 'DIVIDE'}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': bezier_segment, 'Cuts': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Do Bezier"], 14: subdivide_curve_1, 15: subdivide_curve}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': switch.outputs[6], 'Endpoint': polartocart_2}) - -@node_utils.to_nodegroup('nodegroup_simple_tube_v2', singleton=True, type='GeometryNodeTree') -def nodegroup_simple_tube_v2(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'length_rad1_rad2', (1.0, 0.5, 0.3)), - ('NodeSocketVector', 'angles_deg', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'proportions', (0.3333, 0.3333, 0.3333)), - ('NodeSocketFloat', 'aspect', 1.0), - ('NodeSocketBool', 'do_bezier', True), - ('NodeSocketFloat', 'fullness', 4.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - vector_sum = nw.new_node(nodegroup_vector_sum().name, - input_kwargs={'Vector': group_input.outputs["proportions"]}) - - divide = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, - attrs={'operation': 'DIVIDE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["length_rad1_rad2"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: divide.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - polarbezier = nw.new_node(nodegroup_polar_bezier().name, - input_kwargs={'Resolution': 25, 'Origin': group_input.outputs["Origin"], 'angles_deg': group_input.outputs["angles_deg"], 'Seg Lengths': scale.outputs["Vector"], 'Do Bezier': group_input.outputs["do_bezier"]}) - - aspect_to_dim = nw.new_node(nodegroup_aspect_to_dim().name, - input_kwargs={'Aspect Ratio': group_input.outputs["aspect"]}) - - position = nw.new_node(Nodes.InputPosition) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: aspect_to_dim, 1: position}, - attrs={'operation': 'MULTIPLY'}) - - warped_circle_curve = nw.new_node(nodegroup_warped_circle_curve().name, - input_kwargs={'Position': multiply.outputs["Vector"], 'Vertices': 40}) - - smoothtaper = nw.new_node(nodegroup_smooth_taper().name, - input_kwargs={'start_rad': separate_xyz.outputs["Y"], 'end_rad': separate_xyz.outputs["Z"], 'fullness': group_input.outputs["fullness"]}) - - profilepart = nw.new_node(nodegroup_profile_part().name, - input_kwargs={'Skeleton Curve': polarbezier.outputs["Curve"], 'Profile Curve': warped_circle_curve, 'Radius Func': smoothtaper}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': profilepart, 'Skeleton Curve': polarbezier.outputs["Curve"], 'Endpoint': polarbezier.outputs["Endpoint"]}) - - -@node_utils.to_nodegroup('nodegroup_smooth_taper', singleton=True, type='GeometryNodeTree') -def nodegroup_smooth_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'SINE'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'start_rad', 0.29), - ('NodeSocketFloat', 'end_rad', 0.0), - ('NodeSocketFloat', 'fullness', 2.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: sine, 1: divide}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["start_rad"], 4: group_input.outputs["end_rad"]}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply_1}) - - -@node_utils.to_nodegroup('nodegroup_warped_circle_curve', singleton=True, type='GeometryNodeTree') -def nodegroup_warped_circle_curve(nw: NodeWrangler): - # Code generated using version 2.4.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0)), - ('NodeSocketInt', 'Vertices', 32)]) - - mesh_circle = nw.new_node(Nodes.MeshCircle, - input_kwargs={'Vertices': group_input.outputs["Vertices"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_circle, 'Position': group_input.outputs["Position"]}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': set_position}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Curve': mesh_to_curve}) - -@node_utils.to_nodegroup('nodegroup_profile_part', singleton=True, type='GeometryNodeTree') -def nodegroup_profile_part(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Skeleton Curve', None), - ('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloatDistance', 'Radius Func', 1.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Skeleton Curve"], 'Radius': group_input.outputs["Radius Func"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': group_input.outputs["Profile Curve"], 'Fill Caps': True}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': curve_to_mesh, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) \ No newline at end of file diff --git a/infinigen/assets/creatures/util/nodegroups/geometry.py b/infinigen/assets/creatures/util/nodegroups/geometry.py deleted file mode 100644 index 50ebdbc49..000000000 --- a/infinigen/assets/creatures/util/nodegroups/geometry.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_symmetric_instance', singleton=True, type='GeometryNodeTree') -def nodegroup_symmetric_instance(nw: NodeWrangler): - # Code generated using version 2.4.1 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Offset', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Reflector', (1.0, -1.0, 1.0))]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Offset"], 1: group_input.outputs["Reflector"]}, - attrs={'operation': 'MULTIPLY'}) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': 2, 'Start Location': group_input.outputs["Offset"], 'Offset': multiply.outputs["Vector"]}, - attrs={'mode': 'END_POINTS'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': group_input.outputs["Geometry"]}) - - index = nw.new_node(Nodes.Index) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: index}, - attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points, 'Selection': equal}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': scale_instances, 'Selection': equal}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': flip_faces}) - -@node_utils.to_nodegroup('nodegroup_symmetric_clone', singleton=True, type='GeometryNodeTree') -def nodegroup_symmetric_clone(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, -1.0, 1.0))]) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Scale': group_input.outputs["Scale"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': transform}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [group_input.outputs["Geometry"], flip_faces]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Both': join_geometry_2, 'Orig': group_input.outputs["Geometry"], 'Inverted': flip_faces}) - - -@node_utils.to_nodegroup('nodegroup_solidify', singleton=True, type='GeometryNodeTree') -def nodegroup_solidify(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloatDistance', 'Distance', 0.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Offset Scale': multiply, 'Individual': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Distance"], 1: -0.5}, - attrs={'operation': 'MULTIPLY'}) - - extrude_mesh_1 = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Offset Scale': multiply_1, 'Individual': False}) - - flip_faces = nw.new_node(Nodes.FlipFaces, - input_kwargs={'Mesh': extrude_mesh_1.outputs["Mesh"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [extrude_mesh.outputs["Mesh"], flip_faces]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry, 'Distance': 0.0}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': merge_by_distance, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -@node_utils.to_nodegroup('nodegroup_taper', singleton=True, type='GeometryNodeTree') -def nodegroup_taper(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Start', (1.0, 0.63, 0.72)), - ('NodeSocketVector', 'End', (1.0, 1.0, 1.0))]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz.outputs["X"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': separate_xyz.outputs["X"], 7: attribute_statistic.outputs["Min"], 8: attribute_statistic.outputs["Max"], 9: group_input.outputs["Start"], 10: group_input.outputs["End"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'clamp': False}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: map_range.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': multiply.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) \ No newline at end of file diff --git a/infinigen/assets/creatures/util/nodegroups/hair.py b/infinigen/assets/creatures/util/nodegroups/hair.py deleted file mode 100644 index 54fbde9d2..000000000 --- a/infinigen/assets/creatures/util/nodegroups/hair.py +++ /dev/null @@ -1,300 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_vector_bezier - -@node_utils.to_nodegroup('nodegroup_comb_direction', singleton=True, type='GeometryNodeTree') -def nodegroup_comb_direction(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Surface', None), - ('NodeSocketVector', 'Root Positiion', (0.0, 0.0, 0.0))]) - - normal = nw.new_node(Nodes.InputNormal) - - surface_normal = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': group_input.outputs["Surface"], 'Value': normal, 'Sample Position': group_input.outputs["Root Positiion"]}, - label='Surface Normal', - attrs={'data_type': 'FLOAT_VECTOR'}) - - named_attribute = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': 'skeleton_loc'}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - named_attribute_1 = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': 'parent_skeleton_loc'}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: named_attribute.outputs["Attribute"], 1: named_attribute_1.outputs["Attribute"]}, - attrs={'operation': 'SUBTRACT'}) - - normalize = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'NORMALIZE'}) - - skeleton_tangent = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': group_input.outputs["Surface"], 'Value': normalize.outputs["Vector"], 'Sample Position': group_input.outputs["Root Positiion"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: surface_normal, 1: skeleton_tangent}, - attrs={'operation': 'CROSS_PRODUCT'}) - - cross_product_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: surface_normal, 1: cross_product.outputs["Vector"]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - normalize_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product_1.outputs["Vector"]}, - attrs={'operation': 'NORMALIZE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={ - 'Combing Direction': normalize_1.outputs["Vector"], - 'Surface Normal': (surface_normal, "Value"), - 'Skeleton Tangent': skeleton_tangent - }) - -@node_utils.to_nodegroup('nodegroup_hair_position', singleton=True, type='GeometryNodeTree') -def nodegroup_hair_position(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curves', None)]) - - position = nw.new_node(Nodes.InputPosition) - - index = nw.new_node(Nodes.Index) - - spline_length = nw.new_node(Nodes.SplineLength) - - snap = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: spline_length.outputs["Point Count"]}, - attrs={'operation': 'SNAP'}) - - hair_root_position = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input.outputs["Curves"], 'Value': position, 'Index': snap}, - label='Hair Root Position', - attrs={'data_type': 'FLOAT_VECTOR'}) - - position_1 = nw.new_node(Nodes.InputPosition) - - relative_position = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_1, 1: hair_root_position}, - label='Relative Position', - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={ - 'Root Position': hair_root_position, - 'Relative Position': relative_position.outputs["Vector"] - }) - -@node_utils.to_nodegroup('nodegroup_comb_hairs', singleton=True, type='GeometryNodeTree') -def nodegroup_comb_hairs(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curves', None), - ('NodeSocketVector', 'Root Position', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Comb Dir', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Surface Normal', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Length', 0.03), - ('NodeSocketFloat', 'Puiff', 1.0), - ('NodeSocketFloat', 'Comb', 1.0)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Surface Normal"], 'Scale': group_input.outputs["Comb"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Comb Dir"], 'Scale': group_input.outputs["Puiff"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale_1.outputs["Vector"], 1: scale.outputs["Vector"]}) - - vectorbezier = nw.new_node(nodegroup_vector_bezier().name, - input_kwargs={'t': spline_parameter.outputs["Factor"], 'b': scale.outputs["Vector"], 'c': add.outputs["Vector"]}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Length"], 1: length.outputs["Value"]}, - attrs={'operation': 'DIVIDE'}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vectorbezier, 'Scale': divide}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Root Position"], 1: scale_2.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Curves"], 'Position': add_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_strand_noise', singleton=False, type='GeometryNodeTree') -def nodegroup_strand_noise(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Random Mag', 0.001), - ('NodeSocketFloat', 'Perlin Mag', 1.0), - ('NodeSocketFloat', 'Perlin Scale', 5.0)]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': group_input.outputs["Perlin Scale"], 'Detail': 10.0, 'Roughness': 1.0}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': group_input.outputs["Perlin Mag"]}, - attrs={'operation': 'SCALE'}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: (-1.0, -1.0, -1.0)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: random_value.outputs["Value"], 'Scale': group_input.outputs["Random Mag"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': add_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_duplicate_to_clumps', singleton=False, type='GeometryNodeTree') -def nodegroup_duplicate_to_clumps(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Surface Normal', (0.0, 0.0, 0.0)), - ('NodeSocketInt', 'Amount', 3), - ('NodeSocketFloat', 'Tuft Spread', 0.01), - ('NodeSocketFloat', 'Tuft Clumping', 0.5)]) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - attrs={'domain': 'SPLINE'}, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Amount': group_input.outputs["Amount"]}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={0: (-1.0, -1.0, -1.0)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: random_value.outputs["Value"], 'Scale': group_input.outputs["Tuft Spread"]}, - attrs={'operation': 'SCALE'}) - - project = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: group_input.outputs["Surface Normal"]}, - attrs={'operation': 'PROJECT'}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: project.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"], 1: subtract.outputs["Vector"]}, - attrs={'domain': 'CURVE', 'data_type': 'FLOAT_VECTOR'}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["Tuft Clumping"]}, - attrs={'operation': 'SUBTRACT'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: subtract_1}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: capture_attribute.outputs["Attribute"], 'Scale': map_range.outputs["Result"]}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': scale_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - -@node_utils.to_nodegroup('nodegroup_hair_length_rescale', singleton=False, type='GeometryNodeTree') -def nodegroup_hair_length_rescale(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curves', None), - ('NodeSocketFloat', 'Min', 0.69999999999999996)]) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: group_input.outputs["Min"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Curves"], 2: random_value_1.outputs[1]}, - attrs={'domain': 'CURVE'}) - - hairposition = nw.new_node(nodegroup_hair_position().name, - input_kwargs={'Curves': group_input.outputs["Curves"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: hairposition.outputs["Relative Position"], 1: capture_attribute.outputs[2], 2: hairposition.outputs["Root Position"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': multiply_add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - -@node_utils.to_nodegroup('nodegroup_snap_roots_to_surface', singleton=True, type='GeometryNodeTree') -def nodegroup_snap_roots_to_surface(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Target', None), - ('NodeSocketGeometry', 'Curves', None)]) - - hair_pos = nw.new_node(nodegroup_hair_position().name, - input_kwargs={'Curves': group_input.outputs["Curves"]}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': group_input.outputs["Target"], 'Source Position': hair_pos.outputs["Root Position"]}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: geometry_proximity.outputs["Position"], 1: hair_pos.outputs["Relative Position"]}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Curves"], 'Position': add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_2}) \ No newline at end of file diff --git a/infinigen/assets/creatures/util/nodegroups/math.py b/infinigen/assets/creatures/util/nodegroups/math.py deleted file mode 100644 index 63aff4ab9..000000000 --- a/infinigen/assets/creatures/util/nodegroups/math.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_floor_ceil', singleton=False, type='GeometryNodeTree') -def nodegroup_floor_ceil(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Value', 0.0000)]) - - float_to_integer = nw.new_node(Nodes.FloatToInt, input_kwargs={'Float': group_input.outputs["Value"]}, attrs={'rounding_mode': 'FLOOR'}) - - float_to_integer_1 = nw.new_node(Nodes.FloatToInt, - input_kwargs={'Float': group_input.outputs["Value"]}, - attrs={'rounding_mode': 'CEILING'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: float_to_integer}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Floor': float_to_integer, 'Ceil': float_to_integer_1, 'Remainder': subtract}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_clamp_or_wrap', singleton=False, type='GeometryNodeTree') -def nodegroup_clamp_or_wrap(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Value', 0), - ('NodeSocketFloat', 'Max', 0.5000), - ('NodeSocketBool', 'Use Wrap', False)]) - - clamp = nw.new_node(Nodes.Clamp, input_kwargs={'Value': group_input.outputs["Value"], 'Max': group_input.outputs["Max"]}) - - wrap = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Max"], 2: 0.0000}, - attrs={'operation': 'WRAP'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Use Wrap"], 4: clamp, 5: wrap}, - attrs={'input_type': 'INT'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Output': switch.outputs[1]}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_polar_to_cart', singleton=True, type='GeometryNodeTree') -def nodegroup_polar_to_cart(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Angle', 0.5), - ('NodeSocketFloat', 'Length', 0.0), - ('NodeSocketVector', 'Origin', (0.0, 0.0, 0.0))]) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Angle"]}, - attrs={'operation': 'SINE'}) - - construct_unit_vector = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': cosine, 'Z': sine}, - label='Construct Unit Vector') - - offset_polar = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Length"], 1: construct_unit_vector, 2: group_input.outputs["Origin"]}, - label='Offset Polar', - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': offset_polar.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_switch4', singleton=True, type='GeometryNodeTree') -def nodegroup_switch4(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Arg', 0), - ('NodeSocketVector', 'Arg == 0', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 1', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 2', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Arg == 3', (0.0, 0.0, 0.0))]) - - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 2}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - greater_equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 1}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_1, 8: group_input.outputs["Arg == 0"], 9: group_input.outputs["Arg == 1"]}, - attrs={'input_type': 'VECTOR'}) - - greater_equal_2 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Arg"], 3: 3}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal_2, 8: group_input.outputs["Arg == 2"], 9: group_input.outputs["Arg == 3"]}, - attrs={'input_type': 'VECTOR'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Output': switch.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_deg2_rad', singleton=True, type='GeometryNodeTree') -def nodegroup_deg2_rad(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Deg', (0.0, 0.0, 0.0))]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Rad': multiply.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_aspect_to_dim', singleton=True, type='GeometryNodeTree') -def nodegroup_aspect_to_dim(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Aspect Ratio', 1.0)]) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Aspect Ratio"], 'Y': 1.0}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 1.0, 'Y': divide}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, - attrs={'input_type': 'VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'XY Scale': switch.outputs[3]}) - -@node_utils.to_nodegroup('nodegroup_vector_sum', singleton=True, type='GeometryNodeTree') -def nodegroup_vector_sum(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Sum': add_1}) - -@node_utils.to_nodegroup('nodegroup_vector_bezier', singleton=True, type='GeometryNodeTree') -def nodegroup_vector_bezier(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 't', 0.0), - ('NodeSocketVector', 'a', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'b', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'c', (0.0, 0.0, 0.0))]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["t"], 9: group_input.outputs["a"], 10: group_input.outputs["b"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["t"], 9: map_range.outputs["Vector"], 10: group_input.outputs["c"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': map_range_1.outputs["Vector"]}) diff --git a/infinigen/assets/creatures/util/nodegroups/sculpt_v1.py b/infinigen/assets/creatures/util/nodegroups/sculpt_v1.py deleted file mode 100644 index 7e8c3cc6e..000000000 --- a/infinigen/assets/creatures/util/nodegroups/sculpt_v1.py +++ /dev/null @@ -1,251 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.creatures.util.nodegroups.math import nodegroup_floor_ceil, nodegroup_clamp_or_wrap -from infinigen.assets.creatures.util.nodegroups.geometry import nodegroup_symmetric_clone - -@node_utils.to_nodegroup('nodegroup_u_v_param_to_vert_idxs', singleton=False, type='GeometryNodeTree') -def nodegroup_u_v_param_to_vert_idxs(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5000), - ('NodeSocketInt', 'Size', 0), - ('NodeSocketBool', 'Cyclic', False)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Size"]}, - attrs={'operation': 'MULTIPLY'}) - - floorceil = nw.new_node(nodegroup_floor_ceil().name, input_kwargs={'Value': multiply}) - - clamporwrap = nw.new_node(nodegroup_clamp_or_wrap().name, - input_kwargs={'Value': floorceil.outputs["Floor"], 'Max': group_input.outputs["Size"], 'Use Wrap': group_input.outputs["Cyclic"]}) - - clamporwrap_1 = nw.new_node(nodegroup_clamp_or_wrap().name, - input_kwargs={'Value': floorceil.outputs["Ceil"], 'Max': group_input.outputs["Size"], 'Use Wrap': group_input.outputs["Cyclic"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Floor': clamporwrap, 'Ceil': clamporwrap_1, 'Remainder': floorceil.outputs["Remainder"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_bilinear_interp_index_transfer', singleton=False, type='GeometryNodeTree') -def nodegroup_bilinear_interp_index_transfer(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Source', None), - ('NodeSocketFloat', 'U', 0.5000), - ('NodeSocketFloat', 'V', 0.5000), - ('NodeSocketVector', 'Attribute', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'SizeU', 0), - ('NodeSocketInt', 'SizeV', 0), - ('NodeSocketBool', 'CyclicU', False), - ('NodeSocketBool', 'CyclicV', False)]) - - uvparamtovertidxs = nw.new_node(nodegroup_u_v_param_to_vert_idxs().name, - input_kwargs={'Value': group_input.outputs["V"], 'Size': group_input.outputs["SizeV"], 'Cyclic': group_input.outputs["CyclicV"]}) - - uvparamtovertidxs_1 = nw.new_node(nodegroup_u_v_param_to_vert_idxs().name, - input_kwargs={'Value': group_input.outputs["U"], 'Size': group_input.outputs["SizeU"], 'Cyclic': group_input.outputs["CyclicU"]}) - - floor_floor = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Floor"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Floor"]}, - label='FloorFloor', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_1 = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': group_input, 1: group_input.outputs["Attribute"], 'Index': floor_floor}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'INDEX'}) - - ceil_floor = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Ceil"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Floor"]}, - label='CeilFloor', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_2 = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': group_input, 1: group_input.outputs["Attribute"], 'Index': ceil_floor}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'INDEX'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs_1.outputs["Remainder"], 9: transfer_attribute_1.outputs["Attribute"], 10: transfer_attribute_2.outputs["Attribute"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - floor_ceil = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Floor"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Ceil"]}, - label='FloorCeil', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_3 = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': group_input, 1: group_input.outputs["Attribute"], 'Index': floor_ceil}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'INDEX'}) - - ceil_ceil = nw.new_node(Nodes.Math, - input_kwargs={0: uvparamtovertidxs_1.outputs["Ceil"], 1: group_input.outputs["SizeV"], 2: uvparamtovertidxs.outputs["Ceil"]}, - label='CeilCeil', - attrs={'operation': 'MULTIPLY_ADD'}) - - transfer_attribute_4 = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': group_input, 1: group_input.outputs["Attribute"], 'Index': ceil_ceil}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'INDEX'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs_1.outputs["Remainder"], 9: transfer_attribute_3.outputs["Attribute"], 10: transfer_attribute_4.outputs["Attribute"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': uvparamtovertidxs.outputs["Remainder"], 9: map_range.outputs["Vector"], 10: map_range_1.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': map_range_2.outputs["Vector"]}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_curve_parameter_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_parameter_curve(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Surface', None), - ('NodeSocketGeometry', 'UVCurve', None), - ('NodeSocketInt', 'CtrlptsU', 0), - ('NodeSocketInt', 'CtrlptsW', 0)]) - - normal = nw.new_node(Nodes.InputNormal) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - position_1 = nw.new_node(Nodes.InputPosition) - - bilinearinterpindextransfer = nw.new_node(nodegroup_bilinear_interp_index_transfer().name, - input_kwargs={'Source': group_input.outputs["Surface"], 'U': separate_xyz.outputs["X"], 'V': separate_xyz.outputs["Y"], 'Attribute': position_1, 'SizeU': group_input.outputs["CtrlptsU"], 'SizeV': group_input.outputs["CtrlptsW"], 'CyclicV': True}) - - transfer_attribute = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': group_input.outputs["Surface"], 1: normal, 'Source Position': bilinearinterpindextransfer}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'NEAREST_FACE_INTERPOLATED'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: transfer_attribute.outputs["Attribute"], 1: separate_xyz.outputs["Z"], 2: bilinearinterpindextransfer}, - attrs={'operation': 'MULTIPLY_ADD'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["UVCurve"], 'Position': multiply_add.outputs["Vector"]}) - - normal_1 = nw.new_node(Nodes.InputNormal) - - dot_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: transfer_attribute.outputs["Attribute"], 1: normal_1}, - attrs={'operation': 'DOT_PRODUCT'}) - - arcsine = nw.new_node(Nodes.Math, input_kwargs={0: dot_product.outputs["Value"]}, attrs={'operation': 'ARCSINE'}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': set_position, 'Tilt': arcsine}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_curve_tilt}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_curve_sculpt', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_sculpt(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Target', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Base Radius', 0.0500), - ('NodeSocketFloat', 'Base Factor', 0.0500), - ('NodeSocketBool', 'SymmY', True), - ('NodeSocketGeometry', 'StrokeRadFacModifier', None)]) - - normal = nw.new_node(Nodes.InputNormal) - - symmetric_clone = nw.new_node(nodegroup_symmetric_clone().name, input_kwargs={'Geometry': group_input.outputs["Curve"]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["SymmY"], 14: group_input.outputs["Curve"], 15: symmetric_clone.outputs["Both"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': switch.outputs[6]}) - - geometry_proximity = nw.new_node(Nodes.Proximity, input_kwargs={'Target': curve_to_mesh}, attrs={'target_element': 'POINTS'}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': group_input.outputs["StrokeRadFacModifier"]}) - - position = nw.new_node(Nodes.InputPosition) - - index = nw.new_node(Nodes.Index) - - transfer_attribute = nw.new_node(Nodes.TransferAttribute, - input_kwargs={'Source': curve_to_mesh_1, 1: position, 'Index': index}, - attrs={'data_type': 'FLOAT_VECTOR', 'mapping': 'INDEX'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': transfer_attribute.outputs["Attribute"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Base Radius"], 1: separate_xyz.outputs["X"]}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': geometry_proximity.outputs["Distance"], 2: add}) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 1.0000), (0.2000, 0.9400), (0.8000, 0.0600), (1.0000, 0.0000)], handles=['VECTOR', 'AUTO', 'AUTO', 'VECTOR']) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Base Factor"], 1: separate_xyz.outputs["Y"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: add_1}, attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: normal, 'Scale': multiply}, attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Target"], 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_simple_tube_skin', singleton=False, type='GeometryNodeTree') -def nodegroup_simple_tube_skin(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketVector', 'RadStartEnd', (0.0500, 0.0500, 1.0000)), - ('NodeSocketInt', 'Resolution', 32)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0000, 1: spline_parameter.outputs["Factor"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: spline_parameter.outputs["Factor"]}, - attrs={'operation': 'MULTIPLY'}) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'SQRT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["RadStartEnd"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: separate_xyz.outputs["X"], 4: separate_xyz.outputs["Y"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: sqrt, 1: map_range.outputs["Result"]}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': multiply_1}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': separate_xyz.outputs["Z"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Scale': combine_xyz}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': set_curve_radius, 'Profile Curve': transform}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': curve_to_mesh}, attrs={'is_active_output': True}) diff --git a/infinigen/assets/creatures/util/nodegroups/shader.py b/infinigen/assets/creatures/util/nodegroups/shader.py deleted file mode 100644 index 3720e9e17..000000000 --- a/infinigen/assets/creatures/util/nodegroups/shader.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Mingzhe Wang and Alexander Raistrick - - -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_norm_local_pos', singleton=True, type='ShaderNodeTree') -def nodegroup_norm_local_pos(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'local_pos'}) - - attribute_6 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'skeleton_rad'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: attribute_6.outputs["Fac"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': multiply, 'Z': multiply}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Max', 1.0)]) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X Max"], 'Y': attribute_6.outputs["Fac"], 'Z': attribute_6.outputs["Fac"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': attribute_5.outputs["Vector"], 7: combine_xyz_2, 8: combine_xyz_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': map_range_1.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_abs_y', singleton=True, type='ShaderNodeTree') -def nodegroup_abs_y(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_4.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_4.outputs["X"], 'Y': absolute, 'Z': separate_xyz_4.outputs["Z"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1}) - -@node_utils.to_nodegroup('nodegroup_color_mask', singleton=False, type='ShaderNodeTree') -def nodegroup_color_mask(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_body'}) - - attribute_3 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_leg'}) - - attribute_4 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_head'}) - - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'local_pos'}) - - group_2 = nw.new_node(nodegroup_abs_y().name, - input_kwargs={'Vector': attribute_5.outputs["Vector"]}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': group_2, 'W': U(1e4), 'Scale': N(7, 1), 'Detail': N(7, 1), 'Dimension': U(1.5, 3)}, - attrs={'musgrave_dimensions': '4D'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: musgrave_texture, 1: 0.69999999999999996}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) - colorramp_4.color_ramp.interpolation = "EASE" - colorramp_4.color_ramp.elements[0].position = 0.0 - colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_4.color_ramp.elements[1].position = 0.4864 - colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - group = nw.new_node(nodegroup_norm_local_pos().name) - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz_4.outputs["Z"]}) - colorramp_5.color_ramp.interpolation = "EASE" - colorramp_5.color_ramp.elements[0].position = 0.0 - colorramp_5.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_5.color_ramp.elements[1].position = 0.5318 - colorramp_5.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_4.outputs["Color"], 1: colorramp_5.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_4.outputs["Fac"], 'Color1': (1.0, 1.0, 1.0, 1.0), 'Color2': multiply}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': N(14, 2)}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': noise_texture.outputs["Color"], 9: (-0.10000000000000001, -0.10000000000000001, -0.10000000000000001), 10: (0.10000000000000001, 0.10000000000000001, 0.10000000000000001)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group, 1: map_range_1.outputs["Vector"]}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add_1.outputs["Vector"]}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz_2.outputs["X"]}) - colorramp_1.color_ramp.interpolation = "EASE" - colorramp_1.color_ramp.elements[0].position = 0.3091 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.9773 - colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz_2.outputs["Y"]}) - colorramp_2.color_ramp.interpolation = "EASE" - colorramp_2.color_ramp.elements[0].position = 0.0955 - colorramp_2.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp_2.color_ramp.elements[1].position = 0.5318 - colorramp_2.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_1.outputs["Color"], 1: colorramp_2.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_3.outputs["Fac"], 'Color1': mix_3, 'Color2': subtract}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add_1.outputs["Vector"]}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_3.outputs["Z"]}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_2}) - colorramp_3.color_ramp.elements[0].position = 0.2 - colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_3.color_ramp.elements[1].position = 0.6136 - colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_2.outputs["Fac"], 'Color1': mix_2, 'Color2': colorramp_3.outputs["Color"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.2727 - colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.6091 - colorramp.color_ramp.elements[1].color = (0.78220000000000001, 0.78220000000000001, 0.78220000000000001, 1.0) - colorramp.color_ramp.elements[2].position = 0.9727 - colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': colorramp.outputs["Color"]}) \ No newline at end of file diff --git a/infinigen/assets/debris/__init__.py b/infinigen/assets/debris/__init__.py deleted file mode 100644 index 48c459b8d..000000000 --- a/infinigen/assets/debris/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .lichen import LichenFactory -from .moss import MossFactory -from .pine_needle import PineNeedleFactory \ No newline at end of file diff --git a/infinigen/assets/debris/lichen.py b/infinigen/assets/debris/lichen.py deleted file mode 100644 index 0e190a598..000000000 --- a/infinigen/assets/debris/lichen.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -from functools import reduce - -import bpy -import colorsys -import numpy as np -from numpy.random import uniform, normal as N - -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.color import hsv2rgba -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth -from infinigen.assets.utils.object import data2mesh -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -class LichenFactory(AssetFactory): - - def __init__(self, factory_seed): - super(LichenFactory, self).__init__(factory_seed) - self.max_polygon = 1e4 - self.base_hue = uniform(.15, .3) - - @staticmethod - def build_lichen_circle_mesh(n): - angles = polygon_angles(n) - z_jitter = N(0., .02, n) - r_jitter = np.exp(uniform(-.2, 0., n)) - vertices = np.concatenate( - [np.stack([np.cos(angles) * r_jitter, np.sin(angles) * r_jitter, z_jitter]).T, np.zeros((1, 3))], 0) - faces = np.stack([np.arange(n), np.roll(np.arange(n), 1), np.full(n, n)]).T - mesh = data2mesh(vertices, [], faces, 'circle') - return mesh - - @staticmethod - def shader_lichen(nw: NodeWrangler, base_hue=.2, **params): - h_perturb = uniform(-0.02, .02) - s_perturb = uniform(-.05, -.0) - v_perturb = uniform(1., 1.5) - - def map_perturb(h, s, v): - return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) - - subsurface_ratio = .02 - roughness = 1. - - cr = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': nw.musgrave(5000)}) - elements = cr.color_ramp.elements - elements.new(1) - elements[0].position = 0. - elements[1].position = 0.5 - elements[2].position = 1.0 - elements[0].color = map_perturb(base_hue, 1, .05) - elements[1].color = map_perturb((base_hue + .05) % 1, 1, .05) - elements[2].color = 0., 0., 0., 1. - - background = map_perturb(base_hue, .5, .3) - mix_rgb = nw.new_node(Nodes.MixRGB, - [nw.new_node(Nodes.ObjectInfo_Shader).outputs["Random"], cr.outputs["Color"], - background]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': mix_rgb, - 'Subsurface': subsurface_ratio, - 'Subsurface Radius': (.01, .01, .01), - 'Subsurface Color': background, - 'Roughness': roughness - }) - - return principled_bsdf - - def create_asset(self, **kwargs): - n = np.random.randint(4, 6) - mesh = self.build_lichen_circle_mesh(n) - obj = bpy.data.objects.new('lichen', mesh) - bpy.context.scene.collection.objects.link(obj) - bpy.context.view_layer.objects.active = obj - - boundary = obj.vertex_groups.new(name='Boundary') - boundary.add(list(range(n)), 1.0, 'REPLACE') - - growth_scale = 1, 1, .5 - build_diff_growth(obj, boundary.index, max_polygons=self.max_polygon * uniform(0.2, 1), - growth_scale=growth_scale, inhibit_shell=4, repulsion_radius=2, dt=.25) - obj.scale = [0.004] * 3 - butil.apply_transform(obj) - assign_material(obj, surface.shaderfunc_to_material(LichenFactory.shader_lichen, - (self.base_hue + uniform(-.04, .04)) % 1)) - - tag_object(obj, 'lichen') - return obj diff --git a/infinigen/assets/debris/moss.py b/infinigen/assets/debris/moss.py deleted file mode 100644 index b0ab11542..000000000 --- a/infinigen/assets/debris/moss.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei -import math - -import colorsys - -from numpy.random import uniform as U - -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.utils.object import new_cube -from infinigen.core.util.color import hsv2rgba -from infinigen.assets.utils.misc import assign_material -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.placement.instance_scatter import scatter_instances - -from infinigen.core.nodes.node_utils import build_color_ramp - -class MossFactory(AssetFactory): - - def __init__(self, factory_seed): - super(MossFactory, self).__init__(factory_seed) - self.max_polygon = 1e4 - self.base_hue = U(.2, .24) - - @staticmethod - def shader_moss(nw: NodeWrangler, base_hue=.3): - h_perturb = U(-0.02, .02) - s_perturb = U(-.1, -.0) - v_perturb = U(1., 1.5) - - def map_perturb(h, s, v): - return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) - - subsurface_ratio = .05 - roughness = 1. - mix_ratio = .2 - - cr = build_color_ramp(nw, - nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 5.}).outputs["Fac"], - [0, .5, 1], - [map_perturb(base_hue, .8, .1), map_perturb(base_hue - 0.05, .8, .1), (0., 0., 0., 1.)] - ) - - background = map_perturb(base_hue, .8, .02) - mix_rgb = nw.new_node(Nodes.MixRGB, - [nw.new_node(Nodes.ObjectInfo_Shader).outputs["Random"], cr.outputs["Color"], - background]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': mix_rgb, - 'Subsurface': subsurface_ratio, - 'Subsurface Radius': (.01, .01, .01), - 'Subsurface Color': background, - 'Roughness': roughness - }) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, input_kwargs={'Color': mix_rgb}) - - mix_shader = nw.new_node(Nodes.MixShader, [mix_ratio, principled_bsdf, translucent_bsdf]) - return mix_shader - - def create_asset(self, face_size=.01, **params): - obj = new_cube() - surface.add_geomod(obj, self.geo_moss_instance, apply=True, input_args=[face_size]) - assign_material(obj, surface.shaderfunc_to_material(MossFactory.shader_moss, - (self.base_hue + U(-.02, .02) % 1))) - tag_object(obj, 'moss') - return obj - - @staticmethod - def geo_moss_instance(nw: NodeWrangler, face_size): - radius = .008 - start = (0.0, 0.0, 0.0) - start_handle = (-.03, 0.0, .02) - end = (-0.04, 0.0, U(.04, .05)) - end_handle = (end[0] + U(-.03, -.02), 0., end[2] + U(-.01, .0)) - bezier = nw.new_node(Nodes.CurveBezierSegment, input_kwargs={ - 'Resolution': 10 * math.ceil(.01 / face_size), - 'Start': start, - 'Start Handle': start_handle, - 'End Handle': end_handle, - 'End': end - }) - circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 4, 'Radius': radius}).outputs[ - "Curve"] - mesh = nw.curve2mesh(bezier, circle) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': mesh}) diff --git a/infinigen/assets/debris/pine_needle.py b/infinigen/assets/debris/pine_needle.py deleted file mode 100644 index 57049e887..000000000 --- a/infinigen/assets/debris/pine_needle.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick, Lingjie Mei - -from numpy.random import normal as N - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category - -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory - -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object - -def shader_material(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - object_info = nw.new_node(Nodes.ObjectInfo_Shader) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': object_info.outputs["Random"]}) - colorramp.color_ramp.elements[0].position = 0.0000 - colorramp.color_ramp.elements[0].color = color_category('pine_needle') - colorramp.color_ramp.elements[1].position = 1.0000 - colorramp.color_ramp.elements[1].color = color_category('pine_needle') - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': colorramp}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_pine_needle', singleton=False, type='GeometryNodeTree') -def nodegroup_pine_needle(nw: NodeWrangler): - # Code generated using version 2.6.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Scale', 0.0400), - ('NodeSocketFloat', 'Bend', 0.0300), - ('NodeSocketFloatDistance', 'Radius', 0.0010)]) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-1.0000, 0.0000, 0.0000), 'Scale': group_input.outputs["Scale"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (0.0000, 1.0000, 0.0000), 'Scale': group_input.outputs["Bend"]}, - attrs={'operation': 'SCALE'}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (1.0000, 0.0000, 0.0000), 'Scale': group_input.outputs["Scale"]}, - attrs={'operation': 'SCALE'}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 5, 'Start': scale.outputs["Vector"], 'Middle': scale_1.outputs["Vector"], 'End': scale_2.outputs["Vector"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 6, 'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadratic_bezier, 'Profile Curve': curve_circle.outputs["Curve"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': curve_to_mesh}, attrs={'is_active_output': True}) - -class PineNeedleFactory(AssetFactory): - - def sample_params(self): - s = N(1, 0.2) - return { - 'Scale': 0.04 * s, - 'Bend': 0.03 * s * N(1, 0.2), - 'Radius': 0.001 * s * N(1, 0.2) - } - - def create_asset(self, **_): - obj = butil.spawn_vert('pine_needle') - butil.modify_mesh(obj, 'NODES', apply=True, node_group=nodegroup_pine_needle(), - ng_inputs=self.sample_params()) - tag_object(obj, 'pine_needle') - return obj - - def finalize_assets(self, objs): - surface.add_material(objs, shader_material) \ No newline at end of file diff --git a/infinigen/assets/decor/__init__.py b/infinigen/assets/decor/__init__.py deleted file mode 100644 index c24bee2f6..000000000 --- a/infinigen/assets/decor/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .aquarium_tank import AquariumTankFactory \ No newline at end of file diff --git a/infinigen/assets/deformed_trees/base.py b/infinigen/assets/deformed_trees/base.py deleted file mode 100644 index f0c610659..000000000 --- a/infinigen/assets/deformed_trees/base.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -from numpy.random import uniform - -from infinigen.assets.trees import TreeFactory -from infinigen.assets.trees.generate import GenericTreeFactory, random_species -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.surface import NoApply -from infinigen.core.util.math import FixedSeed - - -class BaseDeformedTreeFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super(BaseDeformedTreeFactory, self).__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - (tree_params, _, _), _ = random_species() - tree_params.skinning.update({'Scaling': .2}) - self.base_factory = GenericTreeFactory(factory_seed, tree_params, None, NoApply, coarse) - self.trunk_surface = surface.registry('bark') - self.base_hue = uniform(.02, .08) - self.material = surface.shaderfunc_to_material(self.shader_rings, self.base_hue) - - def build_tree(self, i, distance, **kwargs): - return self.base_factory.spawn_asset(i=i, distance=distance) - - @staticmethod - def geo_xyz(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - for name, component in zip('xyz', nw.separate(nw.new_node(Nodes.InputPosition))): - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry':geometry, 'Name': name, 'Value': component}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def shader_rings(nw: NodeWrangler, base_hue): - position = nw.combine(*map(lambda n: nw.new_node(Nodes.Attribute, attrs={'attribute_name': n}), 'xyz')) - ratio = nw.new_node(Nodes.WaveTexture, [position], - input_kwargs={'Scale': uniform(10, 20), 'Distortion': uniform(4, 10)}, - attrs={'wave_type': 'RINGS', 'rings_direction': 'Z', 'wave_profile': 'SAW'}) - bright_color = hsv2rgba(base_hue, uniform(.4, .8), log_uniform(.2, .8)) - dark_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.02, .02)) % 1, uniform(.4, .8), - log_uniform(.02, .05)), 1. - color = nw.new_node(Nodes.MixRGB, [ratio, dark_color, bright_color]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': color}) - return principled_bsdf - - def create_asset(self, face_size, **params): - raise NotImplementedError diff --git a/infinigen/assets/deformed_trees/fallen.py b/infinigen/assets/deformed_trees/fallen.py deleted file mode 100644 index c84970d81..000000000 --- a/infinigen/assets/deformed_trees/fallen.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - - -import bpy -import bmesh -import numpy as np -from numpy.random import uniform - -from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory -from infinigen.assets.utils.decorate import remove_vertices -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.object import join_objects, separate_loose -from infinigen.assets.utils.draw import cut_plane -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class FallenTreeFactory(BaseDeformedTreeFactory): - - @staticmethod - def geo_cutter(nw: NodeWrangler, strength, scale, radius, metric_fn): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - selection = nw.compare('LESS_THAN', nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1) - offset = nw.scalar_multiply(nw.new_node(Nodes.Clamp, [nw.new_node(Nodes.NoiseTexture, input_kwargs={ - 'Vector': nw.new_node(Nodes.InputPosition), - 'Scale': scale - }), .3, .7]), strength) - offset = nw.scalar_multiply(offset, nw.build_float_curve(x, [(-radius, 1), (radius, 0)])) - anchors = (-1, 0), (-.5, 0), (0, -1), (.5, 0), (1, 0) - offset = nw.scalar_multiply(offset, nw.build_float_curve(surface.eval_argument(nw, metric_fn), anchors)) - geometry = nw.new_node(Nodes.SetPosition, [geometry, selection, None, nw.combine(0, 0, offset)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def build_half(self, obj, cut_center, cut_normal, noise_strength, noise_scale, radius, is_up=True): - obj, cut = cut_plane(obj, cut_center, cut_normal, not is_up) - assign_material(cut, self.material) - obj = join_objects([obj, cut]) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.remove_doubles(threshold=1e-2) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.fill_holes() - metric_fn = lambda nw: nw.dot(nw.sub(nw.new_node(Nodes.InputPosition), cut_center), cut_normal) - surface.add_geomod(obj, self.geo_cutter, apply=True, - input_args=[noise_strength, noise_scale, radius, metric_fn]) - obj = separate_loose(obj) - surface.add_geomod(obj, self.geo_xyz, apply=True) - return obj - - def create_asset(self, i, distance=0, **params): - upper = self.build_tree(i, distance, **params) - radius = max([np.sqrt(v.co[0] ** 2 + v.co[1] ** 2) for v in upper.data.vertices if v.co[-1] < .1]) - self.trunk_surface.apply(upper) - butil.apply_modifiers(upper) - lower = deep_clone_obj(upper, keep_materials=True) - cut_center = np.array([0, 0, uniform(.6, 1.2)]) - cut_normal = np.array([uniform(.1, .2), 0, 1]) - noise_strength = uniform(.3, .5) - noise_scale = uniform(10, 15) - upper = self.build_half(upper, cut_center, cut_normal, noise_strength, noise_scale, radius, True) - lower = self.build_half(lower, cut_center, cut_normal, noise_strength, noise_scale, radius, False) - - ortho = np.array([-cut_normal[0], 0, 1]) - locations = np.array([v.co for v in lower.data.vertices]) - highest = locations[np.argmax(locations @ ortho)] + np.array( - [-uniform(.05, .15), 0, -uniform(.05, .15)]) - upper.location = - highest - butil.apply_transform(upper, loc=True) - - x, _, z = np.mean(np.stack([v.co for v in upper.data.vertices]), 0) - r = np.sqrt(x * x + z * z) - if r > 0: - upper.rotation_euler[1] = np.pi / 2 + np.arcsin((highest[-1] - uniform(0, .2)) / r) - np.arctan( - x / z) - upper.location = highest - butil.apply_transform(upper, loc=True) - remove_vertices(upper, lambda x, y, z: z < -.5) - upper = separate_loose(upper) - obj = join_objects([upper, lower]) - tag_object(obj, 'fallen_tree') - return obj diff --git a/infinigen/assets/deformed_trees/hollow.py b/infinigen/assets/deformed_trees/hollow.py deleted file mode 100644 index 3487057eb..000000000 --- a/infinigen/assets/deformed_trees/hollow.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory -from infinigen.assets.utils.decorate import read_co, read_material_index, write_material_index -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.object import join_objects, separate_loose -from infinigen.assets.utils.nodegroup import geo_selection -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj, select_none -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class HollowTreeFactory(BaseDeformedTreeFactory): - - @staticmethod - def geo_texture(nw: NodeWrangler, material_index): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.compare('EQUAL', nw.new_node(Nodes.MaterialIndex), material_index) - offset = nw.scale(nw.scalar_multiply(nw.musgrave(uniform(10, 20)), -uniform(.03, .06)), - nw.new_node(Nodes.InputNormal)) - geometry = nw.new_node(Nodes.SetPosition, [geometry, selection, None, offset]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def filter_lower(obj): - select_none() - objs = butil.split_object(obj) - filtered = [o for o in objs if np.min(read_co(o)[:, -1]) < .5] - obj = filtered[np.argmax([len(o.data.vertices) for o in filtered])] - objs.remove(obj) - butil.delete(objs) - return obj - - def create_asset(self, i, distance=0, **params): - obj = self.build_tree(i, distance, **params) - scale = uniform(.8, 1.) - threshold = uniform(.36, .4) - - def selection(nw: NodeWrangler): - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - radius = nw.power(nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), .5) - vector = nw.combine(nw.scalar_divide(x, radius), nw.scalar_divide(y, radius), z) - noise = nw.compare('GREATER_THAN', - nw.new_node(Nodes.NoiseTexture, [vector], input_kwargs={'Scale': scale}), - threshold) - r_outside = nw.compare('GREATER_THAN', nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1) - z_lower = nw.scalar_add(.1, - nw.scale(nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '2D'}), - .4)) - z_upper = nw.scalar_sub(3.5, - nw.scale(nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '2D'}), - .4)) - z_outside = nw.boolean_math('OR', nw.compare('LESS_THAN', z, z_lower), - nw.compare('GREATER_THAN', z, z_upper)) - return nw.boolean_math('OR', nw.boolean_math('OR', z_outside, noise), r_outside) - - surface.add_geomod(obj, geo_selection, apply=True, input_args=[selection]) - hollow = deep_clone_obj(obj) - - self.trunk_surface.apply(obj) - butil.apply_modifiers(obj) - assign_material(hollow, self.material) - obj = join_objects([self.filter_lower(obj), self.filter_lower(hollow)]) - - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.bridge_edge_loops(type='PAIRS', number_cuts=10, interpolation='LINEAR') - - ring_material_index = list(obj.data.materials).index(obj.data.materials['shader_rings']) - surface.add_geomod(obj, self.geo_texture, apply=True, input_args=[ring_material_index]) - - material_indices = read_material_index(obj) - null_indices = np.array([i for i, m in enumerate(obj.data.materials) if not hasattr(m, 'name')]) - material_indices[ - np.any(material_indices[:, np.newaxis] == null_indices[np.newaxis, :], -1)] = ring_material_index - write_material_index(obj, material_indices) - tag_object(obj, 'hollow_tree') - return obj diff --git a/infinigen/assets/deformed_trees/rotten.py b/infinigen/assets/deformed_trees/rotten.py deleted file mode 100644 index 88b68c77d..000000000 --- a/infinigen/assets/deformed_trees/rotten.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory -from infinigen.assets.utils.decorate import read_material_index, remove_vertices, write_material_index -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.random import log_uniform -from infinigen.assets.utils.object import join_objects, new_icosphere, separate_loose -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -class RottenTreeFactory(BaseDeformedTreeFactory): - @staticmethod - def geo_cutter(nw: NodeWrangler, strength, scale, metric_fn): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - selection = nw.compare('LESS_THAN', nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1) - offset = nw.scalar_multiply(nw.new_node(Nodes.Clamp, [nw.new_node(Nodes.NoiseTexture, input_kwargs={ - 'Vector': nw.new_node(Nodes.InputPosition), - 'Scale': scale}, attrs={'noise_dimensions': '2D'}), .3, .7]), strength) - anchors = (0, 1), (1.02, 1), (1.05, 0), (2, 0) - metric = surface.eval_argument(nw, metric_fn) - offset = nw.scalar_multiply(offset, nw.build_float_curve(metric, anchors)) - offset = nw.scalar_multiply(offset, nw.switch( - nw.compare('GREATER_THAN', nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0), 1, -1)) - geometry = nw.new_node(Nodes.SetPosition, [geometry, selection, None, nw.combine(0, 0, offset)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def build_cutter(self, radius, height): - cutter = new_icosphere(subdivisions=6) - angle = uniform(-np.pi, 0) - depth = radius * uniform(.4, .9) - cutter_scale = np.array([radius * uniform(.8, 1.2), radius * uniform(.8, 1.2), log_uniform(1., 1.2)]) - cutter_location = np.array([depth * np.cos(angle), depth * np.sin(angle), height]) - cutter.scale = cutter_scale - cutter.location = cutter_location - assign_material(cutter, self.material) - metric = lambda x, y, z: np.linalg.norm( - (np.stack([x, y, z], -1) - cutter_location[np.newaxis, :]) / cutter_scale[np.newaxis, :], axis=-1) - fn = lambda x, y, z: metric(x, y, z) < 1 + 1e-4 - inverse_fn = lambda x, y, z: metric(x, y, z) > 1 + 1e-4 - metric_fn = lambda nw: nw.vector_math('LENGTH', nw.divide( - nw.sub(nw.new_node(Nodes.InputPosition), cutter_location), cutter_scale)) - return cutter, fn, inverse_fn, metric_fn - - def create_asset(self, i, distance=0, **params): - outer = self.build_tree(i, distance, **params) - radius = max([np.sqrt(v.co[0] ** 2 + v.co[1] ** 2) for v in outer.data.vertices if v.co[-1] < .1]) - height = uniform(.8, 1.6) - cutter, fn, inverse_fn, metric_fn = self.build_cutter(radius, height) - butil.modify_mesh(outer, 'BOOLEAN', object=cutter, operation='DIFFERENCE') - outer = separate_loose(outer) - inner = deep_clone_obj(outer) - remove_vertices(outer, fn) - remove_vertices(inner, inverse_fn) - self.trunk_surface.apply(outer) - butil.apply_modifiers(outer) - - obj = join_objects([outer, inner]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.bridge_edge_loops(number_cuts=10, interpolation='LINEAR') - - ring_material_index = list(obj.data.materials).index(obj.data.materials['shader_rings']) - material_indices = read_material_index(obj) - null_indices = np.array([i for i, m in enumerate(obj.data.materials) if not hasattr(m, 'name')]) - material_indices[ - np.any(material_indices[:, np.newaxis] == null_indices[np.newaxis, :], -1)] = ring_material_index - write_material_index(obj, material_indices) - - noise_strength = cutter.scale[-1] * uniform(.5, .8) - noise_scale = uniform(10, 15) - surface.add_geomod(obj, self.geo_cutter, apply=True, - input_args=[noise_strength, noise_scale, metric_fn]) - surface.add_geomod(obj, self.geo_xyz, apply=True) - butil.delete(cutter) - tag_object(outer, 'rotten_tree') - return outer diff --git a/infinigen/assets/deformed_trees/truncated.py b/infinigen/assets/deformed_trees/truncated.py deleted file mode 100644 index 1a398b6fc..000000000 --- a/infinigen/assets/deformed_trees/truncated.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - -import numpy as np -from numpy.random import uniform - -from infinigen.assets.deformed_trees import FallenTreeFactory -from infinigen.assets.utils.decorate import read_co -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class TruncatedTreeFactory(FallenTreeFactory): - - @staticmethod - def geo_cutter(nw: NodeWrangler, strength, scale, radius, metric_fn): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - offset = nw.scalar_multiply(nw.new_node(Nodes.Clamp, [nw.new_node(Nodes.NoiseTexture, input_kwargs={ - 'Vector': nw.new_node(Nodes.InputPosition), - 'Scale': scale - }), .3, .7]), strength) - anchors = (-1, 0), (-.5, 0), (0, 1), (.5, 0), (1, 0) - offset = nw.scalar_multiply(offset, nw.build_float_curve(surface.eval_argument(nw, metric_fn), anchors)) - geometry = nw.new_node(Nodes.SetPosition, [geometry, None, None, nw.combine(0, 0, offset)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def create_asset(self, i, distance=0, **params): - obj = self.build_tree(i, distance, **params) - x, y, z = read_co(obj).T - radius = np.amax(np.sqrt(x ** 2 + y ** 2)[z < .1]) - self.trunk_surface.apply(obj) - butil.apply_modifiers(obj) - cut_center = np.array([0, 0, uniform(.8, 1.5)]) - cut_normal = np.array([uniform(-.4, .4), 0, 1]) - noise_strength = uniform(.6, 1.) - noise_scale = uniform(10, 15) - obj = self.build_half(obj, cut_center, cut_normal, noise_strength, noise_scale, radius, False) - tag_object(obj, 'truncated_tree') - return obj diff --git a/infinigen/assets/elements/__init__.py b/infinigen/assets/elements/__init__.py deleted file mode 100644 index 765c5d999..000000000 --- a/infinigen/assets/elements/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -from .staircases import * -from .doors import * -from .rug import RugFactory -from .warehouses import * -from .nature_shelf_trinkets.generate import NatureShelfTrinketsFactory -from .pillars import PillarFactory diff --git a/infinigen/assets/elements/doors/base.py b/infinigen/assets/elements/doors/base.py deleted file mode 100644 index dd44e0e8e..000000000 --- a/infinigen/assets/elements/doors/base.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: -# - Lingjie Mei: primary author - -import bpy -import gin -import numpy as np -from numpy.random import uniform - -from infinigen.assets.materials.common import unique_surface -from infinigen.assets.utils.decorate import mirror, read_co, write_attribute, write_co -from infinigen.assets.utils.draw import spin -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_cube, new_line -from infinigen.core.constraints.example_solver.room import constants -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.assets.materials import glass, metal, wood -from infinigen.core.util.bevelling import add_bevel, get_bevel_edges -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil - -from infinigen.assets.utils.autobevel import BevelSharp - -class BaseDoorFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super(BaseDoorFactory, self).__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.width = constants.DOOR_WIDTH - self.height = constants.DOOR_SIZE - self.depth = uniform(.04, .06) - self.panel_margin = log_uniform(.08, .12) - self.bevel_width = uniform(.005, .01) - self.out_bevel = uniform() < .7 - self.shrink_width = log_uniform(.005, .06) - - surface_fn = np.random.choice([metal, wood], p=[.2, .8]) - self.surface = unique_surface(surface_fn, self.factory_seed) - self.has_glass = False - self.glass_surface = glass - self.has_louver = False - self.louver_surface = np.random.choice([metal, wood], p=[.2, .8]) - - self.handle_type = np.random.choice(['knob', 'lever', 'pull']) - self.handle_surface = np.random.choice([metal, wood], p=[.2, .8]) - self.handle_offset = self.panel_margin * .5 - self.handle_height = self.height * uniform(.45, .5) - - self.knob_radius = uniform(.03, .04) - base_radius = uniform(1.1, 1.2) - mid_radius = uniform(.4, .5) - self.knob_radius_mid = base_radius, base_radius, mid_radius, mid_radius, 1, uniform(.6, .8), 0 - self.knob_depth = uniform(.08, .1) - self.knob_depth_mid = [0, uniform(.1, .15), uniform(.25, .3), uniform(.35, .45), uniform(.6, .8), 1, - 1 + 1e-3] - - self.lever_radius = uniform(.03, .04) - self.lever_mid_radius = uniform(.01, .02) - self.lever_depth = uniform(.05, .08) - self.lever_mid_depth = uniform(.15, .25) - self.lever_length = log_uniform(.15, .2) - self.level_type = np.random.choice(['wave', 'cylinder', 'bent']) - - self.pull_size = log_uniform(.1, .4) - self.pull_depth = uniform(.05, .08) - self.pull_width = log_uniform(.08, .15) - self.pull_extension = uniform(.05, .15) - self.to_pull_bevel = uniform() < .5 - self.pull_bevel_width = uniform(.02, .04) - self.pull_radius = uniform(.01, .02) - self.pull_type = np.random.choice(['u', 'tee', 'zed']) - self.is_pull_circular = uniform() < .5 or self.pull_type == 'zed' - self.panel_surface = unique_surface(surface_fn, np.random.randint(1e5)) - self.auto_bevel = BevelSharp() - self.side_bevel = log_uniform(.005,.015) - - self.metal_color = metal.sample_metal_color() - - def create_asset(self, **params) -> bpy.types.Object: - for _ in range(100): - obj = self._create_asset() - if max(obj.dimensions) < 5: - return obj - else: - raise ValueError('Bad door booleaning') - - def _create_asset(self): - obj = new_cube(location=(1, 1, 1)) - butil.apply_transform(obj, loc=True) - obj.scale = self.width / 2, self.depth / 2, self.height / 2 - butil.apply_transform(obj) - panels = self.make_panels() - extras = [] - for panel in panels: - extras.extend(panel['func'](obj, panel)) - match self.handle_type: - case 'knob': - extras.extend(self.make_knobs()) - case 'lever': - extras.extend(self.make_levers()) - case 'pull': - extras.extend(self.make_pulls()) - obj = join_objects([obj] + extras) - self.auto_bevel(obj) - obj.location = -self.width, -self.depth, 0 - butil.apply_transform(obj, True) - obj = add_bevel(obj, get_bevel_edges(obj), offset=self.side_bevel) - return obj - - def make_panels(self): - return [] - - def finalize_assets(self, assets): - self.surface.apply(assets, metal_color=self.metal_color, vertical=True) - if self.has_glass: - self.glass_surface.apply(assets, selection='glass', clear=True) - if self.has_louver: - self.louver_surface.apply(assets, selection='louver', metal_color=self.metal_color) - self.handle_surface.apply(assets, selection='handle', metal_color='natural') - - def make_knobs(self): - x_anchors = np.array(self.knob_radius_mid) * self.knob_radius - y_anchors = np.array(self.knob_depth_mid) * self.knob_depth - obj = spin([x_anchors, y_anchors, 0], [0, 2, 3], axis=(0, 1, 0)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.edge_face_add() - return self.make_handles(obj) - - def make_handles(self, obj): - write_attribute(obj, 1, 'handle', 'FACE') - obj.location = self.handle_offset, 0, self.handle_height - butil.apply_transform(obj, loc=True) - other = deep_clone_obj(obj) - obj.location[1] += self.depth - butil.apply_transform(obj, loc=True) - mirror(other, 1) - return [obj, other] - - def make_levers(self): - x_anchors = self.lever_radius, self.lever_radius, self.lever_mid_radius, self.lever_mid_radius, 0 - y_anchors = np.array([0, self.lever_mid_depth, self.lever_mid_depth, 1, 1 + 1e-3]) * self.lever_depth - obj = spin([x_anchors, y_anchors, 0], [0, 1, 2, 3], axis=(0, 1, 0)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.fill() - lever = new_line(4) - if self.level_type == 'wave': - co = read_co(lever) - co[1, -1] = -uniform(.2, .3) - co[3, -1] = uniform(.1, .15) - write_co(lever, co) - elif self.level_type == 'bent': - co = read_co(lever) - co[4, 1] = -uniform(.2, .3) - write_co(lever, co) - lever.scale = [self.lever_length] * 3 - butil.apply_transform(lever) - butil.select_none() - with butil.ViewportMode(lever, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, self.lever_mid_radius * 2)}) - butil.modify_mesh(lever, 'SOLIDIFY', lever, thickness=self.lever_mid_radius, offset=0) - butil.modify_mesh(lever, 'SUBSURF', render_levels=1, levels=1) - lever.location = -self.lever_mid_radius, self.lever_depth, -self.lever_mid_radius - butil.apply_transform(lever, loc=True) - obj = join_objects([obj, lever]) - return self.make_handles(obj) - - def make_pulls(self): - if self.pull_type == 'u': - vertices = (0, 0, self.pull_size), (0, self.pull_depth, self.pull_size), (0, self.pull_depth, 0) - edges = (0, 1), (1, 2) - elif self.pull_type == 'tee': - vertices = (0, 0, self.pull_size), (0, self.pull_depth, self.pull_size), (0, self.pull_depth, 0), ( - 0, self.pull_depth, self.pull_size + self.pull_extension) - edges = (0, 1), (1, 2), (1, 3) - else: - vertices = (0, 0, self.pull_size), (0, self.pull_depth, self.pull_size), ( - self.pull_width, self.pull_depth, self.pull_size), (self.pull_width, self.pull_depth, 0), - edges = (0, 1), (1, 2), (2, 3) - obj = mesh2obj(data2mesh(vertices, edges)) - butil.modify_mesh(obj, 'MIRROR', use_axis=(False, False, True)) - if self.to_pull_bevel: - butil.modify_mesh(obj, 'BEVEL', width=self.pull_bevel_width, segments=4, affect='VERTICES') - if self.is_pull_circular: - surface.add_geomod( - obj, geo_radius, apply=True, input_args=[self.pull_radius, 32], input_kwargs={'to_align_tilt': False} - ) - else: - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (self.pull_radius * 2, 0, 0)}) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.normals_make_consistent(inside=False) - obj.location = -self.pull_radius, -self.pull_radius, -self.pull_radius - butil.apply_transform(obj, loc=True) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.pull_radius * 2, offset=0) - return self.make_handles(obj) - - @property - def casing_factory(self): - from infinigen.assets.elements import DoorCasingFactory - factory = DoorCasingFactory(self.factory_seed, self.coarse) - factory.surface = self.surface - factory.metal_color = self.metal_color - return factory diff --git a/infinigen/assets/elements/nature_shelf_trinkets/generate.py b/infinigen/assets/elements/nature_shelf_trinkets/generate.py deleted file mode 100644 index a7a2bc717..000000000 --- a/infinigen/assets/elements/nature_shelf_trinkets/generate.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Stamatis Alexandropulos - -import colorsys - -import bpy -import numpy as np -import trimesh -import mathutils -from numpy.random import uniform - - -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed -from infinigen.assets.corals import CoralFactory -from infinigen.assets.rocks import BlenderRockFactory -from infinigen.assets.rocks.boulder import BoulderFactory -from infinigen.assets.mollusk import MolluskFactory, AugerFactory, ClamFactory, ConchFactory, MusselFactory, ScallopFactory, VoluteFactory -from infinigen.assets.monocot import PineconeFactory -from infinigen.assets.creatures.beetle import BeetleFactory, AntSwarmFactory -from infinigen.assets.creatures.bird import BirdFactory, FlyingBirdFactory -from infinigen.assets.creatures.carnivore import CarnivoreFactory -from infinigen.assets.creatures.herbivore import HerbivoreFactory -from infinigen.assets.creatures.crustacean import CrustaceanFactory, CrabFactory, LobsterFactory, SpinyLobsterFactory -from infinigen.assets.creatures.reptile import FrogFactory -from infinigen.assets.creatures.insects.dragonfly import DragonflyFactory -from infinigen.assets.utils.decorate import remove_vertices -from infinigen.core.util import blender as butil -from infinigen.assets.utils import object as obj -from infinigen.assets.utils.object import join_objects - - - - -class NatureShelfTrinketsFactory(AssetFactory): - factories = [CoralFactory,BlenderRockFactory, BoulderFactory, PineconeFactory, MolluskFactory, - AugerFactory, ClamFactory, ConchFactory, MusselFactory, ScallopFactory, VoluteFactory, CarnivoreFactory, HerbivoreFactory] - probs = np.array([1,1,1,1,3,2,3,2,2,2,2,5,5]) - - def __init__(self, factory_seed, coarse=False): - super(NatureShelfTrinketsFactory, self).__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - base_factory_fn = np.random.choice(self.factories, p=self.probs / self.probs.sum()) - - kwargs = {} - if base_factory_fn in [HerbivoreFactory, CarnivoreFactory]: - kwargs.update({ - 'hair': False - }) - - self.base_factory = base_factory_fn(self.factory_seed, **kwargs) - - - def create_placeholder(self, **params) -> bpy.types.Object: - size = np.random.uniform(0.1, 0.15) - bpy.ops.mesh.primitive_cube_add(size=size, location=(0,0, size/2)) - placeholder = bpy.context.active_object - return placeholder - - - def create_asset(self, i, placeholder=None, **params): - asset = self.base_factory.spawn_asset( - np.random.randint(1e7), - distance=200, - adaptive_resolution = False - ) - - if (list(asset.children)): - asset = join_objects(list(asset.children)) - - # butil.modify_mesh(asset, 'DECIMATE') - butil.apply_transform(asset,loc=True) - butil.apply_modifiers(asset) - if isinstance(self.base_factory, HerbivoreFactory) or isinstance(self.base_factory, CarnivoreFactory): - pass - else: - if not isinstance(asset, trimesh.Trimesh): - mesh = obj.obj2trimesh(asset) - stable_poses, probs = trimesh.poses.compute_stable_poses(mesh) - stable_pose = stable_poses[np.argmax(probs)] - asset.rotation_euler = mathutils.Matrix(stable_pose[:3,:3]).to_euler() - butil.apply_transform(asset,rot =True) - dim = asset.dimensions - bounding_box = placeholder.dimensions - scale = min([bounding_box[i]/dim[i] for i in range(3)]) - asset.scale = [scale for i in range(3)] - # asset.dimensions = placeholder.dimensions - butil.apply_transform(asset,loc=True) - bounds = butil.bounds(asset) - cur_loc = asset.location - new_location = [ - cur_loc[i]-(bounds[0][i] + bounds[1][i])/2 for i in range(3)] - new_location[2] = cur_loc[2] - (bounds[0][2] + bounding_box[2]/2) - asset.location = new_location - butil.apply_transform(asset,loc=True) - return asset \ No newline at end of file diff --git a/infinigen/assets/fluid/__init__.py b/infinigen/assets/fluid/__init__.py index 1729ae108..8886e3c3e 100644 --- a/infinigen/assets/fluid/__init__.py +++ b/infinigen/assets/fluid/__init__.py @@ -1,16 +1,11 @@ -from .fluid_scenecomp_additions import cached_fire_scenecomp_options -from .fluid import set_fire_to_assets from .asset_cache import FireCachingSystem from .cached_factory_wrappers import ( - CachedBoulderFactory, + CachedBoulderFactory, CachedBushFactory, - CachedCactusFactory, - CachedCreatureFactory, - CachedTreeFactory + CachedCactusFactory, + CachedCreatureFactory, + CachedTreeFactory, ) -from .flip_fluid import ( - make_river, - make_still_water, - make_tilted_river, - make_beach -) \ No newline at end of file +from .flip_fluid import make_beach, make_river, make_still_water, make_tilted_river +from .fluid import set_fire_to_assets +from .fluid_scenecomp_additions import cached_fire_scenecomp_options diff --git a/infinigen/assets/fluid/asset_cache.py b/infinigen/assets/fluid/asset_cache.py index 1e41d07d7..abbb74780 100644 --- a/infinigen/assets/fluid/asset_cache.py +++ b/infinigen/assets/fluid/asset_cache.py @@ -3,30 +3,20 @@ # Authors: Karhan Kayan -import argparse -import os -import sys -from pathlib import Path -from mathutils import Vector import importlib +import json +import logging +import os from collections import defaultdict - +from pathlib import Path import bpy import gin import numpy as np -import json - -from infinigen.assets.fluid.fluid import ( - find_available_cache, - set_obj_on_fire, - fire_smoke_ground_truth, -) +from mathutils import Vector -import time +from infinigen.assets.fluid.fluid import find_available_cache, set_obj_on_fire from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -import logging logger = logging.getLogger(__name__) @@ -35,12 +25,15 @@ SPECIES_MAX = 20 I_MAX = 20 + @gin.configurable class FireCachingSystem: - def __init__(self, asset_folder = None, create=False, max_fire_assets = 3, max_per_kind = 1) -> None: - if asset_folder == None: + def __init__( + self, asset_folder=None, create=False, max_fire_assets=3, max_per_kind=1 + ) -> None: + if asset_folder is None: raise ValueError("asset_folder not set for Fire") - + cache_folder = os.path.join(asset_folder, "Fire") if not os.path.exists(cache_folder): @@ -207,5 +200,3 @@ def link_fire(self, full_sim_folder, sim_folder, obj, factory): self.n_placed[factory.__class__.__name__] += 1 return dom - - diff --git a/infinigen/assets/fluid/bounding_box.py b/infinigen/assets/fluid/bounding_box.py index 771cd5b4d..509ae9f98 100644 --- a/infinigen/assets/fluid/bounding_box.py +++ b/infinigen/assets/fluid/bounding_box.py @@ -3,13 +3,9 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def geometry_geometry_nodes(nw: NodeWrangler, obj): diff --git a/infinigen/assets/fluid/cached_factory_wrappers.py b/infinigen/assets/fluid/cached_factory_wrappers.py index 78069b534..11b3c8768 100644 --- a/infinigen/assets/fluid/cached_factory_wrappers.py +++ b/infinigen/assets/fluid/cached_factory_wrappers.py @@ -3,22 +3,27 @@ # Authors: Karhan Kayan -from infinigen.assets.trees import TreeFactory, BushFactory -from infinigen.assets.creatures import CarnivoreFactory -from infinigen.assets.cactus import CactusFactory -from infinigen.assets.rocks.boulder import BoulderFactory +from infinigen.assets.objects.cactus import CactusFactory +from infinigen.assets.objects.creatures import CarnivoreFactory +from infinigen.assets.objects.rocks.boulder import BoulderFactory +from infinigen.assets.objects.trees import BushFactory, TreeFactory + class CachedBoulderFactory(BoulderFactory): pass + class CachedCactusFactory(CactusFactory): pass + class CachedCreatureFactory(CarnivoreFactory): pass + class CachedBushFactory(BushFactory): pass + class CachedTreeFactory(TreeFactory): - pass \ No newline at end of file + pass diff --git a/infinigen/assets/fluid/duplication_geomod.py b/infinigen/assets/fluid/duplication_geomod.py index ae0cb6dc6..e05cbd242 100644 --- a/infinigen/assets/fluid/duplication_geomod.py +++ b/infinigen/assets/fluid/duplication_geomod.py @@ -3,13 +3,9 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def duplicate(nw: NodeWrangler, obj): diff --git a/infinigen/assets/fluid/flip_fluid.py b/infinigen/assets/fluid/flip_fluid.py index 98539387f..8eddc7481 100644 --- a/infinigen/assets/fluid/flip_fluid.py +++ b/infinigen/assets/fluid/flip_fluid.py @@ -4,33 +4,30 @@ # Authors: Karhan Kayan import os -import sys from pathlib import Path -from numpy.random import uniform -from mathutils import Vector import bpy +from mathutils import Vector +# ruff: noqa: E402 os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. -from infinigen.assets.materials import water, lava, river_water, new_whitewater -from infinigen.assets.materials import blackbody_shader, waterfall_material, smoke_material -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.logging import Timer - -from infinigen.core.util import blender as butil -from infinigen.core.util.organization import AssetFile, Materials, Process -from infinigen.core.util.blender import object_from_trimesh, SelectObjects -from infinigen.terrain.utils import Mesh -import cv2 import subprocess -from infinigen.assets.fluid.fluid import find_available_cache, obj_bb_minmax -import infinigen.assets.fluid.liquid_particle_material as liquid_particle_material +import cv2 +import gin from numpy.random import normal as N -import gin -from infinigen.core.util.organization import Assets, LandTile +from infinigen.assets.fluid.fluid import find_available_cache, obj_bb_minmax +from infinigen.assets.materials import ( + new_whitewater, + river_water, + water, +) +from infinigen.core.util import blender as butil +from infinigen.core.util.logging import Timer +from infinigen.core.util.organization import AssetFile, LandTile, Materials, Process +from infinigen.terrain.utils import Mesh def get_objs_inside_domain(dom, objects): @@ -516,4 +513,4 @@ def make_tilted_river( if __name__ == "__main__": butil.clear_scene(targets=[bpy.data.objects]) - ASSET_ENV_VAR = "INFINIGEN_ASSET_FOLDER" \ No newline at end of file + ASSET_ENV_VAR = "INFINIGEN_ASSET_FOLDER" diff --git a/infinigen/assets/fluid/flip_init.py b/infinigen/assets/fluid/flip_init.py index 1e3125168..1992bfb11 100644 --- a/infinigen/assets/fluid/flip_init.py +++ b/infinigen/assets/fluid/flip_init.py @@ -5,5 +5,5 @@ import bpy -bpy.ops.preferences.addon_enable(module='flip_fluids_addon') +bpy.ops.preferences.addon_enable(module="flip_fluids_addon") bpy.ops.flip_fluid_operators.complete_installation() diff --git a/infinigen/assets/fluid/fluid.py b/infinigen/assets/fluid/fluid.py index 2a75a42fb..b94594588 100644 --- a/infinigen/assets/fluid/fluid.py +++ b/infinigen/assets/fluid/fluid.py @@ -3,45 +3,45 @@ # Authors: Karhan Kayan +import logging import os -import sys -from itertools import chain from pathlib import Path -from numpy.random import uniform, normal, randint + +import bpy +import gin +import numpy as np from mathutils import Vector -import logging -from infinigen.core.util.math import clip_gaussian +from numpy.random import uniform +from infinigen.assets.fluid import duplication_geomod +from infinigen.assets.materials import ( + blackbody_shader, + lava, + smoke_material, + water, + waterfall_material, +) from infinigen.core.nodes.node_wrangler import ( Nodes, NodeWrangler, infer_input_socket, infer_output_socket, ) -import bpy -import numpy as np - - -from infinigen.assets.materials import water, lava - -from infinigen.assets.fluid import duplication_geomod -from infinigen.assets.materials import blackbody_shader, waterfall_material, smoke_material +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.logging import Timer -import gin - -from infinigen.core.util import blender as butil - logger = logging.getLogger(__name__) FLUID_INITIALIZED = False + def check_initalize_fluids(): if FLUID_INITIALIZED: return bpy.ops.flip_fluid_operators.complete_installation() + # find next available number for fluid cache folder def find_available_cache(cache_folder): Path(cache_folder).mkdir(parents=True, exist_ok=True) @@ -127,7 +127,6 @@ def create_liquid_domain( settings.use_collision_border_right = False settings.use_collision_border_top = False - if fluid_type == "water": if waterfall: waterfall_material.apply(obj) @@ -231,17 +230,29 @@ def create_gas_domain( size=1, resolution=64, simulation_duration=100, - adaptive_domain = True, + adaptive_domain=True, output_folder=None, noise_scale=3, dissolve_speed=0, - flame_vorticity = None, - vorticity = None + flame_vorticity=None, + vorticity=None, ): check_initalize_fluids() bpy.ops.mesh.primitive_cube_add(size=size, location=location) obj = bpy.context.object - set_gas_domain_settings(obj, start_frame, fluid_type, resolution, simulation_duration, adaptive_domain, output_folder, noise_scale, dissolve_speed, flame_vorticity, vorticity) + set_gas_domain_settings( + obj, + start_frame, + fluid_type, + resolution, + simulation_duration, + adaptive_domain, + output_folder, + noise_scale, + dissolve_speed, + flame_vorticity, + vorticity, + ) return obj @@ -256,10 +267,10 @@ def set_gas_domain_settings( adaptive_domain=True, output_folder=None, noise_scale=2, - noise_strength = None, + noise_strength=None, dissolve_speed=0, - flame_vorticity = None, - vorticity = None, + flame_vorticity=None, + vorticity=None, ): check_initalize_fluids() if "Fluid" not in obj.modifiers: @@ -331,7 +342,6 @@ def set_gas_domain_settings( @gin.configurable def create_gas_flow(location, fluid_type="fire_and_smoke", size=0.1, fuel_amount=None): - check_initalize_fluids() bpy.ops.mesh.primitive_ico_sphere_add(radius=size, location=location) @@ -362,7 +372,6 @@ def create_gas_flow(location, fluid_type="fire_and_smoke", size=0.1, fuel_amount @gin.configurable def set_gas_flow_settings(obj, fluid_type="fire_and_smoke", fuel_amount=None): - check_initalize_fluids() if "Fluid" not in obj.modifiers: @@ -542,13 +551,13 @@ def set_obj_on_fire( obj.matrix_world.translation[1], obj.matrix_world.translation[2] + uniform(2, 3), ), - turbulence_noise, - turbulence_strength + turbulence_noise, + turbulence_strength, ) butil.select_none() fire_dfs(obj, fluid_type) - + # disabled for now # with Timer('Decimating and Realizing Instance'): # instanced_obj = get_instanced_part(obj) @@ -609,7 +618,6 @@ def generate_waterfall( simulation_duration=30, fluid_type="water", ): - check_initalize_fluids() seed = np.random.randint(10000) @@ -721,7 +729,6 @@ def import_obj_simulate( resolution=300, simulation_duration=50, ): - check_initalize_fluids() # assuming we are importing to the origin @@ -747,14 +754,15 @@ def import_obj_simulate( def find_root(node): - if node.parent == None: + if node.parent is None: return node return find_root(node.parent) @gin.configurable -def set_fire_to_assets(assets, start_frame, simulation_duration, output_folder=None, max_fire_assets=1): - +def set_fire_to_assets( + assets, start_frame, simulation_duration, output_folder=None, max_fire_assets=1 +): check_initalize_fluids() if len(assets) == 0: @@ -775,7 +783,6 @@ def set_fire_to_assets(assets, start_frame, simulation_duration, output_folder=N return for i in range(max_fire_assets): - closest = obj_dist[i] obj = closest[1] logger.info(f"Setting fire to {i=} {obj.name=}") @@ -792,15 +799,16 @@ def set_fire_to_assets(assets, start_frame, simulation_duration, output_folder=N dom_scale=1.1, ) + def duplicate_fluid_obj(obj): bpy.ops.mesh.primitive_plane_add() new_obj = bpy.context.object duplication_geomod.apply(new_obj=new_obj, old_obj=obj) return new_obj + @gin.configurable def estimate_smoke_domain(obj, start_frame, simulation_duration): - check_initalize_fluids() bpy.ops.object.select_all(action="DESELECT") @@ -829,7 +837,6 @@ def estimate_smoke_domain(obj, start_frame, simulation_duration): def estimate_liquid_domain( location, start_frame, simulation_duration, fluid_type="water" ): - check_initalize_fluids() source = create_liquid_flow( @@ -861,7 +868,6 @@ def estimate_liquid_domain( @gin.configurable def set_fluid_to_smoke(obj, start_frame, resolution=300, simulation_duration=30): - check_initalize_fluids() new_obj = duplicate_fluid_obj(obj) @@ -892,7 +898,7 @@ def fire_smoke_ground_truth(domain): data_dir = os.path.join(cache_dir, "data") contents = [f for f in os.listdir(data_dir)] filepath = os.path.join(data_dir, contents[0]) - files = [{"name": f, "name": f} for f in contents] + files = [{"name": f} for f in contents] bpy.ops.object.volume_import(filepath=filepath, directory=data_dir, files=files) vol = bpy.context.object vol.location += translation diff --git a/infinigen/assets/fluid/fluid_scenecomp_additions.py b/infinigen/assets/fluid/fluid_scenecomp_additions.py index abc713ecf..7b9383222 100644 --- a/infinigen/assets/fluid/fluid_scenecomp_additions.py +++ b/infinigen/assets/fluid/fluid_scenecomp_additions.py @@ -4,34 +4,28 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from mathutils import Vector -import gin import numpy as np -from numpy.random import uniform, normal, randint - -from infinigen.core.util.pipeline import RandomStageExecutor -from infinigen.core.placement import placement, density +from numpy.random import randint, uniform +from infinigen.assets.fluid.asset_cache import FireCachingSystem from infinigen.assets.fluid.cached_factory_wrappers import ( + CachedBoulderFactory, + CachedBushFactory, + CachedCactusFactory, CachedTreeFactory, - CachedCreatureFactory, - CachedBoulderFactory, - CachedBushFactory, - CachedCactusFactory ) -from infinigen.assets.fluid.asset_cache import FireCachingSystem -from infinigen.assets.fluid.fluid import is_fire_in_scene -from infinigen.assets.fluid.flip_fluid import create_flip_fluid_domain, set_flip_fluid_domain, create_flip_fluid_inflow, set_flip_fluid_obstacle, get_objs_inside_domain, make_beach, make_river, make_tilted_river +from infinigen.core.placement import density, placement +from infinigen.core.util.pipeline import RandomStageExecutor -def cached_fire_scenecomp_options(p: RandomStageExecutor, terrain_mesh, params, tree_species_params): - land_domain = params.get('land_domain_tags') - underwater_domain = params.get('underwater_domain_tags') - nonliving_domain = params.get('nonliving_domain_tags') +def cached_fire_scenecomp_options( + p: RandomStageExecutor, terrain_mesh, params, tree_species_params +): + land_domain = params.get("land_domain_tags") + underwater_domain = params.get("underwater_domain_tags") + nonliving_domain = params.get("nonliving_domain_tags") - if params.get('cached_fire'): + if params.get("cached_fire"): fire_cache_system = FireCachingSystem() def add_cached_fire_trees(terrain_mesh): @@ -40,11 +34,18 @@ def add_cached_fire_trees(terrain_mesh): ind = np.random.choice(len(species)) s = species[ind] fac = CachedTreeFactory(s, coarse=True) - selection = density.placement_mask(params['select_scale'], tag=land_domain) - placement.scatter_placeholders_mesh(terrain_mesh, fac, selection=selection, altitude=-0.1, - overall_density=params['density'], distance_min=params['distance_min']) - p.run_stage('cached_fire_trees', add_cached_fire_trees, terrain_mesh) - + selection = density.placement_mask(params["select_scale"], tag=land_domain) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + selection=selection, + altitude=-0.1, + overall_density=params["density"], + distance_min=params["distance_min"], + ) + + p.run_stage("cached_fire_trees", add_cached_fire_trees, terrain_mesh) + def add_cached_fire_bushes(terrain_mesh): n_bush_species = randint(1, params.get("max_bush_species", 2) + 1) spec_density = params.get("bush_density", uniform(0.03, 0.12)) / n_bush_species @@ -52,12 +53,22 @@ def add_cached_fire_bushes(terrain_mesh): ind = np.random.choice(len(species)) s = species[ind] fac = CachedBushFactory(s, coarse=True) - selection = density.placement_mask(uniform(0.015, 0.2), normal_thresh=0.3, - select_thresh=uniform(0.5, 0.6), tag=land_domain) - placement.scatter_placeholders_mesh(terrain_mesh, fac, altitude=-0.05, - overall_density=spec_density, distance_min=uniform(0.05, 0.3), - selection=selection) - p.run_stage('cached_fire_bushes', add_cached_fire_bushes, terrain_mesh) + selection = density.placement_mask( + uniform(0.015, 0.2), + normal_thresh=0.3, + select_thresh=uniform(0.5, 0.6), + tag=land_domain, + ) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + altitude=-0.05, + overall_density=spec_density, + distance_min=uniform(0.05, 0.3), + selection=selection, + ) + + p.run_stage("cached_fire_bushes", add_cached_fire_bushes, terrain_mesh) def add_cached_fire_boulders(terrain_mesh): n_boulder_species = randint(1, params.get("max_boulder_species", 5)) @@ -65,11 +76,19 @@ def add_cached_fire_boulders(terrain_mesh): ind = np.random.choice(len(species)) s = species[ind] fac = CachedBoulderFactory(s, coarse=True) - selection = density.placement_mask(0.05, tag=nonliving_domain, select_thresh=uniform(0.55, 0.6)) - placement.scatter_placeholders_mesh(terrain_mesh, fac, - overall_density=params.get("boulder_density", uniform(.02, .05)) / n_boulder_species, - selection=selection, altitude=-0.25) - p.run_stage('cached_fire_boulders', add_cached_fire_boulders, terrain_mesh) + selection = density.placement_mask( + 0.05, tag=nonliving_domain, select_thresh=uniform(0.55, 0.6) + ) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + overall_density=params.get("boulder_density", uniform(0.02, 0.05)) + / n_boulder_species, + selection=selection, + altitude=-0.25, + ) + + p.run_stage("cached_fire_boulders", add_cached_fire_boulders, terrain_mesh) def add_cached_fire_cactus(terrain_mesh): n_cactus_species = randint(2, params.get("max_cactus_species", 4)) @@ -77,8 +96,18 @@ def add_cached_fire_cactus(terrain_mesh): ind = np.random.choice(len(species)) s = species[ind] fac = CachedCactusFactory(s, coarse=True) - selection = density.placement_mask(scale=.05, tag=land_domain, select_thresh=0.57) - placement.scatter_placeholders_mesh(terrain_mesh, fac, altitude=-0.05, - overall_density=params.get('cactus_density', uniform(.02, .1) / n_cactus_species), - selection=selection, distance_min=1) - p.run_stage('cached_fire_cactus', add_cached_fire_cactus, terrain_mesh) + selection = density.placement_mask( + scale=0.05, tag=land_domain, select_thresh=0.57 + ) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + altitude=-0.05, + overall_density=params.get( + "cactus_density", uniform(0.02, 0.1) / n_cactus_species + ), + selection=selection, + distance_min=1, + ) + + p.run_stage("cached_fire_cactus", add_cached_fire_cactus, terrain_mesh) diff --git a/infinigen/assets/fluid/generate.py b/infinigen/assets/fluid/generate.py index 2988dfc72..b4071c5a3 100644 --- a/infinigen/assets/fluid/generate.py +++ b/infinigen/assets/fluid/generate.py @@ -3,29 +3,18 @@ # Authors: Karhan Kayan -import gin import bpy -from infinigen.core.placement.factory import AssetFactory - - -from infinigen.core.util import blender as butil +import gin from infinigen.assets.fluid.fluid import ( - create_liquid_domain, - create_liquid_flow, + add_field, create_gas_domain, create_gas_flow, - add_field, -) -from infinigen.assets.fluid.flip_fluid import ( - create_flip_fluid_domain, - set_flip_fluid_domain, - create_flip_fluid_inflow, - set_flip_fluid_obstacle, - get_objs_inside_domain, + create_liquid_domain, + create_liquid_flow, ) - -from infinigen.core.util.logging import Timer +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil @gin.configurable @@ -65,7 +54,7 @@ def finalize_assets(self, assets): dom = obj else: flow = obj - assert dom != None and flow != None + assert dom is not None and flow is not None bpy.context.view_layer.objects.active = dom print(self.fluid_type) @@ -89,7 +78,7 @@ def finalize_assets(self, assets): dom = obj elif obj.modifiers["Fluid"].fluid_type == "FLOW": obj.hide_render = True - assert dom != None + assert dom is not None cache_dir = cache_dirs[i] mod = dom.modifiers["Fluid"] diff --git a/infinigen/assets/fluid/liquid_particle_material.py b/infinigen/assets/fluid/liquid_particle_material.py index cde9d0cf0..4f4a0f579 100644 --- a/infinigen/assets/fluid/liquid_particle_material.py +++ b/infinigen/assets/fluid/liquid_particle_material.py @@ -3,34 +3,34 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def liquid_particle_material(nw: NodeWrangler): # Code generated using version 2.5.1 of the node_transpiler - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': (1.0000, 1.0000, 1.0000, 1.0000), - 'Subsurface Color': (0.7147, 0.6062, 0.8000, 1.0000), - 'Specular': 0.0886, - 'Roughness': 0.2705 + (0.1 * normal()), - 'Sheen Tint': 0.0000, - 'Clearcoat Roughness': 0.0000, - 'IOR': 1.2000, - 'Transmission': 0.2818 + (0.1 * normal()) + "Base Color": (1.0000, 1.0000, 1.0000, 1.0000), + "Subsurface Color": (0.7147, 0.6062, 0.8000, 1.0000), + "Specular": 0.0886, + "Roughness": 0.2705 + (0.1 * normal()), + "Sheen Tint": 0.0000, + "Clearcoat Roughness": 0.0000, + "IOR": 1.2000, + "Transmission": 0.2818 + (0.1 * normal()), }, - attrs={'distribution': 'MULTI_GGX'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + attrs={"distribution": "MULTI_GGX"}, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, liquid_particle_material, selection=selection) \ No newline at end of file + surface.add_material(obj, liquid_particle_material, selection=selection) diff --git a/infinigen/assets/fluid/run_asset_cache.py b/infinigen/assets/fluid/run_asset_cache.py index 48cacac68..9dbfcc932 100644 --- a/infinigen/assets/fluid/run_asset_cache.py +++ b/infinigen/assets/fluid/run_asset_cache.py @@ -3,20 +3,19 @@ # Authors: Karhan Kayan -import time import argparse -import numpy as np -import sys +import importlib import os +import sys +import time + import gin -import importlib -from pathlib import Path -sys.path.append(os.getcwd()) +import numpy as np -from infinigen.core import init +sys.path.append(os.getcwd()) from infinigen.assets.fluid.asset_cache import FireCachingSystem -from infinigen.core import surface +from infinigen.core import init, surface if __name__ == "__main__": time.sleep(np.random.uniform(0, 3)) @@ -29,9 +28,11 @@ parser.add_argument("-r", "--resolution", type=int) parser.add_argument("--dissolve_speed", type=int, default=25) parser.add_argument("--dom_scale", type=float, default=1) - + args = init.parse_args_blender(parser) - init.apply_gin_configs(configs=[], overrides=[], configs_folder='infinigen_examples/configs_nature') + init.apply_gin_configs( + configs=[], overrides=[], config_folders=["infinigen_examples/configs_nature"] + ) surface.registry.initialize_from_gin() factory_name = args.asset @@ -45,5 +46,5 @@ if factory is None: raise ModuleNotFoundError(f"{factory_name} not Found.") - cache_system = FireCachingSystem(asset_folder = args.asset_folder, create=True) + cache_system = FireCachingSystem(asset_folder=args.asset_folder, create=True) cache_system.create_cached_assets(factory, args) diff --git a/infinigen/assets/fluid/run_tests.py b/infinigen/assets/fluid/run_tests.py index d8069b61d..7e7494b86 100644 --- a/infinigen/assets/fluid/run_tests.py +++ b/infinigen/assets/fluid/run_tests.py @@ -4,6 +4,5 @@ # Authors: Karhan Kayan import pytest -import bpy -pytest.main(['fluid/unit_tests.py', '-rP']) \ No newline at end of file +pytest.main(["fluid/unit_tests.py", "-rP"]) diff --git a/infinigen/assets/fruits/apple.py b/infinigen/assets/fruits/apple.py deleted file mode 100644 index 03387fbaf..000000000 --- a/infinigen/assets/fruits/apple.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryApple(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'apple' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "circle_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.5, 0.05), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0), (0.1227, 0.4281), (0.4705, 0.6625), (0.8886, 0.4156), (1.0, 0.0)],}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'noise amount tilt': 0.0, - 'noise scale pos': 0.5, - 'noise amount pos': 0.1, - 'Resolution': surface_resolution, - 'Start': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(-0.9, -1.1)), - 'End': (0.0, 0.0, 1.0)}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - base_color = np.array((uniform(-0.05, 0.1), 0.999, 0.799)) - base_color[1] += normal(0.0, 0.05) - base_color[2] += normal(0.0, 0.05) - base_color_rgba = hsv2rgba(base_color) - - alt_color = np.copy(base_color) - alt_color[0] += normal(0.05, 0.02) - alt_color[1] += normal(0.0, 0.05) - alt_color[2] += normal(0.0, 0.05) - alt_color_rgba = hsv2rgba(alt_color) - - return { - 'surface_name': "apple_surface", - 'surface_func_args': {'color1': base_color_rgba, - 'color2': alt_color_rgba, - 'random_seed': uniform(-100, 100)}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'spline tangent': 'noderef-shapequadratic-spline tangent', - 'distance to center': 'noderef-shapequadratic-radius to center'}, - 'surface_output_args': {}, - 'surface_resolution': 64, - 'scale_multiplier': 1.0 - } - - def sample_stem_params(self): - stem_color = np.array((0.10, 0.96, 0.13)) - stem_color[0] += normal(0.0, 0.02) - stem_color[1] += normal(0.0, 0.05) - stem_color[2] += normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - return { - 'stem_name': "basic_stem", - 'stem_func_args': {'stem_color': stem_color_rgba}, - 'stem_input_args': {'quad_mid': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.15, 0.2)), - 'quad_end': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), - 'quad_res': 32, - 'cross_radius': uniform(0.025, 0.035), - 'cross_res': 32, - 'Translation': (0.0, 0.0, 0.6)}, - 'stem_output_args': {} - } - - - - - diff --git a/infinigen/assets/fruits/blackberry.py b/infinigen/assets/fruits/blackberry.py deleted file mode 100644 index e2ba576e3..000000000 --- a/infinigen/assets/fruits/blackberry.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryBlackberry(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'blackberry' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "circle_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(0.9, 0.05), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0), (0.0841, 0.3469), (uniform(0.4, 0.6), 0.8), (0.9432, 0.4781), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Start': (uniform(-0.5, 0.5), uniform(-0.5, 0.5), uniform(-0.5, -3.0)), - 'End': (0.0, 0.0, 1.0), - 'random seed tilt': uniform(-100, 100), - 'noise amount tilt': 1.0, - 'Resolution': surface_resolution}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - berry_color = np.array((0.667, 0.254, 0.0)) - berry_color[0] += np.random.normal(0.0, 0.02) - berry_color[1] += np.random.normal(0.0, 0.05) - berry_color[2] += np.random.normal(0.0, 0.005) - berry_color_rgba = hsv2rgba(berry_color) - - return { - 'surface_name': "blackberry_surface", - 'surface_func_args': {'berry_color': berry_color_rgba}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter'}, - 'surface_output_args': {}, - 'surface_resolution': 64, - 'scale_multiplier': 0.3 - } - - def sample_stem_params(self): - stem_color = np.array((0.179, 0.836, 0.318)) - stem_color[0] += np.random.normal(0.0, 0.02) - stem_color[1] += np.random.normal(0.0, 0.05) - stem_color[2] += np.random.normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - return { - 'stem_name': "basic_stem", - 'stem_func_args': {'stem_color': stem_color_rgba}, - 'stem_input_args': {'cross_radius': normal(0.075, 0.005), - 'quad_mid': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.2, 0.3)), - 'quad_end': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.4, 0.6))}, - 'stem_output_args': {} - } \ No newline at end of file diff --git a/infinigen/assets/fruits/coconutgreen.py b/infinigen/assets/fruits/coconutgreen.py deleted file mode 100644 index 568eecf7c..000000000 --- a/infinigen/assets/fruits/coconutgreen.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryCoconutgreen(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'coconut_green' - - def sample_cross_section_params(self, surface_resolution=256): - rad_small = uniform(0.65, 0.75) - - return { - 'cross_section_name': "coconut_cross_section", - 'cross_section_func_args': {'control_points': [(0.0, rad_small), (0.1, rad_small), (1.0, 0.76)]}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.8, 0.1), - 'noise scale': 20.0, - 'noise amount': 0.02, - 'Resolution': surface_resolution}, - 'cross_section_output_args': {'crosssection_coordinate': 'noderef-crosssection-curve parameters'} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0), (0.0591, 0.3156), (uniform(0.2, 0.3), 0.6125), (uniform(0.6, 0.7), 0.675), (0.9636, 0.3625), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Start': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), normal(-1.0, 0.1)), - 'End': (0.0, 0.0, 1.0), - 'Resolution': surface_resolution}, - 'shape_output_args': {'shape_coordinate': 'noderef-shapequadratic-spline parameter'} - } - - def sample_surface_params(self): - bottom_color = np.array((0.282, 0.951, 0.266)) - bottom_color[0] += np.random.normal(0.0, 0.02) - bottom_color[1] += np.random.normal(0.0, 0.05) - bottom_color[2] += np.random.normal(0.0, 0.05) - bottom_color_rgba = hsv2rgba(bottom_color) - - basic_color = np.array((0.235, 0.989, 0.413)) - basic_color[0] += np.random.normal(0.0, 0.025) - basic_color[1] += np.random.normal(0.0, 0.05) - basic_color[2] += np.random.normal(0.0, 0.05) - basic_color_rgba = hsv2rgba(basic_color) - - return { - 'surface_name': "coconutgreen_surface", - 'surface_func_args': {'basic_color': basic_color_rgba, 'bottom_color': bottom_color_rgba}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'spline tangent': 'noderef-shapequadratic-spline tangent', - 'distance to center': 'noderef-shapequadratic-radius to center', - 'cross section paramater': 'noderef-crosssection-curve parameters', - }, - 'surface_output_args': {}, - 'surface_resolution': 256, - 'scale_multiplier': 1.5 - } - - def sample_stem_params(self): - bottom_color = np.array((0.282, 0.951, 0.266)) - bottom_color[0] += np.random.normal(0.0, 0.02) - bottom_color[1] += np.random.normal(0.0, 0.05) - bottom_color[2] += np.random.normal(0.0, 0.05) - bottom_color_rgba = hsv2rgba(bottom_color) - - calyx_edge_color = np.array((0.039, 0.96, 0.037)) - calyx_edge_color[0] += np.random.normal(0.0, 0.02) - calyx_edge_color[1] += np.random.normal(0.0, 0.05) - calyx_edge_color[2] += np.random.normal(0.0, 0.05) - calyx_edge_color_rgba = hsv2rgba(calyx_edge_color) - - stem_x = uniform(-0.4, 0.4) - stem_y = uniform(-0.4, 0.4) - - return { - 'stem_name': "coconut_stem", - 'stem_func_args': {'basic_color': bottom_color_rgba, 'edge_color': calyx_edge_color_rgba}, - 'stem_input_args': { - 'Target': 'noderef-fruitsurface-Geometry', - 'radius': 0.001, - 'calyx width': uniform(0.2, 0.25), - 'Count': randint(4, 6), - 'stem_radius': normal(0.04, 0.005), - 'stem_mid': (stem_x, stem_y, 0.0), - 'stem_end': (2*stem_x, 2*stem_y, uniform(0.3, 0.5)), - }, - 'stem_output_args': {'distance to edge': 'noderef-stem-distance to edge'} - } \ No newline at end of file diff --git a/infinigen/assets/fruits/coconuthairy.py b/infinigen/assets/fruits/coconuthairy.py deleted file mode 100644 index e4e900419..000000000 --- a/infinigen/assets/fruits/coconuthairy.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryCoconuthairy(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'coconut_hairy' - - def sample_cross_section_params(self, surface_resolution=256): - rad_small = uniform(0.65, 0.75) - - return { - 'cross_section_name': "coconut_cross_section", - 'cross_section_func_args': {'control_points': [(0.0, rad_small), (0.1, rad_small), (1.0, 0.76)]}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.8, 0.1), - 'noise scale': 20.0, - 'noise amount': 0.02, - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0), (0.0591, 0.3156), (uniform(0.2, 0.3), 0.6125), (uniform(0.6, 0.7), 0.675), (0.9636, 0.3625), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Start': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), normal(-1.0, 0.1)), - 'End': (0.0, 0.0, 1.0), - 'Resolution': surface_resolution}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - basic_color = np.array((0.05, 0.97, 0.6)) - basic_color[0] += np.random.normal(0.0, 0.01) - basic_color[1] += np.random.normal(0.0, 0.05) - basic_color[2] += np.random.normal(0.0, 0.1) - basic_color_rgba = hsv2rgba(basic_color) - - return { - 'surface_name': "coconuthairy_surface", - 'surface_func_args': {'basic_color': basic_color_rgba}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter'}, - 'surface_output_args': {}, - 'surface_resolution': 256, - 'scale_multiplier': 1.5 - } - - def sample_stem_params(self): - return { - 'stem_name': "empty_stem", - 'stem_func_args': {}, - 'stem_input_args': {}, - 'stem_output_args': {} - } - - - - diff --git a/infinigen/assets/fruits/compositional_fruit.py b/infinigen/assets/fruits/compositional_fruit.py deleted file mode 100644 index d72fa2a9f..000000000 --- a/infinigen/assets/fruits/compositional_fruit.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit -from infinigen.assets.fruits.apple import FruitFactoryApple -from infinigen.assets.fruits.pineapple import FruitFactoryPineapple -from infinigen.assets.fruits.starfruit import FruitFactoryStarfruit -from infinigen.assets.fruits.strawberry import FruitFactoryStrawberry -from infinigen.assets.fruits.blackberry import FruitFactoryBlackberry -from infinigen.assets.fruits.coconuthairy import FruitFactoryCoconuthairy -from infinigen.assets.fruits.coconutgreen import FruitFactoryCoconutgreen -from infinigen.assets.fruits.durian import FruitFactoryDurian - -fruit_names = {'Apple': FruitFactoryApple, - 'Pineapple': FruitFactoryPineapple, - 'Starfruit': FruitFactoryStarfruit, - 'Strawberry': FruitFactoryStrawberry, - 'Blackberry': FruitFactoryBlackberry, - 'Coconuthairy': FruitFactoryCoconuthairy, - 'Coconutgreen': FruitFactoryCoconutgreen, - 'Durian': FruitFactoryDurian, - } - -class FruitFactoryCompositional(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super(FruitFactoryCompositional, self).__init__(factory_seed, scale=scale, coarse=coarse) - - self.name = 'compositional' - self.factories = {} - - for name, factory in fruit_names.items(): - self.factories[name] = factory(factory_seed, scale, coarse) - - with FixedSeed(factory_seed): - self.cross_section_source = np.random.choice(list(fruit_names.keys())) - self.shape_source = np.random.choice(list(fruit_names.keys())) - self.surface_source = np.random.choice(list(fruit_names.keys())) - self.stem_source = np.random.choice(list(fruit_names.keys())) - - def sample_cross_section_params(self, surface_resolution=256): - return self.factories[self.cross_section_source].sample_cross_section_params(surface_resolution) - - def sample_shape_params(self, surface_resolution=256): - return self.factories[self.shape_source].sample_shape_params(surface_resolution) - - def sample_surface_params(self): - return self.factories[self.surface_source].sample_surface_params() - - def sample_stem_params(self): - return self.factories[self.stem_source].sample_stem_params() - - diff --git a/infinigen/assets/fruits/cross_section_lib.py b/infinigen/assets/fruits/cross_section_lib.py deleted file mode 100644 index def78a9e6..000000000 --- a/infinigen/assets/fruits/cross_section_lib.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_rot_semmetry - -@node_utils.to_nodegroup('nodegroup_circle_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_circle_cross_section(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 0.5), - ('NodeSocketFloat', 'noise amount', 0.1), - ('NodeSocketInt', 'Resolution', 256), - ('NodeSocketFloat', 'radius', 0.0)]) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - position = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["random seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Offset': scale.outputs["Vector"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Scale': group_input.outputs["radius"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'curve parameters': value}) - -@node_utils.to_nodegroup('nodegroup_star_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_star_cross_section(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 2.4), - ('NodeSocketFloat', 'noise amount', 0.2), - ('NodeSocketInt', 'Resolution', 256), - ('NodeSocketFloat', 'radius', 1.0)]) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - rotsemmetry = nw.new_node(nodegroup_rot_semmetry().name, - input_kwargs={'N': 5, 'spline parameter': spline_parameter.outputs["Factor"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 2: rotsemmetry.outputs["Result"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': rotsemmetry.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.4156), (0.65, 0.8125), (1.0, 1.0)]) - - position = nw.new_node(Nodes.InputPosition) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 'Scale': float_curve}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["random seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_1.outputs["Vector"], 'Scale': group_input.outputs["radius"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': scale_2.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position, 'curve parameters': capture_attribute.outputs[2]}) - -@node_utils.to_nodegroup('nodegroup_cylax_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_cylax_cross_section(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'fork number', 10), - ('NodeSocketFloat', 'bottom radius', 0.0), - ('NodeSocketFloatDistance', 'noise random seed', 0.0), - ('NodeSocketFloat', 'noise amount', 0.4), - ('NodeSocketFloatDistance', 'radius', 1.0)]) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 256, 'Radius': group_input.outputs["radius"]}) - - position = nw.new_node(Nodes.InputPosition) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - rotsemmetry = nw.new_node(nodegroup_rot_semmetry().name, - input_kwargs={'N': group_input.outputs["fork number"], 'spline parameter': spline_parameter.outputs["Factor"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': rotsemmetry.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.65, 0.8125), (1.0, 1.0)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 3: group_input.outputs["bottom radius"]}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 'Scale': map_range_1.outputs["Result"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["noise random seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 2.4}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: value}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Position': add_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_coconut_cross_section', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_cross_section(nw: NodeWrangler, control_points=[(0.0, 0.7156), (0.1023, 0.7156), (1.0, 0.7594)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 2.4), - ('NodeSocketFloat', 'noise amount', 0.2), - ('NodeSocketInt', 'Resolution', 256), - ('NodeSocketFloat', 'radius', 1.0)]) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - rot_semmetry = nw.new_node(nodegroup_rot_semmetry().name, - input_kwargs={'N': 3, 'spline parameter': spline_parameter.outputs["Factor"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 2: rot_semmetry}) - - position = nw.new_node(Nodes.InputPosition) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': rot_semmetry}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], control_points) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 'Scale': float_curve_1}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["random seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_1.outputs["Vector"], 'Scale': group_input.outputs["radius"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': scale_2.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position, 'curve parameters': capture_attribute.outputs[2]}) - diff --git a/infinigen/assets/fruits/durian.py b/infinigen/assets/fruits/durian.py deleted file mode 100644 index 79aab98b5..000000000 --- a/infinigen/assets/fruits/durian.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils - -import gin -import numpy as np -from numpy.random import uniform, normal, randint - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -@gin.register -class FruitFactoryDurian(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'durian' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "circle_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.2, 0.03), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0031), (0.0841, 0.3469), (uniform(0.4, 0.6), 0.8), (0.8886, 0.6094), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'noise amount tilt': 5.0, - 'noise scale tilt': 0.5, - 'random seed tilt': uniform(-100, 100), - 'Resolution': surface_resolution, - 'Start': (uniform(-0.3, 0.3), uniform(-0.3, 0.3), uniform(-0.5, -1.5)), - 'End': (0.0, 0.0, 1.0)}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - base_color = np.array((0.15, 0.74, 0.32)) - base_color[0] += np.random.normal(0.0, 0.02) - base_color[1] += np.random.normal(0.0, 0.05) - base_color[2] += np.random.normal(0.0, 0.05) - base_color_rgba = hsv2rgba(base_color) - - peak_color = np.array((0.09, 0.87, 0.24)) - peak_color[0] += np.random.normal(0.0, 0.025) - peak_color[1] += np.random.normal(0.0, 0.05) - peak_color[2] += np.random.normal(0.0, 0.05) - peak_color_rgba = hsv2rgba(peak_color) - - return { - 'surface_name': "durian_surface", - 'surface_func_args': {'thorn_control_points': [(0.0, 0.0), (0.7318, 0.4344), (1.0, 1.0)], - 'peak_color': peak_color_rgba, - 'base_color': base_color_rgba - }, - 'surface_input_args': { - 'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'distance Min': uniform(0.07, 0.13), - 'displacement': uniform(0.25, 0.35), - 'noise amount': 0.2 - }, - 'surface_output_args': {'durian thorn coordiante': 'noderef-fruitsurface-distance to center'}, - 'surface_resolution': 512, - 'scale_multiplier': 2.0 - } - - def sample_stem_params(self): - stem_color = np.array((0.10, 0.96, 0.13)) - stem_color[0] += np.random.normal(0.0, 0.02) - stem_color[1] += np.random.normal(0.0, 0.05) - stem_color[2] += np.random.normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - return { - 'stem_name': "basic_stem", - 'stem_func_args': {'stem_color': stem_color_rgba}, - 'stem_input_args': { - 'cross_radius': uniform(0.07, 0.09), - 'quad_mid': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.15, 0.2)), - 'quad_end': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), - 'Translation': (0.0, 0.0, 0.9) - }, - 'stem_output_args': {} - } \ No newline at end of file diff --git a/infinigen/assets/fruits/fruit_utils.py b/infinigen/assets/fruits/fruit_utils.py deleted file mode 100644 index 02ceb3710..000000000 --- a/infinigen/assets/fruits/fruit_utils.py +++ /dev/null @@ -1,696 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_random_rotation_scale', singleton=False, type='GeometryNodeTree') -def nodegroup_random_rotation_scale(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketVector', 'rot mean', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'rot std', 1.0), - ('NodeSocketFloat', 'scale mean', 0.35), - ('NodeSocketFloat', 'scale std', 0.1)]) - - position_3 = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_3, 1: group_input.outputs["random seed"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Color"], 1: value_2}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs["rot std"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["rot mean"], 1: combine_xyz}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: group_input.outputs["scale std"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: group_input.outputs["scale mean"]}, - attrs={'use_clamp': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': add_1.outputs["Vector"], 'Value': add_2}) - -@node_utils.to_nodegroup('nodegroup_surface_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_surface_bump(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement', 0.02), - ('NodeSocketFloat', 'Scale', 50.0)]) - - normal = nw.new_node(Nodes.InputNormal) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': group_input.outputs["Scale"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 1: multiply}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_point_on_mesh', singleton=False, type='GeometryNodeTree') -def nodegroup_point_on_mesh(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketFloatDistance', 'Distance Min', 0.2), - ('NodeSocketFloat', 'parameter max', 1.0), - ('NodeSocketFloat', 'parameter min', 0.0), - ('NodeSocketFloat', 'noise amount', 1.0), - ('NodeSocketFloat', 'noise scale', 5.0)]) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Distance Min': group_input.outputs["Distance Min"], 'Density Max': 10000.0}, - attrs={'distribute_method': 'POISSON'}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["spline parameter"], 1: group_input.outputs["parameter min"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["spline parameter"], 1: group_input.outputs["parameter max"]}, - attrs={'operation': 'LESS_THAN'}) - - nand = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}, - attrs={'operation': 'NAND'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': distribute_points_on_faces.outputs["Points"], 'Selection': nand}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': group_input.outputs["noise scale"]}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: value}, - attrs={'operation': 'SUBTRACT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': group_input.outputs["noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': delete_geometry, 'Offset': scale.outputs["Vector"]}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': group_input.outputs["Mesh"]}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Position': geometry_proximity.outputs["Position"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Rotation': distribute_points_on_faces.outputs["Rotation"]}) - -@node_utils.to_nodegroup('nodegroup_instance_on_points', singleton=False, type='GeometryNodeTree') -def nodegroup_instance_on_points(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorEuler', 'rotation base', (0.0, 0.0, 0.0)), - ('NodeSocketVectorEuler', 'rotation delta', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'translation', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'scale', 0.0), - ('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None)]) - - rotate_euler_1 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': group_input.outputs["rotation base"], 'Rotate By': group_input.outputs["rotation delta"]}, - attrs={'space': 'LOCAL'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], 'Instance': group_input.outputs["Instance"], 'Rotation': rotate_euler_1, 'Scale': group_input.outputs["scale"]}) - - translate_instances = nw.new_node(Nodes.TranslateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Translation': group_input.outputs["translation"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': translate_instances}) - -@node_utils.to_nodegroup('nodegroup_shape_quadratic', singleton=False, type='GeometryNodeTree') -def nodegroup_shape_quadratic(nw: NodeWrangler, radius_control_points=[(0.0, 0.5), (1.0, 0.5)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloat', 'random seed tilt', 0.5), - ('NodeSocketFloat', 'noise scale tilt', 0.5), - ('NodeSocketFloat', 'noise amount tilt', 0.0), - ('NodeSocketFloat', 'random seed pos', 0.0), - ('NodeSocketFloat', 'noise scale pos', 0.0), - ('NodeSocketFloat', 'noise amount pos', 0.0), - ('NodeSocketIntUnsigned', 'Resolution', 256), - ('NodeSocketVectorTranslation', 'Start', (0.0, 0.0, -1.5)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'End', (0.0, 0.0, 1.5))]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadratic_bezier, 2: spline_parameter_2.outputs["Factor"]}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: curve_tangent}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["random seed pos"]}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale pos"]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, - attrs={'operation': 'SUBTRACT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': spline_parameter_2.outputs["Factor"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["noise amount pos"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'Offset': scale_1.outputs["Vector"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: group_input.outputs["random seed tilt"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': add_1, 'Scale': group_input.outputs["noise scale tilt"]}, - attrs={'noise_dimensions': '1D'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: group_input.outputs["noise amount tilt"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, - input_kwargs={'Curve': set_position, 'Tilt': multiply}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_2 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_2.mapping.curves[0], radius_control_points) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_curve_tilt, 'Radius': float_curve_2}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': group_input.outputs["Profile Curve"], 'Fill Caps': True}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': set_position}, - attrs={'mode': 'EVALUATED'}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': curve_to_points.outputs["Points"]}, - attrs={'target_element': 'POINTS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh, 'spline parameter': capture_attribute.outputs[2], 'spline tangent': capture_attribute_1.outputs["Attribute"], 'radius to center': geometry_proximity.outputs["Distance"]}) - -@node_utils.to_nodegroup('nodegroup_add_dent', singleton=False, type='GeometryNodeTree') -def nodegroup_add_dent(nw: NodeWrangler, dent_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketVector', 'spline tangent', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'distance to center', 0.0), - ('NodeSocketBool', 'bottom', False), - ('NodeSocketFloat', 'intensity', 1.0), - ('NodeSocketFloat', 'max radius', 1.0)]) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["spline parameter"], 1: 0.5}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than}, - attrs={'operation': 'NOT'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["bottom"], 6: greater_than, 7: op_not}, - attrs={'input_type': 'BOOLEAN'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["distance to center"], 2: group_input.outputs["max radius"]}) - - float_curve_3 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve_3.mapping.curves[0], dent_control_points) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_3, 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["intensity"]}, - attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["spline tangent"], 'Scale': multiply}, - attrs={'operation': 'SCALE'}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Selection': switch.outputs[2], 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_2}) - -@node_utils.to_nodegroup('nodegroup_add_crater', singleton=False, type='GeometryNodeTree') -def nodegroup_add_crater(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Strength', 1.5)]) - - normal = nw.new_node(Nodes.InputNormal) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': group_input.outputs["Points"]}, - attrs={'target_element': 'POINTS'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': geometry_proximity.outputs["Distance"], 2: 0.08, 3: -0.04, 4: 0.0}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: 0.0, 2: 0.05}, - attrs={'operation': 'SMOOTH_MIN'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': smooth_min}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["Strength"]}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': scale_1.outputs["Vector"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': set_position_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': subdivision_surface}) - - -@node_utils.to_nodegroup('nodegroup_mix_vector', singleton=False, type='GeometryNodeTree') -def nodegroup_mix_vector(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector 1', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Vector 2', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'alpha', 0.5)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: group_input.outputs["alpha"]}, - attrs={'operation': 'SUBTRACT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector 1"], 'Scale': subtract}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector 2"], 'Scale': group_input.outputs["alpha"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': add.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_add_noise_scalar', singleton=False, type='GeometryNodeTree') -def nodegroup_add_noise_scalar(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position = nw.new_node(Nodes.InputPosition) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'value', 0.5), - ('NodeSocketFloat', 'noise random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 5.0), - ('NodeSocketFloat', 'noise amount', 0.5)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["noise random seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["noise amount"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["value"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'value': add_1}) - -@node_utils.to_nodegroup('nodegroup_attach_to_nearest', singleton=False, type='GeometryNodeTree') -def nodegroup_attach_to_nearest(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Target', None), - ('NodeSocketFloat', 'threshold', 0.0), - ('NodeSocketFloat', 'multiplier', 0.5), - ('NodeSocketVectorTranslation', 'Offset', (0.0, 0.0, 0.0))]) - - position = nw.new_node(Nodes.InputPosition) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': group_input.outputs["Target"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["threshold"], 1: geometry_proximity.outputs["Distance"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["multiplier"]}, - attrs={'operation': 'MULTIPLY'}) - - exponent = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'EXPONENT'}) - - clamp = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': exponent}) - - mixvector = nw.new_node(nodegroup_mix_vector().name, - input_kwargs={'Vector 1': position, 'Vector 2': geometry_proximity.outputs["Position"], 'alpha': clamp}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': mixvector, 'Offset': group_input.outputs["Offset"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_manhattan', singleton=False, type='GeometryNodeTree') -def nodegroup_manhattan(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'v1', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'v2', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["v1"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["v2"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz_1.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1}, - attrs={'operation': 'ABSOLUTE'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: absolute_1}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_2}, - attrs={'operation': 'ABSOLUTE'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: absolute_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add_1}) - -@node_utils.to_nodegroup('nodegroup_rot_semmetry', singleton=False, type='GeometryNodeTree') -def nodegroup_rot_semmetry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'N', 0), - ('NodeSocketFloat', 'spline parameter', 0.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={1: group_input.outputs["N"]}, - attrs={'operation': 'DIVIDE'}) - - pingpong = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["spline parameter"], 1: divide}, - attrs={'operation': 'PINGPONG'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': pingpong, 2: divide}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_scale_mesh', singleton=False, type='GeometryNodeTree') -def nodegroup_scale_mesh(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Scale', 1.0)]) - - position = nw.new_node(Nodes.InputPosition) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 'Scale': group_input.outputs["Scale"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_hair', singleton=False, type='GeometryNodeTree') -def nodegroup_hair(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'shape noise random seed', 0.0), - ('NodeSocketFloat', 'shape noise amount', 1.0), - ('NodeSocketIntUnsigned', 'length resolution', 8), - ('NodeSocketInt', 'cross section resolution', 4), - ('NodeSocketFloat', 'scale', 0.0), - ('NodeSocketFloatDistance', 'Radius', 0.01), - ('NodeSocketMaterial', 'Material', None), - ('NodeSocketVectorTranslation', 'Start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, 0.3, 1.0)), - ('NodeSocketVectorTranslation', 'End', (0.0, -1.4, 2.0))]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["length resolution"], 'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - subdivide_curve = nw.new_node(Nodes.SubdivideCurve, - input_kwargs={'Curve': quadratic_bezier_1}) - - position = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: group_input.outputs["shape noise random seed"]}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 1.0}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, - attrs={'operation': 'SUBTRACT'}) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': spline_parameter_2.outputs["Factor"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["shape noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': subdivide_curve, 'Offset': scale_1.outputs["Vector"]}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["cross section resolution"], 'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_position_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh_1, 'Scale': group_input.outputs["scale"]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_1, 'Material': group_input.outputs["Material"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_1}) - -@node_utils.to_nodegroup('nodegroup_random_rotation_scale', singleton=False, type='GeometryNodeTree') -def nodegroup_random_rotation_scale(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'random seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketVector', 'rot mean', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', ' rot std z', 1.0), - ('NodeSocketFloat', 'scale mean', 0.35), - ('NodeSocketFloat', 'scale std', 0.1)]) - - position_3 = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_3, 1: group_input.outputs["random seed"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"]}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Color"], 1: value_2}, - attrs={'operation': 'SUBTRACT'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs[" rot std z"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["rot mean"], 1: combine_xyz}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: group_input.outputs["scale std"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: group_input.outputs["scale mean"]}, - attrs={'use_clamp': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': add_1.outputs["Vector"], 'Value': add_2}) - -@node_utils.to_nodegroup('nodegroup_align_top_to_horizon', singleton=False, type='GeometryNodeTree') -def nodegroup_align_top_to_horizon(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': bounding_box.outputs["Max"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) diff --git a/infinigen/assets/fruits/general_fruit.py b/infinigen/assets/fruits/general_fruit.py deleted file mode 100644 index ba68eaf72..000000000 --- a/infinigen/assets/fruits/general_fruit.py +++ /dev/null @@ -1,178 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.fruit_utils import nodegroup_shape_quadratic, nodegroup_align_top_to_horizon -from infinigen.assets.fruits.cross_section_lib import nodegroup_circle_cross_section, nodegroup_star_cross_section, nodegroup_coconut_cross_section -from infinigen.assets.fruits.stem_lib import nodegroup_basic_stem, nodegroup_pineapple_stem, nodegroup_calyx_stem, nodegroup_empty_stem, nodegroup_coconut_stem - -from infinigen.assets.fruits.surfaces.apple_surface import nodegroup_apple_surface -from infinigen.assets.fruits.surfaces.pineapple_surface import nodegroup_pineapple_surface -from infinigen.assets.fruits.surfaces.starfruit_surface import nodegroup_starfruit_surface -from infinigen.assets.fruits.surfaces.strawberry_surface import nodegroup_strawberry_surface -from infinigen.assets.fruits.surfaces.blackberry_surface import nodegroup_blackberry_surface -from infinigen.assets.fruits.surfaces.coconuthairy_surface import nodegroup_coconuthairy_surface -from infinigen.assets.fruits.surfaces.coconutgreen_surface import nodegroup_coconutgreen_surface -from infinigen.assets.fruits.surfaces.durian_surface import nodegroup_durian_surface -from infinigen.core.tagging import tag_object, tag_nodegroup - -crosssectionlib = { - 'circle_cross_section': nodegroup_circle_cross_section, - 'star_cross_section': nodegroup_star_cross_section, - 'coconut_cross_section': nodegroup_coconut_cross_section, -} - -shapelib = {'shape_quadratic': nodegroup_shape_quadratic} - -surfacelib = { - 'apple_surface': nodegroup_apple_surface, - 'pineapple_surface': nodegroup_pineapple_surface, - 'starfruit_surface': nodegroup_starfruit_surface, - 'strawberry_surface': nodegroup_strawberry_surface, - 'blackberry_surface': nodegroup_blackberry_surface, - 'coconuthairy_surface': nodegroup_coconuthairy_surface, - 'coconutgreen_surface': nodegroup_coconutgreen_surface, - 'durian_surface': nodegroup_durian_surface, -} - -stemlib = { - 'basic_stem': nodegroup_basic_stem, - 'pineapple_stem': nodegroup_pineapple_stem, - 'calyx_stem': nodegroup_calyx_stem, - 'empty_stem': nodegroup_empty_stem, - 'coconut_stem': nodegroup_coconut_stem, -} - -def parse_args(nodeinfo, dictionary): - for k1, v1 in dictionary.items(): - if isinstance(v1, str) and v1.startswith('noderef'): - _, nodename, outputname = v1.split('-') - dictionary[k1] = nodeinfo[nodename].outputs[outputname] - - return dictionary - -def general_fruit_geometry_nodes(nw: NodeWrangler, - cross_section_params, shape_params, surface_params, stem_params): - nodeinfo = {} - - parse_args(nodeinfo, cross_section_params['cross_section_input_args']) - crosssection = nw.new_node(crosssectionlib[cross_section_params['cross_section_name']](**cross_section_params['cross_section_func_args']).name, - input_kwargs=cross_section_params['cross_section_input_args']) - nodeinfo['crosssection'] = crosssection - parse_args(nodeinfo, cross_section_params['cross_section_output_args']) - - parse_args(nodeinfo, shape_params['shape_input_args']) - shapequadratic = nw.new_node(shapelib[shape_params['shape_name']](**shape_params['shape_func_args']).name, - input_kwargs=shape_params['shape_input_args']) - nodeinfo['shapequadratic'] = shapequadratic - parse_args(nodeinfo, shape_params['shape_output_args']) - - parse_args(nodeinfo, surface_params['surface_input_args']) - fruitsurface = nw.new_node(surfacelib[surface_params['surface_name']](**surface_params['surface_func_args']).name, - input_kwargs=surface_params['surface_input_args']) - nodeinfo['fruitsurface'] = fruitsurface - parse_args(nodeinfo, surface_params['surface_output_args']) - - parse_args(nodeinfo, stem_params['stem_input_args']) - stem = nw.new_node(stemlib[stem_params['stem_name']](**stem_params['stem_func_args']).name, - input_kwargs=stem_params['stem_input_args']) - nodeinfo['stem'] = stem - parse_args(nodeinfo, stem_params['stem_output_args']) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [fruitsurface, stem]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry}) - - align = nw.new_node(nodegroup_align_top_to_horizon().name, - input_kwargs={'Geometry': realize_instances}) - - output_dict = {'Geometry': align} - output_dict.update(cross_section_params['cross_section_output_args']) - output_dict.update(shape_params['shape_output_args']) - output_dict.update(surface_params['surface_output_args']) - output_dict.update(stem_params['stem_output_args']) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs=output_dict) - -class FruitFactoryGeneralFruit(AssetFactory): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super(FruitFactoryGeneralFruit, self).__init__(factory_seed, coarse=coarse) - - self.scale = scale - - def sample_cross_section_params(self, surface_resolution=256): - raise NotImplementedError - - def sample_shape_params(self, surface_resolution=256): - raise NotImplementedError - - def sample_surface_params(self): - raise NotImplementedError - - def sample_stem_params(self): - raise NotImplementedError - - def sample_geo_genome(self): - surface_params = self.sample_surface_params() - surface_resolution = surface_params['surface_resolution'] - - cross_section_params = self.sample_cross_section_params(surface_resolution) - shape_params = self.sample_shape_params(surface_resolution) - stem_params = self.sample_stem_params() - - return cross_section_params, shape_params, surface_params, stem_params - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=4, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - with FixedSeed(self.factory_seed): - cross_section_params, shape_params, surface_params, stem_params = self.sample_geo_genome() - - scale_multiplier = surface_params['scale_multiplier'] - - output_list = [] - output_list.extend(cross_section_params['cross_section_output_args'].keys()) - output_list.extend(shape_params['shape_output_args'].keys()) - output_list.extend(surface_params['surface_output_args'].keys()) - output_list.extend(stem_params['stem_output_args'].keys()) - - surface.add_geomod(obj, - general_fruit_geometry_nodes, - attributes=output_list, - apply=False, - input_args=[cross_section_params, shape_params, surface_params, stem_params]) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.1) * self.scale * scale_multiplier - butil.apply_transform(obj) - - # TODO remove when empty materials from geonodes is debugged - butil.purge_empty_materials(obj) - - tag_object(obj, 'fruit_'+self.name) - return obj - - diff --git a/infinigen/assets/fruits/pineapple.py b/infinigen/assets/fruits/pineapple.py deleted file mode 100644 index 8044ea85c..000000000 --- a/infinigen/assets/fruits/pineapple.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryPineapple(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'pineapple' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "circle_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.2, 0.05), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.1031), (0.1182, 0.5062), (uniform(0.3, 0.7), 0.5594), (0.8364, 0.425), (0.9864, 0.1406), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Start': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(-0.8, -1.2)), - 'End': (0.0, 0.0, 1.0), - 'random seed pos': uniform(-100, 100), - 'noise scale pos': 0.5, - 'noise amount pos': 0.4, - 'Resolution': surface_resolution}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - bottom_color = np.array((0.192, 0.898, 0.095)) - bottom_color[0] += np.random.normal(0.0, 0.025) - bottom_color[1] += np.random.normal(0.0, 0.05) - bottom_color[2] += np.random.normal(0.0, 0.05) - bottom_color_rgba = hsv2rgba(bottom_color) - - mid_color = np.array((0.05, 0.96, 0.55)) - mid_color[0] += np.random.normal(0.0, 0.025) - mid_color[1] += np.random.normal(0.0, 0.05) - mid_color[2] += np.random.normal(0.0, 0.05) - mid_color_rgba = hsv2rgba(mid_color) - - top_color = np.array((0.04, 0.99, 0.45)) - top_color[0] += np.random.normal(0.0, 0.025) - top_color[1] += np.random.normal(0.0, 0.05) - top_color[2] += np.random.normal(0.0, 0.05) - top_color_rgba = hsv2rgba(top_color) - - center_color = np.array((0.07, 0.63, 0.84)) - center_color[0] += np.random.normal(0.0, 0.025) - center_color[1] += np.random.normal(0.0, 0.05) - center_color[2] += np.random.normal(0.0, 0.05) - center_color_rgba = hsv2rgba(center_color) - - cell_distance = uniform(0.18, 0.22) - - return { - 'surface_name': "pineapple_surface", - 'surface_func_args': {'color_bottom': bottom_color_rgba, - 'color_mid': mid_color_rgba, - 'color_top': top_color_rgba, - 'color_center': center_color_rgba,}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'point distance': cell_distance, - 'cell scale': cell_distance+0.02}, - 'surface_output_args': {'radius': 'noderef-fruitsurface-spline parameter'}, - 'surface_resolution': 64, - 'scale_multiplier': 1.8 - } - - def sample_stem_params(self): - leaf_color = np.array((0.32, 0.79, 0.20)) - leaf_color[0] += np.random.normal(0.0, 0.025) - leaf_color[1] += np.random.normal(0.0, 0.05) - leaf_color[2] += np.random.normal(0.0, 0.05) - leaf_color_rgba = hsv2rgba(leaf_color) - - return { - 'stem_name': "pineapple_stem", - 'stem_func_args': {'basic_color': leaf_color_rgba}, - 'stem_input_args': {'rotation base': (-uniform(0.5, 0.55), 0.0, 0.0), - 'noise amount': 0.1, - 'noise scale': uniform(10, 30), - 'number of leaves': randint(40, 80), - 'scale base': normal(0.5, 0.05), - 'scale z base': normal(0.15, 0.03), - 'scale z top': normal(0.62, 0.03), - 'rot z base': normal(-0.62, 0.03), - 'rot z top': normal(0.54, 0.03)}, - 'stem_output_args': {} - } - diff --git a/infinigen/assets/fruits/seed_lib.py b/infinigen/assets/fruits/seed_lib.py deleted file mode 100644 index 35e9c4da6..000000000 --- a/infinigen/assets/fruits/seed_lib.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -def shader_seed_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 7.8}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture.outputs["Fac"], 'Color1': (0.807, 0.624, 0.0704, 1.0), 'Color2': (0.3467, 0.2623, 0.0296, 1.0)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': 0.5114}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_strawberry_seed', singleton=False, type='GeometryNodeTree') -def nodegroup_strawberry_seed(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 8)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': (0.0, 0.0, -0.5), 'Middle': (0.0, 0.0, 0.0), 'End': (0.0, 0.0, 0.5)}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 0.0281), (0.7023, 0.2781), (1.0, 0.0281)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: 0.9}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': quadratic_bezier, 'Radius': multiply}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh, 'Material': surface.shaderfunc_to_material(shader_seed_shader)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_1}) \ No newline at end of file diff --git a/infinigen/assets/fruits/starfruit.py b/infinigen/assets/fruits/starfruit.py deleted file mode 100644 index f553f5d7f..000000000 --- a/infinigen/assets/fruits/starfruit.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryStarfruit(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'starfruit' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "star_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.3, 0.05), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {'star parameters': 'noderef-crosssection-curve parameters'} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0727, 0.2), (0.2636, 0.6063), (uniform(0.45, 0.65), uniform(0.7, 0.9)), (0.8886, 0.6094), (1.0, 0.0)],}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Resolution': surface_resolution, - 'Start': (uniform(-0.3, 0.3), uniform(-0.3, 0.3), uniform(-1.0, -2.0)), - 'End': (0.0, 0.0, 1.0)}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - base_color = np.array((0.10, 0.999, 0.799)) - base_color[0] += normal(0.0, 0.025) - base_color[1] += normal(0.0, 0.05) - base_color[2] += normal(0.0, 0.005) - base_color_rgba = hsv2rgba(base_color) - - ridge_color = np.copy(base_color) - ridge_color[0] += normal(0.04, 0.02) - ridge_color[2] += normal(-0.2, 0.02) - ridge_color_rgba = hsv2rgba(ridge_color) - - return { - 'surface_name': "starfruit_surface", - 'surface_func_args': { - 'dent_control_points': [(0.0, 0.4219), (0.0977, 0.4469), (0.2273, 0.4844), (0.5568, 0.5125), (1.0, 0.5)], - 'base_color': base_color_rgba, - 'ridge_color': ridge_color_rgba}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'spline tangent': 'noderef-shapequadratic-spline tangent', - 'distance to center': 'noderef-shapequadratic-radius to center', - 'dent intensity': normal(1.0, 0.1) - }, - 'surface_output_args': {}, - 'surface_resolution': 256, - 'scale_multiplier': 1.0 - } - - def sample_stem_params(self): - stem_color = np.array((0.10, 0.96, 0.13)) - stem_color[0] += normal(0.0, 0.02) - stem_color[1] += normal(0.0, 0.05) - stem_color[2] += normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - return { - 'stem_name': "basic_stem", - 'stem_func_args': {'stem_color': stem_color_rgba}, - 'stem_input_args': {'quad_mid': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.15, 0.2)), - 'quad_end': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), - 'cross_radius': uniform(0.03, 0.05), - 'Translation': (0.0, 0.0, 0.8)}, - 'stem_output_args': {} - } \ No newline at end of file diff --git a/infinigen/assets/fruits/stem_lib.py b/infinigen/assets/fruits/stem_lib.py deleted file mode 100644 index 8f0260b04..000000000 --- a/infinigen/assets/fruits/stem_lib.py +++ /dev/null @@ -1,623 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_surface_bump, nodegroup_add_noise_scalar, nodegroup_attach_to_nearest, nodegroup_scale_mesh -from infinigen.assets.fruits.cross_section_lib import nodegroup_cylax_cross_section - -@node_utils.to_nodegroup('nodegroup_empty_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_empty_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - points = nw.new_node('GeometryNodePoints', - input_kwargs={'Count': 0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': points}) - -def shader_basic_stem_shader(nw: NodeWrangler, stem_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 0.8, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': stem_color}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Specular': 0.1205, 'Roughness': 0.5068}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_basic_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_basic_stem(nw: NodeWrangler, stem_color=(0.179, 0.836, 0.318, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'quad_start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'quad_mid', (0.0, -0.05, 0.2)), - ('NodeSocketVectorTranslation', 'quad_end', (-0.1, 0.0, 0.4)), - ('NodeSocketIntUnsigned', 'quad_res', 128), - ('NodeSocketFloatDistance', 'cross_radius', 0.08), - ('NodeSocketInt', 'cross_res', 128), - ('NodeSocketVectorTranslation', 'Translation', (0.0, 0.0, 1.0)), - ('NodeSocketVectorXYZ', 'Scale', (1.0, 1.0, 2.0))]) - - quadratic_bezier_2 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["quad_res"], 'Start': group_input.outputs["quad_start"], 'Middle': group_input.outputs["quad_mid"], 'End': group_input.outputs["quad_end"]}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["cross_res"], 'Radius': group_input.outputs["cross_radius"]}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadratic_bezier_2, 'Profile Curve': curve_circle_2.outputs["Curve"], 'Fill Caps': True}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': curve_to_mesh_2, 'Displacement': 0.01, 'Scale': 2.9}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': surfacebump, 'Scale': 20.0}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': surfacebump_1, 'Translation': group_input.outputs["Translation"], 'Scale': group_input.outputs["Scale"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_3, 'Material': surface.shaderfunc_to_material(shader_basic_stem_shader, stem_color)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - -def shader_calyx_shader(nw: NodeWrangler, stem_color): - # Code generated using version 2.4.3 of the node_transpiler - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 2.8, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture_1.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': stem_color}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': hue_saturation_value}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Specular': 0.5136, 'Roughness': 0.7614}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.5083, 1: translucent_bsdf, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - -### straberry calyx ### -@node_utils.to_nodegroup('nodegroup_calyx_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_calyx_stem(nw: NodeWrangler, stem_color=(0.1678, 0.4541, 0.0397, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[ - ('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketInt', 'fork number', 10), - ('NodeSocketFloatDistance', 'outer radius', 1.0), - ('NodeSocketFloat', 'inner radius', 0.2), - ('NodeSocketFloat', 'cross section noise amount', 0.4), - ('NodeSocketFloat', 'z noise amount', 1.0), - ('NodeSocketFloatDistance', 'noise random seed', 0.0), - ('NodeSocketVectorTranslation', 'quad_start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'quad_mid', (0.0, -0.05, 0.2)), - ('NodeSocketVectorTranslation', 'quad_end', (-0.1, 0.0, 0.4)), - ('NodeSocketVectorTranslation', 'Translation', (0.0, 0.0, 1.0)), - ('NodeSocketFloatDistance', 'cross_radius', 0.04)]) - - cylaxcrosssection = nw.new_node(nodegroup_cylax_cross_section().name, - input_kwargs={'fork number': group_input.outputs["fork number"], 'bottom radius': group_input.outputs["inner radius"], 'noise random seed': group_input.outputs["noise random seed"], 'noise amount': group_input.outputs["cross section noise amount"], 'radius': group_input.outputs["outer radius"]}) - - fill_curve = nw.new_node(Nodes.FillCurve, - input_kwargs={'Curve': cylaxcrosssection}) - - triangulate = nw.new_node('GeometryNodeTriangulate', - input_kwargs={'Mesh': fill_curve}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': triangulate, 'Level': 3}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - addnoisescalar = nw.new_node(nodegroup_add_noise_scalar().name, - input_kwargs={'value': separate_xyz.outputs["Z"], 'noise random seed': group_input.outputs["noise random seed"], 'noise scale': 1.0, 'noise amount': group_input.outputs["z noise amount"]}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position}, - attrs={'operation': 'LENGTH'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: addnoisescalar, 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': subdivide_mesh, 'Position': combine_xyz}) - - basicstem = nw.new_node(nodegroup_basic_stem().name, - input_kwargs={'quad_start': group_input.outputs["quad_start"], 'quad_mid': group_input.outputs["quad_mid"], 'quad_end': group_input.outputs["quad_end"], 'quad_res': 16, 'cross_radius': group_input.outputs["cross_radius"], 'cross_res': 16, 'Translation': (0.0, 0.0, 0.0)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_position_1, basicstem]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_2, 'Material': surface.shaderfunc_to_material(shader_calyx_shader, stem_color)}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_material, 'Translation': group_input.outputs["Translation"], 'Scale': (1.0, 1.0, 1.0)}) - - attachtonearest = nw.new_node(nodegroup_attach_to_nearest().name, - input_kwargs={'Geometry': transform, 'Target': group_input.outputs["Geometry"], 'threshold': 0.1, 'multiplier': 10.0, 'Offset': (0.0, 0.0, 0.05)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': attachtonearest}) - -### coconutgreen ### -@node_utils.to_nodegroup('nodegroup_jigsaw', singleton=False, type='GeometryNodeTree') -def nodegroup_jigsaw(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketFloat', 'noise scale', 30.0), - ('NodeSocketFloatFactor', 'noise randomness', 0.7), - ('NodeSocketFloat', 'From Max', 0.15), - ('NodeSocketFloat', 'To Min', 0.9)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={1: group_input.outputs["Value"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 1: subtract, 2: add}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': map_range_1.outputs["Result"], 'Scale': group_input.outputs["noise scale"], 'Randomness': group_input.outputs["noise randomness"]}, - attrs={'voronoi_dimensions': '1D', 'feature': 'DISTANCE_TO_EDGE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: group_input.outputs["From Max"], 3: group_input.outputs["To Min"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range.outputs["Result"]}) - -def shader_coconut_calyx_shader(nw: NodeWrangler, basic_color, edge_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 10.0, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.45, 4: 0.52}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'distance to edge'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 3.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: attribute.outputs["Fac"], 1: multiply}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0159 - colorramp.color_ramp.elements[0].color = edge_color # (0.0369, 0.0086, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.0716 - colorramp.color_ramp.elements[1].color = basic_color # (0.1119, 0.2122, 0.008, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = basic_color # (0.1119, 0.2122, 0.008, 1.0) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Roughness': 0.90}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_coconut_calyx', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_calyx(nw: NodeWrangler, basic_color, edge_color): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.5), - ('NodeSocketInt', 'resolution', 128), - ('NodeSocketFloatDistance', 'radius', 1.0), - ('NodeSocketInt', 'subdivision', 5), - ('NodeSocketFloat', 'bump displacement', 0.16), - ('NodeSocketFloat', 'bump scale', 3.22),]) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["resolution"], 'Radius': group_input.outputs["radius"]}) - - jigsaw = nw.new_node(nodegroup_jigsaw().name, - input_kwargs={'Value': group_input.outputs["width"], 'noise scale': 30.22}) - - scale_mesh = nw.new_node(nodegroup_scale_mesh().name, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Scale': jigsaw}, - label='ScaleMesh') - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter_1.outputs["Factor"], 1: value}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute, 1: value, 2: group_input.outputs["width"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.2409, 0.0), (0.7068, 0.275), (1.0, 0.9781)]) - - scale_mesh_1 = nw.new_node(nodegroup_scale_mesh().name, - input_kwargs={'Geometry': scale_mesh, 'Scale': float_curve}, - label='ScaleMesh') - - fill_curve = nw.new_node(Nodes.FillCurve, - input_kwargs={'Curve': scale_mesh_1}, - attrs={'mode': 'NGONS'}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': fill_curve, 'Level': group_input.outputs["subdivision"]}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': subdivide_mesh, 'Displacement': group_input.outputs["bump displacement"], 'Scale': group_input.outputs["bump scale"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': surfacebump, 'Material': surface.shaderfunc_to_material(shader_coconut_calyx_shader, basic_color, edge_color)}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': fill_curve}, - attrs={'target_element': 'EDGES'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material, 'distance to edge': geometry_proximity.outputs["Distance"]}) - -@node_utils.to_nodegroup('nodegroup_coconut_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_stem(nw: NodeWrangler, basic_color=(0.1119, 0.2122, 0.008, 1.0), edge_color=(0.0369, 0.0086, 0.0, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[ - ('NodeSocketGeometry', 'Target', None), - ('NodeSocketFloat', 'radius', 0.0), - ('NodeSocketVectorTranslation', 'Translation', (0.0, 0.0, 1.08)), - ('NodeSocketInt', 'Count', 6), - ('NodeSocketFloat', 'base scale', 0.3), - ('NodeSocketFloat', 'top scale', 0.24), - ('NodeSocketFloat', 'attach threshold', 0.1), - ('NodeSocketFloat', 'attach multiplier', 10.0), - ('NodeSocketFloat', 'calyx width', 0.5), - ('NodeSocketVectorTranslation', 'stem_mid', (0.0, 0.0, 1.0)), - ('NodeSocketVectorTranslation', 'stem_end', (0.0, 0.0, 1.0)), - ('NodeSocketFloat', 'stem_radius', 0.5), - ]) - - coconutcalyx = nw.new_node(nodegroup_coconut_calyx(basic_color=basic_color, - edge_color=edge_color).name, - input_kwargs={'width': group_input.outputs['calyx width']}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': coconutcalyx.outputs["Geometry"], 2: coconutcalyx.outputs["distance to edge"]}) - - spiral = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Rotations': 1.0, 'Start Radius': group_input.outputs["radius"], 'End Radius': group_input.outputs["radius"], 'Height': 0.0}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': spiral, 2: spline_parameter.outputs["Factor"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Translation': group_input.outputs["Translation"]}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': transform, 'Count': group_input.outputs["Count"]}) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': curve_to_points.outputs["Rotation"]}, - attrs={'axis': 'Z'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': capture_attribute.outputs[2], 3: group_input.outputs["base scale"], 4: group_input.outputs["top scale"]}, - attrs={'interpolation_type': 'SMOOTHERSTEP'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': curve_to_points.outputs["Points"], 'Instance': capture_attribute_1.outputs["Geometry"], 'Rotation': align_euler_to_vector, 'Scale': map_range_2.outputs["Result"]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': capture_attribute.outputs[2], 4: 0.01}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': map_range_1.outputs["Result"]}) - - attachtonearest = nw.new_node(nodegroup_attach_to_nearest().name, - input_kwargs={'Geometry': realize_instances, 'Target': group_input.outputs["Target"], 'threshold': group_input.outputs["attach threshold"], 'multiplier': group_input.outputs["attach multiplier"], 'Offset': combine_xyz}) - - basicstem = nw.new_node(nodegroup_basic_stem(basic_color).name, - input_kwargs={'cross_radius': group_input.outputs['stem_radius'], - 'quad_mid': group_input.outputs['stem_mid'], - 'quad_end': group_input.outputs['stem_end'], - 'Translation': (0.0, 0.0, 0.98), - 'Scale': (1.0, 1.0, 1.0)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [basicstem, attachtonearest]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry, 'distance to edge': capture_attribute_1.outputs[2]}) - -### pineapple ### -def shader_leaf(nw: NodeWrangler, basic_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Scale': 3.48, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture_1.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Red"], 1: 0.52, 2: 0.48, 3: 0.32, 4: 0.74}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.94, 4: 1.1}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], - 'Saturation': map_range_3.outputs["Result"], - 'Value': map_range_2.outputs["Result"], - 'Color': basic_color}) # (0.0545, 0.1981, 0.0409, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Specular': 0.5955, 'Roughness': 1.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_pineapple_leaf', singleton=False, type='GeometryNodeTree') -def nodegroup_pineapple_leaf(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 8), - ('NodeSocketVectorTranslation', 'Start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, -0.32, 3.72)), - ('NodeSocketVectorTranslation', 'End', (0.0, 0.92, 4.32))]) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 1.0), (0.6818, 0.5063), (1.0, 0.0)]) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': quadratic_bezier_1, 'Radius': float_curve_1}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_1.outputs["Curve"], 'Scale': (0.5, 0.1, 1.0)}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: absolute}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': transform, 'Offset': combine_xyz}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': set_position, 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': curve_to_mesh_1}) - -@node_utils.to_nodegroup('nodegroup_pineapple_crown', singleton=False, type='GeometryNodeTree') -def nodegroup_pineapple_crown(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spiral_1 = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Resolution': 10, 'Rotations': 5.0, 'Start Radius': 0.01, 'End Radius': 0.01, 'Height': 0.0}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Leaf', None), - ('NodeSocketVectorTranslation', 'translation', (0.0, 0.0, 0.7)), - ('NodeSocketVectorEuler', 'rotation base', (-0.4363, 0.0, 0.0)), - ('NodeSocketInt', 'number of leaves', 75), - ('NodeSocketFloat', 'noise amount', 0.1), - ('NodeSocketFloat', 'noise scale', 50.0), - ('NodeSocketFloat', 'scale base', 0.4), - ('NodeSocketFloat', 'scale z base', 0.12), - ('NodeSocketFloat', 'scale z top', 0.68), - ('NodeSocketFloat', 'rot z base', -0.64), - ('NodeSocketFloat', 'rot z top', 0.38)]) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': spiral_1, 'Translation': group_input.outputs["translation"]}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': transform_4, 'Count': group_input.outputs["number of leaves"]}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': resample_curve_1, 'Displacement': group_input.outputs["noise amount"], 'Scale': group_input.outputs["noise scale"]}) - - curve_tangent_1 = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent_1}) - - rotate_euler_3 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': align_euler_to_vector_1, 'Rotate By': group_input.outputs["rotation base"]}, - attrs={'space': 'LOCAL'}) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1, 3: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter_2.outputs["Factor"], 1: random_value.outputs[1]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add, 3: 0.2}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range_2.outputs["Result"], 3: group_input.outputs["rot z base"], 4: group_input.outputs["rot z top"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range_1.outputs["Result"]}) - - rotate_euler_2 = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': rotate_euler_3, 'Rotate By': combine_xyz_1}, - attrs={'space': 'LOCAL'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range_2.outputs["Result"], 3: group_input.outputs["scale z base"], 4: group_input.outputs["scale z top"]}, - attrs={'interpolation_type': 'SMOOTHERSTEP'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["scale base"], 'Y': map_range.outputs["Result"], 'Z': map_range.outputs["Result"]}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': surfacebump, 'Instance': group_input.outputs["Leaf"], 'Rotation': rotate_euler_2, 'Scale': combine_xyz_3}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': instance_on_points_2}) - -@node_utils.to_nodegroup('nodegroup_pineapple_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_pineapple_stem(nw: NodeWrangler, basic_color): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 16), - ('NodeSocketVectorTranslation', 'Start', (0.0, 0.0, 0.0)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, -0.32, 3.72)), - ('NodeSocketVectorTranslation', 'End', (0.0, 0.92, 4.32)), - ('NodeSocketVectorTranslation', 'translation', (0.0, 0.0, 0.7)), - ('NodeSocketVectorEuler', 'rotation base', (-0.5236, 0.0, 0.0)), - ('NodeSocketInt', 'number of leaves', 75), - ('NodeSocketFloat', 'noise amount', 0.1), - ('NodeSocketFloat', 'noise scale', 20.0), - ('NodeSocketFloat', 'scale base', 0.5), - ('NodeSocketFloat', 'scale z base', 0.15), - ('NodeSocketFloat', 'scale z top', 0.62), - ('NodeSocketFloat', 'rot z base', -0.62), - ('NodeSocketFloat', 'rot z top', 0.54)]) - - pineappleleaf = nw.new_node(nodegroup_pineapple_leaf().name, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': group_input.outputs["Start"], 'Middle': group_input.outputs["Middle"], 'End': group_input.outputs["End"]}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': pineappleleaf, 'Material': surface.shaderfunc_to_material(shader_leaf, basic_color)}) - - pineapplecrown = nw.new_node(nodegroup_pineapple_crown().name, - input_kwargs={'Leaf': set_material_2, 'translation': group_input.outputs["translation"], 'rotation base': group_input.outputs["rotation base"], 'noise amount': group_input.outputs["noise amount"], 'noise scale': group_input.outputs["noise scale"], 'scale base': group_input.outputs["scale base"], 'scale z base': group_input.outputs["scale z base"], 'scale z top': group_input.outputs["scale z top"], 'rot z base': group_input.outputs["rot z base"], 'rot z top': group_input.outputs["rot z top"], 'number of leaves': group_input.outputs['number of leaves']}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': pineapplecrown}) - - diff --git a/infinigen/assets/fruits/strawberry.py b/infinigen/assets/fruits/strawberry.py deleted file mode 100644 index c5d7f7857..000000000 --- a/infinigen/assets/fruits/strawberry.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit - -class FruitFactoryStrawberry(FruitFactoryGeneralFruit): - def __init__(self, factory_seed, scale=1.0, coarse=False): - super().__init__(factory_seed, scale=scale, coarse=coarse) - self.name = 'strawberry' - - def sample_cross_section_params(self, surface_resolution=256): - return { - 'cross_section_name': "circle_cross_section", - 'cross_section_func_args': {}, - 'cross_section_input_args': {'random seed': uniform(-100, 100), - 'radius': normal(1.0, 0.02), - 'Resolution': surface_resolution}, - 'cross_section_output_args': {} - } - - def sample_shape_params(self, surface_resolution=256): - return { - 'shape_name': "shape_quadratic", - 'shape_func_args': {'radius_control_points': [(0.0, 0.0), (0.0227, 0.1313), (0.2227, 0.4406), (uniform(0.55, 0.7), uniform(0.7, 0.78)), (0.925, 0.4719), (1.0, 0.0)]}, - 'shape_input_args': {'Profile Curve': 'noderef-crosssection-Geometry', - 'Start': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(-0.5, -1.0)), - 'End': (0.0, 0.0, 1.0), - 'random seed pos': uniform(-100, 100), - 'Resolution': surface_resolution}, - 'shape_output_args': {} - } - - def sample_surface_params(self): - main_color = np.array((0.0, 0.995, 0.85)) - main_color[0] += np.random.normal(0.0, 0.02) - main_color[1] += np.random.normal(0.0, 0.05) - main_color[2] += np.random.normal(0.0, 0.05) - main_color_rgba = hsv2rgba(main_color) - - top_color = np.array((0.15, 0.75, 0.75)) - top_color[0] += np.random.normal(0.0, 0.02) - top_color[1] += np.random.normal(0.0, 0.05) - top_color[2] += np.random.normal(0.0, 0.05) - top_color_rgba = hsv2rgba(top_color) - - return { - 'surface_name': "strawberry_surface", - 'surface_func_args': {'top_pos': uniform(0.85, 0.95), - 'main_color': main_color_rgba, - 'top_color': top_color_rgba}, - 'surface_input_args': {'Geometry': 'noderef-shapequadratic-Mesh', - 'spline parameter': 'noderef-shapequadratic-spline parameter', - 'Distance Min': 0.15, - 'Strength': 1.5, - 'noise random seed': uniform(-100, 100)}, - 'surface_output_args': {'strawberry seed height': 'noderef-fruitsurface-curve parameters'}, - 'surface_resolution': 64, - 'scale_multiplier': 0.5 - } - - def sample_stem_params(self): - stem_color = np.array((0.28, 0.91, 0.45)) - stem_color[0] += np.random.normal(0.0, 0.02) - stem_color[1] += np.random.normal(0.0, 0.05) - stem_color[2] += np.random.normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - stem_color = np.array((0.28, 0.91, 0.45)) - stem_color[0] += np.random.normal(0.0, 0.02) - stem_color[1] += np.random.normal(0.0, 0.05) - stem_color[2] += np.random.normal(0.0, 0.05) - stem_color_rgba = hsv2rgba(stem_color) - - return { - 'stem_name': "calyx_stem", - 'stem_func_args': {'stem_color': stem_color_rgba}, - 'stem_input_args': {'Geometry': 'noderef-fruitsurface-Geometry', - 'fork number': randint(8, 13), - 'outer radius': uniform(0.7, 0.9), - 'noise random seed': uniform(-100, 100), - 'quad_mid': (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.15, 0.2)), - 'quad_end': (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), - 'cross_radius': uniform(0.035, 0.045), - 'Translation': (0.0, 0.0, 0.97)}, - 'stem_output_args': {} - } \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/apple_surface.py b/infinigen/assets/fruits/surfaces/apple_surface.py deleted file mode 100644 index 167962e56..000000000 --- a/infinigen/assets/fruits/surfaces/apple_surface.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_add_dent - -def shader_apple_shader(nw: NodeWrangler, color1, color2, random_seed): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = random_seed - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: texture_coordinate.outputs["Object"], 1: value}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.2}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 10.0, 'Detail': 10.0, 'Dimension': 0.3, 'Lacunarity': 3.0}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 0.6, 'Lacunarity': 1.0}) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = color1 # - - rgb_1 = nw.new_node(Nodes.RGB) - rgb_1.outputs[0].default_value = color2 # - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': musgrave_texture, 'Color1': rgb, 'Color2': rgb_1}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': 0.55, 'Color': mix}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': musgrave_texture_2, 'Color1': mix, 'Color2': hue_saturation_value}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_3}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_apple_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_apple_surface(nw: NodeWrangler, - color1=(0.2881, 0.6105, 0.0709, 1.0), - color2=(0.7454, 0.6172, 0.0296, 1.0), - random_seed=0.0, - dent_control_points=[(0.0045, 0.3719), (0.0727, 0.4532), (0.2273, 0.4844), (0.5568, 0.5125), (1.0, 0.5)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketVector', 'spline tangent', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'distance to center', 0.0)]) - - adddent = nw.new_node(nodegroup_add_dent(dent_control_points=dent_control_points).name, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'spline parameter': group_input.outputs["spline parameter"], 'spline tangent': group_input.outputs["spline tangent"], 'distance to center': group_input.outputs["distance to center"], 'intensity': 1.5, 'max radius': 1.5}) - - adddent_1 = nw.new_node(nodegroup_add_dent(dent_control_points=dent_control_points).name, - input_kwargs={'Geometry': adddent, 'spline parameter': group_input.outputs["spline parameter"], 'spline tangent': group_input.outputs["spline tangent"], 'distance to center': group_input.outputs["distance to center"], 'bottom': True, 'intensity': -1.0, 'max radius': 1.5}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': adddent_1, 'Material': surface.shaderfunc_to_material(shader_apple_shader, color1, color2, random_seed)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - - diff --git a/infinigen/assets/fruits/surfaces/blackberry_surface.py b/infinigen/assets/fruits/surfaces/blackberry_surface.py deleted file mode 100644 index e5eedbb82..000000000 --- a/infinigen/assets/fruits/surfaces/blackberry_surface.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_shape_quadratic, nodegroup_random_rotation_scale, nodegroup_surface_bump, nodegroup_point_on_mesh, nodegroup_instance_on_points -from infinigen.assets.fruits.cross_section_lib import nodegroup_circle_cross_section - -def shader_berry_shader(nw: NodeWrangler, berry_color): - # Code generated using version 2.4.3 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': berry_color, 'Specular': 0.5705, 'Roughness': 0.2}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -def shader_hair_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 0.8, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}, - attrs={'mode': 'HSV'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': (0.6939, 0.2307, 0.0529, 1.0)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_blackberry_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_blackberry_surface(nw: NodeWrangler, berry_color=(0.0212, 0.0212, 0.0284, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.5)]) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': group_input.outputs['Geometry'], 'Displacement': 0.5, 'Scale': 0.5}) - - pointonmesh = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': surfacebump, 'Distance Min': 0.4, 'spline parameter': group_input.outputs['spline parameter'], 'noise amount': 0.5, 'noise scale': 2.0}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'rot mean': (3.89, 0.0, 0.0)}) - - uv_sphere_2 = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 32, 'Rings': 16}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': uv_sphere_2, 'Displacement': 0.5, 'Scale': 0.3}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': surfacebump_1}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': subdivision_surface, 'Material': surface.shaderfunc_to_material(shader_berry_shader, berry_color)}) - - circlecrosssection_1 = nw.new_node(nodegroup_circle_cross_section().name, - input_kwargs={'noise amount': 0.0, 'Resolution': 8, 'radius': 0.15}) - - shapequadratic_1 = nw.new_node(nodegroup_shape_quadratic().name, - input_kwargs={'Profile Curve': circlecrosssection_1, 'random seed tilt': 0.0, 'noise scale tilt': 0.0, 'noise amount tilt': 0.0, 'noise scale pos': 1.0, - 'noise amount pos': 2.0, 'Resolution': 8, 'Start': (0.0, 0.0, 0.0), - 'Middle': (0.0, 0.0, -1.0), 'End': (0.0, 0.0, -2.0)}) - - value_4 = nw.new_node(Nodes.Value) - value_4.outputs[0].default_value = 0.2 - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shapequadratic_1, 'Translation': (0.0, 0.0, -1.0), 'Scale': value_4}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_3, 'Material': surface.shaderfunc_to_material(shader_hair_shader)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, set_material_3]}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': pointonmesh.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, -0.5, 0.0), - 'scale': randomrotationscale.outputs["Value"], 'Points': pointonmesh.outputs["Geometry"], 'Instance': join_geometry}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instanceonpoints}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances}) diff --git a/infinigen/assets/fruits/surfaces/coconutgreen_surface.py b/infinigen/assets/fruits/surfaces/coconutgreen_surface.py deleted file mode 100644 index 0b754bce1..000000000 --- a/infinigen/assets/fruits/surfaces/coconutgreen_surface.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_add_dent, nodegroup_surface_bump -from infinigen.assets.fruits.surfaces.surface_utils import nodegroup_stripe_pattern - -def shader_coconut_green_shader(nw: NodeWrangler, basic_color, bottom_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Scale': 1.0, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture_1.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'shape_coordinate'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute_1.outputs["Fac"]}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = bottom_color # (0.0908, 0.2664, 0.013, 1.0) - colorramp.color_ramp.elements[1].position = 0.01 - colorramp.color_ramp.elements[1].color = bottom_color # (0.0908, 0.2664, 0.013, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = basic_color # (0.2462, 0.4125, 0.0044, 1.0) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': colorramp.outputs["Color"]}) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'crosssection_coordinate'}) - - group = nw.new_node(nodegroup_stripe_pattern().name, - input_kwargs={'Color': hue_saturation_value_1, 'attribute': attribute_2.outputs["Fac"], 'seed': 10.0}) - - group_1 = nw.new_node(nodegroup_stripe_pattern().name, - input_kwargs={'Color': group, 'attribute': attribute_1.outputs["Fac"], 'voronoi scale': 10.0, 'voronoi randomness': 0.6446, 'seed': -10.0, 'noise amount': 0.48, 'hue min': 1.32, 'hue max': 0.9}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_1, 'Specular': 0.4773, 'Roughness': 0.4455}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_coconutgreen_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_coconutgreen_surface(nw: NodeWrangler, basic_color=(0.2462, 0.4125, 0.0044, 1.0), bottom_color=(0.0908, 0.2664, 0.013, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketVector', 'spline tangent', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'distance to center', 0.0), - ('NodeSocketFloat', 'cross section paramater', 0.5)]) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Displacement': 0.2, 'Scale': 0.5}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': surfacebump, 'Displacement': 0.0, 'Scale': 10.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["distance to center"], 1: 0.05, 2: 0.2, 4: 0.68}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["cross section paramater"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - adddent = nw.new_node(nodegroup_add_dent(dent_control_points=[(0.0, 0.4219), (0.0977, 0.4469), (0.2273, 0.4844), (0.5568, 0.5125), (1.0, 0.5)]).name, - input_kwargs={'Geometry': surfacebump_1, 'spline parameter': group_input.outputs["spline parameter"], 'spline tangent': group_input.outputs["spline tangent"], 'distance to center': group_input.outputs["distance to center"], 'bottom': True, 'intensity': multiply, 'max radius': 3.0}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': adddent, 'Material': surface.shaderfunc_to_material(shader_coconut_green_shader, basic_color, bottom_color)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_3}) \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/coconuthairy_surface.py b/infinigen/assets/fruits/surfaces/coconuthairy_surface.py deleted file mode 100644 index 828087be1..000000000 --- a/infinigen/assets/fruits/surfaces/coconuthairy_surface.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_point_on_mesh, nodegroup_random_rotation_scale, nodegroup_hair, nodegroup_instance_on_points - -def shader_hair_shader(nw: NodeWrangler, basic_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 0.5, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': basic_color}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_coconuthairy_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_coconuthairy_surface(nw: NodeWrangler, basic_color=(0.9473, 0.552, 0.2623, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0)]) - - material = nw.new_node('GeometryNodeInputMaterial') - material.material = surface.shaderfunc_to_material(shader_hair_shader, basic_color) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Material': material}) - - pointonmesh = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'spline parameter': group_input.outputs["spline parameter"], 'Distance Min': 0.03, 'noise amount': 0.0, 'noise scale': 0.0}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'noise scale': 100.0, 'rot mean': (0.47, 0.0, 4.8), ' rot std z': 100.0, 'scale mean': 0.2, 'scale std': 0.0}) - - hair = nw.new_node(nodegroup_hair().name, - input_kwargs={'length resolution': 1, 'cross section resolution': 1, 'scale': 0.3, 'Radius': 0.03, 'Material': material, 'Middle': (0.0, 0.3, 1.0), 'End': (0.0, -1.4, 2.0)}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': pointonmesh.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': randomrotationscale.outputs["Value"], 'Points': pointonmesh.outputs["Geometry"], 'Instance': hair}) - - pointonmesh_1 = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'spline parameter': group_input.outputs["spline parameter"], 'Distance Min': 0.06, 'parameter min': 0.2, 'noise amount': 0.5, 'noise scale': 2.0}) - - randomrotationscale_1 = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'rot mean': (1.3, 0.0, 0.0), ' rot std z': 3.0, 'scale mean': 0.3, 'scale std': 0.5}) - - hair_1 = nw.new_node(nodegroup_hair().name, - input_kwargs={'scale': 1.0, 'Material': material, 'Middle': (0.0, 0.5, 1.0), 'End': (0.0, -1.9, 2.0)}) - - instanceonpoints_1 = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': pointonmesh_1.outputs["Rotation"], 'rotation delta': randomrotationscale_1.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': randomrotationscale_1.outputs["Value"], 'Points': pointonmesh_1.outputs["Geometry"], 'Instance': hair_1}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, instanceonpoints, instanceonpoints_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2}) \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/durian_surface.py b/infinigen/assets/fruits/surfaces/durian_surface.py deleted file mode 100644 index b427031df..000000000 --- a/infinigen/assets/fruits/surfaces/durian_surface.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_manhattan, nodegroup_point_on_mesh, nodegroup_surface_bump - -def shader_durian_shader(nw: NodeWrangler, peak_color, base_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 0.8, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'durian thorn coordiante'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Fac"]}) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = peak_color - colorramp.color_ramp.elements[1].position = 0.2705 - colorramp.color_ramp.elements[1].color = base_color - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Specular': 0.1205, 'Roughness': 0.5068}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_durian_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_durian_surface(nw: NodeWrangler, thorn_control_points=[(0.0, 0.0), (0.7318, 0.4344), (1.0, 1.0)], - peak_color=(0.2401, 0.1455, 0.0313, 1.0), base_color=(0.3278, 0.3005, 0.0704, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'displacement', 1.0), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketFloatDistance', 'distance Min', 0.1), - ('NodeSocketFloat', 'noise amount', 0.3), - ('NodeSocketFloat', 'noise scale', 5.0)]) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Displacement': 0.5, 'Scale': 0.5}) - - normal = nw.new_node(Nodes.InputNormal) - - pointonmesh = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': surfacebump, 'spline parameter': group_input.outputs["spline parameter"], 'Distance Min': group_input.outputs["distance Min"], 'noise amount': group_input.outputs["noise amount"], 'noise scale': group_input.outputs["noise scale"]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': pointonmesh.outputs["Geometry"], 'Source Position': position_1}, - attrs={'target_element': 'POINTS'}) - - manhattan = nw.new_node(nodegroup_manhattan().name, - input_kwargs={'v1': geometry_proximity.outputs["Position"], 'v2': position_1}, - label='manhattan') - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["distance Min"], 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': manhattan, 2: multiply, 3: 1.0, 4: 0.0}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], thorn_control_points) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': float_curve}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["displacement"]}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': surfacebump, 'Offset': scale_1.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position_1, 2: map_range.outputs["Result"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Material': surface.shaderfunc_to_material(shader_durian_shader, peak_color, base_color)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material, 'distance to center': capture_attribute.outputs[2]}) \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/pineapple_surface.py b/infinigen/assets/fruits/surfaces/pineapple_surface.py deleted file mode 100644 index 41f801139..000000000 --- a/infinigen/assets/fruits/surfaces/pineapple_surface.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_point_on_mesh, nodegroup_random_rotation_scale, nodegroup_surface_bump, nodegroup_instance_on_points -from infinigen.assets.fruits.cross_section_lib import nodegroup_circle_cross_section -from infinigen.assets.fruits.stem_lib import nodegroup_pineapple_leaf - -@node_utils.to_nodegroup('nodegroup_pineapple_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_pineapple_surface(nw: NodeWrangler, - color_bottom=(0.0823, 0.0953, 0.0097, 1.0), - color_mid=(0.552, 0.1845, 0.0222, 1.0), - color_top= (0.4508, 0.0999, 0.0003, 1.0), - color_center=(0.8388, 0.5395, 0.314, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketFloatDistance', 'point distance', 0.22), - ('NodeSocketFloat', 'cell scale', 0.2), - ('NodeSocketFloat', 'random seed', 0.0)]) - - pointonmesh = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'spline parameter': group_input.outputs["spline parameter"], 'Distance Min': group_input.outputs["point distance"], 'parameter max': 0.999, 'noise amount': 0.05}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'random seed': group_input.outputs["random seed"], ' rot std z': 0.3, 'scale mean': group_input.outputs["cell scale"]}) - - pineapplecellbody = nw.new_node(nodegroup_pineapple_cell_body().name, - input_kwargs={'resolution': 16, 'scale diff': -0.3}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': pineapplecellbody.outputs["Geometry"], - 'Material': surface.shaderfunc_to_material(shader_cell, color_bottom, color_mid, color_top, color_center)}) - - pineappleleaf = nw.new_node(nodegroup_pineapple_leaf().name, - input_kwargs={'Middle': (0.0, -0.1, 1.0), 'End': (0.0, 0.9, 2.5)}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.3 - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': pineappleleaf, 'Translation': (0.0, -0.1, 0.3), 'Rotation': (-1.0315, 0.0, 0.0), 'Scale': value}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_2, 'Material': surface.shaderfunc_to_material(shader_needle, color_center, color_top)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, set_material_3]}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': join_geometry, 'Displacement': 0.2, 'Scale': 10.0}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': pointonmesh.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.0, 0.0), 'scale': randomrotationscale.outputs["Value"], 'Points': pointonmesh.outputs["Geometry"], 'Instance': surfacebump}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 'Material': surface.shaderfunc_to_material(shader_cell, color_bottom, color_mid, color_top, color_center)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instanceonpoints, set_material_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1, 'spline parameter': pineapplecellbody.outputs["spline parameter"]}) - - -def shader_needle(nw: NodeWrangler, color1, color2): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 8.0, 'Detail': 0.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture.outputs["Fac"], - 'Color1': color1, # (0.7758, 0.4678, 0.2346, 1.0) - 'Color2': color2}) # (0.3467, 0.0595, 0.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -def shader_cell(nw: NodeWrangler, color_bottom, color_mid, color_top, color_center): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 4.6}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'radius'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Fac"]}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = color_bottom # (0.0823, 0.0953, 0.0097, 1.0) - colorramp.color_ramp.elements[1].position = 0.67 - colorramp.color_ramp.elements[1].color = color_mid # (0.552, 0.1845, 0.0222, 1.0) - colorramp.color_ramp.elements[2].position = 0.93 - colorramp.color_ramp.elements[2].color = color_top # (0.4508, 0.0999, 0.0003, 1.0) - colorramp.color_ramp.elements[3].position = 1.0 - colorramp.color_ramp.elements[3].color = color_center # (0.8388, 0.5395, 0.314, 1.0) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': 0.55, 'Color': colorramp.outputs["Color"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture.outputs["Fac"], 'Color1': hue_saturation_value, 'Color2': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': 0.2}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_pineapple_cell_body', singleton=False, type='GeometryNodeTree') -def nodegroup_pineapple_cell_body(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'resolution', 0), - ('NodeSocketFloat', 'scale diff', 0.0)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["resolution"], 'Start': (0.0, 0.0, 0.0), 'Middle': (0.0, 0.0, 0.2), 'End': (0.0, 0.0, 0.4)}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadratic_bezier, 2: spline_parameter.outputs["Factor"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 1.0), (0.1568, 0.875), (0.8045, 0.5313), (1.0, 0.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Radius': float_curve}) - - circlecrosssection = nw.new_node(nodegroup_circle_cross_section().name, - input_kwargs={'noise scale': 8.0, 'noise amount': 0.4, 'Resolution': 64, 'radius': 1.0}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': circlecrosssection}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: separate_xyz.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_1, 1: group_input.outputs["scale diff"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Selection': greater_than, 'Offset': multiply.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'spline parameter': capture_attribute.outputs[2]}) \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/starfruit_surface.py b/infinigen/assets/fruits/surfaces/starfruit_surface.py deleted file mode 100644 index 73ed9c0dc..000000000 --- a/infinigen/assets/fruits/surfaces/starfruit_surface.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.fruit_utils import nodegroup_surface_bump, nodegroup_add_dent - -def shader_starfruit_shader(nw: NodeWrangler, base_color, ridge_color): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'star parameters'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = base_color - colorramp.color_ramp.elements[1].position = 0.9 - colorramp.color_ramp.elements[1].color = base_color - colorramp.color_ramp.elements[2].position = 0.95 - colorramp.color_ramp.elements[2].color = ridge_color - colorramp.color_ramp.elements[3].position = 1.0 - colorramp.color_ramp.elements[3].color = base_color - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Specular': 0.775, 'Roughness': 0.2}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.7, 1: translucent_bsdf, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - - -@node_utils.to_nodegroup('nodegroup_starfruit_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_starfruit_surface(nw: NodeWrangler, - dent_control_points=[(0.0, 0.4219), (0.0977, 0.4469), (0.2273, 0.4844), (0.5568, 0.5125), (1.0, 0.5)], - base_color=(0.7991, 0.6038, 0.0009, 1.0), - ridge_color=(0.3712, 0.4179, 0.0006, 1.0)): - - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketVector', 'spline tangent', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'distance to center', 0.0), - ('NodeSocketFloat', 'dent intensity', 1.0) - ]) - - adddent = nw.new_node(nodegroup_add_dent(dent_control_points=dent_control_points).name, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 'spline parameter': group_input.outputs["spline parameter"], - 'spline tangent': group_input.outputs["spline tangent"], - 'distance to center': group_input.outputs["distance to center"], - 'intensity': group_input.outputs["dent intensity"] - }) - - surfacebump_002 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': adddent, 'Displacement': 0.03, 'Scale': 10.0}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': surfacebump_002, - 'Material': surface.shaderfunc_to_material(shader_starfruit_shader, base_color, ridge_color)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - - \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/strawberry_surface.py b/infinigen/assets/fruits/surfaces/strawberry_surface.py deleted file mode 100644 index 95eee0a2c..000000000 --- a/infinigen/assets/fruits/surfaces/strawberry_surface.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.fruits.seed_lib import nodegroup_strawberry_seed -from infinigen.assets.fruits.fruit_utils import nodegroup_point_on_mesh, nodegroup_add_crater, nodegroup_surface_bump, nodegroup_random_rotation_scale, nodegroup_instance_on_points, nodegroup_add_noise_scalar - -def shader_strawberry_shader(nw: NodeWrangler, top_pos, main_color, top_color): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 0.5}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'strawberry seed height'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.0 - colorramp_1.color_ramp.elements[0].color = main_color - colorramp_1.color_ramp.elements[1].position = top_pos - colorramp_1.color_ramp.elements[1].color = main_color - colorramp_1.color_ramp.elements[2].position = 1.0 - colorramp_1.color_ramp.elements[2].color = top_color - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': 0.55, 'Saturation': 1.5, 'Value': 0.2, 'Fac': 0.3, 'Color': colorramp_1.outputs["Color"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture.outputs["Fac"], 'Color1': colorramp_1.outputs["Color"], 'Color2': hue_saturation_value}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': mix}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Specular': 1.0, 'Roughness': 0.15}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.8, 1: translucent_bsdf, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - -@node_utils.to_nodegroup('nodegroup_strawberry_surface', singleton=False, type='GeometryNodeTree') -def nodegroup_strawberry_surface(nw: NodeWrangler, top_pos=0.9, main_color=(0.8879, 0.0097, 0.0319, 1.0), top_color=(0.8148, 0.6105, 0.1746, 1.0)): - # Code generated using version 2.4.3 of the node_transpiler - strawberryseed = nw.new_node(nodegroup_strawberry_seed().name) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'spline parameter', 0.0), - ('NodeSocketFloatDistance', 'Distance Min', 0.12), - ('NodeSocketFloat', 'Strength', 0.74), - ('NodeSocketFloat', 'noise random seed', 0.0)]) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Displacement': 0.4, 'Scale': 0.5}) - - addnoisescalar = nw.new_node(nodegroup_add_noise_scalar().name, - input_kwargs={'noise random seed': group_input.outputs["noise random seed"], - 'value': group_input.outputs["spline parameter"], - 'noise amount': 0.2}) - - pointonmesh = nw.new_node(nodegroup_point_on_mesh().name, - input_kwargs={'Mesh': surfacebump, 'spline parameter': addnoisescalar, 'Distance Min': group_input.outputs["Distance Min"], 'parameter max': top_pos, 'noise amount': 0.1, 'noise scale': 2.0}) - - addcrater = nw.new_node(nodegroup_add_crater().name, - input_kwargs={'Geometry': surfacebump, 'Points': pointonmesh.outputs["Geometry"], 'Strength': group_input.outputs["Strength"]}) - - surfacebump_1 = nw.new_node(nodegroup_surface_bump().name, - input_kwargs={'Geometry': addcrater, 'Displacement': 0.03, 'Scale': 20.0}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': surfacebump_1, 'Material': surface.shaderfunc_to_material(shader_strawberry_shader, top_pos, main_color, top_color)}) - - randomrotationscale = nw.new_node(nodegroup_random_rotation_scale().name, - input_kwargs={'rot mean': (-1.571, 0.0, 0.0), 'scale mean': 0.08}) - - instanceonpoints = nw.new_node(nodegroup_instance_on_points().name, - input_kwargs={'rotation base': pointonmesh.outputs["Rotation"], 'rotation delta': randomrotationscale.outputs["Vector"], 'translation': (0.0, 0.3, 0.0), 'scale': randomrotationscale.outputs["Value"], 'Points': pointonmesh.outputs["Geometry"], 'Instance': strawberryseed}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, instanceonpoints]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances, 'curve parameters': addnoisescalar}) \ No newline at end of file diff --git a/infinigen/assets/fruits/surfaces/surface_utils.py b/infinigen/assets/fruits/surfaces/surface_utils.py deleted file mode 100644 index 7d191d909..000000000 --- a/infinigen/assets/fruits/surfaces/surface_utils.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_stripe_pattern', singleton=False, type='ShaderNodeTree') -def nodegroup_stripe_pattern(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0)), - ('NodeSocketFloat', 'attribute', 0.0), - ('NodeSocketFloat', 'voronoi scale', 50.0), - ('NodeSocketFloatFactor', 'voronoi randomness', 1.0), - ('NodeSocketFloat', 'seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketFloat', 'noise amount', 1.4), - ('NodeSocketFloat', 'hue min', 0.6), - ('NodeSocketFloat', 'hue max', 1.085)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: texture_coordinate.outputs["Object"], 1: group_input.outputs["seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': group_input.outputs["noise scale"], 'Detail': 1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs["noise amount"]}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["attribute"], 'Scale': group_input.outputs["voronoi scale"], 'Randomness': group_input.outputs["voronoi randomness"]}, - attrs={'voronoi_dimensions': '1D'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 3: group_input.outputs["hue min"], 4: group_input.outputs["hue max"]}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': map_range.outputs["Result"], 'Color': group_input.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': hue_saturation_value}) diff --git a/infinigen/assets/grassland/dandelion.py b/infinigen/assets/grassland/dandelion.py deleted file mode 100644 index 51a792af5..000000000 --- a/infinigen/assets/grassland/dandelion.py +++ /dev/null @@ -1,670 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=61Sk8j1Ml9c by BradleyAnimation - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.assets.materials import simple_greenery -from infinigen.assets.materials import simple_whitish -from infinigen.assets.materials import simple_brownish -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_head_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_head_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Translation', (0.0, 0.0, 1.0)), - ('NodeSocketFloatDistance', 'Radius', 0.04)]) - - uv_sphere_1 = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 64, 'Radius': group_input.outputs["Radius"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere_1, 'Translation': group_input.outputs["Translation"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_1, - 'Material': surface.shaderfunc_to_material(simple_brownish.shader_simple_brown)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_end_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_end_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None)]) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'End Size': 0}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 64, 'Radius': 0.04}) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (uniform(0.45, 0.7), uniform(0.45, 0.7), uniform(2, 3)) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': vector}) - - cone = nw.new_node('GeometryNodeMeshCone', input_kwargs={'Radius Bottom': 0.0040, 'Depth': 0.0040}) - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': normal}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform, 'Instance': cone.outputs["Mesh"], - 'Rotation': align_euler_to_vector_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [instance_on_points_1, transform]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, - 'Material': surface.shaderfunc_to_material(simple_brownish.shader_simple_brown)}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': set_material}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Selection': endpoint_selection, 'Instance': geometry_to_instance, - 'Rotation': align_euler_to_vector}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_branch_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_branch_shape(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - pedal_stem_branches_num = nw.new_node(Nodes.Integer, label='pedal_stem_branches_num') - pedal_stem_branches_num.integer = 40 - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Radius', 0.0100)]) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': pedal_stem_branches_num, - 'Radius': group_input.outputs["Radius"]}) - - pedal_stem_branch_length = nw.new_node(Nodes.Value, label='pedal_stem_branch_length') - pedal_stem_branch_length.outputs[0].default_value = 0.5000 - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': pedal_stem_branch_length}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_1}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line_1, 'Count': 40}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0000, 0.0000), - (0.2, 0.08 * np.random.normal(1., 0.15)), - (0.4, 0.22 * np.random.normal(1., 0.2)), - (0.6, 0.45 * np.random.normal(1., 0.2)), - (0.8, 0.7 * np.random.normal(1., 0.1)), - (1.0000, 1.0000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: uniform(0.15, 0.4)}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': resample_curve, 'Offset': combine_xyz}) - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': normal}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': curve_circle_1.outputs["Curve"], 'Instance': set_position, - 'Rotation': align_euler_to_vector}) - - random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000, 'Seed': 2}) - - random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000, 'Seed': 1}) - - random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_2.outputs[1], - 'Z': random_value.outputs[1]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points, 'Rotation': combine_xyz_2}) - - random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.8000}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances, 'Scale': random_value_3.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Instances': scale_instances}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_branch_contour', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_branch_contour(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - pedal_stem_branch_rsample = nw.new_node(Nodes.Value, - label='pedal_stem_branch_rsample') - pedal_stem_branch_rsample.outputs[0].default_value = 10.0 - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': realize_instances, 'Count': pedal_stem_branch_rsample}) - - index = nw.new_node(Nodes.Index) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': resample_curve, 5: index}, - attrs={'domain': 'CURVE', 'data_type': 'INT'}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - - # generate pedal branch contour - dist = uniform(-0.05, -0.25) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0), (0.2, 0.2 + (dist + normal(0, 0.05)) / 2.), - (0.4, 0.4 + (dist + normal(0, 0.05))), - (0.6, 0.6 + (dist + normal(0, 0.05)) / 1.2), - (0.8, 0.8 + (dist + normal(0, 0.05)) / 2.4), (1.0, 0.95 + normal(0, 0.05))]) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.05, 3: 0.35, 'ID': capture_attribute.outputs[5]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: random_value.outputs[1]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_branch_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_branch_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketVectorTranslation', 'Translation', (0.0, 0.0, 1.0))]) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': 1.0}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': uniform(0.001, 0.0025), 'Resolution': 4}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, - 'Profile Curve': curve_circle_2.outputs["Curve"], 'Fill Caps': True}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh_1, - 'Translation': group_input.outputs["Translation"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_2}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'End', (0.0, 0.0, 1.0)), - ('NodeSocketVectorTranslation', 'Middle', (0.0, 0.0, 0.5)), - ('NodeSocketFloatDistance', 'Radius', 0.05)]) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.0, 0.0, 0.0), 'Middle': group_input.outputs["Middle"], - 'End': group_input.outputs["End"]}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': quadratic_bezier, 'Radius': group_input.outputs["Radius"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': 0.2, 'Resolution': 8}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh, - 'Material': surface.shaderfunc_to_material(simple_whitish.shader_simple_white)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_2, 'Curve': quadratic_bezier}) - - -@node_utils.to_nodegroup('nodegroup_pedal_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_selection(nw: NodeWrangler, params): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={5: 1}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: params["random_dropout"], 1: random_value.outputs[1]}, - attrs={'operation': 'GREATER_THAN'}) - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'num_segments', 0.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: group_input.outputs["num_segments"]}, - attrs={'operation': 'DIVIDE'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: params["row_less_than"]}, - attrs={'operation': 'LESS_THAN'}) - - greater_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: params["row_great_than"]}, - attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: less_than, 1: greater_than_1}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: group_input.outputs["num_segments"]}, - attrs={'operation': 'MODULO'}) - - less_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: params["col_less_than"]}, - attrs={'operation': 'LESS_THAN'}) - - greater_than_2 = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: params["col_great_than"]}, - attrs={'operation': 'GREATER_THAN'}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: less_than_1, 1: greater_than_2}) - - nand = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: op_and, 1: op_and_1}, - attrs={'operation': 'NAND'}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: nand}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_and_2}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = uniform(0.2, 0.4) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 0.4, 4: value}) - - set_curve_radius_2 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': map_range.outputs["Result"]}) - - stem_radius = nw.new_node(Nodes.Value, - label='stem_radius') - stem_radius.outputs[0].default_value = uniform(0.01, 0.024) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': stem_radius}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_2, - 'Profile Curve': curve_circle_3.outputs["Curve"], 'Fill Caps': True}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh_2, - 'Material': surface.shaderfunc_to_material(simple_greenery.shader_simple_greenery)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, set_material, 'stem')}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - pedal_stem_top_point = nw.new_node(Nodes.Vector, - label='pedal_stem_top_point') - pedal_stem_top_point.vector = (0.0, 0.0, 1.0) - - pedal_stem_mid_point = nw.new_node(Nodes.Vector, - label='pedal_stem_mid_point') - pedal_stem_mid_point.vector = (normal(0, 0.05), normal(0, 0.05), 0.5) - - pedal_stem_radius = nw.new_node(Nodes.Value, - label='pedal_stem_radius') - pedal_stem_radius.outputs[0].default_value = uniform(0.02, 0.045) - - pedal_stem_geometry = nw.new_node(nodegroup_pedal_stem_geometry().name, - input_kwargs={'End': pedal_stem_top_point, 'Middle': pedal_stem_mid_point, - 'Radius': pedal_stem_radius}) - - pedal_stem_top_radius = nw.new_node(Nodes.Value, - label='pedal_stem_top_radius') - pedal_stem_top_radius.outputs[0].default_value = uniform(0.005, 0.008) - - pedal_stem_branch_shape = nw.new_node(nodegroup_pedal_stem_branch_shape().name, - input_kwargs={'Radius': pedal_stem_top_radius}) - - pedal_stem_branch_geometry = nw.new_node(nodegroup_pedal_stem_branch_geometry().name, - input_kwargs={'Curve': pedal_stem_branch_shape, - 'Translation': pedal_stem_top_point}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': pedal_stem_branch_geometry, - 'Material': surface.shaderfunc_to_material(simple_whitish.shader_simple_white)}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': pedal_stem_geometry.outputs["Curve"]}) - - pedal_stem_end_geometry = nw.new_node(nodegroup_pedal_stem_end_geometry().name, - input_kwargs={'Points': resample_curve}) - - pedal_stem_head_geometry = nw.new_node(nodegroup_pedal_stem_head_geometry().name, - input_kwargs={'Translation': pedal_stem_top_point, - 'Radius': pedal_stem_top_radius}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [pedal_stem_geometry.outputs["Geometry"], set_material_3, - pedal_stem_end_geometry, pedal_stem_head_geometry]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry}) - - -@node_utils.to_nodegroup('nodegroup_flower_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_flower_geometry(nw: NodeWrangler, params): - # Code generated using version 2.4.3 of the node_transpiler - - num_core_segments = nw.new_node(Nodes.Integer, - label='num_core_segments', - attrs={'integer': 10}) - num_core_segments.integer = randint(8, 25) - - num_core_rings = nw.new_node(Nodes.Integer, - label='num_core_rings', - attrs={'integer': 10}) - num_core_rings.integer = randint(8, 20) - - uv_sphere_2 = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': num_core_segments, 'Rings': num_core_rings, - 'Radius': uniform(0.02, 0.05)}) - - flower_core_shape = nw.new_node(Nodes.Vector, - label='flower_core_shape') - flower_core_shape.vector = (uniform(0.8, 1.2), uniform(0.8, 1.2), uniform(0.5, 0.8)) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere_2, 'Scale': flower_core_shape}) - - selection_params = { - "random_dropout": params["random_dropout"], - "row_less_than": int(params["row_less_than"] * num_core_rings.integer), - "row_great_than": int(params["row_great_than"] * num_core_rings.integer), - "col_less_than": int(params["col_less_than"] * num_core_segments.integer), - "col_great_than": int(params["col_less_than"] * num_core_segments.integer) - } - pedal_selection = nw.new_node(nodegroup_pedal_selection(params=selection_params).name, - input_kwargs={'num_segments': num_core_segments}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Instance', None)]) - - normal_1 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_1}, - attrs={'axis': 'Z'}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.4, 3: 0.7}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_1.outputs[1]}, - attrs={'operation': 'MULTIPLY'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform, 'Selection': pedal_selection, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector_1, 'Scale': multiply}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_1}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform, - 'Material': surface.shaderfunc_to_material(simple_whitish.shader_simple_white)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [realize_instances_1, set_material]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': tag_nodegroup(nw, join_geometry_1, 'flower')}) - - -@node_utils.to_nodegroup('nodegroup_flower_on_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_flower_on_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None)]) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector_2 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Selection': endpoint_selection, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector_2}) - - realize_instances_2 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_2}) - - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': realize_instances_2}) - - -def geometry_dandelion_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.0, 0.0, 0.0), - 'Middle': (normal(0, 0.1), normal(0, 0.1), 0.5), - 'End': (normal(0, 0.1), normal(0, 0.1), 1.0)}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': quadratic_bezier_1}) - - pedal_stem = nw.new_node(nodegroup_pedal_stem().name) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': pedal_stem}) - - flower_geometry = nw.new_node(nodegroup_flower_geometry(kwargs).name, - input_kwargs={'Instance': geometry_to_instance}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': flower_geometry}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = uniform(-0.15, -0.5) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': geometry_to_instance_1, 'Scale': value_2}) - - flower_on_stem = nw.new_node(nodegroup_flower_on_stem().name, - input_kwargs={'Points': resample_curve, 'Instance': transform_3}) - - stem_geometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': quadratic_bezier_1}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [flower_on_stem, stem_geometry]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances}) - - -def geometry_dandelion_seed_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - pedal_stem = nw.new_node(nodegroup_pedal_stem().name) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': pedal_stem}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': geometry_to_instance}) - - -class DandelionFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(DandelionFactory, self).__init__(factory_seed, coarse=coarse) - self.flower_mode = ['full_flower', 'no_flower', 'top_half_flower', 'top_missing_flower', 'sparse_flower'] - self.flower_mode_pb = [0.4, 0.04, 0.23, 0.13, 0.2] - - def get_mode_params(self, mode): - if mode == 'full_flower': - # generate a flower with full seeds - return { - "random_dropout": uniform(0.5, 1.0), - "row_less_than": 0.0, - "row_great_than": 0.0, - "col_less_than": 0.0, - "col_great_than": 0.0 - } - elif mode == 'no_flower': - # generate a flower with no seeds - return { - "random_dropout": 0.0, - "row_less_than": 1.0, - "row_great_than": 0.0, - "col_less_than": 1.0, - "col_great_than": 0.0 - } - elif mode == 'top_half_flower': - # generate a flower with no seeds at bottom half - return { - "random_dropout": uniform(0.6, 1.0), - "row_less_than": uniform(0.3, 0.5), - "row_great_than": 0.0, - "col_less_than": 1.0, - "col_great_than": 0.0 - } - elif mode == 'top_missing_flower': - # generate a flower with no seeds at bottom half - col = uniform(0.3, 1.0) - return { - "random_dropout": uniform(0.5, 0.9), - "row_less_than": 1.0, - "row_great_than": uniform(0.5, 0.7), - "col_less_than": col, - "col_great_than": col - uniform(0.2, 0.4) - } - elif mode == 'sparse_flower': - # generate a flower with no seeds at bottom half - return { - "random_dropout": uniform(0.3, 0.5), - "row_less_than": 0.0, - "row_great_than": 0.0, - "col_less_than": 0.0, - "col_great_than": 0.0 - } - else: - raise NotImplementedError - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - mode = np.random.choice(self.flower_mode, p=self.flower_mode_pb) - params = self.get_mode_params(mode) - - surface.add_geomod(obj, geometry_dandelion_nodes, apply=True, attributes=[], input_kwargs=params) - tag_object(obj, 'dandelion') - return obj - - -class DandelionSeedFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(DandelionSeedFactory, self).__init__(factory_seed, coarse=coarse) - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - surface.add_geomod(obj, geometry_dandelion_seed_nodes, apply=True, attributes=[], input_kwargs=params) - tag_object(obj, 'seed') - return obj - - -if __name__ == '__main__': - f = DandelionSeedFactory(0) - obj = f.create_asset() \ No newline at end of file diff --git a/infinigen/assets/grassland/flower.py b/infinigen/assets/grassland/flower.py deleted file mode 100644 index 711538326..000000000 --- a/infinigen/assets/grassland/flower.py +++ /dev/null @@ -1,597 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick, Alejandro Newell - - -# Code generated using version v2.0.1 of the node_transpiler -import bpy -import mathutils -from numpy.random import uniform, normal -import numpy as np - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface - -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil, color -from infinigen.core.util.math import FixedSeed, dict_lerp -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_polar_to_cart_old', singleton=True) -def nodegroup_polar_to_cart_old(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Addend', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'SINE'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': cosine, 'Z': sine}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: combine_xyz_4, 2: group_input.outputs["Addend"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply_add.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_follow_curve', singleton=True) -def nodegroup_follow_curve(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Curve Min', 0.5), - ('NodeSocketFloat', 'Curve Max', 1.0)]) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 2: separate_xyz.outputs["Z"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"], 3: group_input.outputs["Curve Min"], 4: group_input.outputs["Curve Max"]}) - - curve_length = nw.new_node(Nodes.CurveLength, - input_kwargs={'Curve': group_input.outputs["Curve"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: curve_length}, - attrs={'operation': 'MULTIPLY'}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': group_input.outputs["Curve"], 'Length': multiply}, attrs={'mode': 'LENGTH'}) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Tangent"], 1: sample_curve.outputs["Normal"]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Normal"], 'Scale': separate_xyz.outputs["Y"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': sample_curve.outputs["Position"], 'Offset': add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_norm_index', singleton=True) -def nodegroup_norm_index(nw): - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Count', 0)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Count"]}, - attrs={'operation': 'DIVIDE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'T': divide}) - -@node_utils.to_nodegroup('nodegroup_flower_petal', singleton=True) -def nodegroup_flower_petal(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Length', 0.2), - ('NodeSocketFloat', 'Point', 1.0), - ('NodeSocketFloat', 'Point height', 0.5), - ('NodeSocketFloat', 'Bevel', 6.8), - ('NodeSocketFloat', 'Base width', 0.2), - ('NodeSocketFloat', 'Upper width', 0.3), - ('NodeSocketInt', 'Resolution H', 8), - ('NodeSocketInt', 'Resolution V', 4), - ('NodeSocketFloat', 'Wrinkle', 0.1), - ('NodeSocketFloat', 'Curl', 0.0)]) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution H"], 1: 2.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - grid = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Vertices X': group_input.outputs["Resolution V"], 'Vertices Y': multiply_add}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': grid, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 0.05}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply, 'Y': separate_xyz.outputs["Y"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 7.9, 'Detail': 0.0, 'Distortion': 0.2}, - attrs={'noise_dimensions': '2D'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: group_input.outputs["Wrinkle"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Bevel"]}, - attrs={'operation': 'POWER'}) - - multiply_add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: multiply_add_1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: group_input.outputs["Upper width"], 2: group_input.outputs["Base width"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_add_2}, - attrs={'operation': 'MULTIPLY'}) - - power_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Point"]}, - attrs={'operation': 'POWER'}) - - multiply_add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: power_1, 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_add_3, 1: group_input.outputs["Point height"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Point height"], 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_5, 1: multiply_add_4}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add_2, 1: multiply_add_1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: multiply_6}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Y': multiply_4, 'Z': multiply_7}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': combine_xyz_1}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Length"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': multiply_8}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Curl"]}) - - group_1 = nw.new_node(nodegroup_polar_to_cart_old().name, - input_kwargs={'Addend': combine_xyz_3, 'Value': reroute, 'Vector': multiply_8}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 8, 'Start': (0.0, 0.0, 0.0), 'Middle': combine_xyz_3, 'End': group_1}) - - group = nw.new_node(nodegroup_follow_curve().name, - input_kwargs={'Geometry': set_position, 'Curve': quadratic_bezier, 'Curve Min': 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': tag_nodegroup(nw, group, 'petal')}) - -@node_utils.to_nodegroup('nodegroup_phyllo_points', singleton=True) -def nodegroup_phyllo_points(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Count', 50), - ('NodeSocketFloat', 'Min Radius', 0.0), - ('NodeSocketFloat', 'Max Radius', 2.0), - ('NodeSocketFloat', 'Radius exp', 0.5), - ('NodeSocketFloat', 'Min angle', -0.5236), - ('NodeSocketFloat', 'Max angle', 0.7854), - ('NodeSocketFloat', 'Min z', 0.0), - ('NodeSocketFloat', 'Max z', 1.0), - ('NodeSocketFloat', 'Clamp z', 1.0), - ('NodeSocketFloat', 'Yaw offset', -1.5708)]) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': group_input.outputs["Count"]}) - - mesh_to_points = nw.new_node(Nodes.MeshToPoints, - input_kwargs={'Mesh': mesh_line}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': mesh_to_points, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - index = nw.new_node(Nodes.Index) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: index}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: index}, - attrs={'operation': 'SINE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': cosine, 'Y': sine}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Count"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: group_input.outputs["Radius exp"]}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': power, 3: group_input.outputs["Min Radius"], 4: group_input.outputs["Max Radius"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': multiply.outputs["Vector"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 2: group_input.outputs["Clamp z"], 3: group_input.outputs["Min z"], 4: group_input.outputs["Max z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': map_range_2.outputs["Result"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': combine_xyz_1}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 3: group_input.outputs["Min angle"], 4: group_input.outputs["Max angle"]}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1, 3: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Yaw offset"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range_3.outputs["Result"], 'Y': random_value.outputs[1], 'Z': add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Points': set_position, 'Rotation': combine_xyz_2}) - -@node_utils.to_nodegroup('nodegroup_plant_seed', singleton=True) -def nodegroup_plant_seed(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Dimensions', (0.0, 0.0, 0.0)), - ('NodeSocketIntUnsigned', 'U', 4), - ('NodeSocketInt', 'V', 8)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Dimensions"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["U"], 'Start': (0.0, 0.0, 0.0), 'Middle': multiply_add.outputs["Vector"], 'End': combine_xyz}) - - group = nw.new_node(nodegroup_norm_index().name, - input_kwargs={'Count': group_input.outputs["U"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.3159, 0.4469), (1.0, 0.0156)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 4: 3.0}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': quadratic_bezier_1, 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["V"], 'Radius': separate_xyz.outputs["Y"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, curve_to_mesh, 'seed')}) - -def shader_flower_center(nw): - ambient_occlusion = nw.new_node(Nodes.AmbientOcclusion) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': ambient_occlusion.outputs["Color"]}) - colorramp.color_ramp.elements.new(1) - colorramp.color_ramp.elements[0].position = 0.4841 - colorramp.color_ramp.elements[0].color = (0.0127, 0.0075, 0.0026, 1.0) - colorramp.color_ramp.elements[1].position = 0.8591 - colorramp.color_ramp.elements[1].color = (0.0848, 0.0066, 0.0007, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = (1.0, 0.6228, 0.1069, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -def shader_petal(nw): - - translucent_color_change = uniform(0.1, 0.6) - specular = normal(0.6, 0.1) - roughness = normal(0.4, 0.05) - translucent_amt = normal(0.3, 0.05) - - petal_color = nw.new_node(Nodes.RGB) - petal_color.outputs[0].default_value = color.color_category('petal') - - translucent_color = nw.new_node(Nodes.MixRGB, [translucent_color_change, petal_color, color.color_category('petal')]) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': translucent_color}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': petal_color, 'Specular': specular, 'Roughness': roughness }) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': translucent_amt, 1: principled_bsdf, 2: translucent_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - -def geo_flower(nw, petal_material, center_material): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Center Rad', 0.0), - ('NodeSocketVector', 'Petal Dims', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Seed Size', 0.0), - ('NodeSocketFloat', 'Min Petal Angle', 0.1), - ('NodeSocketFloat', 'Max Petal Angle', 1.36), - ('NodeSocketFloat', 'Wrinkle', 0.01), - ('NodeSocketFloat', 'Curl', 13.89)]) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 8, 'Rings': 8, 'Radius': group_input.outputs["Center Rad"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 1.0, 0.05)}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed Size"], 1: 1.5}, - attrs={'operation': 'MULTIPLY'}) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': transform, 'Distance Min': multiply, 'Density Max': 50000.0}, - attrs={'distribute_method': 'POISSON'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed Size"], 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Y': group_input.outputs["Seed Size"]}) - - group_3 = nw.new_node(nodegroup_plant_seed().name, - input_kwargs={'Dimensions': combine_xyz, 'U': 6, 'V': 6}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': 13.8, 'Scale': 2.41}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 3: 0.34, 4: 1.21}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range.outputs["Result"], 'Y': 1.0, 'Z': 1.0}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': distribute_points_on_faces.outputs["Points"], 'Instance': group_3, 'Rotation': (0.0, -1.5708, 0.0541), 'Scale': combine_xyz_1}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [realize_instances, transform]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': center_material}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Center Rad"], 1: 6.2832}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Petal Dims"]}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 1.2}, - attrs={'operation': 'MULTIPLY'}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Center Rad"]}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Min Petal Angle"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Max Petal Angle"]}) - - group_1 = nw.new_node(nodegroup_phyllo_points().name, - input_kwargs={'Count': multiply_3, 'Min Radius': reroute_3, 'Max Radius': reroute_3, 'Radius exp': 0.0, 'Min angle': reroute_1, 'Max angle': reroute, 'Max z': 0.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Wrinkle"]}) - - reroute_4 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Curl"]}) - - group = nw.new_node(nodegroup_flower_petal().name, - input_kwargs={'Length': separate_xyz.outputs["X"], 'Point': 0.56, 'Point height': -0.1, 'Bevel': 1.83, 'Base width': separate_xyz.outputs["Y"], 'Upper width': subtract, - 'Resolution H': 8, 'Resolution V': 16, 'Wrinkle': reroute_2, 'Curl': reroute_4}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_1.outputs["Points"], 'Instance': group, 'Rotation': group_1.outputs["Rotation"]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 3.73, 'Detail': 5.41, 'Distortion': -1.0}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.025 - - multiply_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': realize_instances_1, 'Offset': multiply_4.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': petal_material}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, set_material]}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': join_geometry, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -class FlowerFactory(AssetFactory): - - def __init__(self, factory_seed, rad=0.15, diversity_fac=0.25): - super(FlowerFactory, self).__init__(factory_seed=factory_seed) - - self.rad = rad - self.diversity_fac = diversity_fac - - with FixedSeed(factory_seed): - self.petal_material = surface.shaderfunc_to_material(shader_petal) - self.center_material = surface.shaderfunc_to_material(shader_flower_center) - self.species_params = self.get_flower_params(self.rad) - - @staticmethod - def get_flower_params(overall_rad=0.05): - pct_inner = uniform(0.05, 0.4) - base_width = 2 * np.pi * overall_rad * pct_inner / normal(20, 5) - top_width = overall_rad * np.clip(normal(0.7, 0.3), base_width * 1.2, 100) - - min_angle, max_angle = np.deg2rad(np.sort(uniform(-20, 100, 2))) - - return { - 'Center Rad': overall_rad * pct_inner, - 'Petal Dims': np.array([overall_rad * (1 - pct_inner), base_width, top_width], dtype=np.float32), - 'Seed Size': uniform(0.005, 0.01), - 'Min Petal Angle': min_angle, - 'Max Petal Angle': max_angle, - 'Wrinkle': uniform(0.003, 0.02), - 'Curl': np.deg2rad(normal(30, 50)) - } - - def create_asset(self, **kwargs) -> bpy.types.Object: - - vert = butil.spawn_vert('flower') - mod = surface.add_geomod(vert, geo_flower, - input_kwargs={'petal_material': self.petal_material, 'center_material': self.center_material}) - - inst_params = self.get_flower_params(self.rad * normal(1, 0.05)) - params = dict_lerp(self.species_params, inst_params, 0.25) - surface.set_geomod_inputs(mod, params) - - butil.apply_modifiers(vert, mod=mod) - - vert.rotation_euler.z = uniform(0, 360) - tag_object(vert, 'flower') - return vert \ No newline at end of file diff --git a/infinigen/assets/grassland/flowerplant.py b/infinigen/assets/grassland/flowerplant.py deleted file mode 100644 index f09757742..000000000 --- a/infinigen/assets/grassland/flowerplant.py +++ /dev/null @@ -1,613 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.assets.grassland.flower import FlowerFactory -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -import numpy as np -from infinigen.core import surface -from infinigen.assets.materials import simple_greenery -from infinigen.assets.small_plants import leaf_general as Leaf -from infinigen.assets.grassland import flower as Flower -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -@node_utils.to_nodegroup('nodegroup_stem_branch_leaf_s_r', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_leaf_s_r(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.2, 3: 0.7}) - - curve_tangent_1 = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent_1}, - attrs={'axis': 'Z'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': random_value.outputs[1], 'Rotation': align_euler_to_vector_1}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch_leaf_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_leaf_selection(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': spline_parameter_1.outputs["Factor"]}) - colorramp_1.color_ramp.interpolation = "CONSTANT" - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.0 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.20 - colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - colorramp_1.color_ramp.elements[2].position = 0.80 - colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - integer = randint(10, 30, size=(1,))[0] - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={5: integer}, - attrs={'data_type': 'INT'}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: random_value_3.outputs[2]}, - attrs={'operation': 'NOT'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: colorramp_1.outputs["Color"], 1: op_not}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_and}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch_leaves', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_leaves(nw: NodeWrangler, leaves): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - resample_curve_3 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Count': 100}) - - stembranchleafselection = nw.new_node(nodegroup_stem_branch_leaf_selection().name) - - leaf_id = randint(0, len(leaves), size=(1,))[0] - leaf = leaves[leaf_id] - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': leaf}) - - stembranchleafsr = nw.new_node(nodegroup_stem_branch_leaf_s_r().name) - - instance_on_points_4 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': resample_curve_3, 'Selection': stembranchleafselection, - 'Instance': object_info_2.outputs["Geometry"], - 'Rotation': stembranchleafsr.outputs["Rotation"], - 'Scale': stembranchleafsr.outputs["Value"]}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={'Max': (0.6, 0.6, 6.28), 'Seed': 30}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - rotate_instances_2 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_4, - 'Rotation': random_value_3.outputs["Value"]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': realize_instances}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': spline_parameter.outputs["Factor"]}) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[1].position = 1.0 - colorramp.color_ramp.elements[1].color = (0.4, 0.4, 0.4, 1.0) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': colorramp.outputs["Color"]}) - - r = uniform(0.015, 0.022, size=(1,))[0] - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 10, 'Radius': r}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position = nw.new_node(Nodes.InputPosition) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - index = nw.new_node(Nodes.Index) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index, 2: 20.0}) - - curvature = uniform(-0.5, 0.5, (1,))[0] - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.5), (0.1, curvature / 5. + 0.5), - (0.25, curvature / 2.5 + 0.5), (0.45, curvature / 1.5 + 0.5), - (0.6, curvature / 1.2 + 0.5), (1.0, curvature + 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: -0.5}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Center': multiply.outputs["Vector"], - 'Angle': multiply_1}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate}) - - -@node_utils.to_nodegroup('nodegroup_stem_leaf_s_r', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_leaf_s_r(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.3, 3: 0.6}) - - curve_tangent_1 = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent_1}, - attrs={'axis': 'Z'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': random_value.outputs[1], 'Rotation': align_euler_to_vector_1}) - - -@node_utils.to_nodegroup('nodegroup_stem_leaf_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_leaf_selection(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': spline_parameter_1.outputs["Factor"]}) - colorramp_1.color_ramp.interpolation = "CONSTANT" - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.0 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.30 - colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - colorramp_1.color_ramp.elements[2].position = 0.85 - colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - integer = randint(5, 15, size=(1,))[0] - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={5: integer}, - attrs={'data_type': 'INT'}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: random_value_3.outputs[2]}, - attrs={'operation': 'NOT'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: colorramp_1.outputs["Color"], 1: op_not}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_and}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch(nw: NodeWrangler, flowers, leaves): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line_2 = nw.new_node(Nodes.CurveLine) - - resample_curve_4 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line_2, 'Count': 20}) - - stembranchrotation = nw.new_node(nodegroup_stem_branch_rotation().name) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve_4, 'Position': stembranchrotation}) - - branchflowersetting = nw.new_node(nodegroup_branch_flower_setting(flowers=flowers).name) - - instance_on_points_3 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position_2, - 'Selection': branchflowersetting.outputs["Selection"], - 'Instance': branchflowersetting.outputs["Geometry"], - 'Rotation': branchflowersetting.outputs["Rotation"], - 'Scale': branchflowersetting.outputs["Value"]}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.4, 3: 0.7}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points_3, 'Scale': random_value.outputs[1]}) - - stembranchgeometry = nw.new_node(nodegroup_stem_branch_geometry().name, - input_kwargs={'Curve': set_position_2}) - - stembranchleaves = nw.new_node(nodegroup_stem_branch_leaves(leaves=leaves).name, - input_kwargs={'Curve': set_position_2}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [stembranchgeometry, stembranchleaves]}) - - join_geometry_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, - 'Material': surface.shaderfunc_to_material(simple_greenery.shader_simple_greenery)}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': scale_instances}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [realize_instances, join_geometry_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2}) - - -@node_utils.to_nodegroup('nodegroup_stem_branch_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_branch_selection(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': spline_parameter_1.outputs["Factor"]}) - colorramp_1.color_ramp.interpolation = "CONSTANT" - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.0 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.50 - colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - colorramp_1.color_ramp.elements[2].position = 0.80 - colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - seed = randint(0, 10000, size=(1,))[0] - threshold = uniform(0.05, 0.1, size=(1,))[0] - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={'Min': 0.0, 'Max': 1.0, 'Seed': seed}) - less_equal = nw.new_node(Nodes.Compare, - input_kwargs={0: random_value, 1: threshold}, - attrs={'operation': 'LESS_EQUAL'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: colorramp_1.outputs["Color"], 1: less_equal}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_and}) - - -@node_utils.to_nodegroup('nodegroup_stem_leaves', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_leaves(nw: NodeWrangler, leaves): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - stemleafselection = nw.new_node(nodegroup_stem_leaf_selection().name) - - leaf_id = randint(0, len(leaves), size=(1,))[0] - leaf = leaves[leaf_id] - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': leaf}) - - stemleafsr = nw.new_node(nodegroup_stem_leaf_s_r().name) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Curve"], - 'Selection': stemleafselection, - 'Instance': object_info_2.outputs["Geometry"], - 'Rotation': stemleafsr.outputs["Rotation"], - 'Scale': stemleafsr.outputs["Value"]}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={'Max': (0.5, 0.5, 6.28), 'Seed': 30}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, - 'Rotation': random_value_2.outputs["Value"]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': realize_instances}) - - -@node_utils.to_nodegroup('nodegroup_main_flower_setting', singleton=False, type='GeometryNodeTree') -def nodegroup_main_flower_setting(nw: NodeWrangler, flowers): - # Code generated using version 2.4.3 of the node_transpiler - - flower_id = randint(0, len(flowers), size=(1,))[0] - scale = uniform(0.25, 0.45, size=(1,))[0] - flower = flowers[flower_id] - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': flower}) - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': object_info_2.outputs["Geometry"], 'Scale': (scale, scale, scale)}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Value': value, - 'Selection': endpoint_selection, 'Rotation': align_euler_to_vector}) - - -@node_utils.to_nodegroup('nodegroup_branch_flower_setting', singleton=False, type='GeometryNodeTree') -def nodegroup_branch_flower_setting(nw: NodeWrangler, flowers): - # Code generated using version 2.4.3 of the node_transpiler - - flower_id = randint(0, len(flowers), size=(1,))[0] - scale = uniform(0.4, 0.6, size=(1,))[0] - flower = flowers[flower_id] - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': flower}) - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': object_info_2.outputs["Geometry"], 'Scale': (scale, scale, scale)}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Value': value, - 'Selection': endpoint_selection, 'Rotation': align_euler_to_vector}) - - -@node_utils.to_nodegroup('nodegroup_stem_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position = nw.new_node(Nodes.InputPosition) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - index = nw.new_node(Nodes.Index) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index, 2: 20.0}) - - curvature = np.clip(np.abs(normal(0, 0.4, (1,))[0]), 0.0, 0.8) - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0), (0.1, curvature / 5.), - (0.25, curvature / 2.5), (0.45, curvature / 1.5), - (0.6, curvature / 1.2), (1.0, curvature)]) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 1.2}, attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Center': multiply.outputs["Vector"], - 'Angle': multiply_2}, - attrs={'rotation_type': 'X_AXIS'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 0.3}) - - add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: (-0.5, -0.5, -0.5), 1: noise_texture.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Rotation': vector_rotate, 'Noise': add_1.outputs["Vector"]}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': spline_parameter.outputs["Factor"]}) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) - colorramp.color_ramp.elements[1].position = 1.0 - colorramp.color_ramp.elements[1].color = (0.4, 0.4, 0.4, 1.0) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': colorramp.outputs["Color"]}) - - rad = uniform(0.01, 0.02, size=(1,))[0] - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 10, 'Radius': rad}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, curve_to_mesh, 'stem')}) - - -def geo_flowerplant(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - leaves = kwargs["leaves"] - flowers = kwargs["flowers"] - curve_line = nw.new_node(Nodes.CurveLine) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': 20}) - - stemrotation = nw.new_node(nodegroup_stem_rotation().name, - input_kwargs={'Geometry': curve_line}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Position': stemrotation.outputs["Rotation"], - 'Offset': stemrotation.outputs["Noise"]}) - - stemgeometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': set_position}) - - mainflowersetting = nw.new_node(nodegroup_main_flower_setting(flowers=flowers).name) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, - 'Selection': mainflowersetting.outputs["Selection"], - 'Instance': mainflowersetting.outputs["Geometry"], - 'Rotation': mainflowersetting.outputs["Rotation"], - 'Scale': mainflowersetting.outputs["Value"]}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': set_position, 'Count': 150}) - - stemleaves = nw.new_node(nodegroup_stem_leaves(leaves=leaves).name, - input_kwargs={'Curve': resample_curve_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [stemgeometry, stemleaves]}) - - join_geometry = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, - 'Material': surface.shaderfunc_to_material(simple_greenery.shader_simple_greenery)}) - - num_versions = randint(0, 3, size=(1,))[0] - branches = [] - for version in range(num_versions): - resample_num = randint(80, 100, size=(1,))[0] - resample_curve_2 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': set_position, 'Count': resample_num}) - stembranchselection = nw.new_node(nodegroup_stem_branch_selection().name) - stembranch = nw.new_node(nodegroup_stem_branch(flowers=flowers, leaves=leaves).name) - random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={'Min': (0.4, 0.4, 0.4)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': resample_curve_2, 'Selection': stembranchselection, - 'Instance': stembranch, 'Scale': (random_value_1, "Value")}) - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={'Min': (0.15, 0.15, 0.0), 'Max': (0.45, 0.45, 6.28), 'Seed': 30}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_2, - 'Rotation': (random_value_4, "Value")}) - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances_1}) - branches.append(realize_instances_1) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={ - 'Geometry': [join_geometry, realize_instances] + branches}) - - z_rotate = uniform(0, 6.28, size=(1,))[0] - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Rotation': (0., 0., z_rotate)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform}) - - -class FlowerPlantFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(FlowerPlantFactory, self).__init__(factory_seed, coarse=coarse) - self.leaves_version_num = 4 - self.flowers_version_num = 1 - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # Make the Leaf and Delete It Later - leaves = [] - for _ in range(self.leaves_version_num): - lf_seed = randint(0, 1000, size=(1,))[0] - leaf_model = Leaf.LeafFactory(genome={"leaf_width": 0.35, "width_rand": 0.1}, factory_seed=lf_seed) - leaf = leaf_model.create_asset() - leaves.append(leaf) - - flowers = [] - for _ in range(self.flowers_version_num): - fw_seed = randint(0, 1000, size=(1,))[0] - rad = uniform(0.4, 0.7, size=(1,))[0] - flower_model = Flower.FlowerFactory(rad=rad, factory_seed=fw_seed) - flower = flower_model.create_asset() - flowers.append(flower) - - params["leaves"] = leaves - params["flowers"] = flowers - - mod = surface.add_geomod(obj, geo_flowerplant, apply=False, attributes=[], input_kwargs=params) - butil.delete(leaves + flowers) - with butil.SelectObjects(obj): - bpy.ops.object.material_slot_remove() - bpy.ops.object.shade_flat() - - butil.apply_modifiers(obj) - - tag_object(obj, 'flowerplant') - return obj diff --git a/infinigen/assets/leaves/leaf_broadleaf.py b/infinigen/assets/leaves/leaf_broadleaf.py deleted file mode 100644 index 3d69441a0..000000000 --- a/infinigen/assets/leaves/leaf_broadleaf.py +++ /dev/null @@ -1,764 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo -# Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=pfOKB1GKJHM by Dr. Blender - -import numpy as np -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -from infinigen.assets.leaves.leaf_v2 import nodegroup_apply_wave, nodegroup_move_to_origin -from infinigen.assets.leaves.leaf_maple import nodegroup_leaf_shader - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -@node_utils.to_nodegroup('nodegroup_random_mask_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_random_mask_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Coord', 0.0), - ('NodeSocketFloat', 'Shape', 0.5), - ('NodeSocketFloat', 'Density', 0.5), - ('NodeSocketFloat', 'Random Scale Seed', 0.5)]) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Coord"], 'Scale': group_input.outputs["Density"], 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Density"], 1: group_input.outputs["Random Scale Seed"]}, - attrs={'operation': 'MULTIPLY'}) - - vein_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Coord"], 'Scale': multiply}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: vein_1.outputs["Distance"], 1: 0.35}) - - round = nw.new_node(Nodes.Math, - input_kwargs={0: add}, - attrs={'operation': 'ROUND'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: vein.outputs["Distance"], 1: round}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 2: 0.02, 3: 0.95, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Shape"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_1, 1: 0.001, 2: 0.005, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range_2.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord_001', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group_input.outputs["X Modulated"]}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.7227, 0.75), (1.0, 1.0)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_shape_with_jigsaw', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_shape_with_jigsaw(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Midrib Value', 1.0), - ('NodeSocketFloat', 'Vein Coord', 0.0), - ('NodeSocketFloat', 'Leaf Shape', 0.5), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.5)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}) - - jigsaw = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord"], 'Scale': group_input.outputs["Jigsaw Scale"]}, - label='Jigsaw', - attrs={'voronoi_dimensions': '1D'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.05}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: jigsaw.outputs["Distance"], 1: multiply, 2: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'MULTIPLY_ADD', 'use_clamp': True}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': maximum}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord(nw: NodeWrangler, vein_curve_control_points, vein_curve_control_handles): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': divide}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], vein_curve_control_points, vein_curve_control_handles) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_shape(nw: NodeWrangler, shape_curve_control_points=[(0.0, 0.0), (0.15, 0.2), (0.3864, 0.2625), (0.6227, 0.2), (0.7756, 0.1145), (0.8955, 0.0312), (1.0, 0.0)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Y', 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X Modulated"], 'Y': group_input.outputs["Y"]}) - - clamp = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': group_input.outputs["Y"], 'Min': -0.6, 'Max': 0.6}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': clamp}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: combine_xyz_1}, - attrs={'operation': 'SUBTRACT'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - leaf_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}, - label='Leaf shape') - node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Leaf Shape': subtract_1, 'Value': leaf_shape}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_midrib(nw: NodeWrangler, midrib_curve_control_points=[(0.0, 0.5), (0.2455, 0.5078), (0.5, 0.4938), (0.75, 0.503), (0.8773, 0.5125), (1.0, 0.5)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', -0.6), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - stem_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}, - label='Stem shape') - node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': stem_shape, 3: -1.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 20.0}) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_5.outputs["Result"], 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -70.0, 2: group_input.outputs["Midrib Length"], 3: group_input.outputs["Midrib Width"], 4: 0.0}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: map_range_2.outputs["Result"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: absolute}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute_1, 2: group_input.outputs["Stem Length"], 3: 1.0, 4: 0.0}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: map_range_3.outputs["Result"], 2: 0.06}, - attrs={'operation': 'SMOOTH_MIN'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: smooth_min}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'X Modulated': subtract, 'Midrib Value': map_range_4.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_apply_vein_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_apply_vein_midrib(nw: NodeWrangler, random_scale_seed=1.08): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Midrib Value', 0.5), - ('NodeSocketFloat', 'Leaf Shape', 1.0), - ('NodeSocketFloat', 'Vein Density', 6.0), - ('NodeSocketFloat', 'Vein Coord - main', 0.0), - ('NodeSocketFloat', 'Vein Coord - 1', 0.0), - ('NodeSocketFloat', 'Vein Coord - 2', 0.0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Leaf Shape"], 1: -0.3, 2: 0.05, 3: 0.015, 4: 0.0}) - - nodegroup = nw.new_node(nodegroup_random_mask_vein().name, - input_kwargs={'Coord': group_input.outputs["Vein Coord - 2"], 'Shape': map_range.outputs["Result"], 'Density': group_input.outputs["Vein Density"], 'Random Scale Seed': random_scale_seed*2.7}) - - nodegroup_1 = nw.new_node(nodegroup_random_mask_vein().name, - input_kwargs={'Coord': group_input.outputs["Vein Coord - 1"], 'Shape': map_range.outputs["Result"], 'Density': group_input.outputs["Vein Density"], 'Random Scale Seed': random_scale_seed}) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord - main"], 'Scale': group_input.outputs["Vein Density"], 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - position = nw.new_node(Nodes.InputPosition) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 20.0}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: 0.02}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: vein.outputs["Distance"], 1: multiply}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add, 2: 0.03, 3: 1.0, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_1, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_1, 1: map_range_5.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup, 1: multiply_2}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Midrib Value"], 1: multiply_3}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Value': multiply_4}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_sub_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': group_input.outputs["Y"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 30.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, - attrs={'clamp': False}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 150.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 2: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range_1.outputs["Result"], 4: -1.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply, 'Color Value': map_range_3.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_leaf_gen', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_leaf_gen(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Displancement scale', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Density', 6.0), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.07), - ('NodeSocketFloat', 'Vein Angle', 1.0), - ('NodeSocketFloat', 'Sub-vein Displacement', 0.5), - ('NodeSocketFloat', 'Sub-vein Scale', 50.0), - ('NodeSocketFloat', 'Wave Displacement', 0.1), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8),]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - nodegroup_midrib = nw.new_node(nodegroup_nodegroup_midrib(midrib_curve_control_points=kwargs['midrib_curve_control_points']).name, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Midrib Length': group_input.outputs["Midrib Length"], - 'Midrib Width': group_input.outputs["Midrib Width"], - 'Stem Length': group_input.outputs["Stem Length"]}) - - nodegroup_shape = nw.new_node(nodegroup_nodegroup_shape(shape_curve_control_points=kwargs['shape_curve_control_points']).name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"]}) - - nodegroup_vein_coord = nw.new_node(nodegroup_nodegroup_vein_coord( - vein_curve_control_points=[(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.6045, 0.4812), (0.7, 0.725), (0.8273, 0.8437), (1.0, 1.0)], - vein_curve_control_handles=['AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO']).name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], 'Vein Angle': group_input.outputs["Vein Angle"], 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_vein_coord_002 = nw.new_node(nodegroup_nodegroup_vein_coord( - vein_curve_control_points=[(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.8091, 0.7312), (1.0, 0.9937)], - vein_curve_control_handles=['AUTO', 'AUTO', 'AUTO', 'AUTO', 'AUTO']).name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], 'Vein Angle': group_input.outputs["Vein Angle"], 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_vein_coord_003 = nw.new_node(nodegroup_nodegroup_vein_coord( - vein_curve_control_points=[(0.0, 0.0), (0.0182, 0.05), (0.2909, 0.2199), (0.4182, 0.3063), (0.7045, 0.3), (1.0, 0.8562)], - vein_curve_control_handles=['AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']).name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], 'Vein Angle': group_input.outputs["Vein Angle"], 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_apply_vein_midrib = nw.new_node(nodegroup_nodegroup_apply_vein_midrib(random_scale_seed=kwargs['vein_mask_random_seed']).name, - input_kwargs={'Midrib Value': nodegroup_midrib.outputs["Midrib Value"], 'Leaf Shape': nodegroup_shape.outputs["Leaf Shape"], 'Vein Density': group_input.outputs["Vein Density"], 'Vein Coord - main': nodegroup_vein_coord_002, 'Vein Coord - 1': nodegroup_vein_coord, 'Vein Coord - 2': nodegroup_vein_coord_003}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Displancement scale"], 1: nodegroup_apply_vein_midrib}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Mesh"], 'Offset': combine_xyz}) - - nodegroup_shape_with_jigsaw = nw.new_node(nodegroup_nodegroup_shape_with_jigsaw().name, - input_kwargs={'Midrib Value': nodegroup_midrib.outputs["Midrib Value"], 'Vein Coord': nodegroup_vein_coord_002, 'Leaf Shape': nodegroup_shape.outputs["Leaf Shape"], 'Jigsaw Scale': group_input.outputs["Jigsaw Scale"], 'Jigsaw Depth': group_input.outputs["Jigsaw Depth"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: nodegroup_shape_with_jigsaw, 1: 0.5}, - attrs={'operation': 'LESS_THAN'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeom, - input_kwargs={'Geometry': set_position, 'Selection': less_than}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': delete_geometry, 2: nodegroup_apply_vein_midrib}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_1.outputs["Y"], 1: -0.6, 2: 0.6}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 0.0), (0.5182, 1.0), (1.0, 1.0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': nodegroup_shape.outputs["Leaf Shape"], 2: -1.0}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0045, 0.0063), (0.0409, 0.0375), (0.4182, 0.05), (1.0, 0.0)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: float_curve}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: 0.7}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_2}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': combine_xyz_1}) - - nodegroup_vein_coord_001 = nw.new_node(nodegroup_nodegroup_vein_coord_001().name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], 'Vein Angle': group_input.outputs["Vein Angle"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': set_position_1, 'Attribute': capture_attribute.outputs[2], 'X Modulated': nodegroup_midrib.outputs["X Modulated"], 'Vein Coord': nodegroup_vein_coord_001, 'Vein Value': nodegroup_apply_vein_midrib}) - -def shader_material(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein value'}) - - # rgb_3 = nw.new_node(Nodes.RGB) - # rgb_3.outputs[0].default_value = (0.9823, 0.8388, 0.117, 1.0) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 6.8, 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.8, 4: 1.2}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'subvein offset'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute.outputs["Color"], 2: -0.94}) - - # rgb_1 = nw.new_node(Nodes.RGB) - # rgb_1.outputs[0].default_value = (0.1878, 0.305, 0.0762, 1.0) - - # rgb = nw.new_node(Nodes.RGB) - # rgb.outputs[0].default_value = (0.0762, 0.1441, 0.0529, 1.0) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 2.0, 'Color': kwargs['color_base']}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': hue_saturation_value_1, 'Color2': kwargs['color_base']}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], 'Value': map_range_2.outputs["Result"], 'Color': mix}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_1.outputs["Color"], 'Color1': kwargs['color_vein'], 'Color2': hue_saturation_value}) - - leaf_shader = nw.new_node(nodegroup_leaf_shader().name, - input_kwargs={'Color': mix_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': leaf_shader}) - -def geo_leaf_broadleaf(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': 10}) - - # subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, - # input_kwargs={'Mesh': subdivide_mesh}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - nodegroup_leaf_gen = nw.new_node(nodegroup_nodegroup_leaf_gen(**kwargs).name, - input_kwargs={'Mesh': capture_attribute.outputs["Geometry"], - 'Displancement scale': 0.005, - 'Vein Asymmetry': kwargs['vein_asymmetry'], # 0.3023 - 'Vein Density': kwargs['vein_density'], # 7.0 - 'Jigsaw Scale': kwargs['jigsaw_scale'], # 50 - 'Jigsaw Depth': kwargs['jigsaw_depth'], # 0.3 - 'Vein Angle': kwargs['vein_angle'], # 0.3 - 'Midrib Length': kwargs['midrib_length'], # 0.3336 - 'Midrib Width': kwargs['midrib_length'], # 0.6302, - 'Stem Length': kwargs['stem_length'], - }) - - nodegroup_sub_vein = nw.new_node(nodegroup_nodegroup_sub_vein().name, - input_kwargs={'X': nodegroup_leaf_gen.outputs["X Modulated"], 'Y': nodegroup_leaf_gen.outputs["Vein Coord"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0002}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': nodegroup_leaf_gen.outputs["Mesh"], 'Offset': combine_xyz}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 2: nodegroup_sub_vein.outputs["Color Value"]}) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 2: nodegroup_leaf_gen.outputs["Vein Value"]}) - - apply_wave = nw.new_node(nodegroup_apply_wave(y_wave_control_points=kwargs['y_wave_control_points'], x_wave_control_points=kwargs['x_wave_control_points']).name, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], 'Wave Scale X': 0.2, 'Wave Scale Y': 1.0, 'X Modulated': nodegroup_leaf_gen.outputs["X Modulated"]}) - - move_to_origin = nw.new_node(nodegroup_move_to_origin().name, - input_kwargs={'Geometry': apply_wave}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': move_to_origin, 'Offset': nodegroup_leaf_gen.outputs["Attribute"], 'Coordinate': capture_attribute.outputs["Attribute"], 'subvein offset': capture_attribute_1.outputs[2], 'vein value': capture_attribute_2.outputs[2]}) - - - -class LeafFactoryBroadleaf(AssetFactory): - - scale = 0.5 - - def __init__(self, factory_seed, season='autumn', coarse=False): - super(LeafFactoryBroadleaf, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.genome = self.sample_geo_genome() - - t = uniform(0.0, 1.0) - - if season=='autumn': - hsvcol_blade = [uniform(0.0, 0.20), 0.85, 0.9] - hsvcol_vein = np.copy(hsvcol_blade) - hsvcol_vein[2] = 0.7 - - elif season=='summer' or season=='spring': - hsvcol_blade = [uniform(0.28, 0.32), uniform(0.6, 0.7), 0.9] - hsvcol_vein = np.copy(hsvcol_blade) - hsvcol_blade[2] = uniform(0.1, 0.5) - hsvcol_vein[2] = uniform(0.1, 0.5) - - elif season=='winter': - hsvcol_blade = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] - hsvcol_vein = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] - - else: - raise NotImplementedError - - self.blade_color = hsvcol_blade - self.vein_color = hsvcol_vein - - self.color_randomness = uniform(0.05, 0.10) - - # if t < 0.5: - # self.blade_color = np.array((0.2346, 0.4735, 0.0273, 1.0)) - # else: - # self.blade_color = np.array((1.000, 0.855, 0.007, 1.0)) - - @staticmethod - def sample_geo_genome(): - leaf_width_1 = uniform(0.2, 0.4) - leaf_width_2 = uniform(0.1, leaf_width_1) - - leaf_offset_1 = uniform(0.49, 0.51) - - return { - 'midrib_length': uniform(0.0, 0.8), - 'midrib_width': uniform(0.5, 1.0), - 'stem_length': uniform(0.7, 0.9), - 'vein_asymmetry': uniform(0.0, 1.0), - 'vein_angle': uniform(0.4, 1.0), - 'vein_density': uniform(3.0, 8.0), - 'subvein_scale': uniform(10.0, 20.0), - 'jigsaw_scale': uniform(30.0, 70.0), - 'jigsaw_depth': uniform(0.0, 0.6), - 'vein_mask_random_seed': uniform(0.0, 100.0), - 'midrib_curve_control_points': [(0.0, 0.5), (0.25, leaf_offset_1), (0.75, 1.0-leaf_offset_1), (1.0, 0.5)], - 'shape_curve_control_points': [(0.0, 0.0), (uniform(0.2, 0.4), leaf_width_1), (uniform(0.6, 0.8), leaf_width_2), (1.0, 0.0)], - 'vein_curve_control_points': [(0.0, 0.0), (0.25, uniform(0.1, 0.4)), (0.75, uniform(0.6, 0.9)), (1.0, 1.0)], - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # add noise to the genotype output - #hue_noise = np.random.randn() * 0 - #hsv_blade = self.hsv_blade + hue_noise - #hsv_vein = self.hsv_vein + hue_noise - - phenome = self.genome.copy() - - phenome['y_wave_control_points'] = [(0.0, 0.5), (uniform(0.25, 0.75), uniform(0.50, 0.60)), (1.0, 0.5)] - x_wave_val = np.random.uniform(0.50, 0.58) - phenome['x_wave_control_points'] = [(0.0, 0.5), (0.4, x_wave_val), (0.5, 0.5), (0.6, x_wave_val), (1.0, 0.5)] - - material_kwargs = phenome.copy() - material_kwargs['color_base'] = np.copy(self.blade_color) # (0.2346, 0.4735, 0.0273, 1.0), - material_kwargs['color_base'][0] += np.random.normal(0.0, 0.02) - material_kwargs['color_base'][1] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'][2] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'] = hsv2rgba(material_kwargs['color_base']) - - material_kwargs['color_vein'] = np.copy(self.vein_color) # (0.2346, 0.4735, 0.0273, 1.0), - material_kwargs['color_vein'][0] += np.random.normal(0.0, 0.02) - material_kwargs['color_vein'][1] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_vein'][2] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_vein'] = hsv2rgba(material_kwargs['color_vein']) - - surface.add_geomod(obj, geo_leaf_broadleaf, apply=False, attributes=['offset', 'coordinate', 'subvein offset', 'vein value'], input_kwargs=phenome) - surface.add_material(obj, shader_material, reuse=False, input_kwargs=material_kwargs) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.1) * self.scale - butil.apply_transform(obj) - tag_object(obj, 'leaf_broadleaf') - - return obj \ No newline at end of file diff --git a/infinigen/assets/leaves/leaf_ginko.py b/infinigen/assets/leaves/leaf_ginko.py deleted file mode 100644 index 7beee01a9..000000000 --- a/infinigen/assets/leaves/leaf_ginko.py +++ /dev/null @@ -1,528 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -from infinigen.assets.leaves.leaf_v2 import nodegroup_move_to_origin, nodegroup_apply_wave -from infinigen.assets.leaves.leaf_maple import nodegroup_leaf_shader - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -def deg2rad(deg): - return deg / 180.0 * np.pi - -@node_utils.to_nodegroup('nodegroup_ginko_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_ginko_stem(nw: NodeWrangler, stem_curve_control_points=[(0.0, 0.4938), (0.3659, 0.4969), (0.7477, 0.4688), (1.0, 0.4969)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Length', 0.64), - ('NodeSocketFloat', 'Value', 0.005)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.0, 0.03, 0.0)}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add.outputs["Vector"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: -1.0, 2: 0.0}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], stem_curve_control_points) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_1, 3: -1.0}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: separate_xyz.outputs["X"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: add_1}, - attrs={'operation': 'ABSOLUTE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: -1.72, 2: -0.35, 3: 0.03, 4: 0.008}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: map_range.outputs["Result"]}, - attrs={'operation': 'SUBTRACT'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["Length"]}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_2}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute_1, 1: group_input.outputs["Length"]}, - attrs={'operation': 'SUBTRACT'}) - - smooth_max = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: subtract_1, 2: 0.02}, - attrs={'operation': 'SMOOTH_MAX'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: smooth_max, 1: group_input.outputs["Value"]}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Stem': subtract_2, 'Stem Raw': absolute}) - -@node_utils.to_nodegroup('nodegroup_ginko_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_ginko_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale Vein', 80.0), - ('NodeSocketFloat', 'Scale Wave', 5.0)]) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: (-0.18, 0.0, 0.0)}, - attrs={'operation': 'SUBTRACT'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': subtract.outputs["Vector"]}) - - gradient_texture_1 = nw.new_node(Nodes.GradientTexture, - input_kwargs={'Vector': subtract.outputs["Vector"]}, - attrs={'gradient_type': 'RADIAL'} - ) - - pingpong = nw.new_node(Nodes.Math, - input_kwargs={0: gradient_texture_1.outputs["Fac"]}, - attrs={'operation': 'PINGPONG'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: pingpong}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: -0.44}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: multiply}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: pingpong, 1: multiply_1}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.005, 2: add}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_add}) - - wave_texture_1 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': combine_xyz_2, 'Scale': group_input.outputs["Scale Vein"], 'Distortion': 0.6, 'Detail': 3.0, 'Detail Scale': 5.0, 'Detail Roughness': 1.0, 'Phase Offset': -4.62}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: wave_texture_1.outputs["Color"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_2, 1: 0.15, 2: -0.32, 4: -0.02}) - - multiply_add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.03, 2: add}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_add_1}) - - wave_texture_2 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': combine_xyz_3, 'Scale': group_input.outputs["Scale Wave"], 'Distortion': -0.42, 'Detail': 10.0, 'Detail Roughness': 1.0, 'Phase Offset': -4.62}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: wave_texture_2.outputs["Fac"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein': map_range_1.outputs["Result"], 'Wave': multiply_3}) - -@node_utils.to_nodegroup('nodegroup_ginko_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_ginko_shape(nw: NodeWrangler, shape_curve_control_points=[(0.0, 0.0), (0.523, 0.1156), (0.5805, 0.7469), (0.7742, 0.7719), (0.9461, 0.7531), (1.0, 0.0)]): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Multiplier', 1.980), - ('NodeSocketFloat', 'Scale Margin', 6.6), - ]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.9, 1.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - gradient_texture = nw.new_node('ShaderNodeTexGradient', - input_kwargs={'Vector': group_input.outputs["Coordinate"]}) - - gradient_texture = nw.new_node(Nodes.GradientTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"]}, - attrs={'gradient_type': 'RADIAL'}) - - pingpong = nw.new_node(Nodes.Math, - input_kwargs={0: gradient_texture.outputs["Fac"]}, - attrs={'operation': 'PINGPONG'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: pingpong, 1: group_input.outputs["Multiplier"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': gradient_texture.outputs["Fac"]}, - attrs={'noise_dimensions': '1D'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.3}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: multiply_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': combine_xyz_1, 'Scale': group_input.outputs["Scale Margin"], 'Distortion': 5.82, 'Detail': 1.52, 'Detail Roughness': 1.0}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: wave_texture.outputs["Fac"], 1: 0.02}, - attrs={'operation': 'MULTIPLY'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': multiply_1}) - node_utils.assign_curve(float_curve.mapping.curves[0], shape_curve_control_points) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: float_curve}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: add_1}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': subtract}) - -@node_utils.to_nodegroup('nodegroup_valid_area', singleton=False, type='GeometryNodeTree') -def nodegroup_valid_area(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'SIGN'}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': sign, 1: -1.0, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range_4.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_ginko', singleton=False, type='GeometryNodeTree') -def nodegroup_ginko(nw: NodeWrangler, stem_curve_control_points, shape_curve_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Vein Length', 0.64), - ('NodeSocketFloat', 'Vein Width', 0.005), - ('NodeSocketFloatAngle', 'Angle', -1.7617), - ('NodeSocketFloat', 'Displacenment', 0.5), - ('NodeSocketFloat', 'Multiplier', 1.980), - ('NodeSocketFloat', 'Scale Vein', 80.0), - ('NodeSocketFloat', 'Scale Wave', 5.0), - ('NodeSocketFloat', 'Scale Margin', 6.6), - ('NodeSocketInt', 'Level', 9), - ]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Level': group_input.outputs["Level"]}) - - position = nw.new_node(Nodes.InputPosition) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Angle': group_input.outputs["Angle"]}, - attrs={'rotation_type': 'Z_AXIS'}) - - ginkoshape = nw.new_node(nodegroup_ginko_shape(shape_curve_control_points=shape_curve_control_points).name, - input_kwargs={'Coordinate': vector_rotate, 'Multiplier': group_input.outputs["Multiplier"], 'Scale Margin': group_input.outputs["Scale Margin"]}) - - validarea = nw.new_node(nodegroup_valid_area().name, - input_kwargs={'Value': ginkoshape}) - - ginkovein = nw.new_node(nodegroup_ginko_vein().name, - input_kwargs={'Vector': vector_rotate, 'Scale Vein': group_input.outputs["Scale Vein"], 'Scale Wave': group_input.outputs["Scale Wave"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: validarea, 1: ginkovein.outputs["Vein"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': ginkoshape, 1: -1.0, 2: 0.0, 3: -5.0, 4: 0.0}, - attrs={'clamp': False}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MULTIPLY', 'use_clamp': True}) - - clamp = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': multiply_1, 'Max': 0.01}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh, 2: clamp}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 2: ginkoshape}) - - ginkostem = nw.new_node(nodegroup_ginko_stem(stem_curve_control_points=stem_curve_control_points).name, - input_kwargs={'Coordinate': position, 'Length': group_input.outputs["Vein Length"], 'Value': group_input.outputs["Vein Width"]}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: ginkoshape, 1: ginkostem.outputs["Stem"], 2: 0.1}, - attrs={'operation': 'SMOOTH_MIN'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: smooth_min, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - stem_length = nw.new_node(Nodes.Compare, - input_kwargs={0: multiply_2, 1: 0.0}, - label='stem length', - attrs={'operation': 'LESS_THAN'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeom, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Selection': stem_length}) - - validarea_1 = nw.new_node(nodegroup_valid_area().name, - input_kwargs={'Value': ginkostem.outputs["Stem"]}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: validarea_1, 1: ginkostem.outputs["Stem Raw"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: clamp}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: group_input.outputs["Displacenment"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_4}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': delete_geometry, 'Offset': combine_xyz}) - - validarea_2 = nw.new_node(nodegroup_valid_area().name, - input_kwargs={'Value': ginkoshape}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: validarea_2, 1: ginkovein.outputs["Wave"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position, 'Vein': capture_attribute_1.outputs[2], 'Shape': capture_attribute.outputs[2], 'Wave': multiply_5}) - -def shader_material(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute.outputs["Color"], 2: 0.12, 4: 6.26}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'shape'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Color"], 1: -0.74, 2: 0.01, 3: 2.0, 4: 0.0}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.3795, 0.6344), (1.0, 1.0)]) - - separate_hsv = nw.new_node('ShaderNodeSeparateHSV', - input_kwargs={'Color': kwargs['color_base']}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_hsv.outputs["V"], 1: 0.2}, - attrs={'operation': 'SUBTRACT'}) - - combine_hsv = nw.new_node(Nodes.CombineHSV, - input_kwargs={'H': separate_hsv.outputs["H"], 'S': separate_hsv.outputs["S"], 'V': subtract}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': float_curve, 'Color1': kwargs['color_base'], 'Color2': combine_hsv}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': mix_1, 'Color2': kwargs['color_vein']}) - - group = nw.new_node(nodegroup_leaf_shader().name, - input_kwargs={'Color': mix}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group}) - -def geo_leaf_ginko(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - nodegroup = nw.new_node(nodegroup_ginko(stem_curve_control_points=kwargs['stem_curve_control_points'], - shape_curve_control_points=kwargs['shape_curve_control_points']).name, - input_kwargs={'Mesh': group_input.outputs["Geometry"], - 'Vein Length': kwargs['vein_length'], - 'Angle': deg2rad(kwargs['angle']), - 'Multiplier': kwargs['multiplier'], - 'Scale Vein': kwargs['scale_vein'], - 'Scale Wave': kwargs['scale_wave'], - 'Scale Margin': kwargs['scale_margin'], - }) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': nodegroup.outputs["Wave"], 4: 0.04}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': map_range.outputs["Result"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': nodegroup.outputs["Geometry"], 'Offset': combine_xyz}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - apply_wave = nw.new_node(nodegroup_apply_wave(y_wave_control_points=kwargs['y_wave_control_points'], x_wave_control_points=kwargs['x_wave_control_points']).name, - input_kwargs={'Geometry': set_position, 'Wave Scale X': 0.0, 'Wave Scale Y': 1.0, 'X Modulated': separate_xyz.outputs["X"]}) - - move_to_origin = nw.new_node(nodegroup_move_to_origin().name, - input_kwargs={'Geometry': apply_wave}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': move_to_origin, 'Vein': nodegroup.outputs["Vein"], 'Shape': nodegroup.outputs["Shape"]}) - -class LeafFactoryGinko(AssetFactory): - - scale = 0.3 - - def __init__(self, factory_seed, season='autumn', coarse=False): - super(LeafFactoryGinko, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.genome = self.sample_geo_genome() - - t = uniform(0.0, 1.0) - - # self.blade_color = hsv2rgba([0.125 + 0.16 * factory_seed / 10, 0.95, 0.6]) - - if season=='autumn': - self.blade_color = [uniform(0.125, 0.2), 0.95, 0.6] - elif season=='summer' or season=='spring': - self.blade_color = [uniform(0.25, 0.3), 0.95, 0.6] - elif season=='winter': - self.blade_color = [uniform(0.125, 0.2), 0.95, 0.6] - else: - raise NotImplementedError - - self.color_randomness = 0.05 - - @staticmethod - def sample_geo_genome(): - return { - 'midrib_length': uniform(0.0, 0.8), - 'midrib_width': uniform(0.5, 1.0), - 'stem_length': uniform(0.7, 0.9), - 'vein_asymmetry': uniform(0.0, 1.0), - 'vein_angle': uniform(0.2, 2.0), - 'vein_density': uniform(5.0, 20.0), - 'subvein_scale': uniform(10.0, 20.0), - 'jigsaw_scale': uniform(5.0, 20.0), - 'jigsaw_depth': uniform(0.0, 2.0), - 'midrib_shape_control_points': [(0.0, 0.5), (0.25, uniform(0.48, 0.52)), (0.75, uniform(0.48, 0.52)), (1.0, 0.5)], - 'leaf_shape_control_points': [(0.0, 0.0), (uniform(0.2, 0.4), uniform(0.1, 0.4)), (uniform(0.6, 0.8), uniform(0.1, 0.4)), (1.0, 0.0)], - 'vein_shape_control_points': [(0.0, 0.0), (0.25, uniform(0.1, 0.4)), (0.75, uniform(0.6, 0.9)), (1.0, 1.0)], - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # add noise to the genotype output - #hue_noise = np.random.randn() * 0 - #hsv_blade = self.hsv_blade + hue_noise - #hsv_vein = self.hsv_vein + hue_noise - - phenome = self.genome.copy() - - phenome['y_wave_control_points'] = [(0.0, 0.5), (uniform(0.25, 0.75), uniform(0.50, 0.60)), (1.0, 0.5)] - x_wave_val = np.random.uniform(0.50, 0.58) - phenome['x_wave_control_points'] = [(0.0, 0.5), (0.4, x_wave_val), (0.5, 0.5), (0.6, x_wave_val), (1.0, 0.5)] - - phenome['stem_curve_control_points'] = [(0.0, 0.5), - (uniform(0.2, 0.3), uniform(0.45, 0.55)), - (uniform(0.7, 0.8), uniform(0.45, 0.55)), - (1.0, 0.5)] - phenome['shape_curve_control_points'] = [(0.0, 0.0), (0.523, 0.1156), (0.5805, 0.7469), (0.7742, 0.7719), (0.9461, 0.7531), (1.0, 0.0)] - phenome['vein_length'] = uniform(0.4, 0.5) - phenome['angle'] = uniform(-110.0, -70.0) - phenome['multiplier'] = uniform(1.90, 1.98) - - phenome['scale_vein'] = uniform(70.0, 90.0) - phenome['scale_wave'] = uniform(4.0, 6.0) - phenome['scale_margin'] = uniform(5.5, 7.5) - - material_kwargs = phenome.copy() - material_kwargs['color_base'] = np.copy(self.blade_color) # (0.2346, 0.4735, 0.0273, 1.0), - material_kwargs['color_base'][0] += np.random.normal(0.0, 0.02) - material_kwargs['color_base'][1] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'][2] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'] = hsv2rgba(material_kwargs['color_base']) - - material_kwargs['color_vein'] = hsv2rgba(np.copy(self.blade_color)) - - surface.add_geomod(obj, geo_leaf_ginko, apply=False, attributes=['vein', 'shape'], input_kwargs=phenome) - surface.add_material(obj, shader_material, reuse=False, input_kwargs=material_kwargs) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.2) * self.scale - butil.apply_transform(obj) - tag_object(obj, 'leaf_ginko') - - return obj \ No newline at end of file diff --git a/infinigen/assets/leaves/leaf_maple.py b/infinigen/assets/leaves/leaf_maple.py deleted file mode 100644 index d28459f4d..000000000 --- a/infinigen/assets/leaves/leaf_maple.py +++ /dev/null @@ -1,798 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo -# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=X9YmJ0zGWHw by Creative Shrimp - - -import numpy as np -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -from infinigen.assets.leaves.leaf_v2 import nodegroup_apply_wave - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -def deg2rad(deg): - return deg / 180.0 * np.pi - -@node_utils.to_nodegroup('nodegroup_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloatAngle', 'Angle', 0.0), - ('NodeSocketFloat', 'Length', 0.0), - ('NodeSocketFloat', 'Start', 0.0), - ('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Anneal', 0.4), - ('NodeSocketFloat', 'Phase Offset', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': separate_xyz.outputs["Y"], 'Z': separate_xyz.outputs["Z"]}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': combine_xyz_1, 'Angle': group_input.outputs["Angle"]}, - attrs={'rotation_type': 'Z_AXIS'}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': vector_rotate}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': combine_xyz_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_1.outputs["X"], 2: 0.3}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.5932, 0.1969), (1.0, 1.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 0.2}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_3.outputs["X"], 1: multiply}) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: sign, 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: multiply_1}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: group_input.outputs["Phase Offset"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': add_2, 'Scale': 8.0, 'Randomness': 0.7125}, - attrs={'voronoi_dimensions': '1D'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_rotate}, - attrs={'operation': 'LENGTH'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: 0.05, 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY', 'use_clamp': True}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 0.08, 1: multiply_2}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: subtract, 3: 1.0, 4: 0.0}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: 0.0}, - attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: group_input.outputs["Anneal"]}, - attrs={'operation': 'MULTIPLY'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: absolute_1, 1: multiply_3}, - attrs={'operation': 'LESS_THAN'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) - - less_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: group_input.outputs["Start"]}, - attrs={'operation': 'LESS_THAN'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_4, 1: less_than_1}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': multiply_5}) - -@node_utils.to_nodegroup('nodegroup_leaf_shader', singleton=False, type='ShaderNodeTree') -def nodegroup_leaf_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0))]) - - diffuse_bsdf = nw.new_node('ShaderNodeBsdfDiffuse', - input_kwargs={'Color': group_input.outputs["Color"]}) - - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy', - input_kwargs={'Color': group_input.outputs["Color"], 'Roughness': 0.3}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.2, 1: diffuse_bsdf, 2: glossy_bsdf}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': group_input.outputs["Color"]}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.3, 1: mix_shader, 2: translucent_bsdf}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Shader': mix_shader_1}) - -@node_utils.to_nodegroup('nodegroup_node_group_002', singleton=False, type='GeometryNodeTree') -def nodegroup_node_group_002(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position = nw.new_node(Nodes.InputPosition) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position}, - attrs={'operation': 'LENGTH'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Shape', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: group_input.outputs["Shape"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply, 1: -1.0, 2: 0.0, 3: -0.1, 4: 0.1}, - attrs={'clamp': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range_1.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_sub_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': group_input.outputs["Y"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9, 'Color1': noise_texture.outputs["Color"], 'Color2': combine_xyz}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 30.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, - attrs={'clamp': False}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 150.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 2: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range_1.outputs["Result"], 4: -1.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply, 'Color Value': map_range_3.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_midrib(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloatAngle', 'Angle', 0.8238), - ('NodeSocketFloatAngle', 'vein Angle', 0.7854), - ('NodeSocketFloat', 'vein Length', 0.2), - ('NodeSocketFloat', 'vein Start', -0.2), - ('NodeSocketFloat', 'Anneal', 0.4), - ('NodeSocketFloat', 'Phase Offset', 0.0)]) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': group_input.outputs["Vector"], 'Angle': group_input.outputs["Angle"]}, - attrs={'rotation_type': 'Z_AXIS'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': vector_rotate_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.5), (0.1432, 0.5406), (0.2591, 0.5062), (0.3705, 0.5406), (0.4591, 0.425), (0.5932, 0.4562), (0.7432, 0.3562), (0.8727, 0.5062), (1.0, 0.5)]) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.1 - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: value}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - vein = nw.new_node(nodegroup_vein().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': group_input.outputs["vein Angle"], 'Length': group_input.outputs["vein Length"], 'Start': group_input.outputs["vein Start"], 'X Modulated': subtract, 'Anneal': group_input.outputs["Anneal"], 'Phase Offset': group_input.outputs["Phase Offset"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_rotate_1, 'Scale': 10.0}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: multiply_2}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 2: 0.01, 3: 1.0, 4: 0.0}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.0}, - attrs={'operation': 'GREATER_THAN'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: greater_than}, - attrs={'operation': 'MULTIPLY'}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: vein, 1: multiply_3}, - attrs={'operation': 'MAXIMUM'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': maximum, 'Vector': vector_rotate_1}) - -@node_utils.to_nodegroup('nodegroup_valid_area', singleton=False, type='GeometryNodeTree') -def nodegroup_valid_area(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'SIGN'}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': sign, 1: -1.0, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range_4.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_maple_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_maple_shape(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Multiplier', 1.96), - ('NodeSocketFloat', 'Noise Level', 0.02)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.9, 1.0, 0.0)}, - attrs={'operation': 'MULTIPLY'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - gradient_texture = nw.new_node(Nodes.GradientTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"]}, - attrs={'gradient_type': 'RADIAL'}) - - pingpong = nw.new_node(Nodes.Math, - input_kwargs={0: gradient_texture.outputs["Fac"]}, - attrs={'operation': 'PINGPONG'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: pingpong, 1: group_input.outputs["Multiplier"]}, - attrs={'operation': 'MULTIPLY'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': multiply_1}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.1156, 0.075), (0.2109, 0.2719), (0.2602, 0.2344), (0.3633, 0.2625), (0.4171, 0.5545), (0.4336, 0.5344), (0.4568, 0.7094), (0.4749, 0.6012), (0.4882, 0.6636), (0.5352, 0.4594), (0.5484, 0.4375), (0.5648, 0.4469), (0.6366, 0.7331), (0.6719, 0.6562), (0.7149, 0.8225), (0.768, 0.6344), (0.7928, 0.6853), (0.8156, 0.5125), (0.8297, 0.4906), (0.85, 0.5125), (0.8988, 0.747), (0.9297, 0.6937), (0.9648, 0.8937), (0.9797, 0.8656), (0.9883, 0.8938), (1.0, 1.0)], handles=['AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO', 'VECTOR', 'AUTO']) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: float_curve}, - attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 0.06}, - attrs={'operation': 'SUBTRACT'}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': multiply_1}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 0.0), (0.1156, 0.075), (0.2109, 0.2719), (0.2602, 0.2344), (0.3633, 0.2625), (0.4336, 0.5344), (0.4568, 0.7094), (0.4749, 0.6012), (0.5352, 0.4594), (0.5484, 0.4375), (0.5648, 0.4469), (0.6719, 0.6562), (0.7149, 0.8225), (0.768, 0.6344), (0.8156, 0.5125), (0.8297, 0.4906), (0.85, 0.5125), (0.9297, 0.6937), (0.9883, 0.8938), (1.0, 1.0)], handles=['AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO']) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: float_curve_1}, - attrs={'operation': 'SUBTRACT'}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_2, 1: 0.06}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Shape': subtract_1, 'Displacement': subtract_3}) - -@node_utils.to_nodegroup('nodegroup_maple_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_maple_stem(nw: NodeWrangler, stem_curve_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Length', 0.64), - ('NodeSocketFloat', 'Value', 0.005)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.0, 0.08, 0.0)}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': add.outputs["Vector"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: -1.0, 2: 0.0}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], stem_curve_control_points) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_1, 3: -1.0}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: separate_xyz.outputs["X"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: add_1}, - attrs={'operation': 'ABSOLUTE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: -1.72, 2: -0.35, 3: 0.03, 4: 0.008}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: map_range.outputs["Result"]}, - attrs={'operation': 'SUBTRACT'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["Length"]}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_2}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute_1, 1: group_input.outputs["Length"]}, - attrs={'operation': 'SUBTRACT'}) - - smooth_max = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: subtract_1, 2: 0.02}, - attrs={'operation': 'SMOOTH_MAX'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: smooth_max, 1: group_input.outputs["Value"]}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Stem': subtract_2, 'Stem Raw': absolute}) - -@node_utils.to_nodegroup('nodegroup_move_to_origin', singleton=False, type='GeometryNodeTree') -def nodegroup_move_to_origin(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz.outputs["Y"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz.outputs["Z"]}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: 0.0, 1: attribute_statistic_1.outputs["Max"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': subtract, 'Z': subtract_1}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -def shader_material(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.55}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_6 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.4}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein'}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': kwargs['color_vein'], 'Color2': kwargs['color_base']}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_4.outputs["Result"], 'Value': map_range_6.outputs["Result"], 'Color': mix}) - - group = nw.new_node(nodegroup_leaf_shader().name, - input_kwargs={'Color': hue_saturation_value}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group}) - -def geo_leaf_maple(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - # subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, - # input_kwargs={'Mesh': group_input.outputs["Geometry"]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': 11}) - - position = nw.new_node(Nodes.InputPosition) - - maplestem = nw.new_node(nodegroup_maple_stem(stem_curve_control_points=kwargs['stem_curve_control_points']).name, - input_kwargs={'Coordinate': position, 'Length': 0.32, 'Value': 0.005}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Angle': deg2rad(kwargs['angle'])}, - attrs={'rotation_type': 'Z_AXIS'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': vector_rotate_1, 'Angle': -1.5708}, - attrs={'rotation_type': 'Z_AXIS'}) - - mapleshape = nw.new_node(nodegroup_maple_shape().name, - input_kwargs={'Coordinate': vector_rotate, 'Multiplier': kwargs['multiplier'], 'Noise Level': 0.04}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: maplestem.outputs["Stem"], 1: mapleshape.outputs["Shape"], 2: 0.0}, - attrs={'operation': 'SMOOTH_MIN'}) - - stem_length = nw.new_node(Nodes.Compare, - input_kwargs={0: smooth_min}, - label='stem length') - - delete_geometry = nw.new_node(Nodes.DeleteGeom, - input_kwargs={'Geometry': subdivide_mesh, 'Selection': stem_length}) - - validarea = nw.new_node(nodegroup_valid_area().name, - input_kwargs={'Value': mapleshape.outputs["Shape"]}) - - midrib = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': 1.693, 'vein Length': 0.12, 'vein Start': -0.12, 'Phase Offset': uniform(0, 100)}) - - midrib_1 = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': -1.7279, 'vein Length': 0.12, 'vein Start': -0.12, 'Phase Offset': uniform(0, 100)}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: midrib.outputs["Result"], 1: midrib_1.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - midrib_2 = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': 0.8901, 'vein Length': 0.2, 'vein Start': 0.0, 'Phase Offset': uniform(0, 100)}) - - midrib_3 = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': -0.9041, 'vein Start': 0.0, 'Phase Offset': uniform(0, 100)}) - - maximum_1 = nw.new_node(Nodes.Math, - input_kwargs={0: midrib_2.outputs["Result"], 1: midrib_3.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - maximum_2 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum, 1: maximum_1}, - attrs={'operation': 'MAXIMUM'}) - - midrib_4 = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': 0.0, 'vein Length': 1.64, 'vein Start': -0.12, 'Phase Offset': uniform(0, 100)}) - - midrib_5 = nw.new_node(nodegroup_midrib().name, - input_kwargs={'Vector': vector_rotate_1, 'Angle': 3.1416, 'vein Angle': 0.761, 'vein Length': -10.56, 'vein Start': 0.02, 'Anneal': 10.0, 'Phase Offset': uniform(0, 100)}) - - maximum_3 = nw.new_node(Nodes.Math, - input_kwargs={0: midrib_4.outputs["Result"], 1: midrib_5.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - maximum_4 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_2, 1: maximum_3}, - attrs={'operation': 'MAXIMUM'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - nodegroup_sub_vein = nw.new_node(nodegroup_nodegroup_sub_vein().name, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': nodegroup_sub_vein.outputs["Color Value"], 2: -0.94, 3: 1.0, 4: 0.0}) - - maximum_5 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_4, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: maximum_5}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: validarea, 1: subtract}, - attrs={'operation': 'MULTIPLY'}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': delete_geometry, 2: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: -0.03}, - attrs={'operation': 'MULTIPLY'}) - - maximum_6 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_4, 1: multiply_1}, - attrs={'operation': 'MAXIMUM'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: maximum_6, 1: 0.015}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: validarea}, - attrs={'operation': 'MULTIPLY'}) - - validarea_1 = nw.new_node(nodegroup_valid_area().name, - input_kwargs={'Value': maplestem.outputs["Stem"]}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: maplestem.outputs["Stem Raw"], 1: 0.01}, - attrs={'operation': 'SUBTRACT'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: validarea_1, 1: subtract_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_4, 1: multiply_5}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add}, - attrs={'operation': 'MULTIPLY'}) - - nodegroup_002 = nw.new_node(nodegroup_node_group_002().name, - input_kwargs={'Shape': mapleshape.outputs["Displacement"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_6, 1: nodegroup_002}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': add_1}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Offset': combine_xyz}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': vector_rotate_1}) - - move_to_origin = nw.new_node(nodegroup_move_to_origin().name, - input_kwargs={'Geometry': set_position}) - - apply_wave = nw.new_node(nodegroup_apply_wave(y_wave_control_points=kwargs['y_wave_control_points'], x_wave_control_points=kwargs['x_wave_control_points']).name, - input_kwargs={'Geometry': move_to_origin, 'Wave Scale X': 0.5, 'Wave Scale Y': 1.0, 'X Modulated': separate_xyz_1.outputs["X"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': apply_wave, 'Vein': capture_attribute.outputs[2]}) - -class LeafFactoryMaple(AssetFactory): - - scale = 0.5 - - def __init__(self, factory_seed, season='autumn', coarse=False): - super().__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.genome = self.sample_geo_genome() - - t = uniform(0.0, 1.0) - - if season=='autumn': - hsvcol_blade = [uniform(0.0, 0.20), 0.85, 0.9] - hsvcol_vein = np.copy(hsvcol_blade) - hsvcol_vein[2] = 0.7 - - elif season=='summer' or season=='spring': - hsvcol_blade = [uniform(0.28, 0.32), uniform(0.6, 0.7), 0.9] - hsvcol_vein = np.copy(hsvcol_blade) - hsvcol_blade[2] = uniform(0.1, 0.5) - hsvcol_vein[2] = uniform(0.1, 0.5) - - elif season=='winter': - hsvcol_blade = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] - hsvcol_vein = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] - - else: - raise NotImplementedError - - self.blade_color = hsvcol_blade - self.vein_color = hsvcol_vein - - self.color_randomness = uniform(0.05, 0.10) - - # if t < 0.5: - # self.blade_color = np.array((0.2346, 0.4735, 0.0273, 1.0)) - # else: - # self.blade_color = np.array((1.000, 0.855, 0.007, 1.0)) - - @staticmethod - def sample_geo_genome(): - return { - 'midrib_length': uniform(0.0, 0.8), - 'midrib_width': uniform(0.5, 1.0), - 'stem_length': uniform(0.7, 0.9), - 'vein_asymmetry': uniform(0.0, 1.0), - 'vein_angle': uniform(0.2, 2.0), - 'vein_density': uniform(5.0, 20.0), - 'subvein_scale': uniform(10.0, 20.0), - 'jigsaw_scale': uniform(5.0, 20.0), - 'jigsaw_depth': uniform(0.0, 2.0), - 'midrib_shape_control_points': [(0.0, 0.5), (0.25, uniform(0.48, 0.52)), (0.75, uniform(0.48, 0.52)), (1.0, 0.5)], - 'leaf_shape_control_points': [(0.0, 0.0), (uniform(0.2, 0.4), uniform(0.1, 0.4)), (uniform(0.6, 0.8), uniform(0.1, 0.4)), (1.0, 0.0)], - 'vein_shape_control_points': [(0.0, 0.0), (0.25, uniform(0.1, 0.4)), (0.75, uniform(0.6, 0.9)), (1.0, 1.0)], - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=4, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # add noise to the genotype output - #hue_noise = np.random.randn() * 0 - #hsv_blade = self.hsv_blade + hue_noise - #hsv_vein = self.hsv_vein + hue_noise - - phenome = self.genome.copy() - - phenome['y_wave_control_points'] = [(0.0, 0.5), (uniform(0.25, 0.75), uniform(0.50, 0.60)), (1.0, 0.5)] - x_wave_val = np.random.uniform(0.50, 0.58) - phenome['x_wave_control_points'] = [(0.0, 0.5), (0.4, x_wave_val), (0.5, 0.5), (0.6, x_wave_val), (1.0, 0.5)] - - phenome['stem_curve_control_points'] = [(0.0, 0.5), - (uniform(0.2, 0.3), uniform(0.45, 0.55)), - (uniform(0.7, 0.8), uniform(0.45, 0.55)), - (1.0, 0.5)] - phenome['shape_curve_control_points'] = [(0.0, 0.0), (0.523, 0.1156), (0.5805, 0.7469), (0.7742, 0.7719), (0.9461, 0.7531), (1.0, 0.0)] - phenome['vein_length'] = uniform(0.4, 0.5) - phenome['angle'] = uniform(-15.0, 15.0) - phenome['multiplier'] = uniform(1.92, 2.00) - - phenome['scale_vein'] = uniform(70.0, 90.0) - phenome['scale_wave'] = uniform(4.0, 6.0) - phenome['scale_margin'] = uniform(5.5, 7.5) - - material_kwargs = phenome.copy() - material_kwargs['color_base'] = np.copy(self.blade_color) # (0.2346, 0.4735, 0.0273, 1.0), - material_kwargs['color_base'][0] += np.random.normal(0.0, 0.02) - material_kwargs['color_base'][1] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'][2] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_base'] = hsv2rgba(material_kwargs['color_base']) - - material_kwargs['color_vein'] = np.copy(self.vein_color) # (0.2346, 0.4735, 0.0273, 1.0), - material_kwargs['color_vein'][0] += np.random.normal(0.0, 0.02) - material_kwargs['color_vein'][1] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_vein'][2] += np.random.normal(0.0, self.color_randomness) - material_kwargs['color_vein'] = hsv2rgba(material_kwargs['color_vein']) - - surface.add_geomod(obj, geo_leaf_maple, apply=False, attributes=['vein'], input_kwargs=phenome) - surface.add_material(obj, shader_material, reuse=False, input_kwargs=material_kwargs) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.1) * self.scale - butil.apply_transform(obj) - tag_object(obj, 'leaf_maple') - - return obj \ No newline at end of file diff --git a/infinigen/assets/leaves/leaf_pine.py b/infinigen/assets/leaves/leaf_pine.py deleted file mode 100644 index 24d25b8c7..000000000 --- a/infinigen/assets/leaves/leaf_pine.py +++ /dev/null @@ -1,376 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -from random import randint -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -######## code for creating pine needles ######## - -def shader_needle(nw): - # Code generated using version 2.3.1 of the node_transpiler - - velvet_bsdf = nw.new_node('ShaderNodeBsdfVelvet', - input_kwargs={'Color': (0.016, 0.2241, 0.0252, 1.0)}) - - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy', - input_kwargs={'Color': (0.5771, 0.8, 0.5713, 1.0), 'Roughness': 0.4}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.3, 1: velvet_bsdf, 2: glossy_bsdf}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': (0.0116, 0.4409, 0.0262, 1.0)}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.1, 1: mix_shader, 2: translucent_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_1}) - -def geometry_needle(nw): - # Code generated using version 2.3.1 of the node_transpiler - - cone = nw.new_node('GeometryNodeMeshCone', - input_kwargs={'Vertices': 4, 'Radius Top': 0.01, 'Radius Bottom': 0.02, 'Depth': 1.0}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cone.outputs["Mesh"], 'Material': surface.shaderfunc_to_material(shader_needle)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - -def apply_needle(obj, selection=None, **kwargs): - surface.add_geomod(obj, geometry_needle, selection=selection, attributes=[]) - -def make_needle(name='Needle'): - if bpy.context.scene.objects.get(name): - return bpy.context.scene.objects.get(name) - - else: - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - needle = bpy.context.active_object - needle.name = name - apply_needle(needle) - - bpy.ops.object.convert(target='MESH') - - return needle - -######## code for creating pine needles ######## - -######## code for creating pine twigs ######## - -@node_utils.to_nodegroup('nodegroup_instance_needle', singleton=True, type='GeometryNodeTree') -def nodegroup_instance_needle(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloatFactor', 'Needle Density', 0.9), - ('NodeSocketInt', 'Seed', 0), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketFloat', 'X Angle Mean', 0.5), - ('NodeSocketFloat', 'X Angle Range', 0.0)]) - - spline_parameter_1 = nw.new_node('GeometryNodeSplineParameter') - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: spline_parameter_1.outputs["Factor"], 1: 0.1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={'Probability': group_input.outputs["Needle Density"], 'Seed': group_input.outputs["Seed"]}, - attrs={'data_type': 'BOOLEAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: random_value_3.outputs[3]}) - - curve_tangent = nw.new_node('GeometryNodeInputTangent') - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Y'} - ) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.6, 'Seed': group_input.outputs["Seed"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 0.8, 'Y': 0.8, 'Z': random_value.outputs[1]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.3 - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: value_1}, - attrs={'operation': 'MULTIPLY'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Curve"], 'Selection': op_and, 'Instance': group_input.outputs["Instance"], 'Rotation': align_euler_to_vector, 'Scale': multiply.outputs["Vector"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Angle Mean"], 1: group_input.outputs["X Angle Range"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Angle Mean"], 1: group_input.outputs["X Angle Range"]}, - attrs={'operation': 'SUBTRACT'}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: add, 3: subtract, 'Seed': group_input.outputs["Seed"]}) - - radians = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_2.outputs[1]}, - attrs={'operation': 'RADIANS'}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={3: 360.0, 'Seed': group_input.outputs["Seed"]}) - - radians_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_1.outputs[1]}, - attrs={'operation': 'RADIANS'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': radians, 'Y': radians_1}) - - rotate_instances = nw.new_node('GeometryNodeRotateInstances', - input_kwargs={'Instances': instance_on_points, 'Rotation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': rotate_instances}) - -@node_utils.to_nodegroup('nodegroup_needle5', singleton=True, type='GeometryNodeTree') -def nodegroup_needle5(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketFloat', 'X Angle Mean', 0.5), - ('NodeSocketFloat', 'X Angle Range', 0.0), - ('NodeSocketFloatFactor', 'Needle Density', 0.9), - ('NodeSocketInt', 'Seed', 0)]) - - instanceneedle = nw.new_node(nodegroup_instance_needle().name, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': group_input.outputs["Seed"], 'Instance': group_input.outputs["Instance"], 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed"], 1: 1.0}) - - instanceneedle_1 = nw.new_node(nodegroup_instance_needle().name, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': add, 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed"], 1: 2.0}) - - instanceneedle_2 = nw.new_node(nodegroup_instance_needle().name, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': add_1, 'Instance': group_input.outputs["Instance"], 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"]}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed"], 1: 3.0}) - - instanceneedle_3 = nw.new_node(nodegroup_instance_needle().name, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': add_2, 'Instance': group_input.outputs["Instance"], 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"]}) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed"], 1: 4.0}) - - instanceneedle_4 = nw.new_node(nodegroup_instance_needle().name, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': add_3, 'Instance': group_input.outputs["Instance"], 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [instanceneedle, instanceneedle_1, instanceneedle_2, instanceneedle_3, instanceneedle_4]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': join_geometry}) - -def shader_twig(nw): - # Code generated using version 2.3.2 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.08, 0.0329, 0.0414, 1.0), 'Specular': 0.0527, 'Roughness': 0.4491}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@node_utils.to_nodegroup('nodegroup_pine_twig', singleton=False, type='GeometryNodeTree') -def nodegroup_pine_twig(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketIntUnsigned', 'Resolution', 20), - ('NodeSocketFloat', 'Middle Y', 0.0), - ('NodeSocketFloat', 'Middle Z', 0.0), - ('NodeSocketFloatFactor', 'Needle Density', 0.9), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketFloat', 'X Angle Mean', 0.5), - ('NodeSocketFloat', 'X Angle Range', 0.0), - ('NodeSocketInt', 'Seed', 0)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: 30.0}, - attrs={'operation': 'DIVIDE'}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 2.0}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Middle Y"], 'Y': divide_1, 'Z': group_input.outputs["Middle Z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': divide}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Start': (0.0, 0.0, 0.0), 'Middle': combine_xyz, 'End': combine_xyz_1}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': -1.7}, - attrs={'noise_dimensions': '4D'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: value}, - attrs={'operation': 'SUBTRACT'}) - - spline_parameter = nw.new_node('GeometryNodeSplineParameter') - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 1: multiply}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': quadratic_bezier, 'Offset': multiply_1.outputs["Vector"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: 2.0, 1: map_range.outputs["Result"]}, - attrs={'operation': 'POWER'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_position, 'Radius': power}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 16, 'Radius': 0.01}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh, 'Material': surface.shaderfunc_to_material(shader_twig)}) - - needle5 = nw.new_node(nodegroup_needle5().name, - input_kwargs={'Curve': set_position, 'Instance': group_input.outputs["Instance"], 'X Angle Mean': group_input.outputs["X Angle Mean"], 'X Angle Range': group_input.outputs["X Angle Range"], 'Needle Density': group_input.outputs["Needle Density"], 'Seed': group_input.outputs["Seed"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, needle5]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': realize_instances, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -def shader_twig(nw): - # Code generated using version 2.3.2 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.08, 0.0329, 0.0414, 1.0), 'Specular': 0.0527, 'Roughness': 0.4491}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -def geometry_node_pine_twig(nw, needle_name='Needle', length=30, middle_y=0.0, middle_z=0.0, seed=0, x_angle_mean=-50.0, x_angle_range=10.0): - # Code generated using version 2.3.2 of the node_transpiler - - object_info = nw.new_node(Nodes.ObjectInfo, - input_kwargs={'Object': bpy.data.objects[needle_name]}) - - pine_needle = nw.new_node(nodegroup_pine_twig().name, - input_kwargs={'Resolution': length, 'Middle Y': middle_y, 'Middle Z': middle_z, 'Instance': object_info.outputs["Geometry"], - 'X Angle Mean': x_angle_mean, 'X Angle Range': x_angle_range, 'Seed': seed}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': pine_needle}) - -def apply_twig(obj, selection=None, **kwargs): - surface.add_geomod(obj, geometry_node_pine_twig, selection=selection, attributes=[], input_kwargs=kwargs) - surface.add_material(obj, shader_twig, selection=selection) - -def make_pine_twig(**kwargs): - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - twig = bpy.context.active_object - twig.name = "Twig" - apply_twig(twig, **kwargs) - - # bpy.ops.object.convert(target='MESH') - - return twig - -class LeafFactoryPine(AssetFactory): - - scale = 0.7 - - def __init__(self, factory_seed, season='autumn', coarse=False): - super(LeafFactoryPine, self).__init__(factory_seed, coarse=coarse) - self.needle = make_needle('Needle') - self.needle.hide_viewport = True - self.needle.hide_render = True - - def create_asset(self, **params): - - # with FixedSeed(self.factory_seed): - seed = randint(0, 1e6) - middle_y = normal(0.0, 0.1) - middle_z = normal(0.0, 0.1) - length = randint(25, 35) - x_angle_mean = uniform(-40, -60) - - obj = make_pine_twig( - needle_name='Needle', - length=length, - middle_y=middle_y, - middle_z=middle_z, - seed=seed, - x_angle_mean=x_angle_mean, - x_angle_range=10.0, - ) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.05) * self.scale - butil.apply_transform(obj) - butil.purge_empty_materials(obj) # TODO remove when geonodes emptymats solved - tag_object(obj, 'leaf_pine') - - return obj - - - - diff --git a/infinigen/assets/leaves/leaf_v2.py b/infinigen/assets/leaves/leaf_v2.py deleted file mode 100644 index 394a89e2c..000000000 --- a/infinigen/assets/leaves/leaf_v2.py +++ /dev/null @@ -1,1007 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import colorsys -import logging - -import numpy as np -from numpy.random import uniform, normal - -import bpy - -from infinigen.core import surface -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.util.color import color_category - - -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('shader_nodegroup_sub_vein', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_sub_vein(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': group_input.outputs["Y"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 30.0, 'Randomness': 0.754}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 4: 3.0}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 10.0, 'Randomness': 0.754}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 2: 0.1, 4: 3.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - -@node_utils.to_nodegroup('shader_nodegroup_midrib', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_midrib(nw, midrib_curve_control_points=[(0.0, 0.5), (0.2809, 0.4868), (0.7448, 0.5164), (1.0, 0.5)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', -0.6), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8) - ]) - - map_range_6 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - stem_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_6.outputs["Result"]}, - label='Stem shape') - node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) - - map_range_7 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': stem_shape, 3: -1.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_7.outputs["Result"], 1: group_input.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - map_range_8 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -70.0, 2: group_input.outputs["Midrib Length"], 3: group_input.outputs["Midrib Width"], 4: 0.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_8.outputs["Result"], 1: absolute}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_9 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute_1, 2: group_input.outputs["Stem Length"], 3: 1.0, 4: 0.0}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: map_range_9.outputs["Result"], 2: 0.06}, - attrs={'operation': 'SMOOTH_MIN'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_8.outputs["Result"], 1: smooth_min}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - map_range_11 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'X Modulated': subtract, 'Midrib Value': map_range_11.outputs["Result"]}) - -@node_utils.to_nodegroup('shader_nodegroup_vein_coord', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_vein_coord(nw, vein_curve_control_points=[(0.0, 0.0), (0.3608, 0.2434), (0.7454, 0.4951), (1.0, 1.0)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: sign, 1: group_input.outputs["Vein Asymmetry"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_13 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - vein__shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': absolute}, - label='Vein Shape') - node_utils.assign_curve(vein__shape.mapping.curves[0], vein_curve_control_points) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein__shape, 2: 0.9, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_4.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_13.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - -@node_utils.to_nodegroup('shader_nodegroup_shape', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_shape(nw, shape_curve_control_points=[(0.0, 0.0), (0.3454, 0.2336), (1.0, 0.0)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Y', 0.0)]) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X Modulated"], 'Y': group_input.outputs["Y"]}) - - clamp = nw.new_node('ShaderNodeClamp', - input_kwargs={'Value': group_input.outputs["Y"], 'Min': -0.6, 'Max': 0.6}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': clamp}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_2, 1: combine_xyz_1}, - attrs={'operation': 'SUBTRACT'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - leaf_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}, - label='Leaf shape') - node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Leaf Shape': subtract_1}) - -@node_utils.to_nodegroup('shader_nodegroup_apply_vein_midrib', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_apply_vein_midrib(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Vein Coord', 0.0), - ('NodeSocketFloat', 'Midrib Value', 0.5), - ('NodeSocketFloat', 'Leaf Shape', 1.0), - ('NodeSocketFloat', 'Vein Density', 6.0)]) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Leaf Shape"], 1: -0.3, 2: 0.0, 3: 0.015, 4: 0.0}) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord"], 'Scale': group_input.outputs["Vein Density"], 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein.outputs["Distance"], 1: 0.001, 2: 0.05, 3: 1.0, 4: 0.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_5.outputs["Result"], 1: map_range_3.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_10 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Midrib Value"], 1: map_range_10.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Value': multiply_1}) - -@node_utils.to_nodegroup('shader_nodegroup_leaf_gen', singleton=False, type='ShaderNodeTree') -def shader_nodegroup_leaf_gen(nw, midrib_curve_control_points, vein_curve_control_points, shape_curve_control_points): - # Code generated using version 2.3.2 of the node_transpiler - input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Displancement scale', 0.01), - ('NodeSocketFloat', 'Vein Asymmetry', 0.8), - ('NodeSocketFloat', 'Vein Density', 10.0), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 1.0), - ('NodeSocketFloat', 'Vein Angle', 1.0), - ('NodeSocketFloat', 'Sub-vein Displacement', 0.5), - ('NodeSocketFloat', 'Sub-vein Scale', 20.0), - ('NodeSocketFloat', 'Wave Displacement', 0.05), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8), - ]) - - coordinate = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'coordinate'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': coordinate.outputs["Vector"]}) - - midrib = nw.new_node(shader_nodegroup_midrib(midrib_curve_control_points=midrib_curve_control_points).name, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], - 'Midrib Length': input.outputs["Midrib Length"], 'Midrib Width': input.outputs["Midrib Width"], 'Stem Length': input.outputs["Stem Length"] - }) - - veincoord = nw.new_node(shader_nodegroup_vein_coord(vein_curve_control_points=vein_curve_control_points).name, - input_kwargs={'X Modulated': midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': input.outputs["Vein Asymmetry"], 'Vein Angle': input.outputs["Vein Angle"]}) - - shape = nw.new_node(shader_nodegroup_shape(shape_curve_control_points=shape_curve_control_points).name, - input_kwargs={'X Modulated': midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"]}) - - applyveinmidrib = nw.new_node(shader_nodegroup_apply_vein_midrib().name, - input_kwargs={'Vein Coord': veincoord, 'Midrib Value': midrib.outputs["Midrib Value"], - 'Leaf Shape': shape, 'Vein Density': input.outputs["Vein Density"]}) - - subvein = nw.new_node(shader_nodegroup_sub_vein().name, - input_kwargs={'X Modulated': midrib.outputs["X Modulated"], 'Y': veincoord}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Value': applyveinmidrib, 'Sub Vein Value': subvein}) - - -@node_utils.to_nodegroup('nodegroup_shape_with_jigsaw', singleton=False, type='GeometryNodeTree') -def nodegroup_shape_with_jigsaw(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Midrib Value', 1.0), - ('NodeSocketFloat', 'Vein Coord', 0.0), - ('NodeSocketFloat', 'Leaf Shape', 0.5), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.5)]) - - map_range_12 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}) - - jigsaw = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord"], 'Scale': group_input.outputs["Jigsaw Scale"]}, - label='Jigsaw', - attrs={'voronoi_dimensions': '1D'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.05}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: jigsaw.outputs["Distance"], 1: multiply, 2: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'MULTIPLY_ADD', 'use_clamp': True}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_12.outputs["Result"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': maximum}) - -@node_utils.to_nodegroup('nodegroup_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_shape(nw, shape_curve_control_points=[(0.0, 0.0), (0.3454, 0.2336), (1.0, 0.0)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Y', 0.0)]) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X Modulated"], 'Y': group_input.outputs["Y"]}) - - clamp = nw.new_node('ShaderNodeClamp', - input_kwargs={'Value': group_input.outputs["Y"], 'Min': -0.6, 'Max': 0.6}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': clamp}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_2, 1: combine_xyz_1}, - attrs={'operation': 'SUBTRACT'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - leaf_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}, - label='Leaf shape') - node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Leaf Shape': subtract_1}) - -@node_utils.to_nodegroup('nodegroup_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_midrib(nw, midrib_curve_control_points=[(0.0, 0.5), (0.2809, 0.4868), (0.7448, 0.5164), (1.0, 0.5)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', -0.6), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8) - ]) - - map_range_6 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - stem_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_6.outputs["Result"]}, - label='Stem shape') - node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) - - map_range_7 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': stem_shape, 3: -1.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_7.outputs["Result"], 1: group_input.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - map_range_8 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -70.0, 2: group_input.outputs["Midrib Length"], 3: group_input.outputs["Midrib Width"], 4: 0.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_8.outputs["Result"], 1: absolute}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_9 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute_1, 2: group_input.outputs["Stem Length"], 3: 1.0, 4: 0.0}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: map_range_9.outputs["Result"], 2: 0.06}, - attrs={'operation': 'SMOOTH_MIN'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_8.outputs["Result"], 1: smooth_min}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - map_range_11 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'X Modulated': subtract, 'Midrib Value': map_range_11.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_vein_coord', singleton=False, type='GeometryNodeTree') -def nodegroup_vein_coord(nw, vein_curve_control_points=[(0.0, 0.0), (0.3608, 0.2434), (0.7454, 0.4951), (1.0, 1.0)]): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: sign, 1: group_input.outputs["Vein Asymmetry"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_13 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - vein__shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': absolute}, - label='Vein Shape') - node_utils.assign_curve(vein__shape.mapping.curves[0], vein_curve_control_points) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein__shape, 2: 0.9, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_4.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_13.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - -@node_utils.to_nodegroup('nodegroup_apply_vein_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_apply_vein_midrib(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Vein Coord', 0.0), - ('NodeSocketFloat', 'Midrib Value', 0.5), - ('NodeSocketFloat', 'Leaf Shape', 1.0), - ('NodeSocketFloat', 'Vein Density', 6.0)]) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Leaf Shape"], 1: -0.3, 2: 0.0, 3: 0.015, 4: 0.0}) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord"], 'Scale': group_input.outputs["Vein Density"], 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein.outputs["Distance"], 1: 0.001, 2: 0.05, 3: 1.0, 4: 0.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_5.outputs["Result"], 1: map_range_3.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_10 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Midrib Value"], 1: map_range_10.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Value': multiply_1}) - -@node_utils.to_nodegroup('nodegroup_leaf_gen', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_gen(nw, midrib_curve_control_points, vein_curve_control_points, shape_curve_control_points): - # Code generated using version 2.3.2 of the node_transpiler - - geometry = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Displancement scale', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Density', 6.0), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.07), - ('NodeSocketFloat', 'Vein Angle', 1.0), - ('NodeSocketFloat', 'Sub-vein Displacement', 0.5), - ('NodeSocketFloat', 'Sub-vein Scale', 50.0), - ('NodeSocketFloat', 'Wave Displacement', 0.1), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8), - ]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - midrib = nw.new_node(nodegroup_midrib(midrib_curve_control_points=midrib_curve_control_points).name, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], - 'Midrib Length': geometry.outputs["Midrib Length"], 'Midrib Width': geometry.outputs["Midrib Width"], 'Stem Length': geometry.outputs["Stem Length"] - }) - - veincoord = nw.new_node(nodegroup_vein_coord(vein_curve_control_points=vein_curve_control_points).name, - input_kwargs={'X Modulated': midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"], 'Vein Asymmetry': geometry.outputs["Vein Asymmetry"], 'Vein Angle': geometry.outputs["Vein Angle"]}) - - shape = nw.new_node(nodegroup_shape(shape_curve_control_points=shape_curve_control_points).name, - input_kwargs={'X Modulated': midrib.outputs["X Modulated"], 'Y': separate_xyz.outputs["Y"]}) - - applyveinmidrib = nw.new_node(nodegroup_apply_vein_midrib().name, - input_kwargs={'Vein Coord': veincoord, 'Midrib Value': midrib.outputs["Midrib Value"], 'Leaf Shape': shape, 'Vein Density': geometry.outputs["Vein Density"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: geometry.outputs["Displancement scale"], 1: applyveinmidrib}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': geometry.outputs["Mesh"], 'Offset': combine_xyz}) - - shapewithjigsaw = nw.new_node(nodegroup_shape_with_jigsaw().name, - input_kwargs={'Midrib Value': midrib.outputs["Midrib Value"], 'Vein Coord': veincoord, 'Leaf Shape': shape, 'Jigsaw Scale': geometry.outputs["Jigsaw Scale"], 'Jigsaw Depth': geometry.outputs["Jigsaw Depth"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: shapewithjigsaw, 1: 0.5}, - attrs={'operation': 'LESS_THAN'}) - - delete_geometry = nw.new_node('GeometryNodeDeleteGeometry', - input_kwargs={'Geometry': set_position, 'Selection': less_than}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': delete_geometry, 2: applyveinmidrib}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': capture_attribute, 'Attribute': capture_attribute.outputs[2], 'X Modulated': midrib.outputs["X Modulated"], 'Vein Coord': veincoord}) - - -@node_utils.to_nodegroup('nodegroup_sub_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_sub_vein(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': group_input.outputs["Y"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 30.0, 'Randomness': 0.754}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 10.0, 'Randomness': 0.754}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 2: 0.1}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_add_noise', singleton=False, type='GeometryNodeTree') -def nodegroup_add_noise(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement', 0.05), - ('NodeSocketFloat', 'Scale', 10.0)]) - - position_1 = nw.new_node(Nodes.InputPosition) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position_1, 'Scale': group_input.outputs["Scale"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs["Displacement"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - - -@node_utils.to_nodegroup('nodegroup_apply_wave', singleton=False, type='GeometryNodeTree') -def nodegroup_apply_wave(nw, y_wave_control_points, x_wave_control_points): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Wave Scale Y', 1.0), - ('NodeSocketFloat', 'Wave Scale X', 1.0), - ('NodeSocketFloat', 'X Modulated', None), - ]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz_1.outputs["Y"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"]}) - - float_curves = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curves.mapping.curves[0], y_wave_control_points) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curves, 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_2.outputs["Result"], 1: group_input.outputs["Wave Scale Y"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: group_input.outputs['X Modulated']}) - - map_range_7 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs['X Modulated'], 1: attribute_statistic_1.outputs["Min"], 2: attribute_statistic_1.outputs["Max"]}) - - float_curves_2 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_7.outputs["Result"]} - ) - node_utils.assign_curve(float_curves_2.mapping.curves[0], x_wave_control_points) - float_curves_2.mapping.curves[0].points[2].handle_type = 'VECTOR' - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curves_2, 3: -1.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_4.outputs["Result"], 1: group_input.outputs["Wave Scale X"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_1}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Offset': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - -@node_utils.to_nodegroup('nodegroup_move_to_origin', singleton=False, type='GeometryNodeTree') -def nodegroup_move_to_origin(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: separate_xyz.outputs["Y"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': subtract}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_blight', singleton=False, type='ShaderNodeTree') -def nodegroup_blight(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketColor', 'Leaf Color', (0.5, 0.5, 0.5, 1.0)), - ('NodeSocketColor', 'Blight Color', (0.5, 0.3992, 0.035, 1.0)), - ('NodeSocketFloat', 'Random Seed', 18.3), - ('NodeSocketFloat', 'Offset', 0.5)]) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"], 'W': group_input.outputs["Random Seed"], 'Scale': 4.0, 'Detail': 10.0, 'Dimension': 10.0, 'Lacunarity': 5.0, 'Offset': group_input.outputs["Offset"]}, - attrs={'musgrave_dimensions': '4D', 'musgrave_type': 'HETERO_TERRAIN'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 4: 0.8}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_1.outputs["Result"], 'Color1': group_input.outputs["Leaf Color"], 'Color2': group_input.outputs["Blight Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': mix_4}) - -@node_utils.to_nodegroup('nodegroup_dotted_blight', singleton=False, type='ShaderNodeTree') -def nodegroup_dotted_blight(nw): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coord', (0.0, 0.0, 0.0)), - ('NodeSocketColor', 'Leaf Color', (0.5, 0.5, 0.5, 1.0)), - ('NodeSocketColor', 'Blight Color', (0.4969, 0.2831, 0.0273, 1.0))]) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': group_input.outputs["Coord"], 'Scale': 20.0}, - attrs={'voronoi_dimensions': '2D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.15, 3: 1.0, 4: 0.0}) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': group_input.outputs["Blight Color"], 'Color2': (0.0, 0.0, 0.0, 1.0)}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': group_input.outputs["Leaf Color"], 'Color2': mix_5}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': mix_3}) - - -def shader_leaf_new(nw, **kwargs): - # Code generated using version 2.3.2 of the node_transpiler - leafgen = nw.new_node(shader_nodegroup_leaf_gen(midrib_curve_control_points=kwargs['midrib_shape_control_points'], - vein_curve_control_points=kwargs['vein_shape_control_points'], - shape_curve_control_points=kwargs['leaf_shape_control_points']).name, - input_kwargs={'Displancement scale': 0.01, - 'Vein Asymmetry': kwargs['vein_asymmetry'], - 'Vein Angle': kwargs['vein_angle'], - 'Vein Density': kwargs['vein_density'], - 'Jigsaw Scale': kwargs['jigsaw_scale'], - 'Jigsaw Depth': kwargs['jigsaw_depth'], - 'Midrib Length': kwargs['midrib_length'], - 'Midrib Width': kwargs['midrib_width'], - 'Stem Length': kwargs['stem_length'] - }) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = kwargs['blade_color'] - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': kwargs['vein_color_mix_factor'], 'Color1': rgb, 'Color2': (0.35, 0.35, 0.35, 1.0)}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': leafgen.outputs["Sub Vein Value"], 'Color1': mix, 'Color2': rgb}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': leafgen.outputs["Vein Value"], 'Color1': mix, 'Color2': mix_1}) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - rgb_1 = nw.new_node(Nodes.RGB) - rgb_1.outputs[0].default_value = kwargs['blight_color'] - - group_1 = nw.new_node(nodegroup_dotted_blight().name, - input_kwargs={'Coord': texture_coordinate.outputs["Generated"], 'Leaf Color': mix_2, 'Blight Color': rgb_1}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': kwargs['dotted_blight_weight'], 'Color1': mix_2, 'Color2': group_1}) - - group_2 = nw.new_node(nodegroup_blight().name, - input_kwargs={'Coordinate': texture_coordinate.outputs["Generated"], 'Leaf Color': mix_3, 'Blight Color': rgb_1, 'Random Seed': kwargs['blight_random_seed'], 'Offset': kwargs['blight_area_factor']}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': kwargs['blight_weight'], 'Color1': mix_3, 'Color2': group_2}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': mix_4}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_4}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.7, 1: translucent_bsdf, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - - - -def geo_leaf_v2(nw, **kwargs): - # Code generated using version 2.3.2 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': 10}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - leafgen = nw.new_node(nodegroup_leaf_gen(midrib_curve_control_points=kwargs['midrib_shape_control_points'], - vein_curve_control_points=kwargs['vein_shape_control_points'], - shape_curve_control_points=kwargs['leaf_shape_control_points']).name, - input_kwargs={'Mesh': capture_attribute.outputs["Geometry"], - 'Displancement scale': 0.005, - 'Vein Asymmetry': kwargs['vein_asymmetry'], - 'Vein Angle': kwargs['vein_angle'], - 'Vein Density': kwargs['vein_density'], - 'Jigsaw Scale': kwargs['jigsaw_scale'], - 'Jigsaw Depth': kwargs['jigsaw_depth'], - 'Midrib Length': kwargs['midrib_length'], - 'Midrib Width': kwargs['midrib_width'], - 'Stem Length': kwargs['stem_length'], - }) - - # addnoise = nw.new_node(nodegroup_add_noise().name, - # input_kwargs={'Geometry': leafgen.outputs["Mesh"], 'Displacement': 0.03, 'Scale': 10.0}) - - subvein = nw.new_node(nodegroup_sub_vein().name, - input_kwargs={'X': leafgen.outputs["X Modulated"], 'Y': leafgen.outputs["Vein Coord"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subvein, 1: 0.001}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': leafgen.outputs["Mesh"], 'Offset': combine_xyz}) - - logging.warning(f'Disabling set_position to avoid LeafV2 segfault') - set_position = leafgen.outputs["Mesh"] - - applywave = nw.new_node(nodegroup_apply_wave(y_wave_control_points=kwargs['y_wave_control_points'], x_wave_control_points=kwargs['x_wave_control_points']).name, - input_kwargs={'Geometry': set_position, 'Wave Scale X': 0.15, 'Wave Scale Y': 1.5, 'X Modulated': leafgen.outputs["X Modulated"]}) - - movetoorigin = nw.new_node(nodegroup_move_to_origin().name, - input_kwargs={'Geometry': applywave}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': movetoorigin, 'Attribute': leafgen.outputs["Attribute"], 'Coordinate': capture_attribute.outputs["Attribute"]}) - -class LeafFactoryV2(AssetFactory): - - scale = 0.5 - - def __init__(self, factory_seed, coarse=False): - super(LeafFactoryV2, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.genome = self.sample_geo_genome() - - t = uniform(0.0, 1.0) - - if t < 0.8: - self.blade_color = color_category('greenery') - elif t < 0.9: - self.blade_color = color_category('yellowish') - else: - self.blade_color = color_category('red') - - self.blight_color = color_category('yellowish') - self.vein_color_mix_factor = uniform(0.2, 0.6) - - @staticmethod - def sample_geo_genome(): - return { - 'midrib_length': uniform(0.0, 0.8), - 'midrib_width': uniform(0.5, 1.0), - 'stem_length': uniform(0.7, 0.9), - 'vein_asymmetry': uniform(0.0, 1.0), - 'vein_angle': uniform(0.2, 2.0), - 'vein_density': uniform(5.0, 20.0), - 'subvein_scale': uniform(10.0, 20.0), - 'jigsaw_scale': uniform(5.0, 20.0), - 'jigsaw_depth': uniform(0.0, 2.0), - 'midrib_shape_control_points': [(0.0, 0.5), (0.25, uniform(0.48, 0.52)), (0.75, uniform(0.48, 0.52)), (1.0, 0.5)], - 'leaf_shape_control_points': [(0.0, 0.0), (uniform(0.2, 0.4), uniform(0.1, 0.4)), (uniform(0.6, 0.8), uniform(0.1, 0.4)), (1.0, 0.0)], - 'vein_shape_control_points': [(0.0, 0.0), (0.25, uniform(0.1, 0.4)), (0.75, uniform(0.6, 0.9)), (1.0, 1.0)], - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # add noise to the genotype output - #hue_noise = np.random.randn() * 0 - #hsv_blade = self.hsv_blade + hue_noise - #hsv_vein = self.hsv_vein + hue_noise - - phenome = self.genome.copy() - - phenome['y_wave_control_points'] = [(0.0, 0.5), (np.random.uniform(0.25, 0.75), np.random.uniform(0.50, 0.60)), (1.0, 0.5)] - x_wave_val = np.random.uniform(0.50, 0.58) - phenome['x_wave_control_points'] = [(0.0, 0.5), (0.4, x_wave_val), (0.5, 0.5), (0.6, x_wave_val), (1.0, 0.5)] - - material_kwargs = phenome.copy() - material_kwargs['blade_color'] = self.blade_color - material_kwargs['blade_color'][0] += np.random.normal(0.0, 0.03) - material_kwargs['blade_color'][1] += np.random.normal(0.0, 0.03) - material_kwargs['blade_color'][2] += np.random.normal(0.0, 0.03) - - material_kwargs['blight_color'] = self.blight_color - - material_kwargs['vein_color_mix_factor'] = self.vein_color_mix_factor - material_kwargs['blight_weight'] = np.random.binomial(1, 0.1) - material_kwargs['dotted_blight_weight'] = np.random.binomial(1, 0.1) - material_kwargs['blight_random_seed'] = np.random.uniform(0.0, 100.0) - material_kwargs['blight_area_factor'] = np.random.uniform(0.2, 0.8) - - # TODO: add more phenome attributes - - surface.add_geomod(obj, geo_leaf_v2, apply=False, - attributes=['offset', 'coordinate'], input_kwargs=phenome) - surface.add_material(obj, shader_leaf_new, - reuse=False, input_kwargs=material_kwargs) - - bpy.ops.object.convert(target='MESH') - - obj = bpy.context.object - obj.scale *= normal(1, 0.05) * self.scale - butil.apply_transform(obj) - tag_object(obj, 'leaf') - - return obj diff --git a/infinigen/assets/leaves/leaf_wrapped.py b/infinigen/assets/leaves/leaf_wrapped.py deleted file mode 100644 index 07c5e17d3..000000000 --- a/infinigen/assets/leaves/leaf_wrapped.py +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.leaves.leaf_maple import LeafFactoryMaple -from infinigen.assets.leaves.leaf_broadleaf import LeafFactoryBroadleaf -from infinigen.assets.leaves.leaf_ginko import LeafFactoryGinko -from infinigen.core.placement.factory import AssetFactory - -def nodegroup_nodegroup_apply_wrap(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - angle = nw.new_node(Nodes.Value, - label='angle') - angle.outputs[0].default_value = kwargs['angle'] - - radians = nw.new_node(Nodes.Math, - input_kwargs={0: angle}, - attrs={'operation': 'RADIANS'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': radians}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Rotation': combine_xyz_2}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - rotation = nw.new_node(Nodes.Value, - label='rotation') - rotation.outputs[0].default_value = kwargs['rotation'] - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 1.0 - - end_radius = nw.new_node(Nodes.Value, - label='end_radius') - end_radius.outputs[0].default_value = kwargs['end_radius'] - - spiral = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Resolution': 1000, 'Rotations': rotation, 'Start Radius': value, 'End Radius': end_radius, 'Height': 0.0}) - - curve_length = nw.new_node(Nodes.CurveLength, - input_kwargs={'Curve': spiral}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': transform_2, 2: separate_xyz_1.outputs["Y"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: attribute_statistic.outputs["Max"], 1: attribute_statistic.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: curve_length, 1: subtract}, - attrs={'operation': 'DIVIDE'}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: value, 1: divide}, - attrs={'operation': 'DIVIDE'}) - - divide_2 = nw.new_node(Nodes.Math, - input_kwargs={0: end_radius, 1: divide}, - attrs={'operation': 'DIVIDE'}) - - spiral_1 = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Resolution': 1000, 'Rotations': rotation, 'Start Radius': divide_1, 'End Radius': divide_2, 'Height': 0.0}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': spiral_1, 'Rotation': (0.0, 1.5708, 3.1416)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 2.0}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - noise_level = nw.new_node(Nodes.Value, - label='noise_level') - noise_level.outputs[0].default_value = kwargs['noise_level'] - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 1: noise_level}, - attrs={'operation': 'MULTIPLY'}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': transform, 'Offset': multiply.outputs["Vector"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_1.outputs["Y"], 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"]}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': set_position_2, 'Factor': map_range.outputs["Result"]}, - attrs={'mode': 'FACTOR'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': sample_curve.outputs["Position"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz_2.outputs["Y"], 'Z': separate_xyz_2.outputs["Z"]}) - - normalize = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Position"]}, - attrs={'operation': 'NORMALIZE'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: separate_xyz.outputs["Z"], 1: normalize.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: multiply_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': transform_2, 'Position': add.outputs["Vector"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: 0.0, 1: radians}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': subtract_2}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position, 'Rotation': combine_xyz_3}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': divide_1}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_3, 'Translation': combine_xyz_4}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_4}) - -class LeafFactoryWrapped(AssetFactory): - - def __init__(self, factory_seed, season='autumn', coarse=False): - super().__init__(factory_seed, coarse=coarse) - self.factory_list = [ - LeafFactoryMaple(factory_seed, season=season, coarse=coarse), - LeafFactoryBroadleaf(factory_seed, season=season, coarse=coarse), - LeafFactoryGinko(factory_seed, season=season, coarse=coarse), - ] - - def create_asset(self, **params): - - fac_id = randint(len(self.factory_list)) - fac = self.factory_list[fac_id] - - wrap_params = { - 'angle': uniform(-70, 70), - 'rotation': uniform(0.2, 2.0), - 'end_radius': np.exp(uniform(-2.0, 2.0)), - 'noise_level': uniform(0.0, 0.5) - } - - obj = fac.create_asset() - surface.add_geomod(obj, nodegroup_nodegroup_apply_wrap, apply=False, input_kwargs=wrap_params) - - bpy.ops.object.convert(target='MESH') - - return obj \ No newline at end of file diff --git a/infinigen/assets/lighting/__init__.py b/infinigen/assets/lighting/__init__.py index daa84a74e..0a1166c69 100644 --- a/infinigen/assets/lighting/__init__.py +++ b/infinigen/assets/lighting/__init__.py @@ -5,7 +5,4 @@ from . import sky_lighting from .caustics_lamp import CausticsLampFactory -from .ceiling_lights import CeilingLightFactory -from .ceiling_classic_lamp import CeilingClassicLampFactory from .indoor_lights import PointLampFactory -from .lamp import LampFactory, DeskLampFactory, FloorLampFactory diff --git a/infinigen/assets/lighting/caustics_lamp.py b/infinigen/assets/lighting/caustics_lamp.py index fcb374a98..3a704f717 100644 --- a/infinigen/assets/lighting/caustics_lamp.py +++ b/infinigen/assets/lighting/caustics_lamp.py @@ -6,108 +6,148 @@ import bpy -from mathutils import Vector - -from numpy.random import uniform as U, normal as N, randint, uniform import numpy as np +from mathutils import Vector +from numpy.random import normal as N +from numpy.random import randint, uniform +from numpy.random import uniform as U -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils -from infinigen.core.placement import placement -from infinigen.core.placement.placement import placeholder_locs -from infinigen.core.util.math import FixedSeed +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform -@node_utils.to_nodegroup('nodegroup_caustics', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_caustics", singleton=False, type="ShaderNodeTree") def nodegroup_caustics(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Prewarp', 0.15), ('NodeSocketFloat', 'Scale', 0.0), - ('NodeSocketFloat', 'Smoothness', 0.0), ('NodeSocketFloat', 'AnimSpeed', .02)]) - - w = nw.new_node(Nodes.Value, label='W') + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Prewarp", 0.15), + ("NodeSocketFloat", "Scale", 0.0), + ("NodeSocketFloat", "Smoothness", 0.0), + ("NodeSocketFloat", "AnimSpeed", 0.02), + ], + ) + + w = nw.new_node(Nodes.Value, label="W") w.outputs[0].default_value = 0.0 - multiply = nw.new_node(Nodes.Math, input_kwargs={1: group_input.outputs["AnimSpeed"]}, - attrs={'operation': 'MULTIPLY'}) - driver = multiply.inputs[0].driver_add('default_value').driver + multiply = nw.new_node( + Nodes.Math, + input_kwargs={1: group_input.outputs["AnimSpeed"]}, + attrs={"operation": "MULTIPLY"}, + ) + driver = multiply.inputs[0].driver_add("default_value").driver driver.expression = f"frame / {log_uniform(100, 200)}" - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={ - 'Vector': group_input.outputs["Vector"], - 'W': multiply, - 'Scale': log_uniform(2, 8), - 'Roughness': N(0.5, 0.05), - 'Distortion': N(0.5, 0.02) - }, attrs={'noise_dimensions': '4D'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 'Scale': group_input.outputs["Prewarp"] - }, attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: scale.outputs["Vector"]}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, input_kwargs={ - 'Vector': add.outputs["Vector"], - 'W': multiply, - 'Scale': group_input.outputs["Scale"], - 'Smoothness': group_input.outputs["Smoothness"] - }, attrs={'voronoi_dimensions': '4D', 'feature': 'SMOOTH_F1'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Smoothness"], 1: U(.04, .08)}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={ - 'Vector': add.outputs["Vector"], - 'W': multiply, - 'Scale': group_input.outputs["Scale"], - 'Smoothness': add_1 - }, attrs={'voronoi_dimensions': '4D', 'feature': 'SMOOTH_F1'}) - - difference = nw.scalar_multiply(nw.math('ABSOLUTE', nw.scalar_sub(voronoi_texture, voronoi_texture_1)), - 20.0) - - noise = nw.math('ABSOLUTE', - nw.scalar_sub(nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': uniform(2, 5)}), .5)) - noise = nw.new_node(Nodes.MapRange, [noise, 0, 1, .6, 1.]) - ramp = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': nw.scalar_multiply(difference, noise)}) - node_utils.assign_curve(ramp.mapping.curves[0], - [(0.0, 0.0), (0.19, 0.08), (0.34, 1.0), (1.0, 1.0)], - handles=['AUTO', 'AUTO', 'VECTOR', 'VECTOR']) - - - nw.new_node(Nodes.GroupOutput, input_kwargs={'Color': ramp}) + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Vector"], + "W": multiply, + "Scale": log_uniform(2, 8), + "Roughness": N(0.5, 0.05), + "Distortion": N(0.5, 0.02), + }, + attrs={"noise_dimensions": "4D"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: noise_texture.outputs["Color"], + "Scale": group_input.outputs["Prewarp"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: scale.outputs["Vector"]}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "W": multiply, + "Scale": group_input.outputs["Scale"], + "Smoothness": group_input.outputs["Smoothness"], + }, + attrs={"voronoi_dimensions": "4D", "feature": "SMOOTH_F1"}, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Smoothness"], 1: U(0.04, 0.08)}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "W": multiply, + "Scale": group_input.outputs["Scale"], + "Smoothness": add_1, + }, + attrs={"voronoi_dimensions": "4D", "feature": "SMOOTH_F1"}, + ) + + difference = nw.scalar_multiply( + nw.math("ABSOLUTE", nw.scalar_sub(voronoi_texture, voronoi_texture_1)), 20.0 + ) + + noise = nw.math( + "ABSOLUTE", + nw.scalar_sub( + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": uniform(2, 5)}), 0.5 + ), + ) + noise = nw.new_node(Nodes.MapRange, [noise, 0, 1, 0.6, 1.0]) + ramp = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": nw.scalar_multiply(difference, noise)} + ) + node_utils.assign_curve( + ramp.mapping.curves[0], + [(0.0, 0.0), (0.19, 0.08), (0.34, 1.0), (1.0, 1.0)], + handles=["AUTO", "AUTO", "VECTOR", "VECTOR"], + ) + + nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": ramp}) def shader_caustic_lamp(nw: NodeWrangler, params: dict): coord = nw.new_node(Nodes.TextureCoord) - caustics = nw.new_node(nodegroup_caustics().name, - input_kwargs={'Vector': coord.outputs['Normal'], **params}) - emission = nw.new_node(Nodes.Emission, input_kwargs={'Strength': caustics}) + caustics = nw.new_node( + nodegroup_caustics().name, + input_kwargs={"Vector": coord.outputs["Normal"], **params}, + ) + emission = nw.new_node(Nodes.Emission, input_kwargs={"Strength": caustics}) nw.new_node(Nodes.LightOutput, [emission]) class CausticsLampFactory(AssetFactory): - def __init__(self, factory_seed): super(CausticsLampFactory, self).__init__(factory_seed) with FixedSeed(factory_seed): self.params = { - 'Prewarp': U(0.1, 0.5), - 'Scale': U(30, 100), - 'Smoothness': 0.2 * N(1, 0.1), - 'AnimSpeed': 0.1 * N(1, 0.1) + "Prewarp": U(0.1, 0.5), + "Scale": U(30, 100), + "Smoothness": 0.2 * N(1, 0.1), + "AnimSpeed": 0.1 * N(1, 0.1), } def create_asset(self, **params) -> bpy.types.Object: - bpy.ops.object.light_add(type='SPOT') + bpy.ops.object.light_add(type="SPOT") lamp = bpy.context.active_object lamp.data.shadow_soft_size = 0 lamp.data.spot_blend = 1 - lamp.data.spot_size = np.pi * .4 + lamp.data.spot_size = np.pi * 0.4 lamp.rotation_euler = 0, 0, uniform(0, np.pi * 2) lamp.data.use_nodes = True @@ -117,9 +157,8 @@ def create_asset(self, **params) -> bpy.types.Object: def add_caustics(obj, zoff=200): - fac = CausticsLampFactory(randint(1e7)) loc = Vector(np.array(obj.bound_box).mean(axis=0)) + Vector((0, 0, zoff)) lamp = fac.spawn_asset(0, loc=loc) - lamp.scale = (50, 50, 50) # only affects UI + lamp.scale = (50, 50, 50) # only affects UI lamp.data.energy = U(100e6, 200e6) diff --git a/infinigen/assets/lighting/ceiling_classic_lamp.py b/infinigen/assets/lighting/ceiling_classic_lamp.py deleted file mode 100644 index 60307e7be..000000000 --- a/infinigen/assets/lighting/ceiling_classic_lamp.py +++ /dev/null @@ -1,239 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Stamatis Alexandropoulos - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core import tagging -from .indoor_lights import PointLampFactory -from infinigen.assets.utils.autobevel import BevelSharp -from infinigen.core.util.color import color_category - -from infinigen.assets.materials.ceiling_light_shaders import shader_lamp_bulb_nonemissive - - -def shader_lamp_material(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = color_category('textile') - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': rgb, 'Subsurface Radius': (0.1000, 0.1000, 0.1000), 'Roughness': uniform(0.2,0.9), 'Sheen': 0.2068, 'Clearcoat Roughness': 0.1436, 'Transmission': 0.4045, 'Transmission Roughness': 0.6932, 'Emission': (0.9858, 0.9858, 0.9858, 1.0000), 'Emission Strength': 0.0000, 'Alpha': 0.8614}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Scale': 104.3000, 'Randomness': 0.0000}, - attrs={'feature': 'SMOOTH_F1'}) - - displacement = nw.new_node(Nodes.Displacement, input_kwargs={'Height': voronoi_texture.outputs["Distance"], 'Scale': 0.4000}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}, - attrs={'is_active_output': True}) - -def shader_inside_medal(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.0018, 0.0015, 0.0000, 1.0000), 'Metallic': 1.0000, 'Roughness': 0.0682}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) - -def shader_cable(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.0000, 0.0000, 0.0000, 1.0000), 'Metallic': 1.0000, 'Roughness': 0.4273}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('geometry_nodes', singleton=True, type='GeometryNodeTree') -def geometry_nodes(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'cable_length', 0.7000), - ('NodeSocketFloat', 'cable_radius', 0.0500), - ('NodeSocketFloat', 'height', 0.0000), - ('NodeSocketFloat', 'bottom_radius', 0.0000), - ('NodeSocketFloat', 'top_radius', 0.0000), - ('NodeSocketFloat', 'Thickness', 0.5000), - ('NodeSocketFloatDistance', 'Amount', 1.0000)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["cable_length"]}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 87, 'Radius': group_input.outputs["cable_radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': curve_line, 'Profile Curve': curve_circle.outputs["Curve"]}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, 'Scale': (1.0000, 1.0000, -1.0000)}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_geometry, 'Material': surface.shaderfunc_to_material(shader_cable)}) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["top_radius"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_geometry_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_3.outputs["Curve"], 'Translation': combine_xyz_4}) - - curve_line_3 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (-1.0000, 0.0000, 0.0000), 'End': (1.0000, 0.0000, 0.0000)}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': curve_line_3}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Amount"]}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': reroute}, - attrs={'domain': 'INSTANCE'}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"]}) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: reroute}, attrs={'operation': 'DIVIDE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': transform_geometry_4, 'Factor': multiply_1}, - attrs={'use_all_curves': True}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': realize_instances_1, 'Selection': endpoint_selection_1, 'Position': sample_curve.outputs["Position"]}) - - endpoint_selection_2 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Thickness"], 2: 0.0000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - curve_circle_4 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply_add}) - - transform_geometry_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_circle_4.outputs["Curve"]}) - - sample_curve_1 = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': transform_geometry_5, 'Factor': multiply_1}, - attrs={'use_all_curves': True}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Selection': endpoint_selection_2, 'Position': sample_curve_1.outputs["Position"]}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_geometry_4, set_position_1, transform_geometry_5]}) - - curve_circle_5 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Thickness"]}) - - curve_to_mesh_3 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry_3, 'Profile Curve': curve_circle_5.outputs["Curve"], 'Fill Caps': True}) - - transform_geometry_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh_3}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_geometry_6, 'Material': surface.shaderfunc_to_material(shader_inside_medal)}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: -1.5000, 1: -0.1000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_2}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}, attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: multiply_3}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_4}) - - curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_1, 'End': combine_xyz_2}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["bottom_radius"], 4: group_input.outputs["top_radius"]}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': curve_line_2, 'Radius': map_range.outputs["Result"]}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle_2.outputs["Curve"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': curve_to_mesh_2}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': curve_to_mesh_2, 'Offset Scale': 0.0050, 'Individual': False}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [flip_faces, extrude_mesh.outputs["Mesh"]]}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_geometry_2, 'Material': surface.shaderfunc_to_material(shader_lamp_material)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material_1, set_material_2]}) - - ico_sphere = nw.new_node(Nodes.MeshIcoSphere, input_kwargs={'Radius': 0.0500, 'Subdivisions': 4}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': ico_sphere.outputs["Mesh"], 'Material': surface.shaderfunc_to_material(shader_lamp_bulb_nonemissive)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, join_geometry_1, set_material_3]}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_2, 'Rotation': (0.0000, 3.1416, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry_3}, attrs={'is_active_output': True}) - - - - - -class CeilingClassicLampFactory(AssetFactory): - def __init__(self, factory_seed): - super(CeilingClassicLampFactory, self).__init__(factory_seed) - with FixedSeed(factory_seed): - self.params = { - 'cable_length': uniform(0.6, 0.710), - 'cable_radius': uniform(0.015,0.02), - 'height':uniform(0.4, 0.710), - 'top_radius':uniform(0.05, 0.2), - 'bottom_radius': uniform(0.22,0.35), - 'Thickness': uniform(0.002, 0.006), - 'Amount': randint(1, 8) - } - self.light_factory = PointLampFactory(factory_seed) - - # self.beveler = BevelSharp(mult=uniform(1, 3)) - def create_placeholder(self, **_): - obj = butil.spawn_cube() - butil.modify_mesh( - obj, - 'NODES', - node_group=geometry_nodes(), - ng_inputs=self.params, - apply=True - ) - tagging.tag_system.relabel_obj(obj) - return obj - - def create_asset(self, i, placeholder, face_size, **_): - obj = butil.deep_clone_obj(placeholder, keep_materials=True) - light = self.light_factory.spawn_asset(i) - butil.parent_to(light, obj) - return obj diff --git a/infinigen/assets/lighting/ceiling_lights.py b/infinigen/assets/lighting/ceiling_lights.py deleted file mode 100644 index ccf31fa34..000000000 --- a/infinigen/assets/lighting/ceiling_lights.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: -# - -# - Alexander Raistrick: add point light - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed, clip_gaussian -from infinigen.core.placement.factory import AssetFactory - -from .indoor_lights import PointLampFactory -from infinigen.assets.utils.autobevel import BevelSharp -from infinigen.assets.material_assignments import AssetList - - -class CeilingLightFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.]): - super(CeilingLightFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - self.ceiling_light_default_params = [{ - "Radius": 0.2, - "Thickness": 0.001, - "InnerRadius": 0.2, - "Height": 0.1, - "InnerHeight": 0.1, - "Curvature": 0.1, - }, { - "Radius": 0.18, - "Thickness": 0.05, - "InnerRadius": 0.18, - "Height": 0.1, - "InnerHeight": 0.1, - "Curvature": 0.25, - }, { - "Radius": 0.2, - "Thickness": 0.005, - "InnerRadius": 0.18, - "Height": 0.1, - "InnerHeight": 0.03, - "Curvature": 0.4, - }] - with FixedSeed(factory_seed): - self.light_factory = PointLampFactory(factory_seed) - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - - self.params.update(self.material_params) - self.beveler = BevelSharp(mult=U(1, 3)) - - def get_material_params(self): - material_assignments = AssetList['CeilingLightFactory']() - black_material = material_assignments['black_material'].assign_material() - white_material = material_assignments['white_material'].assign_material() - - wrapped_params = { - 'BlackMaterial': surface.shaderfunc_to_material(black_material), - 'WhiteMaterial': surface.shaderfunc_to_material(white_material), - } - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - - def sample_parameters(self, dimensions, use_default=False): - if use_default: - return self.ceiling_light_default_params[RI(0, len(self.ceiling_light_default_params))] - else: - Radius = clip_gaussian(0.12, 0.04, 0.1, 0.25) - Thickness = U(0.005, 0.05) - InnerRadius = Radius * U(0.4, 0.9) - Height = 0.7 * clip_gaussian(0.09, 0.03, 0.07, 0.15) - InnerHeight = Height * U(0.5, 1.1) - Curvature = U(0.1, 0.5) - params = { - "Radius": Radius, - "Thickness": Thickness, - "InnerRadius": InnerRadius, - "Height": Height, - "InnerHeight": InnerHeight, - "Curvature": Curvature, - } - return params - - def create_placeholder(self, i, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_ceiling_light_geometry(), ng_inputs=self.params, apply=True) - return obj - - def create_asset(self, i, placeholder, **params): - obj = butil.copy(placeholder, keep_materials=True) - self.beveler(obj) - - lamp = self.light_factory.spawn_asset(i, loc=(0,0,0), rot=(0,0,0)) - - butil.parent_to(lamp, obj, no_transform=True, no_inverse=True) - lamp.location.z -= 0.03 - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - -@node_utils.to_nodegroup('nodegroup_ceiling_light_geometry', singleton=True, type='GeometryNodeTree') -def nodegroup_ceiling_light_geometry(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.2000), - ('NodeSocketFloat', 'Thickness', 0.0050), - ('NodeSocketFloat', 'InnerRadius', 0.1800), - ('NodeSocketFloat', 'Height', 0.1000), - ('NodeSocketFloat', 'InnerHeight', 0.0300), - ('NodeSocketFloat', 'Curvature', 0.4000), - ('NodeSocketMaterial', 'BlackMaterial', None), - ('NodeSocketMaterial', 'WhiteMaterial', None)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 512, 'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': curve_line, 'Profile Curve': curve_circle.outputs["Curve"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': curve_to_mesh, 'Offset Scale': group_input.outputs["Thickness"], 'Individual': False}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': curve_to_mesh}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [extrude_mesh.outputs["Mesh"], flip_faces]}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': join_geometry, 'Shade Smooth': False}) - - mesh_circle = nw.new_node(Nodes.MeshCircle, input_kwargs={'Radius': group_input.outputs["Radius"]}, attrs={'fill_type': 'NGON'}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_shade_smooth, mesh_circle]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': group_input.outputs["BlackMaterial"]}) - - ico_sphere_1 = nw.new_node(Nodes.MeshIcoSphere, input_kwargs={'Radius': group_input.outputs["InnerRadius"], 'Subdivisions': 5}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': ico_sphere_1.outputs["Mesh"], 'Name': 'UVMap', 3: ico_sphere_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - position_2 = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_2}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: 0.0010}, attrs={'operation': 'LESS_THAN'}) - - separate_geometry_1 = nw.new_node(Nodes.SeparateGeometry, input_kwargs={'Geometry': store_named_attribute, 'Selection': less_than}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["InnerHeight"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': 1.0000, 'Z': group_input.outputs["Curvature"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': separate_geometry_1.outputs["Selection"], 'Translation': combine_xyz_2, 'Scale': combine_xyz_3}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (0.0000, 0.0000, -0.0010), 'End': combine_xyz_1}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["InnerRadius"]}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, curve_to_mesh_1]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_2, 'Material': group_input.outputs["WhiteMaterial"]}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': join_geometry_3}) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (0.0000, 0.0000, 0.0000) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_3, 'Bounding Box': bounding_box.outputs["Bounding Box"], 'LightPosition': vector}, - attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/lighting/hdri_lighting.py b/infinigen/assets/lighting/hdri_lighting.py index 89aad92b6..c769dcc20 100644 --- a/infinigen/assets/lighting/hdri_lighting.py +++ b/infinigen/assets/lighting/hdri_lighting.py @@ -10,24 +10,35 @@ import numpy as np from numpy.random import uniform -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.util.random import random_general as rg HDRI_RESOURCES = f"{os.getcwd()}/resources/hdri" @gin.configurable -def hdri_lighting(nw: NodeWrangler, strength=("uniform", 0.8, 1.2), ): - suffixes = [f for f in os.listdir(HDRI_RESOURCES) if f.endswith('.exr')] +def hdri_lighting( + nw: NodeWrangler, + strength=("uniform", 0.8, 1.2), +): + suffixes = [f for f in os.listdir(HDRI_RESOURCES) if f.endswith(".exr")] suffix = np.random.choice(suffixes) - image = bpy.data.images.load(filepath=f"{HDRI_RESOURCES}/{suffix}",check_existing=True) + image = bpy.data.images.load( + filepath=f"{HDRI_RESOURCES}/{suffix}", check_existing=True + ) texture_coord = nw.new_node(Nodes.TextureCoord) - coord = nw.new_node(Nodes.Mapping, [texture_coord], input_kwargs={'Rotation': (0, 0, uniform(np.pi * 2))}) - texture = nw.new_node(Nodes.EnvironmentTexture, [coord], attrs={'image': image}) - return nw.new_node(Nodes.Background, input_kwargs={'Color': texture, 'Strength': rg(strength)}) + coord = nw.new_node( + Nodes.Mapping, + [texture_coord], + input_kwargs={"Rotation": (0, 0, uniform(np.pi * 2))}, + ) + texture = nw.new_node(Nodes.EnvironmentTexture, [coord], attrs={"image": image}) + return nw.new_node( + Nodes.Background, input_kwargs={"Color": texture, "Strength": rg(strength)} + ) def add_lighting(): nw = NodeWrangler(bpy.context.scene.world.node_tree) surface = hdri_lighting(nw) - nw.new_node(Nodes.WorldOutput, input_kwargs={'Surface': surface}) + nw.new_node(Nodes.WorldOutput, input_kwargs={"Surface": surface}) diff --git a/infinigen/assets/lighting/holdout_lighting.py b/infinigen/assets/lighting/holdout_lighting.py index b4d177edf..8eee37a2d 100644 --- a/infinigen/assets/lighting/holdout_lighting.py +++ b/infinigen/assets/lighting/holdout_lighting.py @@ -10,25 +10,36 @@ import numpy as np from numpy.random import uniform -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.util.random import random_general as rg HOLDOUT_RESOURCES = f"{os.getcwd()}/resources/holdout" @gin.configurable -def holdout_lighting(nw: NodeWrangler, strength=("uniform", 0.8, 1.2), ): - suffixes = [f for f in os.listdir(HOLDOUT_RESOURCES) if f.endswith('.png')] +def holdout_lighting( + nw: NodeWrangler, + strength=("uniform", 0.8, 1.2), +): + suffixes = [f for f in os.listdir(HOLDOUT_RESOURCES) if f.endswith(".png")] suffix = np.random.choice(suffixes) - image = bpy.data.images.load(filepath=f"{HOLDOUT_RESOURCES}/{suffix}",check_existing=True) + image = bpy.data.images.load( + filepath=f"{HOLDOUT_RESOURCES}/{suffix}", check_existing=True + ) texture_coord = nw.new_node(Nodes.TextureCoord) - coord = nw.new_node(Nodes.Mapping, [texture_coord], input_kwargs={'Rotation': (0, 0, uniform(np.pi * 2))}) - texture = nw.new_node(Nodes.EnvironmentTexture, [coord], attrs={'image': image}) - return nw.new_node(Nodes.Background, input_kwargs={'Color': texture, 'Strength': rg(strength)}) + coord = nw.new_node( + Nodes.Mapping, + [texture_coord], + input_kwargs={"Rotation": (0, 0, uniform(np.pi * 2))}, + ) + texture = nw.new_node(Nodes.EnvironmentTexture, [coord], attrs={"image": image}) + return nw.new_node( + Nodes.Background, input_kwargs={"Color": texture, "Strength": rg(strength)} + ) def add_lighting(): nw = NodeWrangler(bpy.context.scene.world.node_tree) surface = holdout_lighting(nw) - nw.new_node(Nodes.WorldOutput, input_kwargs={'Surface': surface}) + nw.new_node(Nodes.WorldOutput, input_kwargs={"Surface": surface}) bpy.context.scene.world.cycles_visibility.camera = False diff --git a/infinigen/assets/lighting/indoor_lights.py b/infinigen/assets/lighting/indoor_lights.py index a4f02e9bc..8ada1b331 100644 --- a/infinigen/assets/lighting/indoor_lights.py +++ b/infinigen/assets/lighting/indoor_lights.py @@ -5,52 +5,47 @@ # Authors: Alexander Raistrick import bpy -from mathutils import Vector +from numpy.random import uniform as U -from numpy.random import uniform as U, normal as N, randint, uniform -import numpy as np - -from infinigen.core.util.random import log_uniform from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.placement import placement -from infinigen.core.placement.placement import placeholder_locs -from infinigen.core.util.math import FixedSeed from infinigen.core.placement.factory import AssetFactory from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import clip_gaussian def shader_blackbody_temp(nw, params): - blackbody = nw.new_node(Nodes.BlackBody, input_kwargs={'Temperature': params['Temperature']}) - emission = nw.new_node(Nodes.Emission, input_kwargs={'Color': blackbody}) + blackbody = nw.new_node( + Nodes.BlackBody, input_kwargs={"Temperature": params["Temperature"]} + ) + emission = nw.new_node(Nodes.Emission, input_kwargs={"Color": blackbody}) nw.new_node(Nodes.LightOutput, [emission]) -class PointLampFactory(AssetFactory): +class PointLampFactory(AssetFactory): def __init__(self, factory_seed): super().__init__(factory_seed) with FixedSeed(factory_seed): self.params = { - 'Wattage': U(40, 100), - 'Radius': U(0.02, 0.03), - 'Temperature': clip_gaussian(4700, 700, 3500, 6500) + "Wattage": U(40, 100), + "Radius": U(0.02, 0.03), + "Temperature": clip_gaussian(4700, 700, 3500, 6500), } def create_placeholder(self, **_): cube = butil.spawn_cube(size=2) - cube.scale = (self.params['Radius'],) * 3 + cube.scale = (self.params["Radius"],) * 3 butil.apply_transform(cube) return cube def create_asset(self, **_) -> bpy.types.Object: - bpy.ops.object.light_add(type='POINT') + bpy.ops.object.light_add(type="POINT") lamp = bpy.context.active_object - lamp.data.energy = self.params['Wattage'] - lamp.data.shadow_soft_size = self.params['Radius'] + lamp.data.energy = self.params["Wattage"] + lamp.data.shadow_soft_size = self.params["Radius"] lamp.data.use_nodes = True nw = NodeWrangler(lamp.data.node_tree) shader_blackbody_temp(nw, params=self.params) - return lamp \ No newline at end of file + return lamp diff --git a/infinigen/assets/lighting/lamp.py b/infinigen/assets/lighting/lamp.py deleted file mode 100644 index f78caded8..000000000 --- a/infinigen/assets/lighting/lamp.py +++ /dev/null @@ -1,548 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: -# - Hongyu Wen: primary author -# - Alexander Raistrick: add point light - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from .indoor_lights import PointLampFactory -from infinigen.assets.material_assignments import AssetList - -class LampFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.], lamp_type="FloorLamp"): - super(LampFactory, self).__init__(factory_seed, coarse=coarse) - - self.bulb_fac = PointLampFactory(factory_seed) - self.bulb_fac.params['Temperature'] = max(self.bulb_fac.params['Temperature'] * 0.6, 2500) - self.bulb_fac.params['Wattage'] *= 0.5 - - self.dimensions = dimensions - self.lamp_type = lamp_type - self.lamp_default_params = { - "DeskLamp":{ - "StandRadius": 0.01, - "StandHeight": 0.3, - "BaseRadius": 0.07, - "BaseHeight": 0.02, - "ShadeHeight": 0.18, - "HeadTopRadius": 0.08, - "HeadBotRadius": 0.11, - "ReverseLamp": True, - "RackThickness": 0.002, - "CurvePoint1": (0.0, 0.0, 0.0), - "CurvePoint2": (0.0, 0.0, 0.2), - "CurvePoint3": (0.0, 0.0, 0.3) - }, - "FloorLamp1": { - "StandRadius": 0.01, - "StandHeight": 0.3, - "BaseRadius": 0.1, - "BaseHeight": 0.02, - "ShadeHeight": 0.2, - "HeadTopRadius": 0.1, - "HeadBotRadius": 0.12, - "ReverseLamp": False, - "RackThickness": 0.002, - "CurvePoint1": (0.0, 0.0, 1.0), - "CurvePoint2": (0.05, 0.0, 1.2), - "CurvePoint3": (0.2, 0.0, 1.0) - }, - "FloorLamp2": { - "StandRadius": 0.01, - "StandHeight": 0.3, - "BaseRadius": 0.1, - "BaseHeight": 0.02, - "ShadeHeight": 0.2, - "HeadTopRadius": 0.1, - "HeadBotRadius": 0.11, - "ReverseLamp": True, - "RackThickness": 0.002, - "CurvePoint1": (0.0, 0.0, 1.0), - "CurvePoint2": (0.0, 0.0, 1.1), - "CurvePoint3": (0.0, 0.0, 1.2) - }} - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['LampFactory']() - black_material = material_assignments['black_material'].assign_material() - white_material = material_assignments['metal'].assign_material() - lampshade_material = material_assignments['lampshade'].assign_material() - - wrapped_params = { - 'BlackMaterial': surface.shaderfunc_to_material(black_material), - 'MetalMaterial': surface.shaderfunc_to_material(white_material), - 'LampshadeMaterial': surface.shaderfunc_to_material(lampshade_material) - } - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - def sample_parameters(self, dimensions, use_default=False): - if use_default: - if self.lamp_type == "DeskLamp": - return self.lamp_default_params["DeskLamp"] - else: - return random.choice([self.lamp_default_params["FloorLamp1"], self.lamp_default_params["FloorLamp2"]]) - else: - stand_radius = U(0.005, 0.015) - base_radius = U(0.05, 0.15) - base_height = U(0.01, 0.03) - shade_height = U(0.18, 0.3) - head_top_radius = U(0.07, 0.15) - head_bot_radius = head_top_radius + U(0, 0.05) - rack_thickness = U(0.001, 0.003) - reverse_lamp = True - - if self.lamp_type == "DeskLamp": - height = U(0.25, 0.4) - else: - height = U(1, 1.5) - - z1 = U(base_height, height) - z2 = U(z1, height) - z3 = height - - x1, x2, x3 = 0, 0, 0 - # if self.lamp_type == "FloorLamp" and U() < 0.5: - # x2 = U(0.03, 0.1) - # x3 = U(0.2, 0.4) - # z2, z3 = z3, z2 - # reverse_lamp = False - - params = { - "StandRadius": stand_radius, - "BaseRadius": base_radius, - "BaseHeight": base_height, - "ShadeHeight": shade_height, - "HeadTopRadius": head_top_radius, - "HeadBotRadius": head_bot_radius, - "ReverseLamp": reverse_lamp, - "RackThickness": rack_thickness, - "CurvePoint1": (x1, 0.0, z1), - "CurvePoint2": (x2, 0.0, z2), - "CurvePoint3": (x3, 0.0, z3) - } - return params - - def create_asset(self, i, **params): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_lamp_geometry(), ng_inputs=self.params, apply=True) - - if np.random.uniform() < 0.6: - bulb = self.bulb_fac(i) - butil.parent_to(bulb, obj, no_inverse=True, no_transform=True) - bulb.location.z = obj.bound_box[-2][2] - self.params['ShadeHeight'] * 0.5 - - with butil.SelectObjects(obj): - bpy.ops.object.shade_flat() - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - -class DeskLampFactory(LampFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse=coarse, lamp_type='DeskLamp') - -class FloorLampFactory(LampFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse, lamp_type=np.random.choice(['FloorLamp1', 'FloorLamp2'])) - - - - -@node_utils.to_nodegroup('nodegroup_bulb', singleton=False, type='GeometryNodeTree') -def nodegroup_bulb(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[ - ('NodeSocketMaterial', 'LampshadeMaterial', None), - ('NodeSocketMaterial', 'MetalMaterial', None)]) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (0.0000, 0.0000, -0.2000), 'End': (0.0000, 0.0000, 0.0000)}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.1500, 'Resolution': 100}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - spiral = nw.new_node('GeometryNodeCurveSpiral', - input_kwargs={'Rotations': 5.0000, 'Start Radius': 0.1500, 'End Radius': 0.1500, 'Height': 0.2000}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': spiral, 'Translation': (0.0000, 0.0000, -0.2000)}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.0150, 'Resolution': 100}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': transform, 'Profile Curve': curve_circle_2.outputs["Curve"], 'Fill Caps': True}) - - curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (0.0000, 0.0000, -0.2000), 'End': (0.0000, 0.0000, -0.3000)}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line_2, 'Count': 100}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0000, 1.0000), (0.4432, 0.5500), (1.0000, 0.2750)], handles=['AUTO', 'VECTOR', 'AUTO']) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': resample_curve_1, 'Radius': float_curve_1}) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.1500, 'Resolution': 100}) - - curve_to_mesh_3 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': curve_circle_3.outputs["Curve"], 'Fill Caps': True}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh_1, curve_to_mesh_2, curve_to_mesh_3]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': group_input.outputs['MetalMaterial']}) - - curve_line = nw.new_node(Nodes.CurveLine) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': 100}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 0.1500), (0.0500, 0.1700), (0.1500, 0.2000), (0.5500, 0.3800), (0.8000, 0.3500), (0.9568, 0.2200), (1.0000, 0.0000)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': resample_curve, 'Radius': float_curve}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 100}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh, 'Material': group_input.outputs['LampshadeMaterial']}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': (0.0000, 0.0000, 0.3000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_bulb_rack', singleton=False, type='GeometryNodeTree') -def nodegroup_bulb_rack(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - amount = nw.new_node(Nodes.GroupInput, - label='amount', - expose_input=[('NodeSocketFloatDistance', 'Thickness', 0.0200), - ('NodeSocketInt', 'Amount', 3), - ('NodeSocketFloatDistance', 'InnerRadius', 1.0000), - ('NodeSocketFloatDistance', 'OuterRadius', 1.0000), - ('NodeSocketFloat', 'InnerHeight', 0.0000), - ('NodeSocketFloat', 'OuterHeight', 0.0000)]) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': amount.outputs["OuterRadius"], 'Resolution': 100}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': amount.outputs["OuterHeight"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_2.outputs["Curve"], 'Translation': combine_xyz}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (-1.0000, 0.0000, 0.0000), 'End': (1.0000, 0.0000, 0.0000)}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': curve_line}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': amount.outputs["Amount"]}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': reroute}, - attrs={'domain': 'INSTANCE'}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': duplicate_elements.outputs["Geometry"]}) - - endpoint_selection = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: reroute}, attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': transform, 'Factor': multiply}, - attrs={'use_all_curves': True}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': realize_instances, 'Selection': endpoint_selection, 'Position': sample_curve.outputs["Position"]}) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: amount.outputs["Thickness"], 2: amount.outputs["InnerRadius"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply_add, 'Resolution': 100}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': amount.outputs["InnerHeight"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Translation': combine_xyz_1}) - - sample_curve_1 = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': transform_1, 'Factor': multiply}, - attrs={'use_all_curves': True}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Selection': endpoint_selection_1, 'Position': sample_curve_1.outputs["Position"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, set_position_1, transform_1]}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': amount.outputs["Thickness"], 'Resolution': 100}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': curve_to_mesh}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_reversiable_bulb', singleton=False, type='GeometryNodeTree') -def nodegroup_reversiable_bulb(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Scale', 0.3000), - ('NodeSocketBool', 'Reverse', False), - ('NodeSocketMaterial', 'BlackMaterial', None), - ('NodeSocketMaterial', 'LampshadeMaterial', None), - ('NodeSocketMaterial', 'MetalMaterial', None)]) - - bulb = nw.new_node(nodegroup_bulb().name, input_kwargs={'LampshadeMaterial': group_input.outputs["LampshadeMaterial"], - 'MetalMaterial': group_input.outputs["MetalMaterial"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Scale"], 'Y': group_input.outputs["Scale"], 'Z': group_input.outputs["Scale"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': bulb, 'Scale': combine_xyz_1}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': transform}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Reverse"], 1: 3.1415}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, input_kwargs={'Instances': geometry_to_instance, 'Rotation': combine_xyz_2}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Reverse"], 1: 2.0000, 2: -1.0000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: -0.0150, 1: multiply_add}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': rotate_instances, 'RackSupport': multiply_1}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_lamp_head', singleton=False, type='GeometryNodeTree') -def nodegroup_lamp_head(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'ShadeHeight', 0.0000), - ('NodeSocketFloat', 'TopRadius', 0.3000), - ('NodeSocketFloat', 'BotRadius', 0.5000), - ('NodeSocketBool', 'ReverseBulb', True), - ('NodeSocketFloatDistance', 'RackThickness', 0.0050), - ('NodeSocketFloat', 'RackHeight', 0.5000), - ('NodeSocketMaterial', 'BlackMaterial', None), - ('NodeSocketMaterial', 'LampshadeMaterial', None), - ('NodeSocketMaterial', 'MetalMaterial', None)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["TopRadius"], 1: 0.8000}, - attrs={'operation': 'MULTIPLY'}) - - reversiable_bulb = nw.new_node(nodegroup_reversiable_bulb().name, - input_kwargs={'Scale': multiply, - 'BlackMaterial': group_input.outputs["BlackMaterial"], - 'LampshadeMaterial': group_input.outputs["LampshadeMaterial"], - 'MetalMaterial': group_input.outputs["MetalMaterial"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.1500}, attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ReverseBulb"], 1: 2.0000, 2: -1.0000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["RackHeight"], 1: multiply_add}, - attrs={'operation': 'MULTIPLY'}) - - bulb_rack = nw.new_node(nodegroup_bulb_rack().name, - input_kwargs={'Thickness': group_input.outputs["RackThickness"], 'InnerRadius': multiply_1, 'OuterRadius': group_input.outputs["TopRadius"], 'InnerHeight': reversiable_bulb.outputs["RackSupport"], 'OuterHeight': multiply_2}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': bulb_rack, 'Material': group_input.outputs["BlackMaterial"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_2}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ShadeHeight"], 1: group_input.outputs["RackHeight"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: multiply_3}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_4}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_1, 'End': combine_xyz}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: group_input.outputs["TopRadius"], 4: group_input.outputs["BotRadius"]}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': curve_line, 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 100}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': curve_to_mesh}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': curve_to_mesh, 'Offset Scale': 0.0050, 'Individual': False}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [flip_faces, extrude_mesh.outputs["Mesh"]]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': group_input.outputs["LampshadeMaterial"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [reversiable_bulb.outputs["Geometry"], set_material, set_material_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_lamp_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_lamp_geometry(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'StandRadius', 0.0200), - ('NodeSocketFloatDistance', 'BaseRadius', 0.1000), - ('NodeSocketFloat', 'BaseHeight', 0.0200), - ('NodeSocketFloat', 'ShadeHeight', 0.0000), - ('NodeSocketFloat', 'HeadTopRadius', 0.3000), - ('NodeSocketFloat', 'HeadBotRadius', 0.5000), - ('NodeSocketBool', 'ReverseLamp', True), - ('NodeSocketFloatDistance', 'RackThickness', 0.0050), - ('NodeSocketVectorTranslation', 'CurvePoint1', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVectorTranslation', 'CurvePoint2', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVectorTranslation', 'CurvePoint3', (0.0000, 0.0000, 0.0000)), - ('NodeSocketMaterial', 'BlackMaterial', None), - ('NodeSocketMaterial', 'LampshadeMaterial', None), - ('NodeSocketMaterial', 'MetalMaterial', None)]) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["BaseHeight"]}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_1}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["BaseRadius"], 'Resolution': 100}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["BaseHeight"]}) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Start': combine_xyz, 'Start Handle': group_input.outputs["CurvePoint1"], 'End Handle': group_input.outputs["CurvePoint2"], 'End': group_input.outputs["CurvePoint3"], 'Resolution': 100}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [bezier_segment, curve_line]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["StandRadius"], 'Resolution': 100}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': join_geometry_2, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh_1, curve_to_mesh]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, 'Material': group_input.outputs["BlackMaterial"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ShadeHeight"], 1: 0.4000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["ShadeHeight"], 1: 0.2000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["ReverseLamp"], 2: multiply_1}, - attrs={'operation': 'MULTIPLY_ADD'}) - - lamp_head = nw.new_node(nodegroup_lamp_head().name, - input_kwargs={'ShadeHeight': group_input.outputs["ShadeHeight"], - 'TopRadius': group_input.outputs["HeadTopRadius"], - 'BotRadius': group_input.outputs["HeadBotRadius"], - 'ReverseBulb': group_input.outputs["ReverseLamp"], - 'RackThickness': group_input.outputs["RackThickness"], - 'RackHeight': multiply_add, - 'BlackMaterial': group_input.outputs["BlackMaterial"], - 'LampshadeMaterial': group_input.outputs["LampshadeMaterial"], - 'MetalMaterial': group_input.outputs["MetalMaterial"],}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': bezier_segment, 'Factor': 1.0000}, - attrs={'use_all_curves': True}) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': sample_curve.outputs["Tangent"]}, attrs={'axis': 'Z'}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': lamp_head, 'Translation': sample_curve.outputs["Position"], 'Rotation': align_euler_to_vector}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, transform]}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': join_geometry_1}) - - curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0.0000, 0.0000, 0.1000)}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_line_2, 'Translation': sample_curve.outputs["Position"], 'Rotation': align_euler_to_vector}) - - sample_curve_1 = nw.new_node(Nodes.SampleCurve, input_kwargs={'Curves': transform_geometry, 'Factor': 1.0000}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1, 'Bounding Box': bounding_box.outputs["Bounding Box"], 'LightPosition': sample_curve_1.outputs["Position"]}, - attrs={'is_active_output': True}) - diff --git a/infinigen/assets/lighting/sky_lighting.py b/infinigen/assets/lighting/sky_lighting.py index 4593ef280..0af0e3212 100644 --- a/infinigen/assets/lighting/sky_lighting.py +++ b/infinigen/assets/lighting/sky_lighting.py @@ -4,15 +4,17 @@ # Authors: Alexander Raistrick, Zeyu Ma, Kaiyu Yang, Lingjie Mei -import bpy import math -import numpy as np + +import bpy import gin -from infinigen.core.util.random import random_general as rg +import numpy as np from numpy.random import uniform from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import clip_gaussian +from infinigen.core.util.random import random_general as rg + @gin.configurable def nishita_lighting( @@ -35,26 +37,49 @@ def nishita_lighting( if camera_based_rotation is None: sky_texture.sun_rotation = np.random.uniform(0, 2 * math.pi) else: - sky_texture.sun_rotation = 2 * math.pi - cam.parent.rotation_euler[2] + np.radians(camera_based_rotation) + sky_texture.sun_rotation = ( + 2 * math.pi + - cam.parent.rotation_euler[2] + + np.radians(camera_based_rotation) + ) if dynamic: - sky_texture.sun_rotation += (sky_texture.sun_elevation + np.radians(8)) / 2 * np.arctan(np.radians(rising_angle)) - sky_texture.keyframe_insert(data_path="sun_rotation", frame=bpy.context.scene.frame_end) - sky_texture.sun_rotation -= (sky_texture.sun_elevation + np.radians(8)) * np.arctan(np.radians(rising_angle)) - sky_texture.keyframe_insert(data_path="sun_rotation", frame=bpy.context.scene.frame_start) + sky_texture.sun_rotation += ( + (sky_texture.sun_elevation + np.radians(8)) + / 2 + * np.arctan(np.radians(rising_angle)) + ) + sky_texture.keyframe_insert( + data_path="sun_rotation", frame=bpy.context.scene.frame_end + ) + sky_texture.sun_rotation -= ( + sky_texture.sun_elevation + np.radians(8) + ) * np.arctan(np.radians(rising_angle)) + sky_texture.keyframe_insert( + data_path="sun_rotation", frame=bpy.context.scene.frame_start + ) - sky_texture.keyframe_insert(data_path="sun_elevation", frame=bpy.context.scene.frame_end) + sky_texture.keyframe_insert( + data_path="sun_elevation", frame=bpy.context.scene.frame_end + ) sky_texture.sun_elevation = -np.radians(8) - sky_texture.keyframe_insert(data_path="sun_elevation", frame=bpy.context.scene.frame_start) + sky_texture.keyframe_insert( + data_path="sun_elevation", frame=bpy.context.scene.frame_start + ) sky_texture.sun_elevation = -np.radians(5) - sky_texture.keyframe_insert(data_path="sun_elevation", frame=bpy.context.scene.frame_start + 10) + sky_texture.keyframe_insert( + data_path="sun_elevation", frame=bpy.context.scene.frame_start + 10 + ) sky_texture.altitude = clip_gaussian(100, 400, 0, 2000) - sky_texture.air_density =rg(air_density) + sky_texture.air_density = rg(air_density) sky_texture.dust_density = rg(dust_density) sky_texture.ozone_density = clip_gaussian(1, 1, 0.1, 10) - + strength = rg(strength) - return nw.new_node(Nodes.Background, input_kwargs={'Color': sky_texture, 'Strength': strength}) + return nw.new_node( + Nodes.Background, input_kwargs={"Color": sky_texture, "Strength": strength} + ) + def add_lighting(cam=None): nw = NodeWrangler(bpy.context.scene.world.node_tree) @@ -67,16 +92,18 @@ def add_lighting(cam=None): volume = None - nw.new_node(Nodes.WorldOutput, input_kwargs={ - 'Surface': surface, - 'Volume': volume - }) + nw.new_node(Nodes.WorldOutput, input_kwargs={"Surface": surface, "Volume": volume}) + @gin.configurable -def add_camera_based_lighting(energy=("log_uniform", 200, 500), spot_size=("uniform", np.pi / 6, np.pi / 4)): +def add_camera_based_lighting( + energy=("log_uniform", 200, 500), spot_size=("uniform", np.pi / 6, np.pi / 4) +): camera = bpy.context.scene.camera - bpy.ops.object.light_add(type='SPOT', location=camera.location, rotation=camera.rotation_euler) + bpy.ops.object.light_add( + type="SPOT", location=camera.location, rotation=camera.rotation_euler + ) spot = bpy.context.active_object spot.data.energy = rg(energy) spot.data.spot_size = rg(spot_size) - spot.data.spot_blend = uniform(.6, .8) + spot.data.spot_blend = uniform(0.6, 0.8) diff --git a/infinigen/assets/lighting/three_point_lighting.py b/infinigen/assets/lighting/three_point_lighting.py index e1dba2e16..3cb0026ca 100644 --- a/infinigen/assets/lighting/three_point_lighting.py +++ b/infinigen/assets/lighting/three_point_lighting.py @@ -13,16 +13,24 @@ def add_lighting(asset): dimension = asset.dimensions * asset.scale radius = np.sqrt(dimension[0] * dimension[1]) / 2 * 1.5 - locations = np.array( - [(uniform(3, 4), -uniform(3, 4), uniform(5, 6)), (uniform(3, 4), uniform(3, 4), uniform(3, 4)), - (-uniform(5, 6), uniform(-2, -3), uniform(3, 4))]) * radius + locations = ( + np.array( + [ + (uniform(3, 4), -uniform(3, 4), uniform(5, 6)), + (uniform(3, 4), uniform(3, 4), uniform(3, 4)), + (-uniform(5, 6), uniform(-2, -3), uniform(3, 4)), + ] + ) + * radius + ) energies = [1000, 1000 / uniform(5, 10), 1000 * uniform(5, 10)] for loc, energy in zip(locations, energies): - bpy.ops.object.light_add(type='SPOT') + bpy.ops.object.light_add(type="SPOT") light = bpy.context.active_object light.location = loc + asset.location + center(asset) * asset.scale - light.rotation_euler = 0, np.arctan2(np.sqrt(loc[0] ** 2 + loc[1] ** 2), loc[2]), -np.arctan2(-loc[0], - -loc[ - 1])\ - - np.pi / 2 + light.rotation_euler = ( + 0, + np.arctan2(np.sqrt(loc[0] ** 2 + loc[1] ** 2), loc[2]), + -np.arctan2(-loc[0], -loc[1]) - np.pi / 2, + ) light.data.energy = energy * radius * radius diff --git a/infinigen/assets/material_assignments.py b/infinigen/assets/material_assignments.py index a11d1bdff..629220af2 100644 --- a/infinigen/assets/material_assignments.py +++ b/infinigen/assets/material_assignments.py @@ -4,37 +4,51 @@ # Authors: Meenal Parakh -import numpy as np -from dataclasses import dataclass import functools +import numpy as np from numpy.random import uniform -from infinigen.assets.materials import (metal, plastic, text, ceramic, woods, dirt, - mirror, - wood, glass_volume, fabrics, plaster, - sofa_fabric, leather, rug, water, glass) +from infinigen.assets.color_fits import real_color_distribution +from infinigen.assets.materials import ( + beverage_fridge_shaders, + ceiling_light_shaders, + ceramic, + dirt, + dishwasher_shaders, + fabrics, + glass, + glass_volume, + lamp_shaders, + metal, + microwave_shaders, + mirror, + oven_shaders, + plaster, + plastic, + rug, + sofa_fabric, + table_marble, + text, + vase_shaders, + velvet, + water, + wood, + woods, +) +from infinigen.assets.materials.art import Art, ArtFabric, ArtRug from infinigen.assets.materials.plastics import plastic_rough from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic -from infinigen.assets.materials import (glass_volume, plaster, wood, - sofa_fabric, leather, rug, water, glass, velvet) -from infinigen.assets.materials import (beverage_fridge_shaders, dishwasher_shaders, - ceiling_light_shaders, - vase_shaders, - lamp_shaders, table_marble, - fabrics, - microwave_shaders, oven_shaders) - -from infinigen.assets.materials import black_plastic -from infinigen.assets.materials.art import ArtFabric, ArtRug, Art -from infinigen.assets.materials.wear_tear import procedural_edge_wear, procedural_scratch -from infinigen.assets.color_fits import real_color_distribution +from infinigen.assets.materials.wear_tear import ( + procedural_edge_wear, + procedural_scratch, +) -DEFAULT_EDGE_WEAR_PROB = .5 -DEFAULT_SCRATCH_PROB = .5 +DEFAULT_EDGE_WEAR_PROB = 0.5 +DEFAULT_SCRATCH_PROB = 0.5 -class TextureAssignments: +class TextureAssignments: def __init__(self, materials, probabilities): self.materials = materials self.probabilities = probabilities @@ -44,211 +58,259 @@ def assign_material(self): p = p / p.sum() return np.random.choice(self.materials, p=p) + class MaterialOptions: def __init__(self, materials_list): self.materials, self.probabilities = zip(*materials_list) self.probabilities = np.array(self.probabilities) self.probabilities = self.probabilities / self.probabilities.sum() - + def assign_material(self): return np.random.choice(self.materials, p=self.probabilities) - + def get_all_metal_shaders(): - metal_shaders_list = [metal.brushed_metal.shader_brushed_metal, - metal.galvanized_metal.shader_galvanized_metal, - metal.grained_and_polished_metal.shader_grained_metal, - metal.hammered_metal.shader_hammered_metal] + metal_shaders_list = [ + metal.brushed_metal.shader_brushed_metal, + metal.galvanized_metal.shader_galvanized_metal, + metal.grained_and_polished_metal.shader_grained_metal, + metal.hammered_metal.shader_hammered_metal, + ] color = metal.sample_metal_color() - new_shaders = [functools.partial(shader, base_color=color) for shader in metal_shaders_list] + new_shaders = [ + functools.partial(shader, base_color=color) for shader in metal_shaders_list + ] for idx, ns in enumerate(new_shaders): # fix taken from: https://github.com/elastic/apm-agent-python/issues/293 ns.__name__ = metal_shaders_list[idx].__name__ - + return new_shaders - + + def plastic_furniture(): - new_shader = functools.partial(shader_rough_plastic, base_color=real_color_distribution('sofa_leather')) + new_shader = functools.partial( + shader_rough_plastic, base_color=real_color_distribution("sofa_leather") + ) new_shader.__name__ = shader_rough_plastic.__name__ return new_shader def get_all_fabric_shaders(): - return [fabrics.shader_coarse_fabric_random, fabrics.shader_fine_fabric_random, fabrics.shader_fabric, - fabrics.shader_leather, fabrics.shader_sofa_fabric] + return [ + fabrics.shader_coarse_knit_fabric, + fabrics.shader_fine_knit_fabric, + fabrics.shader_fabric, + fabrics.shader_leather, + fabrics.shader_sofa_fabric, + ] def beverage_fridge_materials(): metal_shaders = get_all_metal_shaders() return { - "surface": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), + "surface": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), "front": TextureAssignments([beverage_fridge_shaders.shader_glass_001], [1.0]), - "handle": TextureAssignments([beverage_fridge_shaders.shader_white_metal_001], [1.0]), - "back": TextureAssignments([beverage_fridge_shaders.shader_black_medal_001], [1.0]), + "handle": TextureAssignments( + [beverage_fridge_shaders.shader_white_metal_001], [1.0] + ), + "back": TextureAssignments( + [beverage_fridge_shaders.shader_black_medal_001], [1.0] + ), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def dishwasher_materials(): metal_shaders = get_all_metal_shaders() return { - "surface": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), + "surface": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), "front": TextureAssignments([dishwasher_shaders.shader_glass_002], [1.0]), - "white_metal": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), + "white_metal": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), "top": TextureAssignments([dishwasher_shaders.shader_black_medal_002], [1.0]), - "name_material": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), + "name_material": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def microwave_materials(): metal_shaders = get_all_metal_shaders() return { - "surface": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), - "back": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), - "black_glass": TextureAssignments([microwave_shaders.shader_black_glass], [1.0]), + "surface": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), + "back": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), + "black_glass": TextureAssignments( + [microwave_shaders.shader_black_glass], [1.0] + ), "glass": TextureAssignments([microwave_shaders.shader_glass], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def oven_materials(): metal_shaders = get_all_metal_shaders() return { - "surface": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), + "surface": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), "back": TextureAssignments([oven_shaders.shader_black_medal], [1.0]), - "white_metal": TextureAssignments(metal_shaders, [1.0]* len(metal_shaders)), - "black_glass": TextureAssignments([oven_shaders.shader_super_black_glass], [1.0]), + "white_metal": TextureAssignments(metal_shaders, [1.0] * len(metal_shaders)), + "black_glass": TextureAssignments( + [oven_shaders.shader_super_black_glass], [1.0] + ), "glass": TextureAssignments([oven_shaders.shader_glass], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def tv_materials(): return { - "surface": TextureAssignments([metal, plastic_rough], [1.0, .2]), + "surface": TextureAssignments([metal, plastic_rough], [1.0, 0.2]), "screen_surface": TextureAssignments([text.Text], [1.0]), - "support": TextureAssignments([metal, plastic_rough], [1.0, .2]), + "support": TextureAssignments([metal, plastic_rough], [1.0, 0.2]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def bathtub_materials(): return { "surface": TextureAssignments([ceramic], [1]), "leg": TextureAssignments([metal], [1.0]), "hole": TextureAssignments([metal], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def bathroom_sink_materials(): return { "surface": TextureAssignments([ceramic, metal], [0.9, 0.1]), # rest inherited from bathtub_materials } + def toilet_materials(): return { "surface": TextureAssignments([ceramic, metal], [0.9, 0.1]), "hardware_surface": TextureAssignments([metal], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def hardware_materials(): return { "surface": TextureAssignments([metal], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def blanket_materials(): return { - "surface": TextureAssignments([ArtFabric, fabrics], - [1.0, 1.0]), + "surface": TextureAssignments([ArtFabric, fabrics.fabric_random], [1.0, 1.0]), } + def pants_materials(): return { - "surface": TextureAssignments([ArtFabric, fabrics], - [1.0, 1.0]), + "surface": TextureAssignments([ArtFabric, fabrics.fabric_random], [1.0, 1.0]), } + def towel_materials(): return { - "surface": TextureAssignments([ArtRug, rug], - [0.2, 0.8]), + "surface": TextureAssignments([ArtRug, rug], [0.2, 0.8]), } + def acquarium_materials(): return { "glass_surface": TextureAssignments([glass], [1.0]), "belt_surface": TextureAssignments([metal.galvanized_metal], [1.0]), "water_surface": TextureAssignments([water], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [0, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [0, DEFAULT_EDGE_WEAR_PROB], } + def ceiling_light_materials(): return { - "black_material": TextureAssignments([ceiling_light_shaders.shader_black], [1.0]), - "white_material": TextureAssignments([ceiling_light_shaders.shader_lamp_bulb_nonemissive], [1.0]), + "black_material": TextureAssignments( + [ceiling_light_shaders.shader_black], [1.0] + ), + "white_material": TextureAssignments( + [ceiling_light_shaders.shader_lamp_bulb_nonemissive], [1.0] + ), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] } + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], + } + def lamp_materials(): return { - 'black_material': TextureAssignments([lamp_shaders.shader_black], [1.0]), - 'metal': TextureAssignments([lamp_shaders.shader_metal], [1.0]), - 'lampshade': TextureAssignments([lamp_shaders.shader_lampshade], [1.0]), + "black_material": TextureAssignments([lamp_shaders.shader_black], [1.0]), + "metal": TextureAssignments([lamp_shaders.shader_metal], [1.0]), + "lampshade": TextureAssignments([lamp_shaders.shader_lampshade], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [0, 0] + "wear_tear_prob": [0, 0], } + def table_cocktail_materials(): # top materials are: choice(['marble', 'tiled_wood', 'plastic', 'glass']), # choice(['brushed_metal', 'grained_metal', 'galvanized_metal', 'wood', 'glass']), metal_shaders = get_all_metal_shaders() return { - 'top': TextureAssignments([table_marble.shader_marble, - woods.tiled_wood.shader_wood_tiled, - shader_rough_plastic, - glass_volume.shader_glass_volume], - [1.0, 1.0, 1.0, 1.0]), - 'leg': TextureAssignments([*metal_shaders, - wood.shader_wood, - glass_volume.shader_glass_volume], - [1.0] * len(metal_shaders)+ [1.0, 1.0]), + "top": TextureAssignments( + [ + table_marble.shader_marble, + woods.tiled_wood.shader_wood_tiled, + shader_rough_plastic, + glass_volume.shader_glass_volume, + ], + [1.0, 1.0, 1.0, 1.0], + ), + "leg": TextureAssignments( + [*metal_shaders, wood.shader_wood, glass_volume.shader_glass_volume], + [1.0] * len(metal_shaders) + [1.0, 1.0], + ), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def table_dining_materials(): metal_shaders = get_all_metal_shaders() fabric_shaders = get_all_fabric_shaders() probs = [1.0 / len(metal_shaders)] * len(metal_shaders) - - return { - 'top': MaterialOptions([ - (table_marble.shader_marble, 2.0), - (wood.shader_wood, 1.0), - (dishwasher_shaders.shader_glass_002, 1.0), - (oven_shaders.shader_super_black_glass, 1.0), - (woods.tiled_wood.shader_wood_tiled, 2.0), - (glass_volume.shader_glass_volume, 1.0), - *(zip(metal_shaders, probs)), - ]), - 'leg': MaterialOptions([ - (wood.shader_wood, 1.0), - (glass_volume.shader_glass_volume, 1.0), - (plastic_furniture(), 1.0), - *(zip(metal_shaders, probs)), - ]), + + return { + "top": MaterialOptions( + [ + (table_marble.shader_marble, 2.0), + (wood.shader_wood, 1.0), + (dishwasher_shaders.shader_glass_002, 1.0), + (oven_shaders.shader_super_black_glass, 1.0), + (woods.tiled_wood.shader_wood_tiled, 2.0), + (glass_volume.shader_glass_volume, 1.0), + *(zip(metal_shaders, probs)), + ] + ), + "leg": MaterialOptions( + [ + (wood.shader_wood, 1.0), + (glass_volume.shader_glass_volume, 1.0), + (plastic_furniture(), 1.0), + *(zip(metal_shaders, probs)), + ] + ), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def bar_chair_materials(leg_style=None): metal_shaders = get_all_metal_shaders() if leg_style == "wheeled": @@ -256,28 +318,30 @@ def bar_chair_materials(leg_style=None): else: probs = [1.0 / len(metal_shaders)] * len(metal_shaders) return { - 'seat': TextureAssignments([leather.shader_leather], - [1.0]), - 'leg': TextureAssignments([wood.shader_wood, - *metal_shaders], - [1.0] + probs), + "seat": TextureAssignments([fabrics.shader_leather], [1.0]), + "leg": TextureAssignments([wood.shader_wood, *metal_shaders], [1.0] + probs), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, 0.0] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, 0.0], } + def chair_materials(): return { - 'limb': TextureAssignments( - [metal, wood, fabrics], - [2.0, 2., 2] + "limb": TextureAssignments([metal, wood, fabrics.fabric_random], [2.0, 2.0, 2]), + "surface": TextureAssignments( + [plastic_rough, wood, fabrics.fabric_random], [0.3, 0.5, 0.7] + ), + "panel": TextureAssignments( + [plastic_rough, wood, fabrics.fabric_random], [0.3, 0.5, 0.7] + ), + "arm": TextureAssignments( + [plastic, wood, fabrics.fabric_random], [0.3, 0.5, 0.7] ), - 'surface': TextureAssignments([plastic_rough, wood, fabrics], [.3, 0.5, 0.7]), - 'panel': TextureAssignments([plastic_rough, wood, fabrics], [.3, 0.5, 0.7]), - 'arm': TextureAssignments([plastic, wood, fabrics], [.3, 0.5, 0.7]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def office_chair_materials(leg_style=None): metal_shaders = get_all_metal_shaders() if leg_style == "wheeled": @@ -285,66 +349,79 @@ def office_chair_materials(leg_style=None): else: probs = [1.0 / len(metal_shaders)] * len(metal_shaders) return { - 'top': TextureAssignments([ - leather.shader_leather, - wood.shader_wood, - shader_rough_plastic, - glass_volume.shader_glass_volume], - [1.0, 1.0, 1.0, 1.0]), - 'leg': TextureAssignments([wood.shader_wood, - *metal_shaders], - [1.0] + probs), + "top": TextureAssignments( + [ + fabrics.shader_leather, + wood.shader_wood, + shader_rough_plastic, + glass_volume.shader_glass_volume, + ], + [1.0, 1.0, 1.0, 1.0], + ), + "leg": TextureAssignments([wood.shader_wood, *metal_shaders], [1.0] + probs), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def bedframe_materials(): return { - 'surface': TextureAssignments([wood, plaster], - [2.0, 1.0,]), - 'limb_surface': TextureAssignments([wood, plaster], [2.0, 1.0]), + "surface": TextureAssignments( + [wood, plaster], + [ + 2.0, + 1.0, + ], + ), + "limb_surface": TextureAssignments([wood, plaster], [2.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def mattress_materials(): return { - 'surface': TextureAssignments([sofa_fabric], - [1.0]), + "surface": TextureAssignments([sofa_fabric], [1.0]), } + def pillow_materials(): return { - 'surface': TextureAssignments([ArtFabric, sofa_fabric], - [1.0, 1.0]), + "surface": TextureAssignments([ArtFabric, sofa_fabric], [1.0, 1.0]), } + def sofa_materials(): return { - 'sofa_fabric': MaterialOptions([ + "sofa_fabric": MaterialOptions( + [ (velvet.shader_velvet, 0.5), (sofa_fabric.shader_sofa_fabric, 0.3), - (leather.shader_leather, 0.2) - ]), + (fabrics.shader_leather, 0.2), + ] + ), } + def book_materials(): return { "surface": TextureAssignments([plaster], [1.0]), "cover_surface": TextureAssignments([text.Text], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [0, 0] + "wear_tear_prob": [0, 0], } + def vase_materials(): return { - "surface": TextureAssignments([vase_shaders.shader_ceramic, - glass_volume.shader_glass_volume], - [1.0, 1.0]), + "surface": TextureAssignments( + [vase_shaders.shader_ceramic, glass_volume.shader_glass_volume], [1.0, 1.0] + ), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def pan_materials(): return { "surface": TextureAssignments([metal], [1.0]), @@ -352,35 +429,40 @@ def pan_materials(): # no guard as it overrides over tableware_materials } + def cup_materials(): return { - 'surface': TextureAssignments([glass, plastic], [1.0, 1.0]), + "surface": TextureAssignments([glass, plastic], [1.0, 1.0]), "wrap_surface": TextureAssignments([text.Text], [1.0]), } + def bottle_materials(): return { "surface": TextureAssignments([glass, plastic], [1.0, 1.0]), "wrap_surface": TextureAssignments([text.Text], [1.0]), "cap_surface": TextureAssignments([metal, plastic], [1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, 0.0] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, 0.0], } + def tableware_materials(fragile=False, transparent=False): if fragile: surface_materials = TextureAssignments([ceramic, glass, plastic], [1.0, 1, 1]) elif transparent: surface_materials = TextureAssignments([ceramic, glass], [1.0, 1]) else: - surface_materials = TextureAssignments([ceramic, glass, plastic, metal, wood], [1, 1, 1.0, 1, 1]) + surface_materials = TextureAssignments( + [ceramic, glass, plastic, metal, wood], [1, 1, 1.0, 1, 1] + ) return { "surface": surface_materials, "guard": TextureAssignments([wood, plastic], [1.0, 1.0]), "inside": TextureAssignments([ceramic, metal], [1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } @@ -389,204 +471,223 @@ def can_materials(): "surface": TextureAssignments([metal], [1.0]), "wrap_surface": TextureAssignments([text.Text], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def jar_materials(): return { "surface": TextureAssignments([glass], [1.0]), "cap_surface": TextureAssignments([metal], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def foodbag_materials(): return { "surface": TextureAssignments([text.Text], [1.0]), } + def lid_materials(): return { "surface": TextureAssignments([ceramic, metal], [0.5, 0.5]), "rim_surface": TextureAssignments([metal], [1.0]), "handle_surface": TextureAssignments([metal, ceramic], [1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, 0.0] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, 0.0], } + + def glasslid_materials(): return { "surface": TextureAssignments([glass], [1.0]), "rim_surface": TextureAssignments([metal], [1.0]), "handle_surface": TextureAssignments([metal, ceramic], [1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - 'wear_tear_prob': [DEFAULT_SCRATCH_PROB, 0.0] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, 0.0], } + def plant_container_materials(): return { - "surface": TextureAssignments([ceramic, metal], [3., 1.]), - 'dirt_surface': TextureAssignments([dirt], [1.0]), + "surface": TextureAssignments([ceramic, metal], [3.0, 1.0]), + "dirt_surface": TextureAssignments([dirt], [1.0]), } + def balloon_materials(): return { - "surface": TextureAssignments([metal], - [1.0]), + "surface": TextureAssignments([metal], [1.0]), } + def range_hood_materials(): return { "surface": TextureAssignments([metal], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def wall_art_materials(): return { "frame": TextureAssignments([wood, metal], [1.0, 1.0]), "surface": TextureAssignments([Art], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def mirror_materials(): return { "frame": TextureAssignments([wood, metal], [1.0, 1.0]), "surface": TextureAssignments([mirror], [1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } def kitchen_sink_materials(): shaders = get_all_metal_shaders() - sink_color = metal.sample_metal_color(metal_color='natural') - if uniform() < .5: - tap_color = metal.sample_metal_color(metal_color='plain') + sink_color = metal.sample_metal_color(metal_color="natural") + if uniform() < 0.5: + tap_color = metal.sample_metal_color(metal_color="plain") else: - tap_color = metal.sample_metal_color(metal_color='natural') - sink_shaders = [lambda nw, *args: shader(nw, *args, base_color=sink_color) for shader in shaders] - tap_shaders = [lambda nw, *args: shader(nw, *args, base_color=tap_color) for shader in shaders] + tap_color = metal.sample_metal_color(metal_color="natural") + sink_shaders = [ + lambda nw, *args: shader(nw, *args, base_color=sink_color) for shader in shaders + ] + tap_shaders = [ + lambda nw, *args: shader(nw, *args, base_color=tap_color) for shader in shaders + ] return { "sink": TextureAssignments(sink_shaders, [1.0, 1.0, 1.0, 1.0]), "tap": TextureAssignments(tap_shaders, [1.0, 1.0, 1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } def kitchen_tap_materials(): shaders = get_all_metal_shaders() - if uniform() < .5: - tap_color = metal.sample_metal_color(metal_color='plain') + if uniform() < 0.5: + tap_color = metal.sample_metal_color(metal_color="plain") else: - tap_color = metal.sample_metal_color(metal_color='natural') - tap_shaders = [lambda nw, *args: shader(nw, *args, base_color=tap_color) for shader in shaders] + tap_color = metal.sample_metal_color(metal_color="natural") + tap_shaders = [ + lambda nw, *args: shader(nw, *args, base_color=tap_color) for shader in shaders + ] return { "tap": TextureAssignments(tap_shaders, [1.0, 1.0, 1.0, 1.0]), "wear_tear": [procedural_scratch, procedural_edge_wear], - "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB] + "wear_tear_prob": [DEFAULT_SCRATCH_PROB, DEFAULT_EDGE_WEAR_PROB], } + def rug_materials(): return { - "surface": MaterialOptions([ - (rug, 3.0), - (ArtRug, 2.0), - (fabrics, 5.0), - ]) + "surface": MaterialOptions( + [ + (rug, 3.0), + (ArtRug, 2.0), + (fabrics.fabric_random, 5.0), + ] + ) } + AssetList = { # appliances - 'BeverageFridgeFactory': beverage_fridge_materials, # looks like dishwasher currently - 'DishwasherFactory': dishwasher_materials, - 'MicrowaveFactory': microwave_materials, - 'OvenFactory': oven_materials, # looks like dishwasher currently - 'TVFactory': tv_materials, - 'MonitorFactory': None, # inherits from TVFactory + "BeverageFridgeFactory": beverage_fridge_materials, # looks like dishwasher currently + "DishwasherFactory": dishwasher_materials, + "MicrowaveFactory": microwave_materials, + "OvenFactory": oven_materials, # looks like dishwasher currently + "TVFactory": tv_materials, + "MonitorFactory": None, # inherits from TVFactory # bathroom - 'BathtubFactory': bathtub_materials, - 'BathroomSinkFactory': bathroom_sink_materials, # inheriting from bathtub factory, so not used - 'HardwareFactory': hardware_materials, - 'ToiletFactory': toilet_materials, + "BathtubFactory": bathtub_materials, + "BathroomSinkFactory": bathroom_sink_materials, # inheriting from bathtub factory, so not used + "HardwareFactory": hardware_materials, + "ToiletFactory": toilet_materials, # clothes - 'BlanketFactory': blanket_materials, # has Art which is a class, not func, - # also "Normal Not Found" is printed when generating + "BlanketFactory": blanket_materials, # has Art which is a class, not func, + # also "Normal Not Found" is printed when generating ############## this point onwards, using this dictionary to get corresponding ############## material functions except for tableware base - 'PantsFactory': pants_materials, # same comment as above - 'ShirtFactory': pants_materials, # same comment as above - 'TowelFactory': towel_materials, + "PantsFactory": pants_materials, # same comment as above + "ShirtFactory": pants_materials, # same comment as above + "TowelFactory": towel_materials, # decor - 'AquariumTankFactory': acquarium_materials, + "AquariumTankFactory": acquarium_materials, # lighting - 'CausticsLampFactory': None, # the properties are not materials, so skipping - 'CeilingLightFactory': ceiling_light_materials, - 'PointLampFactory': None, # the properties are not materials, so skipping - 'LampFactory': lamp_materials, # really required bunch of changes to expose the materials + "CausticsLampFactory": None, # the properties are not materials, so skipping + "CeilingLightFactory": ceiling_light_materials, + "PointLampFactory": None, # the properties are not materials, so skipping + "LampFactory": lamp_materials, # really required bunch of changes to expose the materials # seating: chairs - 'BarChairFactory': bar_chair_materials, - 'ChairFactory': chair_materials, # an internal reassignment that overrides surface with the limb material - 'OfficeChairFactory': office_chair_materials, + "BarChairFactory": bar_chair_materials, + "ChairFactory": chair_materials, # an internal reassignment that overrides surface with the limb material + "OfficeChairFactory": office_chair_materials, # seating: sofas and beds - 'BedFactory': None, # uses the below factories, so no materials - 'BedFrameFactory': bedframe_materials, - 'MattressFactory': mattress_materials, - 'PillowFactory': pillow_materials, - 'SofaFactory': sofa_materials, + "BedFactory": None, # uses the below factories, so no materials + "BedFrameFactory": bedframe_materials, + "MattressFactory": mattress_materials, + "PillowFactory": pillow_materials, + "SofaFactory": sofa_materials, # shelves: todo - 'SimpleDeskFactory': None, - 'SimpleBookcaseFactory': None, - 'CellShelfFactory': None, - 'TVStandFactory': None, - 'TriangleShelfFactory': None, - 'LargeShelfFactory': None, - 'SingleCabinetFactory': None, - 'KitchenCabinetFactory': None, - 'KitchenSpaceFactory': None, - 'KitchenIslandFactory': None, + "SimpleDeskFactory": None, + "SimpleBookcaseFactory": None, + "CellShelfFactory": None, + "TVStandFactory": None, + "TriangleShelfFactory": None, + "LargeShelfFactory": None, + "SingleCabinetFactory": None, + "KitchenCabinetFactory": None, + "KitchenSpaceFactory": None, + "KitchenIslandFactory": None, # table decorations : they have their own materials - 'BookFactory': book_materials, - 'BookColumnFactory': None, # use BookFactory - 'BookStackFactory': None, # use BookFactory - 'VaseFactory': vase_materials, + "BookFactory": book_materials, + "BookColumnFactory": None, # use BookFactory + "BookStackFactory": None, # use BookFactory + "VaseFactory": vase_materials, # sink and tap - 'SinkFactory': kitchen_sink_materials, - 'TapFactory': kitchen_tap_materials, + "SinkFactory": kitchen_sink_materials, + "TapFactory": kitchen_tap_materials, # tables - 'TableCocktailFactory': table_cocktail_materials, - 'TableDiningFactory': table_dining_materials, - 'TableTopFactory': None, # not sure where the materials are used in it + "TableCocktailFactory": table_cocktail_materials, + "TableDiningFactory": table_dining_materials, + "TableTopFactory": None, # not sure where the materials are used in it # Tableware - 'TablewareFactory': tableware_materials, # only function with arguments + "TablewareFactory": tableware_materials, # only function with arguments # 'TablewareFactory': tableware_materials_default, # directly uses the following functions (not through the AssetList Dictionary) - 'SpoonFactory': None, # uses materials from tableware base - 'KnifeFactory': None, # uses materials from tableware base - 'ChopsticksFactory': None, # uses materials from tableware base - 'ForkFactory': None, # uses materials from tableware base - 'SpatulaFactory': None, # uses materials from tableware base - 'PanFactory': pan_materials, - 'PotFactory': None, # uses the same materials as PanFactory - 'CupFactory': cup_materials, - 'WineglassFactory': None, # uses materials from transparent tableware - 'PlateFactory': None, # uses materials from tableware base - 'BowlFactory': None, # uses materials from tableware base - 'FruitContainerFactory': None, # uses materials from tableware base - 'BottleFactory': bottle_materials, - 'CanFactory': can_materials, - 'JarFactory': jar_materials, - 'FoodBagFactory': foodbag_materials, - 'FoodBoxFactory': foodbag_materials, # same params as above - 'LidFactory': lid_materials, - 'GlassLidFactory': glasslid_materials, - 'PlantContainerFactory': plant_container_materials, + "SpoonFactory": None, # uses materials from tableware base + "KnifeFactory": None, # uses materials from tableware base + "ChopsticksFactory": None, # uses materials from tableware base + "ForkFactory": None, # uses materials from tableware base + "SpatulaFactory": None, # uses materials from tableware base + "PanFactory": pan_materials, + "PotFactory": None, # uses the same materials as PanFactory + "CupFactory": cup_materials, + "WineglassFactory": None, # uses materials from transparent tableware + "PlateFactory": None, # uses materials from tableware base + "BowlFactory": None, # uses materials from tableware base + "FruitContainerFactory": None, # uses materials from tableware base + "BottleFactory": bottle_materials, + "CanFactory": can_materials, + "JarFactory": jar_materials, + "FoodBagFactory": foodbag_materials, + "FoodBoxFactory": foodbag_materials, # same params as above + "LidFactory": lid_materials, + "GlassLidFactory": glasslid_materials, + "PlantContainerFactory": plant_container_materials, # wall decorations - 'BalloonFactory': balloon_materials, - 'RangeHoodFactory': range_hood_materials, # getting RangeHoodFactory not Found. - 'WallArtFactory': wall_art_materials, - 'MirrorFactory': mirror_materials, + "BalloonFactory": balloon_materials, + "RangeHoodFactory": range_hood_materials, # getting RangeHoodFactory not Found. + "WallArtFactory": wall_art_materials, + "MirrorFactory": mirror_materials, # window - 'WindowFactory': None, + "WindowFactory": None, "RugFactory": rug_materials, } diff --git a/infinigen/assets/materials/__init__.py b/infinigen/assets/materials/__init__.py index 039b38d05..e455c367b 100644 --- a/infinigen/assets/materials/__init__.py +++ b/infinigen/assets/materials/__init__.py @@ -3,21 +3,49 @@ # Authors: Hongyu Wen -from . import * -from infinigen.infinigen_gpl.surfaces import * +from infinigen.infinigen_gpl.surfaces import snow +# too verbose to import all shaders one by one +# ruff: noqa: F403 +from . import * +from .art import Art, ArtFabric, ArtRug, DarkArt +from .fabrics import ( + coarse_knit_fabric, + fine_knit_fabric, + general_fabric, + leather, + lined_fabric, + sofa_fabric, + velvet, +) +from .glass import shader_glass +from .metal import ( + brushed_metal, + galvanized_metal, + grained_and_polished_metal, + hammered_metal, + metal_basic, +) from .metal.brushed_metal import shader_brushed_metal from .metal.galvanized_metal import shader_galvanized_metal from .metal.grained_and_polished_metal import shader_grained_metal from .metal.hammered_metal import shader_hammered_metal from .metal.metal_basic import shader_metal -from .metal import ( - metal_basic, - galvanized_metal, - grained_and_polished_metal, - hammered_metal, - brushed_metal, +from .plastic import shader_rough_plastic, shader_translucent_plastic +from .stone_and_concrete import concrete +from .woods import ( + composite_wood_tile, + crossed_wood_tile, + hexagon_wood_tile, + non_wood_tile, + square_wood_tile, + staggered_wood_tile, + tiled_wood, + wood, + wood_old, + wood_tile, ) +from .woods.wood import shader_wood metal_shader_list = [ shader_brushed_metal, @@ -27,29 +55,8 @@ shader_metal, ] -from .plastic import shader_rough_plastic -from .plastic import shader_translucent_plastic - plastic_shader_list = [shader_rough_plastic, shader_translucent_plastic] -from .woods.wood import shader_wood - wood_shader_list = [shader_wood] -from .glass import shader_glass - glass_shader_list = [shader_glass] - -from .leather_and_fabrics import ( - leather, - general_fabric, - sofa_fabric, - fine_knit_fabric, - coarse_knit_fabric, - lined_fabric, - velvet -) -from .art import Art, DarkArt, ArtRug, ArtFabric -from .stone_and_concrete import concrete -from .woods import tiled_wood, wood, wood_old, square_wood_tile, hexagon_wood_tile, composite_wood_tile, \ - staggered_wood_tile, crossed_wood_tile, wood_tile, non_wood_tile diff --git a/infinigen/assets/materials/aluminumdisp2tut.py b/infinigen/assets/materials/aluminumdisp2tut.py index 41864345e..86653e017 100644 --- a/infinigen/assets/materials/aluminumdisp2tut.py +++ b/infinigen/assets/materials/aluminumdisp2tut.py @@ -4,136 +4,168 @@ # Authors: Mingzhe Wang # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=FY0lR96Mwas by Sam Bowman -import os, sys -import numpy as np -import math as ma +import os + import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, + sample_ratio, +) +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface def shader_aluminumdisp2tut(nw: NodeWrangler, rand=False, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) - - multiply = nw.new_node(Nodes.Math, + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"]}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) if rand: multiply.inputs[1].default_value = sample_range(-1, 1) - multiply_1 = nw.new_node(Nodes.Math, + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"]}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) if rand: multiply_1.inputs[1].default_value = sample_range(-1, 1) - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: multiply_1}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': add}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Rotation': combine_xyz}) - - mapping = nw.new_node(Nodes.VectorMath, + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Rotation": combine_xyz, + }, + ) + + mapping = nw.new_node( + Nodes.VectorMath, input_kwargs={0: mapping_1.outputs["Vector"], 1: (1, 75, 1)}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - #mapping = nw.new_node(Nodes.Mapping, + # mapping = nw.new_node(Nodes.Mapping, # input_kwargs={'Vector': mapping_1, 'Scale': (1.0, sample_range(50, 100) if rand else 75.0, 1.0)}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping, 'W': 0.7, 'Scale': 2.0, 'Detail': 10.0, 'Dimension': 1.0}, - attrs={'musgrave_dimensions': '4D'}) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping, + "W": 0.7, + "Scale": 2.0, + "Detail": 10.0, + "Dimension": 1.0, + }, + attrs={"musgrave_dimensions": "4D"}, + ) if rand: - musgrave_texture.inputs['W'].default_value = sample_range(0, 5) - musgrave_texture.inputs['Scale'].default_value = sample_ratio(2, 0.5, 2) + musgrave_texture.inputs["W"].default_value = sample_range(0, 5) + musgrave_texture.inputs["Scale"].default_value = sample_ratio(2, 0.5, 2) - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture}) - colorramp_4.color_ramp.elements[0].position = sample_range(0.1, 0.3) if rand else 0.1455 + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) + colorramp_4.color_ramp.elements[0].position = ( + sample_range(0.1, 0.3) if rand else 0.1455 + ) colorramp_4.color_ramp.elements[0].color = (0.466, 0.466, 0.466, 1.0) colorramp_4.color_ramp.elements[1].position = 1.0 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp_4.outputs["Color"]}) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_4.outputs["Color"]} + ) colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = .28 + colorramp_1.color_ramp.elements[0].position = 0.28 colorramp_1.color_ramp.elements[0].color = (0.56, 0.61, 0.61, 1.0) colorramp_1.color_ramp.elements[1].position = 0.46 colorramp_1.color_ramp.elements[1].color = (0.206, 0.24, 0.27, 1.0) colorramp_1.color_ramp.elements[2].position = 0.71 colorramp_1.color_ramp.elements[2].color = (0.92, 0.97, 0.95, 1.0) - + if rand: for e in colorramp_1.color_ramp.elements: sample_color(e.color, offset=0.02) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp_4.outputs["Color"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_4.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.74 colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.5162, 0.5162, 0.5162, 1.0) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture}) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) colorramp_3.color_ramp.elements[0].position = 0.77 colorramp_3.color_ramp.elements[0].color = (0.26, 0.26, 0.26, 1.0) colorramp_3.color_ramp.elements[1].position = 1.0 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Metallic': colorramp.outputs["Color"], 'Roughness': colorramp_3.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_1.outputs["Color"], + "Metallic": colorramp.outputs["Color"], + "Roughness": colorramp_3.outputs["Color"], + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geo_aluminumdisp2tut(nw: NodeWrangler, rand=False, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - #subdivide_level = nw.new_node(Nodes.Value, + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + # subdivide_level = nw.new_node(Nodes.Value, # label='SubdivideLevel') - #subdivide_level.outputs[0].default_value = 0 - - #subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, + # subdivide_level.outputs[0].default_value = 0 + + # subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, # input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Level': subdivide_level}) - + position = nw.new_node(Nodes.InputPosition) - - scale = nw.new_node(Nodes.Value, - label='Scale') + + scale = nw.new_node(Nodes.Value, label="Scale") scale.outputs[0].default_value = 1.0 - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: scale}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 4.0}, - attrs={'noise_dimensions': '4D'}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": multiply.outputs["Vector"], "Scale": 4.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(0, 5) - noise_texture.inputs['Scale'].default_value = sample_ratio(6.0, 0.75, 1.5) + noise_texture.inputs["W"].default_value = sample_range(0, 5) + noise_texture.inputs["Scale"].default_value = sample_ratio(6.0, 0.75, 1.5) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.68 colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) @@ -141,66 +173,107 @@ def geo_aluminumdisp2tut(nw: NodeWrangler, rand=False, **input_kwargs): colorramp.color_ramp.elements[1].color = (0.093, 0.093, 0.093, 1.0) colorramp.color_ramp.elements[2].position = 0.9 colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 2.0}, - attrs={'noise_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": multiply.outputs["Vector"], "Scale": 2.0}, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.46 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 1.0 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp_1.outputs["Color"], 'Color2': (0.521, 0.521, 0.521, 1.0)}) - + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp_1.outputs["Color"], + "Color2": (0.521, 0.521, 0.521, 1.0), + }, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.5 - - subtract = nw.new_node(Nodes.VectorMath, + + subtract = nw.new_node( + Nodes.VectorMath, input_kwargs={0: mix, 1: value}, - attrs={'operation': 'SUBTRACT'}) - + attrs={"operation": "SUBTRACT"}, + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - offset_scale = nw.new_node(Nodes.Value, - label='OffsetScale') + attrs={"operation": "MULTIPLY"}, + ) + + offset_scale = nw.new_node(Nodes.Value, label="OffsetScale") offset_scale.outputs[0].default_value = sample_range(0.03, 0.05) if rand else 0.04 - - multiply_2 = nw.new_node(Nodes.VectorMath, + + multiply_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_1.outputs["Vector"], 1: offset_scale}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_2.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Attribute': capture_attribute.outputs["Attribute"]}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_2.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_aluminumdisp2tut, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) - surface.add_material(obj, shader_aluminumdisp2tut, reuse=False, input_kwargs=shader_kwargs) + surface.add_geomod( + obj, + geo_aluminumdisp2tut, + apply=False, + input_kwargs=geo_kwargs, + attributes=["offset"], + ) + surface.add_material( + obj, shader_aluminumdisp2tut, reuse=False, input_kwargs=shader_kwargs + ) if __name__ == "__main__": - mat = 'aluminumdisp2tut' - if not os.path.isdir(os.path.join('outputs', mat)): - os.mkdir(os.path.join('outputs', mat)) + mat = "aluminumdisp2tut" + if not os.path.isdir(os.path.join("outputs", mat)): + os.mkdir(os.path.join("outputs", mat)) for i in range(10): - bpy.ops.wm.open_mainfile(filepath='test.blend') - apply(bpy.data.objects['SolidModel'], geo_kwargs={'rand':True, 'subdivide_mesh_level':3}, shader_kwargs={'rand': True}) - #fn = os.path.join(os.path.abspath(os.curdir), 'giraffe_geo_test.blend') - #bpy.ops.wm.save_as_mainfile(filepath=fn) - bpy.context.scene.render.filepath = os.path.join('outputs', mat, '%s_%d.jpg'%(mat, i)) - bpy.context.scene.render.image_settings.file_format='JPEG' - bpy.ops.render.render(write_still=True) \ No newline at end of file + bpy.ops.wm.open_mainfile(filepath="test.blend") + apply( + bpy.data.objects["SolidModel"], + geo_kwargs={"rand": True, "subdivide_mesh_level": 3}, + shader_kwargs={"rand": True}, + ) + # fn = os.path.join(os.path.abspath(os.curdir), 'giraffe_geo_test.blend') + # bpy.ops.wm.save_as_mainfile(filepath=fn) + bpy.context.scene.render.filepath = os.path.join( + "outputs", mat, "%s_%d.jpg" % (mat, i) + ) + bpy.context.scene.render.image_settings.file_format = "JPEG" + bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/art.py b/infinigen/assets/materials/art.py index a872158bd..8c51aa481 100644 --- a/infinigen/assets/materials/art.py +++ b/infinigen/assets/materials/art.py @@ -5,13 +5,15 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.materials.fabrics.fabric_random import fabric_shader_list from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from . import text -from ..utils.decorate import read_uv, write_uv -from ...core.nodes import NodeWrangler, Nodes from infinigen.core.util.random import random_general as rg +from ...core.nodes import Nodes, NodeWrangler +from ..utils.decorate import read_uv, write_uv +from . import rug, text + class Art(text.Text): def __init__(self, factory_seed): @@ -23,16 +25,15 @@ def __init__(self, factory_seed): @staticmethod def scale_uniform(min_, max_): - return (max_ - min_) * log_uniform(.1, .5) + return (max_ - min_) * log_uniform(0.1, 0.5) class DarkArt(Art): - def __init__(self, factory_seed): super().__init__(factory_seed) with FixedSeed(self.factory_seed): self.darken_scale = uniform(5, 10) - self.darken_ratio = uniform(.5, 1) + self.darken_ratio = uniform(0.5, 1) def make_shader_func(self, bbox): art_shader_func = super(DarkArt, self).make_shader_func(bbox) @@ -41,10 +42,13 @@ def shader_dark_art(nw: NodeWrangler): art_shader_func(nw) art_bsdf = nw.find(Nodes.PrincipledBSDF)[0] art_color = nw.find_from(art_bsdf.inputs[0])[0].from_socket - dark_color = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': self.darken_scale}).outputs[0] + dark_color = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": self.darken_scale} + ).outputs[0] art_color = nw.new_node( - Nodes.MixRGB, [self.darken_ratio, art_color, dark_color], - attrs={'blend_type': 'DARKEN'} + Nodes.MixRGB, + [self.darken_ratio, art_color, dark_color], + attrs={"blend_type": "DARKEN"}, ).outputs[2] nw.connect_input(art_color, art_bsdf.inputs[0]) @@ -52,7 +56,6 @@ def shader_dark_art(nw: NodeWrangler): class ArtComposite(DarkArt): - @property def base_shader(self): raise NotImplementedError @@ -68,7 +71,10 @@ def shader_art_composite(nw: NodeWrangler, **kwargs): art_color = nw_.find_from(art_bsdf.inputs[0])[0].from_socket nw_.nodes.remove(art_bsdf) nw_.connect_input(art_color, base_bsdf.inputs[0]) - nw_.connect_input(base_bsdf.outputs[0], nw_.find(Nodes.MaterialOutput)[0].inputs['Surface']) + nw_.connect_input( + base_bsdf.outputs[0], + nw_.find(Nodes.MaterialOutput)[0].inputs["Surface"], + ) return shader_art_composite @@ -79,14 +85,12 @@ def make_sphere(self): class ArtRug(ArtComposite): @property def base_shader(self): - from . import rug return rug.shader_rug class ArtFabric(ArtComposite): @property def base_shader(self): - from .leather_and_fabrics import fabric_shader_list return rg(fabric_shader_list) diff --git a/infinigen/assets/materials/atmosphere_light_haze.py b/infinigen/assets/materials/atmosphere_light_haze.py index cabc7dc7d..ffa8cfbb1 100644 --- a/infinigen/assets/materials/atmosphere_light_haze.py +++ b/infinigen/assets/materials/atmosphere_light_haze.py @@ -4,30 +4,33 @@ # Authors: Alexander Raistrick, Zeyu Ma -import numpy as np -from numpy.random import uniform import gin -from infinigen.core.util.random import random_general as rg +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util import color -from infinigen.core import surface +from infinigen.core.util.random import random_general as rg type = None @gin.configurable -def shader_atmosphere(nw, enable_scatter=True, density=("uniform", 0, 0.006), anisotropy=0.5, **kwargs): +def shader_atmosphere( + nw, enable_scatter=True, density=("uniform", 0, 0.006), anisotropy=0.5, **kwargs +): nw.force_input_consistency() - principled_volume = nw.new_node(Nodes.PrincipledVolume, + principled_volume = nw.new_node( + Nodes.PrincipledVolume, input_kwargs={ - 'Color': color.color_category('fog'), - 'Density': rg(density), - 'Anisotropy': rg(anisotropy) - }) - + "Color": color.color_category("fog"), + "Density": rg(density), + "Anisotropy": rg(anisotropy), + }, + ) + return (None, principled_volume) + def apply(obj, selection=None, **kwargs): surface.add_material(obj, shader_atmosphere, selection=selection) diff --git a/infinigen/assets/materials/bark.py b/infinigen/assets/materials/bark.py index 4260a3b72..447d62c6d 100644 --- a/infinigen/assets/materials/bark.py +++ b/infinigen/assets/materials/bark.py @@ -3,134 +3,215 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -from numpy.random import uniform as U, normal as N -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_ratio, +) from infinigen.core import surface -import random +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def shader_bark(nw, rand=False, **input_kwargs): - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Detail': 16.0, 'Roughness': 0.62}) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping, "Detail": 16.0, "Roughness": 0.62}, + ) if rand: - sample_max = input_kwargs['noise_scale_max'] if 'noise_scale_max' in input_kwargs else 3 - sample_min = input_kwargs['noise_scale_min'] if 'noise_scale_min' in input_kwargs else 1/sample_max - noise_texture.inputs["Scale"].default_value = sample_ratio(noise_texture.inputs["Scale"].default_value, sample_min, sample_max) + sample_max = ( + input_kwargs["noise_scale_max"] if "noise_scale_max" in input_kwargs else 3 + ) + sample_min = ( + input_kwargs["noise_scale_min"] + if "noise_scale_min" in input_kwargs + else 1 / sample_max + ) + noise_texture.inputs["Scale"].default_value = sample_ratio( + noise_texture.inputs["Scale"].default_value, sample_min, sample_max + ) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.627 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.63 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset'}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': noise_texture.outputs["Fac"], 'Color2': attribute.outputs["Color"]}, - attrs={'blend_type': 'MULTIPLY'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset"}) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": noise_texture.outputs["Fac"], + "Color2": attribute.outputs["Color"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix}) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.02, 0.0091, 0.0016, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.2243, 0.1341, 0.1001, 1.0) for e in colorramp.color_ramp.elements: sample_color(e.color) - #print(e.color[0], e.color[1], e.color[2]) + # print(e.color[0], e.color[1], e.color[2]) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': colorramp.outputs["Color"], 'Color2': (0.0897, 0.052, 0.0149, 1.0)}) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": (0.0897, 0.052, 0.0149, 1.0), + }, + ) if rand: for i in range(3): - mix_1.inputs[7].default_value[i] = (colorramp.color_ramp.elements[0].color[i] + colorramp.color_ramp.elements[1].color[i]) / 2 + mix_1.inputs[7].default_value[i] = ( + colorramp.color_ramp.elements[0].color[i] + + colorramp.color_ramp.elements[1].color[i] + ) / 2 - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.5173, 0.5173, 0.5173, 1.0) colorramp_2.color_ramp.elements[1].position = 1.0 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1, 'Roughness': colorramp_2.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_1, "Roughness": colorramp_2.outputs["Color"]}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geo_bark(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 5.0000 - - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: position, 1: value}, attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply.outputs["Vector"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': N(10, 2), 'W': U(-10, 10)}, - attrs={'noise_dimensions': '4D'}, - ) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"]}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 3.0000}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 1.0000}, attrs={'operation': 'MULTIPLY'}) - - fract = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2}, attrs={'operation': 'FRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: fract}, attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: subtract_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: 4.0000}, attrs={'operation': 'MULTIPLY'}) - + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": N(10, 2), "W": U(-10, 10)}, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 3.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1} + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1.0000}, attrs={"operation": "MULTIPLY"} + ) + + fract = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2}, attrs={"operation": "FRACT"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: fract}, attrs={"operation": "SUBTRACT"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: 4.0000}, + attrs={"operation": "MULTIPLY"}, + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_4, 1: normal}, attrs={'operation': 'MULTIPLY'}) - + + multiply_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_4, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0.0100 - - multiply_6 = nw.new_node(Nodes.VectorMath, + + multiply_6 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_5.outputs["Vector"], 1: value_1}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_6.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: multiply_4}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Attribute': capture_attribute.outputs["Attribute"]}, - attrs={'is_active_output': True}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_6.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: multiply_4}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + attrs={"is_active_output": True}, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_bark, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) - surface.add_material(obj, shader_bark, reuse=False, input_kwargs=shader_kwargs) \ No newline at end of file + surface.add_geomod( + obj, geo_bark, apply=False, input_kwargs=geo_kwargs, attributes=["offset"] + ) + surface.add_material(obj, shader_bark, reuse=False, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/bark_birch.py b/infinigen/assets/materials/bark_birch.py index 0712bc020..218d66288 100644 --- a/infinigen/assets/materials/bark_birch.py +++ b/infinigen/assets/materials/bark_birch.py @@ -5,207 +5,374 @@ # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=SAbWUs1Rnxw by Sam Bowman # Code generated using version 2.1.0 of the node_transpiler -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes -from .bark_random import nodegroup_apply_geo_matv2, nodegroup_shader_canonical_coord, nodegroup_canonical_coord +from .bark_random import ( + nodegroup_apply_geo_matv2, + nodegroup_canonical_coord, + nodegroup_shader_canonical_coord, +) -@node_utils.to_nodegroup('nodegroup_birch_mat_helper', singleton=False, type='ShaderNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_birch_mat_helper", singleton=False, type="ShaderNodeTree" +) def nodegroup_birch_mat_helper(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Vector"], 'Scale': 50.0, 'Detail': 10.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 1: 0.3, 2: 0.4, 3: 1.0, 4: 0.0}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': group_input.outputs["Vector"], 'Scale': 2.0, 'Detail': 10.0, 'Dimension': 0.6, 'Lacunarity': 3.0}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: 0.3, 2: 0.5}) - - power = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Vector"], + "Scale": 50.0, + "Detail": 10.0, + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture.outputs["Fac"], + 1: 0.3, + 2: 0.4, + 3: 1.0, + 4: 0.0, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": group_input.outputs["Vector"], + "Scale": 2.0, + "Detail": 10.0, + "Dimension": 0.6, + "Lacunarity": 3.0, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: 0.3, 2: 0.5} + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: map_range_1.outputs["Result"], 1: 0.2}, - attrs={'operation': 'POWER', 'use_clamp': True}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, + attrs={"operation": "POWER", "use_clamp": True}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 2.0}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture_1.outputs["Color"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.341, 2: 0.377}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["R"], 1: 0.341, 2: 0.377}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.341, 2: 0.377}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_add, 2: 2.0}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': map_range_5.outputs["Result"]}) + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": combine_xyz, "Scale": 2.0} + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture_1.outputs["Color"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["B"], 1: 0.341, 2: 0.377}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["R"], 1: 0.341, 2: 0.377}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["G"], 1: 0.341, 2: 0.377}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: map_range_4.outputs["Result"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": multiply_add, 2: 2.0} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range_5.outputs["Result"]} + ) colorramp.color_ramp.elements[0].position = 0.5052 colorramp.color_ramp.elements[0].color = (0.0252, 0.0395, 0.0176, 1.0) colorramp.color_ramp.elements[1].position = 0.8015 colorramp.color_ramp.elements[1].color = (0.3095, 0.4072, 0.3515, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': (0.0823, 0.1095, 0.0595, 1.0), 'Color2': colorramp.outputs["Color"]}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': power, 'Color1': mix, 'Color2': (0.0232, 0.0144, 0.0021, 1.0)}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': mix_1, 'Color2': (0.0437, 0.0482, 0.0222, 1.0)}) - - map_range_6 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': power, 3: 0.4}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': mix_2, 'Result': map_range_6.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_birch_geo', singleton=False, type='GeometryNodeTree') + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": (0.0823, 0.1095, 0.0595, 1.0), + "Color2": colorramp.outputs["Color"], + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": power, + "Color1": mix, + "Color2": (0.0232, 0.0144, 0.0021, 1.0), + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": mix_1, + "Color2": (0.0437, 0.0482, 0.0222, 1.0), + }, + ) + + map_range_6 = nw.new_node(Nodes.MapRange, input_kwargs={"Value": power, 3: 0.4}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Color": mix_2, "Result": map_range_6.outputs["Result"]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_birch_geo", singleton=False, type="GeometryNodeTree" +) def nodegroup_birch_geo(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Position"]}) - - multiply = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketVector", "Position", (0.0, 0.0, 0.0))], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Position"]} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': multiply}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 2.0}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.341, 2: 0.377, 3: 1.0, 4: 0.0}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["R"], 1: 0.341, 2: 0.377, 3: 1.0, 4: 0.0}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: 0.25, 2: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_1 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": combine_xyz, "Scale": 2.0} + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.341, + 2: 0.377, + 3: 1.0, + 4: 0.0, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["R"], + 1: 0.341, + 2: 0.377, + 3: 1.0, + 4: 0.0, + }, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range.outputs["Result"], + 1: 0.25, + 2: map_range_1.outputs["Result"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_add, 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Position"], 'Scale': 10.0, 'Detail': 15.0}) - - multiply_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Position"], + "Scale": 10.0, + "Detail": 15.0, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.08}, - attrs={'operation': 'MULTIPLY'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': group_input.outputs["Position"], 'Scale': 2.0, 'Detail': 10.0, 'Dimension': 0.6, 'Lacunarity': 3.0}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: 0.3, 2: 0.5}) - - power = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": group_input.outputs["Position"], + "Scale": 2.0, + "Detail": 10.0, + "Dimension": 0.6, + "Lacunarity": 3.0, + }, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: 0.3, 2: 0.5} + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: 0.2}, - attrs={'operation': 'POWER', 'use_clamp': True}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: 0.03}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: multiply_3}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: add}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: 5.0}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Factor': multiply_4}) + attrs={"operation": "POWER", "use_clamp": True}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: power, 1: 0.03}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 5.0}, attrs={"operation": "MULTIPLY"} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Factor": multiply_4}) + def shader_birch_mat(nw, selection=None): - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'initial_position'}) - - group = nw.new_node(nodegroup_shader_canonical_coord().name, - input_kwargs={'Vector': attribute.outputs["Vector"]}) - - group_1 = nw.new_node(nodegroup_birch_mat_helper().name, - input_kwargs={'Vector': group}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_1.outputs["Color"], 'Roughness': group_1.outputs["Result"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "initial_position"} + ) + + group = nw.new_node( + nodegroup_shader_canonical_coord().name, + input_kwargs={"Vector": attribute.outputs["Vector"]}, + ) + + group_1 = nw.new_node( + nodegroup_birch_mat_helper().name, input_kwargs={"Vector": group} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_1.outputs["Color"], + "Roughness": group_1.outputs["Result"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geo_bark_birch(nw, selection=None): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None),]) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ], + ) + + parent_loc = nw.new_node( + Nodes.NamedAttribute, + ["parent_skeleton_loc"], + attrs={"data_type": "FLOAT_VECTOR"}, + ) + skeleton_loc = nw.new_node( + Nodes.NamedAttribute, ["skeleton_loc"], attrs={"data_type": "FLOAT_VECTOR"} + ) - parent_loc = nw.new_node(Nodes.NamedAttribute, ['parent_skeleton_loc'], attrs={'data_type': 'FLOAT_VECTOR'}) - skeleton_loc = nw.new_node(Nodes.NamedAttribute, ['skeleton_loc'], attrs={'data_type': 'FLOAT_VECTOR'}) - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - canonicalcoord = nw.new_node(nodegroup_canonical_coord().name, - input_kwargs={'Self Location': skeleton_loc, 'Parent Location': parent_loc}) - - birchgeo = nw.new_node(nodegroup_birch_geo().name, - input_kwargs={'Position': canonicalcoord}) - - group = nw.new_node(nodegroup_apply_geo_matv2().name, - input_kwargs={ - 'Geometry': capture_attribute.outputs["Geometry"], - 'Displacement Amount': nw.multiply(birchgeo, surface.eval_argument(nw, selection)), - 'Displacement Scale': 0.05, - 'Material': surface.shaderfunc_to_material(shader_birch_mat) - }) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': group, 'initial_position': capture_attribute.outputs["Attribute"]}) + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + canonicalcoord = nw.new_node( + nodegroup_canonical_coord().name, + input_kwargs={"Self Location": skeleton_loc, "Parent Location": parent_loc}, + ) + + birchgeo = nw.new_node( + nodegroup_birch_geo().name, input_kwargs={"Position": canonicalcoord} + ) + + group = nw.new_node( + nodegroup_apply_geo_matv2().name, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Displacement Amount": nw.multiply( + birchgeo, surface.eval_argument(nw, selection) + ), + "Displacement Scale": 0.05, + "Material": surface.shaderfunc_to_material(shader_birch_mat), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": group, + "initial_position": capture_attribute.outputs["Attribute"], + }, + ) def apply(obj, selection=None, **kwargs): - surface.add_geomod(obj, geo_bark_birch, selection=selection, attributes=['initial_position']) + surface.add_geomod( + obj, geo_bark_birch, selection=selection, attributes=["initial_position"] + ) surface.add_material(obj, shader_birch_mat) diff --git a/infinigen/assets/materials/bark_random.py b/infinigen/assets/materials/bark_random.py index 72af3d2aa..615ae157a 100644 --- a/infinigen/assets/materials/bark_random.py +++ b/infinigen/assets/materials/bark_random.py @@ -6,544 +6,1023 @@ # Code generated using version 2.1.0 of the node_transpiler from typing import Tuple -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface + import numpy as np +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed -@node_utils.to_nodegroup('nodegroup_calc_radius', singleton=True, type='GeometryNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_calc_radius", singleton=True, type="GeometryNodeTree" +) def nodegroup_calc_radius(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Self Location', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Parent Location', (0.0, 0.0, 0.0))]) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Self Location"], 1: group_input.outputs["Parent Location"]}, - attrs={'operation': 'SUBTRACT'}) - - normalize = nw.new_node(Nodes.VectorMath, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Self Location", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Parent Location", (0.0, 0.0, 0.0)), + ], + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Self Location"], + 1: group_input.outputs["Parent Location"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + normalize = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'NORMALIZE'}) - + attrs={"operation": "NORMALIZE"}, + ) + position = nw.new_node(Nodes.InputPosition) - - subtract_1 = nw.new_node(Nodes.VectorMath, + + subtract_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: group_input.outputs["Parent Location"]}, - attrs={'operation': 'SUBTRACT'}) - - dot_product = nw.new_node(Nodes.VectorMath, + attrs={"operation": "SUBTRACT"}, + ) + + dot_product = nw.new_node( + Nodes.VectorMath, input_kwargs={0: normalize.outputs["Vector"], 1: subtract_1.outputs["Vector"]}, - attrs={'operation': 'DOT_PRODUCT'}) - - multiply = nw.new_node(Nodes.VectorMath, + attrs={"operation": "DOT_PRODUCT"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: normalize.outputs["Vector"], 1: dot_product.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Parent Location"], 1: multiply.outputs["Vector"]}) - - subtract_2 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Parent Location"], + 1: multiply.outputs["Vector"], + }, + ) + + subtract_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: add.outputs["Vector"], 1: position}, - attrs={'operation': 'SUBTRACT'}) - - length = nw.new_node(Nodes.VectorMath, + attrs={"operation": "SUBTRACT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_2.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Radius': length.outputs["Value"]}) + attrs={"operation": "LENGTH"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Radius": length.outputs["Value"]} + ) -@node_utils.to_nodegroup('nodegroup_shader_canonical_coord', singleton=True, type='ShaderNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_shader_canonical_coord", singleton=True, type="ShaderNodeTree" +) def nodegroup_shader_canonical_coord(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'scale', 2.0), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'parent_skeleton_loc'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'skeleton_loc'}) - - subtract = nw.new_node(Nodes.VectorMath, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "scale", 2.0), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ], + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "parent_skeleton_loc"} + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "skeleton_loc"}) + + subtract = nw.new_node( + Nodes.VectorMath, input_kwargs={0: attribute_1.outputs["Vector"], 1: attribute.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - cross_product = nw.new_node(Nodes.VectorMath, + attrs={"operation": "SUBTRACT"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'CROSS_PRODUCT'}) - - dot_product = nw.new_node(Nodes.VectorMath, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + dot_product = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'DOT_PRODUCT'}) - - length = nw.new_node(Nodes.VectorMath, + attrs={"operation": "DOT_PRODUCT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - divide = nw.new_node(Nodes.Math, + attrs={"operation": "LENGTH"}, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: dot_product.outputs["Value"], 1: length.outputs["Value"]}, - attrs={'operation': 'DIVIDE'}) - - arccosine = nw.new_node(Nodes.Math, - input_kwargs={0: divide}, - attrs={'operation': 'ARCCOSINE'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': group_input.outputs["Vector"], 'Center': attribute.outputs["Vector"], 'Axis': cross_product.outputs["Vector"], 'Angle': arccosine}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute.outputs["Vector"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - subtract_1 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "DIVIDE"}, + ) + + arccosine = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "ARCCOSINE"} + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": group_input.outputs["Vector"], + "Center": attribute.outputs["Vector"], + "Axis": cross_product.outputs["Vector"], + "Angle": arccosine, + }, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_rotate, 1: combine_xyz}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.VectorMath, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_1.outputs["Vector"], 1: group_input.outputs["scale"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Coordinate': multiply.outputs["Vector"]}) + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Coordinate": multiply.outputs["Vector"]} + ) -@node_utils.to_nodegroup('nodegroup_inject_z_noise_and_scale_001', singleton=True, type='GeometryNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_inject_z_noise_and_scale_001", singleton=True, type="GeometryNodeTree" +) def nodegroup_inject_z_noise_and_scale_001(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Noise Scale', 2.0), - ('NodeSocketFloat', 'Noise Amount', 0.5), - ('NodeSocketFloat', 'Z Multiplier', 0.5)]) - - separate_xyz_6 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Coordinate"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"], 'Scale': group_input.outputs["Noise Scale"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_2.outputs["Fac"], 1: group_input.outputs["Noise Amount"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_6.outputs["Z"], 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Noise Scale", 2.0), + ("NodeSocketFloat", "Noise Amount", 0.5), + ("NodeSocketFloat", "Z Multiplier", 0.5), + ], + ) + + separate_xyz_6 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Coordinate"]} + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Coordinate"], + "Scale": group_input.outputs["Noise Scale"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture_2.outputs["Fac"], + 1: group_input.outputs["Noise Amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_6.outputs["Z"], 1: multiply} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["Z Multiplier"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_6.outputs["X"], 'Y': separate_xyz_6.outputs["Y"], 'Z': multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Coordinate': combine_xyz_6}) - -@node_utils.to_nodegroup('nodegroup_primary_voronoi', singleton=True, type='GeometryNodeTree') + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_6.outputs["X"], + "Y": separate_xyz_6.outputs["Y"], + "Z": multiply_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Coordinate": combine_xyz_6} + ) + + +@node_utils.to_nodegroup( + "nodegroup_primary_voronoi", singleton=True, type="GeometryNodeTree" +) def nodegroup_primary_voronoi(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Texture Scale', 20.0), - ('NodeSocketFloatFactor', 'Randomness', 1.0)]) - - voronoi_texture_3 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"], 'Scale': group_input.outputs["Texture Scale"], 'Randomness': group_input.outputs["Randomness"]}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_3.outputs["Distance"], 2: 0.1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Displacement': map_range.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_mix', singleton=True, type='GeometryNodeTree') + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Texture Scale", 20.0), + ("NodeSocketFloatFactor", "Randomness", 1.0), + ], + ) + + voronoi_texture_3 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": group_input.outputs["Coordinate"], + "Scale": group_input.outputs["Texture Scale"], + "Randomness": group_input.outputs["Randomness"], + }, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_3.outputs["Distance"], 2: 0.1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Displacement": map_range.outputs["Result"]} + ) + + +@node_utils.to_nodegroup("nodegroup_mix", singleton=True, type="GeometryNodeTree") def nodegroup_mix(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Input 1', 0.5), - ('NodeSocketFloat', 'Input 2', 0.5), - ('NodeSocketFloat', 'Mix Weight', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Input 1"], 1: group_input.outputs["Mix Weight"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Input 1", 0.5), + ("NodeSocketFloat", "Input 2", 0.5), + ("NodeSocketFloat", "Mix Weight", 0.5), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Input 1"], + 1: group_input.outputs["Mix Weight"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: group_input.outputs["Mix Weight"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Input 2"], 1: subtract}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add}) - -@node_utils.to_nodegroup('nodegroup_adjust_v', singleton=True, type='ShaderNodeTree') + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add}) + + +@node_utils.to_nodegroup("nodegroup_adjust_v", singleton=True, type="ShaderNodeTree") def nodegroup_adjust_v(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0)), - ('NodeSocketFloat', 'V Shift', 0.5)]) - - separate_hsv = nw.new_node('ShaderNodeSeparateHSV', - input_kwargs={'Color': group_input.outputs["Color"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_hsv.outputs["V"], 1: group_input.outputs["V Shift"]}) - - combine_hsv = nw.new_node(Nodes.CombineHSV, - input_kwargs={'H': separate_hsv.outputs["H"], 'S': separate_hsv.outputs["S"], 'V': add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': combine_hsv}) - -@node_utils.to_nodegroup('nodegroup_inject_z_noise_and_scale', singleton=True, type='ShaderNodeTree') + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0)), + ("NodeSocketFloat", "V Shift", 0.5), + ], + ) + + separate_hsv = nw.new_node( + "ShaderNodeSeparateHSV", input_kwargs={"Color": group_input.outputs["Color"]} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_hsv.outputs["V"], 1: group_input.outputs["V Shift"]}, + ) + + combine_hsv = nw.new_node( + Nodes.CombineHSV, + input_kwargs={ + "H": separate_hsv.outputs["H"], + "S": separate_hsv.outputs["S"], + "V": add, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": combine_hsv}) + + +@node_utils.to_nodegroup( + "nodegroup_inject_z_noise_and_scale", singleton=True, type="ShaderNodeTree" +) def nodegroup_inject_z_noise_and_scale(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Noise Scale', 5.0), - ('NodeSocketFloat', 'Noise Amount', 0.0), - ('NodeSocketFloat', 'Z Multiplier', 0.5)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Coordinate"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"], 'Scale': group_input.outputs["Noise Scale"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs["Noise Amount"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Noise Scale", 5.0), + ("NodeSocketFloat", "Noise Amount", 0.0), + ("NodeSocketFloat", "Z Multiplier", 0.5), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Coordinate"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Coordinate"], + "Scale": group_input.outputs["Noise Scale"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["Noise Amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["Z Multiplier"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Coordiante': combine_xyz}) - -@node_utils.to_nodegroup('nodegroup_voronoi', singleton=True, type='ShaderNodeTree') + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Coordiante": combine_xyz} + ) + + +@node_utils.to_nodegroup("nodegroup_voronoi", singleton=True, type="ShaderNodeTree") def nodegroup_voronoi(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Coordinate', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Texture Scale', 5.0), - ('NodeSocketFloatFactor', 'Randomness', 1.0)]) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': group_input.outputs["Coordinate"], 'Scale': group_input.outputs["Texture Scale"], 'Randomness': group_input.outputs["Randomness"]}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.5}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Displacement': map_range.outputs["Result"]}) - -@node_utils.to_nodegroup('nodegroup_canonical_coord', singleton=True, type='GeometryNodeTree') + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Texture Scale", 5.0), + ("NodeSocketFloatFactor", "Randomness", 1.0), + ], + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": group_input.outputs["Coordinate"], + "Scale": group_input.outputs["Texture Scale"], + "Randomness": group_input.outputs["Randomness"], + }, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.5}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Displacement": map_range.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_canonical_coord", singleton=True, type="GeometryNodeTree" +) def nodegroup_canonical_coord(nw): position = nw.new_node(Nodes.InputPosition) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Self Location', (0.0, 0.0, 0.0)), - ('NodeSocketVector', 'Parent Location', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'scale', 2.0)]) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Self Location"], 1: group_input.outputs["Parent Location"]}, - attrs={'operation': 'SUBTRACT'}) - - cross_product = nw.new_node(Nodes.VectorMath, + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Self Location", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Parent Location", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "scale", 2.0), + ], + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Self Location"], + 1: group_input.outputs["Parent Location"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'CROSS_PRODUCT'}) - - dot_product = nw.new_node(Nodes.VectorMath, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + dot_product = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'DOT_PRODUCT'}) - - length = nw.new_node(Nodes.VectorMath, + attrs={"operation": "DOT_PRODUCT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - divide = nw.new_node(Nodes.Math, + attrs={"operation": "LENGTH"}, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: dot_product.outputs["Value"], 1: length.outputs["Value"]}, - attrs={'operation': 'DIVIDE'}) - - arccosine = nw.new_node(Nodes.Math, - input_kwargs={0: divide}, - attrs={'operation': 'ARCCOSINE'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Center': group_input.outputs["Parent Location"], 'Axis': cross_product.outputs["Vector"], 'Angle': arccosine}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Parent Location"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - subtract_1 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "DIVIDE"}, + ) + + arccosine = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "ARCCOSINE"} + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position, + "Center": group_input.outputs["Parent Location"], + "Axis": cross_product.outputs["Vector"], + "Angle": arccosine, + }, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Parent Location"]}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_rotate, 1: combine_xyz}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.VectorMath, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_1.outputs["Vector"], 1: group_input.outputs["scale"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Coordinate': multiply.outputs["Vector"]}) + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Coordinate": multiply.outputs["Vector"]} + ) + -@node_utils.to_nodegroup('nodegroup_random_bark_geo', singleton=True, type='GeometryNodeTree') +@node_utils.to_nodegroup( + "nodegroup_random_bark_geo", singleton=True, type="GeometryNodeTree" +) def nodegroup_random_bark_geo(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Position', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Noise Scale', 2.0), - ('NodeSocketFloat', 'Noise Amount', 1.0), - ('NodeSocketFloat', 'Texture Scale', 30.0), - ('NodeSocketFloatFactor', 'Randomness', 1.0), - ('NodeSocketFloat', 'Value', 0.05), - ('NodeSocketFloat', 'Mix Weight', 0.1), - ('NodeSocketFloat', 'Scale', 15.0), - ('NodeSocketFloat', 'Detail', 16.0), - ('NodeSocketFloat', 'Value_1', 2.0), - ('NodeSocketFloat', 'Z Multiplier', 0.5), - ('NodeSocketFloat', 'Texture Scale S', 30.0)]) - - group_3 = nw.new_node(nodegroup_primary_voronoi().name, - input_kwargs={'Coordinate': group_input.outputs["Position"], 'Texture Scale': group_input.outputs['Texture Scale S']}) - - group = nw.new_node(nodegroup_inject_z_noise_and_scale_001().name, - input_kwargs={'Coordinate': group_input.outputs["Position"], 'Noise Scale': group_input.outputs["Noise Scale"], 'Noise Amount': group_input.outputs["Noise Amount"], 'Z Multiplier': group_input.outputs["Z Multiplier"]}) - - group_2 = nw.new_node(nodegroup_primary_voronoi().name, - input_kwargs={'Coordinate': group, 'Texture Scale': group_input.outputs["Texture Scale"], 'Randomness': group_input.outputs["Randomness"]}) - - group_5 = nw.new_node(nodegroup_mix().name, - input_kwargs={'Input 1': group_3, 'Input 2': group_2, 'Mix Weight': group_input.outputs["Mix Weight"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Position"], 'Scale': group_input.outputs["Value_1"], 'Detail': group_input.outputs["Detail"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs['Noise Scale']}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_5, 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Position", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Noise Scale", 2.0), + ("NodeSocketFloat", "Noise Amount", 1.0), + ("NodeSocketFloat", "Texture Scale", 30.0), + ("NodeSocketFloatFactor", "Randomness", 1.0), + ("NodeSocketFloat", "Value", 0.05), + ("NodeSocketFloat", "Mix Weight", 0.1), + ("NodeSocketFloat", "Scale", 15.0), + ("NodeSocketFloat", "Detail", 16.0), + ("NodeSocketFloat", "Value_1", 2.0), + ("NodeSocketFloat", "Z Multiplier", 0.5), + ("NodeSocketFloat", "Texture Scale S", 30.0), + ], + ) + + group_3 = nw.new_node( + nodegroup_primary_voronoi().name, + input_kwargs={ + "Coordinate": group_input.outputs["Position"], + "Texture Scale": group_input.outputs["Texture Scale S"], + }, + ) + + group = nw.new_node( + nodegroup_inject_z_noise_and_scale_001().name, + input_kwargs={ + "Coordinate": group_input.outputs["Position"], + "Noise Scale": group_input.outputs["Noise Scale"], + "Noise Amount": group_input.outputs["Noise Amount"], + "Z Multiplier": group_input.outputs["Z Multiplier"], + }, + ) + + group_2 = nw.new_node( + nodegroup_primary_voronoi().name, + input_kwargs={ + "Coordinate": group, + "Texture Scale": group_input.outputs["Texture Scale"], + "Randomness": group_input.outputs["Randomness"], + }, + ) + + group_5 = nw.new_node( + nodegroup_mix().name, + input_kwargs={ + "Input 1": group_3, + "Input 2": group_2, + "Mix Weight": group_input.outputs["Mix Weight"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Position"], + "Scale": group_input.outputs["Value_1"], + "Detail": group_input.outputs["Detail"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["Noise Scale"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: group_5, 1: multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply_1}) + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply_1}) -@node_utils.to_nodegroup('nodegroup_apply_geo_matv2', singleton=True, type='GeometryNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_apply_geo_matv2", singleton=True, type="GeometryNodeTree" +) def nodegroup_apply_geo_matv2(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement Amount', 0.0), - ('NodeSocketFloat', 'Displacement Scale', 0.0), - ('NodeSocketMaterial', 'Material', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement Amount", 0.0), + ("NodeSocketFloat", "Displacement Scale", 0.0), + ("NodeSocketMaterial", "Material", None), + ], + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Displacement Amount"], 1: group_input.outputs["Displacement Scale"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Displacement Amount"], + 1: group_input.outputs["Displacement Scale"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: normal, 1: multiply}, - attrs={'operation': 'MULTIPLY'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position_1, 'Material': group_input.outputs["Material"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - -def shader_random_bark_mat(nw, base_color:Tuple, geo_params, selection=None): - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: geo_params['Noise Texture Scale'], 1: 4.0}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': multiply}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture_1.outputs["Fac"], 1: 0.35, 2: 0.4, 3: 0.5, 4: 0.0}) - - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'initial_position'}) - - group_canonical = nw.new_node(nodegroup_shader_canonical_coord().name, - input_kwargs={'Vector': attribute_5.outputs["Vector"]}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: geo_params['Noise Texture Scale'], 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_canonical, 'Scale': multiply_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 1: 0.35, 2: 0.4, 3: 0.5, 4: 0.0}) - - group_2 = nw.new_node(nodegroup_voronoi().name, - input_kwargs={'Coordinate': group_canonical, 'Texture Scale': geo_params['Secondary Voronoi Scale']}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_2, 3: 0.7, 4: 0.0}) - - group = nw.new_node(nodegroup_inject_z_noise_and_scale().name, - input_kwargs={'Coordinate': group_canonical, 'Noise Scale': geo_params['Z Noise Scale'], 'Noise Amount': geo_params['Z Noise Amount'], 'Z Multiplier': geo_params['Z Multiplier']}) - - group_1 = nw.new_node(nodegroup_voronoi().name, - input_kwargs={'Coordinate': group, 'Texture Scale': geo_params['Primary Voronoi Scale'], 'Randomness': geo_params['Primary Voronoi Randomness']}) - + attrs={"operation": "MULTIPLY"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position_1, + "Material": group_input.outputs["Material"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +def shader_random_bark_mat(nw, base_color: Tuple, geo_params, selection=None): + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: geo_params["Noise Texture Scale"], 1: 4.0}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": multiply}) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture_1.outputs["Fac"], + 1: 0.35, + 2: 0.4, + 3: 0.5, + 4: 0.0, + }, + ) + + attribute_5 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "initial_position"} + ) + + group_canonical = nw.new_node( + nodegroup_shader_canonical_coord().name, + input_kwargs={"Vector": attribute_5.outputs["Vector"]}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: geo_params["Noise Texture Scale"], 1: 2.0}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": group_canonical, "Scale": multiply_1}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture.outputs["Fac"], + 1: 0.35, + 2: 0.4, + 3: 0.5, + 4: 0.0, + }, + ) + + group_2 = nw.new_node( + nodegroup_voronoi().name, + input_kwargs={ + "Coordinate": group_canonical, + "Texture Scale": geo_params["Secondary Voronoi Scale"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_2, 3: 0.7, 4: 0.0} + ) + + group = nw.new_node( + nodegroup_inject_z_noise_and_scale().name, + input_kwargs={ + "Coordinate": group_canonical, + "Noise Scale": geo_params["Z Noise Scale"], + "Noise Amount": geo_params["Z Noise Amount"], + "Z Multiplier": geo_params["Z Multiplier"], + }, + ) + + group_1 = nw.new_node( + nodegroup_voronoi().name, + input_kwargs={ + "Coordinate": group, + "Texture Scale": geo_params["Primary Voronoi Scale"], + "Randomness": geo_params["Primary Voronoi Randomness"], + }, + ) + rgb_1 = nw.new_node(Nodes.RGB) rgb_1.outputs[0].default_value = base_color # todo: this value needs to be assigned - - group_3 = nw.new_node(nodegroup_adjust_v().name, - input_kwargs={'Color': rgb_1, 'V Shift': 0.1}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': group_3}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': group_1, 'Color1': mix_4, 'Color2': group_3}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range.outputs["Result"], 'Color1': mix_3, 'Color2': mix_4}) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9, 'Color1': (1.0, 1.0, 1.0, 1.0), 'Color2': group_3}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_1.outputs["Result"], 'Color1': mix, 'Color2': mix_6}) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9, 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': group_3}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': mix_1, 'Color2': mix_5}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2, 'Roughness': 0.7}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + group_3 = nw.new_node( + nodegroup_adjust_v().name, input_kwargs={"Color": rgb_1, "V Shift": 0.1} + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 0.1, "Color1": (0.0, 0.0, 0.0, 1.0), "Color2": group_3}, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, input_kwargs={"Fac": group_1, "Color1": mix_4, "Color2": group_3} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": mix_3, + "Color2": mix_4, + }, + ) + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 0.9, "Color1": (1.0, 1.0, 1.0, 1.0), "Color2": group_3}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_1.outputs["Result"], + "Color1": mix, + "Color2": mix_6, + }, + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 0.9, "Color1": (0.0, 0.0, 0.0, 1.0), "Color2": group_3}, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": mix_1, + "Color2": mix_5, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_2, "Roughness": 0.7} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geo_bark_random(nw, base_color, geo_params, selection=None): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement Scale', geo_params['Displacement Scale']), - ('NodeSocketFloat', 'Z Noise Scale', geo_params['Z Noise Scale']), - ('NodeSocketFloat', 'Z Noise Amount', geo_params['Z Noise Amount']), - ('NodeSocketFloat', 'Z Multiplier', geo_params['Z Multiplier']), - ('NodeSocketFloat', 'Primary Voronoi Scale', geo_params['Primary Voronoi Scale']), - ('NodeSocketFloatFactor', 'Primary Voronoi Randomness', geo_params['Primary Voronoi Randomness']), - ('NodeSocketFloat', 'Secondary Voronoi Mix Weight', geo_params['Secondary Voronoi Mix Weight']), - ('NodeSocketFloat', 'Secondary Voronoi Scale', geo_params['Secondary Voronoi Scale']), - ('NodeSocketFloat', 'Noise Texture Scale', geo_params['Noise Texture Scale']), - ('NodeSocketFloat', 'Noise Texture Detail', geo_params['Noise Texture Detail']), - ('NodeSocketFloat', 'Noise Texture Weight', geo_params['Noise Texture Weight'])]) - - parent_loc = nw.new_node(Nodes.NamedAttribute, ['parent_skeleton_loc'], attrs={'data_type': 'FLOAT_VECTOR'}) - skeleton_loc = nw.new_node(Nodes.NamedAttribute, ['skeleton_loc'], attrs={'data_type': 'FLOAT_VECTOR'}) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement Scale", geo_params["Displacement Scale"]), + ("NodeSocketFloat", "Z Noise Scale", geo_params["Z Noise Scale"]), + ("NodeSocketFloat", "Z Noise Amount", geo_params["Z Noise Amount"]), + ("NodeSocketFloat", "Z Multiplier", geo_params["Z Multiplier"]), + ( + "NodeSocketFloat", + "Primary Voronoi Scale", + geo_params["Primary Voronoi Scale"], + ), + ( + "NodeSocketFloatFactor", + "Primary Voronoi Randomness", + geo_params["Primary Voronoi Randomness"], + ), + ( + "NodeSocketFloat", + "Secondary Voronoi Mix Weight", + geo_params["Secondary Voronoi Mix Weight"], + ), + ( + "NodeSocketFloat", + "Secondary Voronoi Scale", + geo_params["Secondary Voronoi Scale"], + ), + ( + "NodeSocketFloat", + "Noise Texture Scale", + geo_params["Noise Texture Scale"], + ), + ( + "NodeSocketFloat", + "Noise Texture Detail", + geo_params["Noise Texture Detail"], + ), + ( + "NodeSocketFloat", + "Noise Texture Weight", + geo_params["Noise Texture Weight"], + ), + ], + ) + + parent_loc = nw.new_node( + Nodes.NamedAttribute, + ["parent_skeleton_loc"], + attrs={"data_type": "FLOAT_VECTOR"}, + ) + skeleton_loc = nw.new_node( + Nodes.NamedAttribute, ["skeleton_loc"], attrs={"data_type": "FLOAT_VECTOR"} + ) + position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - canonicalcoord = nw.new_node(nodegroup_canonical_coord().name, - input_kwargs={'Self Location': skeleton_loc, 'Parent Location': parent_loc}) - group_1 = nw.new_node(nodegroup_random_bark_geo().name, - input_kwargs={'Position': canonicalcoord, 'Noise Scale': group_input.outputs["Z Noise Scale"], 'Noise Amount': group_input.outputs["Z Noise Amount"], 3: group_input.outputs["Primary Voronoi Scale"], 'Randomness': group_input.outputs["Primary Voronoi Randomness"], 5: group_input.outputs["Displacement Scale"], 'Mix Weight': group_input.outputs["Secondary Voronoi Mix Weight"], 'Scale': group_input.outputs["Noise Texture Scale"], 'Detail': group_input.outputs["Noise Texture Detail"], 9: group_input.outputs["Noise Texture Weight"], 'Z Multiplier': group_input.outputs["Z Multiplier"], 11: group_input.outputs["Secondary Voronoi Scale"]}) + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) - calc_radius = nw.new_node(nodegroup_calc_radius().name, - input_kwargs={'Self Location': skeleton_loc, 'Parent Location': parent_loc}) + canonicalcoord = nw.new_node( + nodegroup_canonical_coord().name, + input_kwargs={"Self Location": skeleton_loc, "Parent Location": parent_loc}, + ) - multiply = nw.new_node(Nodes.Math, + group_1 = nw.new_node( + nodegroup_random_bark_geo().name, + input_kwargs={ + "Position": canonicalcoord, + "Noise Scale": group_input.outputs["Z Noise Scale"], + "Noise Amount": group_input.outputs["Z Noise Amount"], + 3: group_input.outputs["Primary Voronoi Scale"], + "Randomness": group_input.outputs["Primary Voronoi Randomness"], + 5: group_input.outputs["Displacement Scale"], + "Mix Weight": group_input.outputs["Secondary Voronoi Mix Weight"], + "Scale": group_input.outputs["Noise Texture Scale"], + "Detail": group_input.outputs["Noise Texture Detail"], + 9: group_input.outputs["Noise Texture Weight"], + "Z Multiplier": group_input.outputs["Z Multiplier"], + 11: group_input.outputs["Secondary Voronoi Scale"], + }, + ) + + calc_radius = nw.new_node( + nodegroup_calc_radius().name, + input_kwargs={"Self Location": skeleton_loc, "Parent Location": parent_loc}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: calc_radius, 1: 3.0}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: group_1}, - attrs={'operation': 'MULTIPLY'}) - group = nw.new_node(nodegroup_apply_geo_matv2().name, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Displacement Amount': nw.multiply(multiply_1, surface.eval_argument(nw, selection)), - 'Displacement Scale': 0.5, 'Material': surface.shaderfunc_to_material(shader_random_bark_mat, geo_params=geo_params, base_color=base_color)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': group, 'initial_position': capture_attribute.outputs["Attribute"]}) + attrs={"operation": "MULTIPLY"}, + ) + group = nw.new_node( + nodegroup_apply_geo_matv2().name, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Displacement Amount": nw.multiply( + multiply_1, surface.eval_argument(nw, selection) + ), + "Displacement Scale": 0.5, + "Material": surface.shaderfunc_to_material( + shader_random_bark_mat, geo_params=geo_params, base_color=base_color + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": group, + "initial_position": capture_attribute.outputs["Attribute"], + }, + ) + # https://blender.stackexchange.com/questions/158896/how-set-hex-in-rgb-node-python def srgb_to_linearrgb(c): - if c < 0: return 0 - elif c < 0.04045: return c/12.92 - else: return ((c+0.055)/1.055)**2.4 + if c < 0: + return 0 + elif c < 0.04045: + return c / 12.92 + else: + return ((c + 0.055) / 1.055) ** 2.4 + def hex_to_rgb(h, alpha=1): - r = (h & 0xff0000) >> 16 - g = (h & 0x00ff00) >> 8 - b = (h & 0x0000ff) - return tuple([srgb_to_linearrgb(c/0xff) for c in (r,g,b)] + [alpha]) + r = (h & 0xFF0000) >> 16 + g = (h & 0x00FF00) >> 8 + b = h & 0x0000FF + return tuple([srgb_to_linearrgb(c / 0xFF) for c in (r, g, b)] + [alpha]) + def get_random_bark_params(seed): with FixedSeed(seed): - color_factory = [0x4c2f27, 0x69432d, 0x371803, 0x7f4040, 0xcc9576, 0x9e8170, 0x3d2b1f, - 0x8d6a58, 0x8b3325, 0x79443c, 0x88540b, 0x9b5f43, 0x4e3828, 0x4e3828, - 0xc09a6b, 0x944536, 0x3f0110, 0x773c12, 0x6e4e37, 0x5c4033, 0x5c4033, - 0x3c3034, 0x96704c, 0x371b1a, 0x483b32, 0x43141a, 0x471713, 0xc3b090, - 0x6b4423, 0x674d46, 0x5d2e1a, 0x331c1f, 0x7a5640, 0xb99984, 0x71543d, - 0x8f4b28, 0x491a00, 0x836446, 0x7f461b, 0x6a3208, 0x724115, 0xa0522b, - 0x832a0c, 0x371b1a, 0xc7a373, 0x483b32, 0x635147, 0x664228, 0x5c5248] + color_factory = [ + 0x4C2F27, + 0x69432D, + 0x371803, + 0x7F4040, + 0xCC9576, + 0x9E8170, + 0x3D2B1F, + 0x8D6A58, + 0x8B3325, + 0x79443C, + 0x88540B, + 0x9B5F43, + 0x4E3828, + 0x4E3828, + 0xC09A6B, + 0x944536, + 0x3F0110, + 0x773C12, + 0x6E4E37, + 0x5C4033, + 0x5C4033, + 0x3C3034, + 0x96704C, + 0x371B1A, + 0x483B32, + 0x43141A, + 0x471713, + 0xC3B090, + 0x6B4423, + 0x674D46, + 0x5D2E1A, + 0x331C1F, + 0x7A5640, + 0xB99984, + 0x71543D, + 0x8F4B28, + 0x491A00, + 0x836446, + 0x7F461B, + 0x6A3208, + 0x724115, + 0xA0522B, + 0x832A0C, + 0x371B1A, + 0xC7A373, + 0x483B32, + 0x635147, + 0x664228, + 0x5C5248, + ] color_factory = [hex_to_rgb(c) for c in color_factory] geo_params = { - 'Displacement Scale': np.random.uniform(0.03, 0.07), - 'Z Noise Scale': np.random.uniform(1.0, 3.0), - 'Z Noise Amount': np.random.uniform(0.5, 1.5), - 'Z Multiplier': np.random.uniform(0.1, 0.3), - 'Primary Voronoi Scale': np.random.uniform(20, 40), - 'Primary Voronoi Randomness': np.random.uniform(0.6, 1.0), - 'Secondary Voronoi Mix Weight': np.random.uniform(0.05, 0.2), - 'Secondary Voronoi Scale': np.random.uniform(30, 50), - 'Noise Texture Scale': 15.0, - 'Noise Texture Detail': 16.0, - 'Noise Texture Weight': 2.0} - color_params = {'Color': color_factory[np.random.randint(len(color_factory))]} + "Displacement Scale": np.random.uniform(0.03, 0.07), + "Z Noise Scale": np.random.uniform(1.0, 3.0), + "Z Noise Amount": np.random.uniform(0.5, 1.5), + "Z Multiplier": np.random.uniform(0.1, 0.3), + "Primary Voronoi Scale": np.random.uniform(20, 40), + "Primary Voronoi Randomness": np.random.uniform(0.6, 1.0), + "Secondary Voronoi Mix Weight": np.random.uniform(0.05, 0.2), + "Secondary Voronoi Scale": np.random.uniform(30, 50), + "Noise Texture Scale": 15.0, + "Noise Texture Detail": 16.0, + "Noise Texture Weight": 2.0, + } + color_params = {"Color": color_factory[np.random.randint(len(color_factory))]} return geo_params, color_params -def apply(obj, selection=None, **kwargs): +def apply(obj, selection=None, **kwargs): geo_params, color_params = get_random_bark_params(seed=np.random.randint(1e5)) - surface.add_geomod(obj, geo_bark_random, selection=selection, - input_kwargs={'base_color': color_params['Color'], 'geo_params': geo_params}, attributes=['initial_position']) + surface.add_geomod( + obj, + geo_bark_random, + selection=selection, + input_kwargs={"base_color": color_params["Color"], "geo_params": geo_params}, + attributes=["initial_position"], + ) diff --git a/infinigen/assets/materials/basic_bsdf.py b/infinigen/assets/materials/basic_bsdf.py index 295d29b85..4d402ffc9 100644 --- a/infinigen/assets/materials/basic_bsdf.py +++ b/infinigen/assets/materials/basic_bsdf.py @@ -4,34 +4,35 @@ # Authors: Alexander Raistrick -import bpy -import mathutils - import numpy as np -from numpy.random import uniform, normal +from numpy.random import normal, uniform -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.util.color import hsv2rgba from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.util.color import hsv2rgba -def shader_basic_bsdf(nw): +def shader_basic_bsdf(nw): color = nw.new_node(Nodes.RGB) color.outputs[0].default_value = hsv2rgba(uniform(0.05, 0.95, 3)) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': np.clip(normal(0.6, 0.3), 0.05, 0.95), - 'Metallic': uniform(0, 1) if uniform() < 0.3 else 0, - 'Subsurface': 0 if uniform() < 0.8 else uniform(0, 0.2) + "Base Color": color, + "Roughness": np.clip(normal(0.6, 0.3), 0.05, 0.95), + "Metallic": uniform(0, 1) if uniform() < 0.3 else 0, + "Subsurface": 0 if uniform() < 0.8 else uniform(0, 0.2), }, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) return principled_bsdf + def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_basic_bsdf, reuse=False) \ No newline at end of file + surface.add_material(obj, shader_basic_bsdf, reuse=False) diff --git a/infinigen/assets/materials/beak.py b/infinigen/assets/materials/beak.py index 9e260e9eb..e34febbd7 100644 --- a/infinigen/assets/materials/beak.py +++ b/infinigen/assets/materials/beak.py @@ -4,67 +4,90 @@ # Authors: Yihan Wang -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import randint, uniform + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def orange(): r = uniform(205 / 255, 1) - g = uniform(0, 150/255) + g = uniform(0, 150 / 255) b = 0 return (r, g, b) + def white(): return (uniform(0, 0.05), uniform(0, 0.05), uniform(0, 0.05)) + def black(): return (1 - uniform(0, 0.05), 1 - uniform(0, 0.05), 1 - uniform(0, 0.05)) + def rand_color(): op = randint(0, 2) return orange(), orange() + def shader_beak(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate.outputs["UV"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 4.3 + uniform(0, 2), 'Roughness': 0.4167 + uniform(0, 0.2)}) - - multiply = nw.new_node(Nodes.Math, + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": texture_coordinate.outputs["UV"]} + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Scale": 4.3 + uniform(0, 2), + "Roughness": 0.4167 + uniform(0, 0.2), + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.2}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply} + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) colorramp.color_ramp.interpolation = "EASE" col0, col1 = rand_color() colorramp.color_ramp.elements[0].position = 0.33 + uniform(-1, 1) * 0.2 colorramp.color_ramp.elements[0].color = (col0[0], col0[1], col0[2], 1.0) - colorramp.color_ramp.elements[1].position = 0.66 + uniform(-1, 1) * 0.2 + colorramp.color_ramp.elements[1].position = 0.66 + uniform(-1, 1) * 0.2 colorramp.color_ramp.elements[1].color = (col1[0], col1[1], col1[2], 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': 0.2434 + uniform(0, 0.1)}) - - glass_bsdf = nw.new_node('ShaderNodeBsdfGlass') - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.5667 + uniform(0, 0.05), 1: principled_bsdf, 2: glass_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": 0.2434 + uniform(0, 0.1), + }, + ) + + glass_bsdf = nw.new_node("ShaderNodeBsdfGlass") + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": 0.5667 + uniform(0, 0.05), + 1: principled_bsdf, + 2: glass_bsdf, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_beak, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_beak, selection=selection) diff --git a/infinigen/assets/materials/beverage_fridge_shaders.py b/infinigen/assets/materials/beverage_fridge_shaders.py index 2499733e9..f114720fa 100644 --- a/infinigen/assets/materials/beverage_fridge_shaders.py +++ b/infinigen/assets/materials/beverage_fridge_shaders.py @@ -1,25 +1,36 @@ - # Copyright (c) Princeton University. - # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - # Authors: Hongyu Wen +# Authors: Hongyu Wen from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_glass_001(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={'IOR': 1.5000}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glass_bsdf}, attrs={'is_active_output': True}) + glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={"IOR": 1.5000}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glass_bsdf}, + attrs={"is_active_output": True}, + ) def shader_black_medal_001(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - anisotropic_bsdf = nw.new_node('ShaderNodeBsdfAnisotropic', input_kwargs={'Color': (0.0167, 0.0167, 0.0167, 1.0000)}) + anisotropic_bsdf = nw.new_node( + "ShaderNodeBsdfAnisotropic", + input_kwargs={"Color": (0.0167, 0.0167, 0.0167, 1.0000)}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': anisotropic_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": anisotropic_bsdf}, + attrs={"is_active_output": True}, + ) def shader_white_metal_001(nw: NodeWrangler): @@ -27,20 +38,49 @@ def shader_white_metal_001(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0000, 1.0000, 50.0000)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 20.0000, 'Detail': 20.0000, 'Distortion': 1.0000}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Color"]}) + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0000, 1.0000, 50.0000), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 20.0000, + "Detail": 20.0000, + "Distortion": 1.0000, + }, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.2500 colorramp.color_ramp.elements[0].color = [0.5244, 0.5244, 0.5244, 1.0000] colorramp.color_ramp.elements[1].position = 1.0000 colorramp.color_ramp.elements[1].color = [0.9698, 0.9698, 0.9698, 1.0000] - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Subsurface Color': (1.0000, 1.0000, 1.0000, 1.0000), 'Metallic': 1.0000, 'Specular': 1.0000, 'Roughness': 0.1000, 'Anisotropic': 0.9182, 'Sheen': 0.0455, 'Sheen Tint': 0.4948}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Subsurface Color": (1.0000, 1.0000, 1.0000, 1.0000), + "Metallic": 1.0000, + "Specular": 1.0000, + "Roughness": 0.1000, + "Anisotropic": 0.9182, + "Sheen": 0.0455, + "Sheen Tint": 0.4948, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/bird.py b/infinigen/assets/materials/bird.py index 151dd5532..11f66aa8d 100644 --- a/infinigen/assets/materials/bird.py +++ b/infinigen/assets/materials/bird.py @@ -4,80 +4,102 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os +import random + import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, +) from infinigen.core import surface -import random +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_l_inear', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_l_inear", singleton=False, type="ShaderNodeTree") def nodegroup_l_inear(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'CoffX', 0.5), - ('NodeSocketFloat', 'CoffZ', 0.5)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "CoffX", 0.5), + ("NodeSocketFloat", "CoffZ", 0.5), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["CoffX"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["CoffZ"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add}) - -@node_utils.to_nodegroup('nodegroup_head_neck', singleton=False, type='ShaderNodeTree') -def nodegroup_head_neck(nw: NodeWrangler, rand=True, kind='duck'): + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add}) + + +@node_utils.to_nodegroup("nodegroup_head_neck", singleton=False, type="ShaderNodeTree") +def nodegroup_head_neck(nw: NodeWrangler, rand=True, kind="duck"): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color1', (0.046, 0.5, 0.0, 1.0)), - ('NodeSocketFloat', 'W', 6.0)]) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'W': group_input.outputs["W"], 'Scale': 2.0}, - attrs={'noise_dimensions': '4D'}) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Color1", (0.046, 0.5, 0.0, 1.0)), + ("NodeSocketFloat", "W", 6.0), + ], + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "W": group_input.outputs["W"], + "Scale": 2.0, + }, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_1.inputs['W'].default_value = sample_range(-2, 2) - - subtract = nw.new_node(Nodes.VectorMath, + noise_texture_1.inputs["W"].default_value = sample_range(-2, 2) + + subtract = nw.new_node( + Nodes.VectorMath, input_kwargs={0: noise_texture_1.outputs["Fac"], 1: (0.0, 0.0, 0.0)}, - attrs={'operation': 'SUBTRACT'}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': texture_coordinate.outputs["Generated"], 'Color2': subtract.outputs["Vector"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mix}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.05}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': add}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': reroute}) + attrs={"operation": "SUBTRACT"}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": texture_coordinate.outputs["Generated"], + "Color2": subtract.outputs["Vector"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mix}) + + add = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.05}) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add}) + + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": reroute}) colorramp_4.color_ramp.elements.new(0) colorramp_4.color_ramp.elements.new(0) colorramp_4.color_ramp.elements[0].position = 0.83 @@ -88,316 +110,443 @@ def nodegroup_head_neck(nw: NodeWrangler, rand=True, kind='duck'): colorramp_4.color_ramp.elements[2].color = (1.0, 1.0, 1.0, 1.0) colorramp_4.color_ramp.elements[3].position = 0.835 colorramp_4.color_ramp.elements[3].color = (0.0, 0.0, 0.0, 1.0) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': reroute}) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": reroute}) colorramp_3.color_ramp.elements[0].position = 0.83 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.84 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_head'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: attribute_2.outputs["Color"]}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_1}) + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_head"}) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: attribute_2.outputs["Color"]}, + ) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) colorramp_1.color_ramp.elements[0].position = 0.4545 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.5455 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 1.7, 'Scale': 3.0}, - attrs={'noise_dimensions': '4D'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 1.7, "Scale": 3.0}, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.5077 colorramp.color_ramp.elements[0].color = (0.0063, 0.017, 0.005, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.0018, 0.0571, 0.0, 1.0) if rand: - if kind == 'duck': + if kind == "duck": sample_color(colorramp.color_ramp.elements[0].color, keep_sum=True) for i in range(3): - colorramp.color_ramp.elements[1].color[i] = colorramp.color_ramp.elements[0].color[i]+0.005 - elif kind == 'eagle': + colorramp.color_ramp.elements[1].color[i] = ( + colorramp.color_ramp.elements[0].color[i] + 0.005 + ) + elif kind == "eagle": colorramp.color_ramp.elements[0].color = (0.265, 0.265, 0.265, 1.0) sample_color(colorramp.color_ramp.elements[0].color, offset=0.05) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': group_input.outputs["Color1"], 'Color2': colorramp.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': mix_1}) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": group_input.outputs["Color1"], + "Color2": colorramp.outputs["Color"], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": mix_1}) + -def shader_bird_body(nw: NodeWrangler, rand=True, kind='duck', **input_kwargs): +def shader_bird_body(nw: NodeWrangler, rand=True, kind="duck", **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_tail'}) - - attribute_3 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_body'}) - - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_leg'}) - - attribute_6 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_wing'}) - - attribute_4 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_foot'}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_4.outputs["Color"], 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': (0.0225, 0.0055, 0.0024, 1.0)}) - - mix_8 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_6.outputs["Color"], 'Color1': mix_3, 'Color2': (0.008, 0.008, 0.008, 1.0)}) - + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_tail"}) + + attribute_3 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_body"}) + + attribute_5 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_leg"}) + + attribute_6 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_wing"}) + + attribute_4 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_foot"}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_4.outputs["Color"], + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": (0.0225, 0.0055, 0.0024, 1.0), + }, + ) + + mix_8 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_6.outputs["Color"], + "Color1": mix_3, + "Color2": (0.008, 0.008, 0.008, 1.0), + }, + ) + texture_coordinate_2 = nw.new_node(Nodes.TextureCoord) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_2.outputs["Generated"]}) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': texture_coordinate_2.outputs["Generated"], 'Color2': noise_texture_2.outputs["Color"]}) - - group_2 = nw.new_node(nodegroup_l_inear().name, - input_kwargs={'Vector': mix_6, 'CoffX': 0.1, 'CoffZ': 1.0}) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate_2.outputs["Generated"]}, + ) + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": texture_coordinate_2.outputs["Generated"], + "Color2": noise_texture_2.outputs["Color"], + }, + ) + + group_2 = nw.new_node( + nodegroup_l_inear().name, + input_kwargs={"Vector": mix_6, "CoffX": 0.1, "CoffZ": 1.0}, + ) if rand: if random.random() < 0.5: - group_2.inputs['CoffX'].default_value = sample_range(-0.1, 0.1) + group_2.inputs["CoffX"].default_value = sample_range(-0.1, 0.1) else: - group_2.inputs['CoffX'].default_value = sample_range(0.1, 0.8) + group_2.inputs["CoffX"].default_value = sample_range(0.1, 0.8) + + add = nw.new_node(Nodes.Math, input_kwargs={0: group_2, 1: 0.1}) - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_2, 1: 0.1}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) colorramp_3.color_ramp.elements[0].position = 0.4159 colorramp_3.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.6886 colorramp_3.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 20.0}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_4.outputs["Fac"]}) + + noise_texture_4 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 20.0}) + + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_4.outputs["Fac"]} + ) colorramp_5.color_ramp.elements[0].position = 0.3341 colorramp_5.color_ramp.elements[0].color = (0.0079, 0.0062, 0.0063, 1.0) colorramp_5.color_ramp.elements[1].position = 0.9932 colorramp_5.color_ramp.elements[1].color = (0.0302, 0.0264, 0.0262, 1.0) if rand: - if kind == 'duck': + if kind == "duck": for i in range(3): colorramp_5.color_ramp.elements[0].color[i] = sample_range(0, 0.2) colorramp_5.color_ramp.elements[1].color[i] = sample_range(0, 0.2) - elif kind == 'eagle': + elif kind == "eagle": for i in range(3): colorramp_5.color_ramp.elements[0].color[i] = sample_range(0, 0.01) colorramp_5.color_ramp.elements[0].position = sample_range(0.5, 0.6) colorramp_5.color_ramp.elements[1].color[i] = sample_range(0, 0.1) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_1.inputs['W'].default_value = sample_range(-2, 2) + noise_texture_1.inputs["W"].default_value = sample_range(-2, 2) x = random.random() if x < 0.3: - noise_texture_1.inputs['Scale'].default_value = 1 + noise_texture_1.inputs["Scale"].default_value = 1 if x > 0.7: - noise_texture_1.inputs['Scale'].default_value = 50 + noise_texture_1.inputs["Scale"].default_value = 50 - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Color"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Color"]} + ) colorramp_1.color_ramp.elements[0].position = 0.4614 colorramp_1.color_ramp.elements[0].color = (0.1, 0.1, 0.1, 1.0) colorramp_1.color_ramp.elements[1].position = 1.0 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) if rand: - if kind == 'eagle': + if kind == "eagle": for i in range(3): colorramp_1.color_ramp.elements[0].color[i] = sample_range(0, 0.01) colorramp_1.color_ramp.elements[0].position = sample_range(0.5, 0.6) colorramp_1.color_ramp.elements[1].color[i] = sample_range(0, 0.1) - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_3.outputs["Color"], 'Color1': colorramp_5.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_5.outputs["Color"], 'Color1': mix_8, 'Color2': mix_5}) - + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_3.outputs["Color"], + "Color1": colorramp_5.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_5.outputs["Color"], + "Color1": mix_8, + "Color2": mix_5, + }, + ) + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': texture_coordinate.outputs["Generated"], 'Color2': noise_texture.outputs["Color"]}) - - group_1 = nw.new_node(nodegroup_l_inear().name, - input_kwargs={'Vector': mix, 'CoffX': 0.6}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"]}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": texture_coordinate.outputs["Generated"], + "Color2": noise_texture.outputs["Color"], + }, + ) + + group_1 = nw.new_node( + nodegroup_l_inear().name, input_kwargs={"Vector": mix, "CoffX": 0.6} + ) if rand: - group_1.inputs['CoffX'].default_value = sample_range(0, 0.08) - group_1.inputs['CoffZ'].default_value = 1.1 - group_1.inputs['CoffX'].default_value + group_1.inputs["CoffX"].default_value = sample_range(0, 0.08) + group_1.inputs["CoffZ"].default_value = ( + 1.1 - group_1.inputs["CoffX"].default_value + ) - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_1, 1: -0.02}) + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_1, 1: -0.02}) if rand: add_1.inputs[1].default_value = sample_range(-0.07, 0.03) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add_1}) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) colorramp.color_ramp.elements[0].position = 0.6295 colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp.color_ramp.elements[1].position = 0.7068 colorramp.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 20.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 20.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_3.inputs['W'].default_value = sample_range(-2, 2) + noise_texture_3.inputs["W"].default_value = sample_range(-2, 2) - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_3.outputs["Fac"]}) + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_3.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.4636 colorramp_4.color_ramp.elements[0].color = (0.0112, 0.0053, 0.0047, 1.0) colorramp_4.color_ramp.elements[1].position = 1.0 colorramp_4.color_ramp.elements[1].color = (0.0231, 0.0128, 0.0121, 1.0) - + if rand: - if kind == 'duck': + if kind == "duck": sample_color(colorramp_4.color_ramp.elements[0].color, keep_sum=True) sample_color(colorramp_4.color_ramp.elements[1].color, keep_sum=True) - if kind == 'eagle': + if kind == "eagle": for i in range(3): colorramp_4.color_ramp.elements[0].color[i] = sample_range(0, 0.01) colorramp_4.color_ramp.elements[0].position = sample_range(0.5, 0.6) colorramp_4.color_ramp.elements[1].color[i] = sample_range(0, 0.1) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp_4.outputs["Color"], 'Color2': mix_5}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_3.outputs["Color"], 'Color1': mix_7, 'Color2': mix_1}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': mix_2, 'Color2': (0.0, 0.0, 0.0, 1.0)}) - - group = nw.new_node(nodegroup_head_neck(rand=rand, kind=kind).name, - input_kwargs={'Color1': mix_4, 'W': 0.5}) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp_4.outputs["Color"], + "Color2": mix_5, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_3.outputs["Color"], + "Color1": mix_7, + "Color2": mix_1, + }, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": mix_2, + "Color2": (0.0, 0.0, 0.0, 1.0), + }, + ) + + group = nw.new_node( + nodegroup_head_neck(rand=rand, kind=kind).name, + input_kwargs={"Color1": mix_4, "W": 0.5}, + ) if rand: - group.inputs['W'].default_value = sample_range(-2, 2) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group, 'Subsurface IOR': 0.0, 'Specular': 0.0, 'Roughness': 1.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -def shader_bird_feather(nw: NodeWrangler, rand=True, kind='duck', tail=False, **input_kwargs): + group.inputs["W"].default_value = sample_range(-2, 2) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group, + "Subsurface IOR": 0.0, + "Specular": 0.0, + "Roughness": 1.0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_bird_feather( + nw: NodeWrangler, rand=True, kind="duck", tail=False, **input_kwargs +): # Code generated using version 2.4.3 of the node_transpiler - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 1.6}, - attrs={'noise_dimensions': '4D'}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"W": 1.6}, attrs={"noise_dimensions": "4D"} + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2, 2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.377 colorramp.color_ramp.elements[0].color = (0.02, 0.02, 0.02, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.0061, 0.0058, 0.0059, 1.0) if rand: - if kind == 'duck': + if kind == "duck": x = sample_range(0.02, 0.15) for i in range(3): colorramp.color_ramp.elements[1].color[i] = x - elif kind == 'eagle': + elif kind == "eagle": if tail: colorramp.color_ramp.elements[0].color = (0.265, 0.265, 0.265, 1.0) sample_color(colorramp.color_ramp.elements[0].color, offset=0.05) colorramp.color_ramp.elements[1].color = (0.007, 0.007, 0.007, 1.0) - else: - colorramp.color_ramp.elements[0].color = (0.012861, 0.006847, 0.004, 1.0) + else: + colorramp.color_ramp.elements[0].color = ( + 0.012861, + 0.006847, + 0.004, + 1.0, + ) sample_color(colorramp.color_ramp.elements[0].color, offset=0.003) - colorramp.color_ramp.elements[1].color = (0.154963, 0.081816, 0.042745, 1.0) + colorramp.color_ramp.elements[1].color = ( + 0.154963, + 0.081816, + 0.042745, + 1.0, + ) sample_color(colorramp.color_ramp.elements[1].color, offset=0.005) colorramp.color_ramp.elements[0].position = sample_range(0.56, 0.62) texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Scale': 5.00, 'Distortion': 10.0000, 'Detail': 10.0000, - 'Detail Roughness': 2.0000}) - - colorramp2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Color"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 5.00, + "Distortion": 10.0000, + "Detail": 10.0000, + "Detail Roughness": 2.0000, + }, + ) + + colorramp2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Color"]} + ) colorramp2.color_ramp.elements[0].position = 0.0955 colorramp2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp2.color_ramp.elements[1].position = 0.6364 colorramp2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.5 if tail else 1.0, 'Color1': colorramp2.outputs["Color"], 'Color2': colorramp.outputs["Color"]}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.5 if tail else 1.0, + "Color1": colorramp2.outputs["Color"], + "Color2": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (mix, "Result"), "Specular": 0.0, "Roughness": 1.0}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (mix, "Result"), 'Specular': 0.0, 'Roughness': 1.0}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) def shader_wave_feather(nw: NodeWrangler, **input_kwargs): # Code generated using version 2.5.1 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Scale': 5.00, 'Distortion': 10.0000, 'Detail': 10.0000, - 'Detail Roughness': 2.0000}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Color"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 5.00, + "Distortion": 10.0000, + "Detail": 10.0000, + "Detail Roughness": 2.0000, + }, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.0955 colorramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp.color_ramp.elements[1].position = 0.6364 colorramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"]}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": colorramp.outputs["Color"]} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) def shader_bird_beak(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 1.1, 'Scale': 20.0, 'Roughness': 0.5142}, - attrs={'noise_dimensions': '4D'}) + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 1.1, "Scale": 20.0, "Roughness": 0.5142}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2, 2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp.color_ramp.interpolation = "EASE" colorramp.color_ramp.elements[0].position = 0.3815 colorramp.color_ramp.elements[0].color = (0.2773, 0.271, 0.047, 1.0) @@ -407,23 +556,27 @@ def shader_bird_beak(nw: NodeWrangler, rand=True, **input_kwargs): sample_color(colorramp.color_ramp.elements[0].color, keep_sum=True) sample_color(colorramp.color_ramp.elements[1].color, keep_sum=True) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': 0.3408}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": colorramp.outputs["Color"], "Roughness": 0.3408}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def shader_bird_eyeball(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - ''' + """ texture_coordinate = nw.new_node(Nodes.TextureCoord) - + mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Rotation': (0.0, 0.0, -0.5236)}) - + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': mapping}) - + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': separate_xyz.outputs["X"]}) colorramp.color_ramp.interpolation = "CONSTANT" @@ -434,37 +587,49 @@ def shader_bird_eyeball(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[1].color = (0.6744, 0.0691, 0.3627, 1.0) colorramp.color_ramp.elements[2].position = 0.1909 colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - ''' - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.0, 0.0, 0.0, 1.0), 'Roughness': 0.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + """ + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (0.0, 0.0, 0.0, 1.0), "Roughness": 0.0}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def shader_bird_claw(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.0091, 0.0091, 0.0091, 1.0), 'Specular': 0.0, 'Roughness': 0.4409}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.0091, 0.0091, 0.0091, 1.0), + "Specular": 0.0, + "Roughness": 0.4409, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(objs, shader_kwargs={}, **kwargs): x = random.random() if x < 0.4: - kind = 'eagle' + kind = "eagle" else: - kind = 'duck' - shader_kwargs['kind'] = kind + kind = "duck" + shader_kwargs["kind"] = kind if not isinstance(objs, list): objs = [objs] for obj in objs: if "Tail" in obj.name: - shader_kwargs['tail'] = True + shader_kwargs["tail"] = True surface.add_material(obj, shader_bird_feather, input_kwargs=shader_kwargs) else: - shader_kwargs['tail'] = False + shader_kwargs["tail"] = False if "Body" in obj.name: surface.add_material(obj, shader_bird_body, input_kwargs=shader_kwargs) if "Feather" in obj.name and "Tail" not in obj.name: @@ -476,9 +641,10 @@ def apply(objs, shader_kwargs={}, **kwargs): if "Beak" in obj.name: surface.add_material(obj, shader_bird_beak, input_kwargs=shader_kwargs) + if __name__ == "__main__": for i in range(10): - bpy.ops.wm.open_mainfile(filepath='dev_scene_test_bird.blend') + bpy.ops.wm.open_mainfile(filepath="dev_scene_test_bird.blend") objs = [ "creature(98047, 0).parts(0, factory=NurbsBody)", "creature(98047, 0).parts(1).extra(TailFeathers, 1)", @@ -501,10 +667,9 @@ def apply(objs, shader_kwargs={}, **kwargs): ] objs = [bpy.data.objects[x] for x in objs] apply(objs) - fn_blend = os.path.join(os.path.abspath(os.curdir), 'dev_scene_eagle.blend') - fn = os.path.join(os.path.abspath(os.curdir), 'test_bird%d.jpg'%(i)) + fn_blend = os.path.join(os.path.abspath(os.curdir), "dev_scene_eagle.blend") + fn = os.path.join(os.path.abspath(os.curdir), "test_bird%d.jpg" % (i)) bpy.ops.wm.save_as_mainfile(filepath=fn_blend) bpy.context.scene.render.filepath = fn - bpy.context.scene.render.image_settings.file_format='JPEG' + bpy.context.scene.render.image_settings.file_format = "JPEG" bpy.ops.render.render(write_still=True) - \ No newline at end of file diff --git a/infinigen/assets/materials/black_plastic.py b/infinigen/assets/materials/black_plastic.py index 8a8a3a446..871239420 100644 --- a/infinigen/assets/materials/black_plastic.py +++ b/infinigen/assets/materials/black_plastic.py @@ -4,19 +4,23 @@ # Authors: Hongyu Wen from numpy.random import uniform as U + from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.color import hsv2rgba # used in ceiling lights and tv + def shader_black(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - color = hsv2rgba( - U(0.45, 0.55), - U(0, 0.1), - U(0, 1) + color = hsv2rgba(U(0.45, 0.55), U(0, 0.1), U(0, 1)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': color}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) diff --git a/infinigen/assets/materials/blackbody_shader.py b/infinigen/assets/materials/blackbody_shader.py index eda65afa6..5d7734c1d 100644 --- a/infinigen/assets/materials/blackbody_shader.py +++ b/infinigen/assets/materials/blackbody_shader.py @@ -4,17 +4,13 @@ # Authors: Karhan Kayan # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=zyIJQHlFQs0 by PolyFjord -import bpy -import mathutils -from numpy.random import uniform, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal as N + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import random_color_neighbour - def blackbody_shader(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler @@ -56,7 +52,9 @@ def blackbody_shader(nw: NodeWrangler): principled_volume = nw.new_node( Nodes.PrincipledVolume, input_kwargs={ - "Color": random_color_neighbour((0.3568, 0.3568, 0.3568, 1.0000),0.1,0.1,0.1), + "Color": random_color_neighbour( + (0.3568, 0.3568, 0.3568, 1.0000), 0.1, 0.1, 0.1 + ), "Density": 15.0000 + N(), "Blackbody Intensity": multiply_1, }, @@ -70,4 +68,4 @@ def blackbody_shader(nw: NodeWrangler): def apply(obj, selection=None, **kwargs): - surface.add_material(obj, blackbody_shader, selection=selection) \ No newline at end of file + surface.add_material(obj, blackbody_shader, selection=selection) diff --git a/infinigen/assets/materials/bone.py b/infinigen/assets/materials/bone.py index 45f6b35cf..d8545db0e 100644 --- a/infinigen/assets/materials/bone.py +++ b/infinigen/assets/materials/bone.py @@ -4,83 +4,146 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_bone(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Location': (1.7 + uniform(-1, 1) * 0.05, 0.29999999999999999 + uniform(-1, 1) * 0.05, uniform(-1, 1) * 0.05)}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 10.800000000000001 + uniform(-1, 1) * 3, 'Detail': 15.0, 'Roughness': 0.76670000000000005}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_2.outputs["Fac"], 'Scale': 10.0}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture_1.outputs["Color"]}) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Location": ( + 1.7 + uniform(-1, 1) * 0.05, + 0.29999999999999999 + uniform(-1, 1) * 0.05, + uniform(-1, 1) * 0.05, + ), + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 10.800000000000001 + uniform(-1, 1) * 3, + "Detail": 15.0, + "Roughness": 0.76670000000000005, + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture_2.outputs["Fac"], "Scale": 10.0}, + ) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.4364 + uniform(-1, 1) * 0.05 colorramp_2.color_ramp.elements[0].color = (0, 0, 0, 1.0) colorramp_2.color_ramp.elements[1].position = 0.58 + uniform(-1, 1) * 0.05 colorramp_2.color_ramp.elements[1].color = (1, 1, 1, 1.0) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': 98.900000000000006 + uniform(-0.3, 1) * 30, 'Detail': 15.0, 'Roughness': 0.76670000000000005}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Scale': 10.0 + uniform(-1, 1) * 0.05}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Color"]}) + + mapping_2 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_2, + "Scale": 98.900000000000006 + uniform(-0.3, 1) * 30, + "Detail": 15.0, + "Roughness": 0.76670000000000005, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "Scale": 10.0 + uniform(-1, 1) * 0.05, + }, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.3089 + uniform(-1, 1) * 0.05 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.673 + uniform(-1, 1) * 0.05 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: colorramp_2.outputs["Color"], 1: colorramp.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["UV"], 'Scale': (1.0, 1.0, 0.0)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 6.4000000000000004 + uniform(-1, 1) * 1}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + attrs={"operation": "MULTIPLY"}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Scale": (1.0, 1.0, 0.0), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 6.4000000000000004 + uniform(-1, 1) * 1, + }, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.3682 + uniform(-1, 1) * 0.05 - colorramp_1.color_ramp.elements[0].color = (0.38129999999999997, 0.2384, 0.1183, 1.0) + colorramp_1.color_ramp.elements[0].color = ( + 0.38129999999999997, + 0.2384, + 0.1183, + 1.0, + ) colorramp_1.color_ramp.elements[1].position = 0.7591 + uniform(-1, 1) * 0.05 - colorramp_1.color_ramp.elements[1].color = (0.49690000000000001, 0.50290000000000001, 0.46779999999999999, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': multiply.outputs["Vector"], 'Color1': (0.19120000000000001, 0.045199999999999997, 0.0103, 1.0), 'Color2': colorramp_1.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': 0.44090000000000001}) - - glass_bsdf = nw.new_node('ShaderNodeBsdfGlass') - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.2, 1: principled_bsdf, 2: glass_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + colorramp_1.color_ramp.elements[1].color = ( + 0.49690000000000001, + 0.50290000000000001, + 0.46779999999999999, + 1.0, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": multiply.outputs["Vector"], + "Color1": (0.19120000000000001, 0.045199999999999997, 0.0103, 1.0), + "Color2": colorramp_1.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix, "Roughness": 0.44090000000000001}, + ) + + glass_bsdf = nw.new_node("ShaderNodeBsdfGlass") + + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.2, 1: principled_bsdf, 2: glass_bsdf} + ) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_bone, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_bone, selection=selection) diff --git a/infinigen/assets/materials/brick.py b/infinigen/assets/materials/brick.py index 20e45873c..9190309ab 100644 --- a/infinigen/assets/materials/brick.py +++ b/infinigen/assets/materials/brick.py @@ -7,53 +7,75 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.materials import common from infinigen.assets.utils.object import new_plane -from infinigen.assets.utils.uv import unwrap_faces, unwrap_normal -from infinigen.core.util.color import hsv2rgba +from infinigen.assets.utils.uv import unwrap_normal from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.assets.materials import common -from infinigen.core.util.random import log_uniform from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.random import log_uniform def shader_brick(nw: NodeWrangler, height=None, **kwargs): if height is None: - height = log_uniform(.07, .12) + height = log_uniform(0.07, 0.12) uv_map = nw.new_node(Nodes.UVMap) - front_color, back_color = [hsv2rgba(uniform(0, .05), uniform(.8, 1), log_uniform(.02, .5)) for _ in - range(2)] - mortar_color = hsv2rgba(uniform(0, .05), uniform(.2, .5), log_uniform(.02, .8)) - dark_color = hsv2rgba(uniform(0, .05), uniform(.8, 1), log_uniform(.005, .02)) - noise = nw.new_node(Nodes.NoiseTexture, [uv_map], - input_kwargs={'Scale': uniform(40, 50), 'Detail': uniform(15, 20)}) - color = nw.new_node(Nodes.BrickTexture, [uv_map, front_color, back_color, mortar_color], input_kwargs={ - 'Scale': 1, - 'Row Height': height, - 'Brick Width': height * log_uniform(1.2, 2.5), - 'Mortar Size': height * log_uniform(.04, .08), - 'Mortar Smooth': noise - }).outputs['Color'] - noise = nw.new_node(Nodes.MusgraveTexture, [uv_map], input_kwargs={'Scale': uniform(2, 5)}) - color = nw.new_node(Nodes.MixRGB, [nw.scalar_multiply(log_uniform(.5, 1.), noise), color, dark_color], - attrs={'blend_type': 'DARKEN'}) + front_color, back_color = [ + hsv2rgba(uniform(0, 0.05), uniform(0.8, 1), log_uniform(0.02, 0.5)) + for _ in range(2) + ] + mortar_color = hsv2rgba(uniform(0, 0.05), uniform(0.2, 0.5), log_uniform(0.02, 0.8)) + dark_color = hsv2rgba(uniform(0, 0.05), uniform(0.8, 1), log_uniform(0.005, 0.02)) + noise = nw.new_node( + Nodes.NoiseTexture, + [uv_map], + input_kwargs={"Scale": uniform(40, 50), "Detail": uniform(15, 20)}, + ) + color = nw.new_node( + Nodes.BrickTexture, + [uv_map, front_color, back_color, mortar_color], + input_kwargs={ + "Scale": 1, + "Row Height": height, + "Brick Width": height * log_uniform(1.2, 2.5), + "Mortar Size": height * log_uniform(0.04, 0.08), + "Mortar Smooth": noise, + }, + ).outputs["Color"] + noise = nw.new_node( + Nodes.MusgraveTexture, [uv_map], input_kwargs={"Scale": uniform(2, 5)} + ) + color = nw.new_node( + Nodes.MixRGB, + [nw.scalar_multiply(log_uniform(0.5, 1.0), noise), color, dark_color], + attrs={"blend_type": "DARKEN"}, + ) - roughness = nw.build_float_curve(nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}), - [(0, .5), (1, 1.)]) + roughness = nw.build_float_curve( + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}), + [(0, 0.5), (1, 1.0)], + ) - offset = nw.scalar_add(nw.scalar_multiply(nw.scalar_sub(color, .5), uniform(.01, .04)), nw.scalar_multiply( - nw.new_node(Nodes.MusgraveTexture, [uv_map], input_kwargs={'Scale': 50}), uniform(.0, .01))) - bump = nw.new_node(Nodes.Bump, input_kwargs={'Height': offset}) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={"Roughness": roughness, 'Base Color': color, 'Normal': bump - }) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}) + offset = nw.scalar_add( + nw.scalar_multiply(nw.scalar_sub(color, 0.5), uniform(0.01, 0.04)), + nw.scalar_multiply( + nw.new_node(Nodes.MusgraveTexture, [uv_map], input_kwargs={"Scale": 50}), + uniform(0.0, 0.01), + ), + ) + bump = nw.new_node(Nodes.Bump, input_kwargs={"Height": offset}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Roughness": roughness, "Base Color": color, "Normal": bump}, + ) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf}) def apply(obj, selection=None, height=None, **kwargs): for o in obj if isinstance(obj, Iterable) else [obj]: - unwrap_normal(o, selection, axis_='z') + unwrap_normal(o, selection, axis_="z") common.apply(obj, shader_brick, selection, height, **kwargs) diff --git a/infinigen/assets/materials/bumpy_rubber_floor.py b/infinigen/assets/materials/bumpy_rubber_floor.py index ee5fa8cdb..5ad9a1f38 100644 --- a/infinigen/assets/materials/bumpy_rubber_floor.py +++ b/infinigen/assets/materials/bumpy_rubber_floor.py @@ -4,173 +4,387 @@ # Authors: Yiming Zuo -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba + -@node_utils.to_nodegroup('nodegroup_node_group', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_node_group", singleton=False, type="ShaderNodeTree") def nodegroup_node_group(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 1.0000), - ('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloatFactor', 'Roughness', 0.4000)]) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Scale': group_input.outputs["Scale"]}) - - reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': mapping}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Seed"]}) - - noise_texture_9 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute_1, 'W': reroute, 'Scale': 18.0000, 'Detail': 3.0000, 'Roughness': 0.4500}, - attrs={'noise_dimensions': '4D'}) - - map_range_6 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_9.outputs["Fac"], 3: 0.6000, 4: 1.4000}) - - hue_saturation_value = nw.new_node(Nodes.HueSaturationValue, - input_kwargs={'Value': map_range_6.outputs["Result"], 'Color': group_input.outputs["Base Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Specular': 0.9, 'Roughness': group_input.outputs["Roughness"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 2.0000, 'Detail': 6.0000}, - attrs={'noise_dimensions': '4D'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Randomness': 0.0000}, - attrs={'feature': 'DISTANCE_TO_EDGE', 'voronoi_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.0300, 3: 1.0000, 4: 0.0000}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 2.5000, 'Detail': 6.0000}, - attrs={'noise_dimensions': '4D'}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_1.outputs["Fac"], 1: 0.5500, 2: 0.5700}) - - multiply = nw.new_node(Nodes.Math, + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 1.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloatFactor", "Roughness", 0.4000), + ], + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Scale": group_input.outputs["Scale"], + }, + ) + + reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": mapping}) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Seed"]} + ) + + noise_texture_9 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": reroute_1, + "W": reroute, + "Scale": 18.0000, + "Detail": 3.0000, + "Roughness": 0.4500, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_9.outputs["Fac"], 3: 0.6000, 4: 1.4000}, + ) + + hue_saturation_value = nw.new_node( + Nodes.HueSaturationValue, + input_kwargs={ + "Value": map_range_6.outputs["Result"], + "Color": group_input.outputs["Base Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Specular": 0.9, + "Roughness": group_input.outputs["Roughness"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 2.0000, + "Detail": 6.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture.outputs["Fac"], "Randomness": 0.0000}, + attrs={"feature": "DISTANCE_TO_EDGE", "voronoi_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: 0.0300, + 3: 1.0000, + 4: 0.0000, + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 2.5000, + "Detail": 6.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_1.outputs["Fac"], 1: 0.5500, 2: 0.5700}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 10.0000, 'Detail': 15.0000, 'Distortion': 0.1000}, - attrs={'noise_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_2.outputs["Fac"], 1: 0.6300, 2: 0.6800}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 10.0000, + "Detail": 15.0000, + "Distortion": 0.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_2.outputs["Fac"], 1: 0.6300, 2: 0.6800}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_2.outputs["Result"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 200.0000 - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': value}, - attrs={'noise_dimensions': '4D'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': value}, - attrs={'voronoi_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.4000, 2: noise_texture_3.outputs["Fac"], 3: voronoi_texture_1.outputs["Distance"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: mix.outputs["Result"], 1: 0.1000}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": value, + }, + attrs={"noise_dimensions": "4D"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": value, + }, + attrs={"voronoi_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.4000, + 2: noise_texture_3.outputs["Fac"], + 3: voronoi_texture_1.outputs["Distance"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: mix.outputs["Result"], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply_3}) - - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 4.0000, 'Detail': 1.0000, 'Roughness': 0.4500}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture_4.outputs["Fac"]}, attrs={'operation': 'SUBTRACT'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 3.0000}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture_4 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 4.0000, + "Detail": 1.0000, + "Roughness": 0.4500, + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_4.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 3.0000}, + attrs={"operation": "MULTIPLY"}, + ) + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_4}) - - noise_texture_5 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 40.0000, 'Detail': 15.0000, 'Distortion': 0.1000}, - attrs={'noise_dimensions': '4D'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture_5.outputs["Fac"], 1: 0.6500, 2: 0.6400, 3: 1.0000, 4: 0.0000}) - - noise_texture_7 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group_input.outputs["Seed"], 'Scale': 12.0000, 'Detail': 6.0000}, - attrs={'noise_dimensions': '4D'}) - - map_range_4 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_7.outputs["Fac"], 1: 0.5500, 2: 0.5700}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5}, attrs={'operation': 'SUBTRACT'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture_5 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 40.0000, + "Detail": 15.0000, + "Distortion": 0.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture_5.outputs["Fac"], + 1: 0.6500, + 2: 0.6400, + 3: 1.0000, + 4: 0.0000, + }, + ) + + noise_texture_7 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group_input.outputs["Seed"], + "Scale": 12.0000, + "Detail": 6.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_7.outputs["Fac"], 1: 0.5500, 2: 0.5700}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: map_range_4.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_5}, attrs={"operation": "SUBTRACT"} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: multiply_6}) - - noise_texture_6 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute_1, 'W': reroute, 'Scale': 30.0000, 'Detail': 3.0000, 'Roughness': 0.4500}, - attrs={'noise_dimensions': '4D'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture_6.outputs["Fac"]}, attrs={'operation': 'SUBTRACT'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture_6 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": reroute_1, + "W": reroute, + "Scale": 30.0000, + "Detail": 3.0000, + "Roughness": 0.4500, + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_6.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_2}, attrs={"operation": "MULTIPLY"} + ) + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: multiply_7}) - - noise_texture_8 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute_1, 'W': reroute, 'Scale': 20.0000, 'Detail': 3.0000, 'Roughness': 0.4500}, - attrs={'noise_dimensions': '4D'}) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture_8.outputs["Fac"], 1: 0.5500, 2: 0.5100, 3: -0.5000, 4: 0.5000}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: map_range_5.outputs["Result"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture_8 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": reroute_1, + "W": reroute, + "Scale": 20.0000, + "Detail": 3.0000, + "Roughness": 0.4500, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture_8.outputs["Fac"], + 1: 0.5500, + 2: 0.5100, + 3: -0.5000, + 4: 0.5000, + }, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_5.outputs["Result"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_8}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': add_5, 'tmp_viewer': principled_bsdf}, - attrs={'is_active_output': True}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "BSDF": principled_bsdf, + "Displacement": add_5, + "tmp_viewer": principled_bsdf, + }, + attrs={"is_active_output": True}, + ) + def shader_bumpy_rubber(nw: NodeWrangler, scale=2.0, base_color=None, seed=None): # Code generated using version 2.6.5 of the node_transpiler if base_color is None: - base_color = hsv2rgba(uniform(0, 1), uniform(.2, .5), uniform(.4, .7)) + base_color = hsv2rgba(uniform(0, 1), uniform(0.2, 0.5), uniform(0.4, 0.7)) if seed is None: seed = uniform(-1000.0, 1000.0) - + roughness = uniform(0.1, 0.3) - group = nw.new_node(nodegroup_node_group().name, - input_kwargs={'Base Color': base_color, 'Scale': scale, 'Seed': seed, 'Roughness': roughness}) - - displacement = nw.new_node(Nodes.Displacement, - input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000, 'Scale': 0.0010}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["tmp_viewer"], 'Displacement': displacement}, - attrs={'is_active_output': True}) + group = nw.new_node( + nodegroup_node_group().name, + input_kwargs={ + "Base Color": base_color, + "Scale": scale, + "Seed": seed, + "Roughness": roughness, + }, + ) + + displacement = nw.new_node( + Nodes.Displacement, + input_kwargs={ + "Height": group.outputs["Displacement"], + "Midlevel": 0.0000, + "Scale": 0.0010, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={ + "Surface": group.outputs["tmp_viewer"], + "Displacement": displacement, + }, + attrs={"is_active_output": True}, + ) + def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_bumpy_rubber, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_bumpy_rubber, selection=selection) diff --git a/infinigen/assets/materials/ceiling_light_shaders.py b/infinigen/assets/materials/ceiling_light_shaders.py index d738d8230..fb15d9bf4 100644 --- a/infinigen/assets/materials/ceiling_light_shaders.py +++ b/infinigen/assets/materials/ceiling_light_shaders.py @@ -4,6 +4,7 @@ # Authors: Hongyu Wen from numpy.random import uniform as U + from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.color import hsv2rgba @@ -12,34 +13,59 @@ def shader_lamp_bulb_nonemissive(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler light_path = nw.new_node(Nodes.LightPath) - + object_info = nw.new_node(Nodes.ObjectInfo_Shader) - - white_noise_texture = nw.new_node(Nodes.WhiteNoiseTexture, - input_kwargs={'Vector': object_info.outputs["Random"]}, - attrs={'noise_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.9000, 6: white_noise_texture.outputs["Color"], 7: (0.5000, 0.4444, 0.3669, 1.0000)}, - attrs={'data_type': 'RGBA'}) - - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, input_kwargs={'Color': mix.outputs[2]}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, input_kwargs={'Color': mix.outputs[2]}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: transparent_bsdf, 2: translucent_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}, attrs={'is_active_output': True}) + + white_noise_texture = nw.new_node( + Nodes.WhiteNoiseTexture, + input_kwargs={"Vector": object_info.outputs["Random"]}, + attrs={"noise_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.9000, + 6: white_noise_texture.outputs["Color"], + 7: (0.5000, 0.4444, 0.3669, 1.0000), + }, + attrs={"data_type": "RGBA"}, + ) + + transparent_bsdf = nw.new_node( + Nodes.TransparentBSDF, input_kwargs={"Color": mix.outputs[2]} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": mix.outputs[2]} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: transparent_bsdf, + 2: translucent_bsdf, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader}, + attrs={"is_active_output": True}, + ) + def shader_black(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - color = hsv2rgba( - U(0.45, 0.55), - U(0, 0.1), - U(0, 1) + color = hsv2rgba(U(0.45, 0.55), U(0, 0.1), U(0, 1)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': color}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/materials/ceramic.py b/infinigen/assets/materials/ceramic.py index 40e63c96f..488f75cc4 100644 --- a/infinigen/assets/materials/ceramic.py +++ b/infinigen/assets/materials/ceramic.py @@ -4,40 +4,58 @@ # Authors: Lingjie Mei from numpy.random import uniform -from infinigen.core.util.color import hsv2rgba from infinigen.assets.materials import common -from infinigen.core.util.random import log_uniform from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.random import log_uniform -def shader_ceramic(nw: NodeWrangler, clear=False, roughness_min=0, roughness_max=.8, **kwargs): - if uniform(0, 1) < .8 and not clear: - color = hsv2rgba(uniform(0, 1), uniform(.2, .4), log_uniform(.3, .6)) +def shader_ceramic( + nw: NodeWrangler, clear=False, roughness_min=0, roughness_max=0.8, **kwargs +): + if uniform(0, 1) < 0.8 and not clear: + color = hsv2rgba(uniform(0, 1), uniform(0.2, 0.4), log_uniform(0.3, 0.6)) else: - color = hsv2rgba(0, 0, log_uniform(.3, .6)) - - roughness = nw.build_float_curve(nw.musgrave(log_uniform(20, 40)), [(0, roughness_min), (1, roughness_max)]) - clearcoat_roughness = nw.build_float_curve(nw.musgrave(log_uniform(20, 40)), - [(0, roughness_min), (1, roughness_max)]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - "Roughness": roughness, - 'Clearcoat': 1, - 'Clearcoat Roughness': clearcoat_roughness, - 'Specular': 1, - 'Base Color': color, - 'Subsurface': uniform(.02, .05), - 'Subsurface Radius': (.02, .02, .02) - }) - - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={ - 'Height': nw.scalar_multiply(log_uniform(.001, .005), nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': log_uniform(20, 40)})), - 'Midlevel': 0.0000 - }) - - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}) + color = hsv2rgba(0, 0, log_uniform(0.3, 0.6)) + + roughness = nw.build_float_curve( + nw.musgrave(log_uniform(20, 40)), [(0, roughness_min), (1, roughness_max)] + ) + clearcoat_roughness = nw.build_float_curve( + nw.musgrave(log_uniform(20, 40)), [(0, roughness_min), (1, roughness_max)] + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Roughness": roughness, + "Clearcoat": 1, + "Clearcoat Roughness": clearcoat_roughness, + "Specular": 1, + "Base Color": color, + "Subsurface": uniform(0.02, 0.05), + "Subsurface Radius": (0.02, 0.02, 0.02), + }, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={ + "Height": nw.scalar_multiply( + log_uniform(0.001, 0.005), + nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": log_uniform(20, 40)} + ), + ), + "Midlevel": 0.0000, + }, + ) + + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + ) def apply(obj, selection=None, clear=False, **kwargs): diff --git a/infinigen/assets/materials/chitin.py b/infinigen/assets/materials/chitin.py index ef03fadc2..fe94cd427 100644 --- a/infinigen/assets/materials/chitin.py +++ b/infinigen/assets/materials/chitin.py @@ -4,25 +4,28 @@ # Authors: Mingzhe Wang # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=K45LuDJv_hk by yojigraphics -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os + import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.materials.utils.surface_utils import ( + sample_range, +) from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba + def shader_chitin(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - geometry = nw.new_node('ShaderNodeNewGeometry') - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': geometry.outputs["Pointiness"]}) + geometry = nw.new_node("ShaderNodeNewGeometry") + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": geometry.outputs["Pointiness"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements[0].position = 0.4091 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -30,180 +33,256 @@ def shader_chitin(nw: NodeWrangler, rand=True, **input_kwargs): colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) colorramp_1.color_ramp.elements[2].position = 0.5127 colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - colorramp_10 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp_1.outputs["Color"]}) + + colorramp_10 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_1.outputs["Color"]} + ) colorramp_10.color_ramp.elements[0].position = 0.0 colorramp_10.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_10.color_ramp.elements[1].position = 0.2273 colorramp_10.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': geometry.outputs["Pointiness"]}) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": geometry.outputs["Pointiness"]} + ) colorramp_4.color_ramp.elements[0].position = 0.4909 colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_4.color_ramp.elements[1].position = 0.6773 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_10.outputs["Color"], 1: colorramp_4.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': multiply}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: colorramp_10.outputs["Color"], + 1: colorramp_4.outputs["Color"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply}) colorramp_3.color_ramp.interpolation = "EASE" colorramp_3.color_ramp.elements[0].position = 0.0 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.0864 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate_1.outputs["Generated"]}) - - colorramp_6 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz_3.outputs["X"]}) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate_1.outputs["Generated"]}, + ) + + colorramp_6 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz_3.outputs["X"]} + ) colorramp_6.color_ramp.elements[0].position = 0.5332 colorramp_6.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_6.color_ramp.elements[1].position = 0.5427 colorramp_6.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_body'}) - - multiply_1 = nw.new_node(Nodes.Math, + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_body"}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_6.outputs["Color"], 1: attribute_2.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_head'}) - - add = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_head"}) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: attribute_1.outputs["Color"]}, - attrs={'use_clamp': True}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) + attrs={"use_clamp": True}, + ) + + colorramp_5 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) colorramp_5.color_ramp.elements[0].position = 0.0 colorramp_5.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_5.color_ramp.elements[1].position = 1.0 colorramp_5.color_ramp.elements[1].color = (0.0168, 0.0168, 0.0168, 1.0) - - multiply_2 = nw.new_node(Nodes.Math, + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_3.outputs["Color"], 1: colorramp_5.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - attribute_3 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_leg'}) - - invert_1 = nw.new_node('ShaderNodeInvert', - input_kwargs={'Color': attribute_3.outputs["Color"]}) - - multiply_3 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + attribute_3 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_leg"}) + + invert_1 = nw.new_node( + "ShaderNodeInvert", input_kwargs={"Color": attribute_3.outputs["Color"]} + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: invert_1}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - sign = nw.new_node(Nodes.Math, + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + sign = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"]}, - attrs={'operation': 'SIGN'}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Rotation': (0.0, 0.0, -0.7854), 'Scale': (1.0, 1.0, 0.0)}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping_1}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Rotation': (0.0, 0.0, 0.7854), 'Scale': (1.0, 10.0, 0.0)}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': sign, 'Color1': separate_xyz_1.outputs["X"], 'Color2': separate_xyz.outputs["X"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 10.0, 'Detail': 10.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': mix_3, 'Color2': noise_texture.outputs["Fac"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mix, 'W': 1.4, 'Scale': 100.0, 'Detail': 10.0, 'Roughness': 0.0}, - attrs={'noise_dimensions': '4D'}) + attrs={"operation": "SIGN"}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Rotation": (0.0, 0.0, -0.7854), + "Scale": (1.0, 1.0, 0.0), + }, + ) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping_1}) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Rotation": (0.0, 0.0, 0.7854), + "Scale": (1.0, 10.0, 0.0), + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": sign, + "Color1": separate_xyz_1.outputs["X"], + "Color2": separate_xyz.outputs["X"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": 10.0, "Detail": 10.0} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": mix_3, + "Color2": noise_texture.outputs["Fac"], + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mix, + "W": 1.4, + "Scale": 100.0, + "Detail": 10.0, + "Roughness": 0.0, + }, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_1.inputs['W'].default_value = sample_range(-2,2) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': multiply_3, 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': noise_texture_1.outputs["Fac"]}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_2}) + noise_texture_1.inputs["W"].default_value = sample_range(-2, 2) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": multiply_3, + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": noise_texture_1.outputs["Fac"], + }, + ) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_2}) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.0068, 0.0, 0.0005, 1.0) colorramp_2.color_ramp.elements[1].position = 0.9955 colorramp_2.color_ramp.elements[1].color = (0.1347, 0.0156, 0.0115, 1.0) if rand: - colorramp_2.color_ramp.elements[1].color = hsv2rgba((np.mod(normal(0.2, 0.4), 1), uniform(0, 1), uniform(0.05, 0.5))) - #for i in range(3): + colorramp_2.color_ramp.elements[1].color = hsv2rgba( + (np.mod(normal(0.2, 0.4), 1), uniform(0, 1), uniform(0.05, 0.5)) + ) + # for i in range(3): # colorramp_2.color_ramp.elements[1].color[i] /= 7 - invert = nw.new_node('ShaderNodeInvert', - input_kwargs={'Color': multiply_2}) - - colorramp_11 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': invert}) + invert = nw.new_node("ShaderNodeInvert", input_kwargs={"Color": multiply_2}) + + colorramp_11 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": invert}) colorramp_11.color_ramp.elements[0].position = 0.3932 colorramp_11.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_11.color_ramp.elements[1].position = 1.0 colorramp_11.color_ramp.elements[1].color = (0.5103, 0.5103, 0.5103, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_2.outputs["Color"], 'Metallic': 0.7, 'Roughness': colorramp_11.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_2.outputs["Color"], + "Metallic": 0.7, + "Roughness": colorramp_11.outputs["Color"], + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_chitin(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + normal = nw.new_node(Nodes.InputNormal) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 100.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 100.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2,2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) - add = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5}) - - multiply = nw.new_node(Nodes.VectorMath, + add = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5} + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: normal, 1: add}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.001 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): surface.add_geomod(obj, geometry_chitin, input_kwargs=geo_kwargs) @@ -212,13 +291,19 @@ def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_1019.blend') - #creature(73349, 0).parts(0, factory=QuadrupedBody) + bpy.ops.wm.open_mainfile(filepath="dev_scene_1019.blend") + # creature(73349, 0).parts(0, factory=QuadrupedBody) obj = "creature(36230, 0).parts(0, factory=BeetleBody)" - #obj = "creature(73349, 0).parts(0, factory=QuadrupedBody)" - apply(bpy.data.objects[obj], geo_kwargs={'rand': True}, shader_kwargs={'rand': True}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_beetle_attr.blend') + # obj = "creature(73349, 0).parts(0, factory=QuadrupedBody)" + apply( + bpy.data.objects[obj], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True}, + ) + fn = os.path.join( + os.path.abspath(os.curdir), "dev_scene_test_beetle_attr.blend" + ) bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/chunkyrock.py b/infinigen/assets/materials/chunkyrock.py index 6d0687273..e9d969f9f 100644 --- a/infinigen/assets/materials/chunkyrock.py +++ b/infinigen/assets/materials/chunkyrock.py @@ -8,72 +8,103 @@ import os import bpy -import mathutils -from numpy.random import uniform, normal, randint import gin -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, + sample_ratio, +) from infinigen.core import surface -from infinigen.assets.materials.utils.surface_utils import sample_color, sample_range, sample_ratio -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed -from .mountain import geo_MOUNTAIN_general +from infinigen.core.util.organization import SurfaceTypes +from .mountain import geo_MOUNTAIN_general type = SurfaceTypes.SDFPerturb mod_name = "geo_rocks" name = "chunkyrock" + def shader_rocks(nw, rand=True, random_seed=0, **input_kwargs): nw.force_input_consistency() - position = nw.new_node('ShaderNodeNewGeometry') + position = nw.new_node("ShaderNodeNewGeometry") depth = geo_rocks(nw, random_seed=random_seed, geometry=False) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': depth}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": depth}) colorramp_3.color_ramp.elements[0].position = 0.0285 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.1347 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': position, 'Scale': (0.2, 0.2, 0.2)}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": position, "Scale": (0.2, 0.2, 0.2)} + ) - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Detail': 15.0}) + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Detail": 15.0} + ) - rock_color1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"], 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': (0.01, 0.024, 0.0283, 1.0)}) + rock_color1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture_1.outputs["Fac"], + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": (0.01, 0.024, 0.0283, 1.0), + }, + ) if rand: sample_color(rock_color1.inputs[6].default_value) sample_color(rock_color1.inputs[7].default_value) - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Detail': 15.0}) + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Detail": 15.0} + ) - rock_color2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"], 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': (0.0694, 0.1221, 0.0693, 1.0)}) + rock_color2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture_2.outputs["Fac"], + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": (0.0694, 0.1221, 0.0693, 1.0), + }, + ) if rand: sample_color(rock_color2.inputs[6].default_value) sample_color(rock_color2.inputs[7].default_value) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_3.outputs["Color"], 'Color1': rock_color1, 'Color2': rock_color2}) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_3.outputs["Color"], + "Color1": rock_color1, + "Color2": rock_color2, + }, + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_1} + ) return principled_bsdf + @gin.configurable -def geo_rocks(nw: NodeWrangler, rand=True, selection=None, random_seed=0, geometry=True, **input_kwargs): +def geo_rocks( + nw: NodeWrangler, + rand=True, + selection=None, + random_seed=0, + geometry=True, + **input_kwargs, +): nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + position = nw.new_node("ShaderNodeNewGeometry") + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) else: position = nw.new_node(Nodes.InputPosition) normal = nw.new_node(Nodes.InputNormal) @@ -81,26 +112,42 @@ def geo_rocks(nw: NodeWrangler, rand=True, selection=None, random_seed=0, geomet with FixedSeed(random_seed): # Code generated using version 2.4.3 of the node_transpiler - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position}) + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": position} + ) - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.8, 'Color1': noise_texture.outputs["Color"], 'Color2': position}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.8, + "Color1": noise_texture.outputs["Color"], + "Color2": position, + }, + ) if rand: sample_max = 2 - sample_min = 1/2 - voronoi_texture_scale = nw.new_value(sample_ratio(1, sample_min, sample_max), "voronoi_texture_scale") + sample_min = 1 / 2 + voronoi_texture_scale = nw.new_value( + sample_ratio(1, sample_min, sample_max), "voronoi_texture_scale" + ) voronoi_texture_w = nw.new_value(sample_range(0, 5), "voronoi_texture_w") else: voronoi_texture_scale = 1.0 voronoi_texture_w = 0 - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': voronoi_texture_scale, 'W': voronoi_texture_w}, - attrs={'feature': 'DISTANCE_TO_EDGE', 'voronoi_dimensions': '4D'}) + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mix, + "Scale": voronoi_texture_scale, + "W": voronoi_texture_w, + }, + attrs={"feature": "DISTANCE_TO_EDGE", "voronoi_dimensions": "4D"}, + ) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}, + colorramp = nw.new_node( + Nodes.ColorRamp, + input_kwargs={"Fac": voronoi_texture.outputs["Distance"]}, label="colorramp_VAR", ) colorramp.color_ramp.elements[0].position = 0.0432 @@ -108,48 +155,71 @@ def geo_rocks(nw: NodeWrangler, rand=True, selection=None, random_seed=0, geomet colorramp.color_ramp.elements[1].position = 0.3 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) if rand: - colorramp.color_ramp.elements[0].position = sample_ratio(colorramp.color_ramp.elements[0].position, 0.5, 2) - colorramp.color_ramp.elements[1].position = sample_ratio(colorramp.color_ramp.elements[1].position, 0.5, 2) + colorramp.color_ramp.elements[0].position = sample_ratio( + colorramp.color_ramp.elements[0].position, 0.5, 2 + ) + colorramp.color_ramp.elements[1].position = sample_ratio( + colorramp.color_ramp.elements[1].position, 0.5, 2 + ) depth = colorramp - multiply = nw.new_node(Nodes.VectorMath, + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: colorramp.outputs["Color"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.4 - offset = nw.new_node(Nodes.VectorMath, + offset = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) if geometry: groupinput = nw.new_node(Nodes.GroupInput) - noise_params = {"scale": ("uniform", 10, 20), "detail": 9, "roughness": 0.6, "zscale": ("log_uniform", 0.08, 0.12)} + noise_params = { + "scale": ("uniform", 10, 20), + "detail": 9, + "roughness": 0.6, + "zscale": ("log_uniform", 0.08, 0.12), + } offset = nw.add(offset, geo_MOUNTAIN_general(nw, 3, noise_params, 0, {}, {})) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) else: return depth def apply(obj, selection=None, geo_kwargs=None, shader_kwargs=None, **kwargs): surface.add_geomod(obj, geo_rocks, selection=selection, input_kwargs=geo_kwargs) - surface.add_material(obj, shader_rocks, selection=selection, input_kwargs=shader_kwargs) + surface.add_material( + obj, shader_rocks, selection=selection, input_kwargs=shader_kwargs + ) + if __name__ == "__main__": - mat = 'rock' - if not os.path.isdir(os.path.join('outputs', mat)): - os.mkdir(os.path.join('outputs', mat)) + mat = "rock" + if not os.path.isdir(os.path.join("outputs", mat)): + os.mkdir(os.path.join("outputs", mat)) for i in range(10): - bpy.ops.wm.open_mainfile(filepath='test.blend') - apply(bpy.data.objects['SolidModel'], geo_kwargs={'rand':True}, shader_kwargs={'rand': True}) - #fn = os.path.join(os.path.abspath(os.curdir), 'giraffe_geo_test.blend') - #bpy.ops.wm.save_as_mainfile(filepath=fn) - bpy.context.scene.render.filepath = os.path.join('outputs', mat, '%s_%d.jpg'%(mat, i)) - bpy.context.scene.render.image_settings.file_format='JPEG' + bpy.ops.wm.open_mainfile(filepath="test.blend") + apply( + bpy.data.objects["SolidModel"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True}, + ) + # fn = os.path.join(os.path.abspath(os.curdir), 'giraffe_geo_test.blend') + # bpy.ops.wm.save_as_mainfile(filepath=fn) + bpy.context.scene.render.filepath = os.path.join( + "outputs", mat, "%s_%d.jpg" % (mat, i) + ) + bpy.context.scene.render.image_settings.file_format = "JPEG" bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/cobble_stone.py b/infinigen/assets/materials/cobble_stone.py index a88f37b25..8b295d698 100644 --- a/infinigen/assets/materials/cobble_stone.py +++ b/infinigen/assets/materials/cobble_stone.py @@ -5,13 +5,15 @@ # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=9Tq-6HReNEk by Ryan King Art -from numpy.random import uniform as U, normal as N -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +import gin +from numpy.random import normal as N +from numpy.random import uniform as U + from infinigen.core import surface -from infinigen.core.util.random import random_color_neighbour -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed -import gin +from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.util.random import random_color_neighbour type = SurfaceTypes.SDFPerturb mod_name = "geo_cobblestone" @@ -22,136 +24,232 @@ def shader_cobblestone(nw: NodeWrangler, random_seed=0): # Code generated using version 2.4.3 of the node_transpiler, and modified nw.force_input_consistency() stone_color = geo_cobblestone(nw, random_seed=random_seed, geometry=False) - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': nw.new_node('ShaderNodeNewGeometry'), 'Scale': N(10, 1.5) / 25, 'W': U(-5, 5)}, - attrs={'noise_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.new_node("ShaderNodeNewGeometry"), + "Scale": N(10, 1.5) / 25, + "W": U(-5, 5), + }, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0 - colorramp_1.color_ramp.elements[0].color = random_color_neighbour((0.014, 0.013, 0.014, 1.0), 0.2, 0.1, 0.1) + colorramp_1.color_ramp.elements[0].color = random_color_neighbour( + (0.014, 0.013, 0.014, 1.0), 0.2, 0.1, 0.1 + ) colorramp_1.color_ramp.elements[1].position = 1.0 - colorramp_1.color_ramp.elements[1].color = random_color_neighbour((0.047, 0.068, 0.069, 1.0), 0.2, 0.1, 0.1) - - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': stone_color.outputs["Color"], 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': colorramp_1.outputs["Color"]}) + colorramp_1.color_ramp.elements[1].color = random_color_neighbour( + (0.047, 0.068, 0.069, 1.0), 0.2, 0.1, 0.1 + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": stone_color.outputs["Color"], + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": colorramp_1.outputs["Color"], + }, + ) roughness_low = N(0.25, 0.05) roughness_high = N(0.75, 0.05) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': stone_color.outputs["Color"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": stone_color.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (roughness_high, roughness_high, roughness_high, 1.0) + colorramp.color_ramp.elements[0].color = ( + roughness_high, + roughness_high, + roughness_high, + 1.0, + ) colorramp.color_ramp.elements[1].position = 1.0 - colorramp.color_ramp.elements[1].color = (roughness_low, roughness_low, roughness_low, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': colorramp.outputs["Color"]}) + colorramp.color_ramp.elements[1].color = ( + roughness_low, + roughness_low, + roughness_low, + 1.0, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix, "Roughness": colorramp.outputs["Color"]}, + ) return principled_bsdf + @gin.configurable def geo_cobblestone(nw: NodeWrangler, selection=None, random_seed=0, geometry=True): # Code generated using version 2.4.3 of the node_transpiler, and modified nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + position = nw.new_node("ShaderNodeNewGeometry") + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) else: position = nw.new_node(Nodes.InputPosition) normal = nw.new_node(Nodes.InputNormal) with FixedSeed(random_seed): # scale of the stone, inversely proportional - sca_sto = nw.new_value(U(9, 15)/2, "sca_sto") + sca_sto = nw.new_value(U(9, 15) / 2, "sca_sto") # uniformity of the stone, inversely proportional uni_sto = nw.new_value(U(0.5, 0.9), "uni_sto") # depth of stone dep_sto = nw.new_value(U(0.02, 0.04), "dep_sto") - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'W': nw.new_value(U(-5, 5), "W1"), 'Scale': nw.new_value(N(6.0, 0.5), "Scale1")}, - attrs={'noise_dimensions': '4D'}) - - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': nw.new_value(U(-5, 5), "W2"), 'Scale': sca_sto, 'Randomness': uni_sto}, - attrs={'voronoi_dimensions': '4D'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': voronoi_texture_2.outputs["Position"], 'Scale': nw.new_value(N(20, 2), "Scale2")}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_3.outputs["Fac"]}) + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": nw.new_value(U(-5, 5), "W1"), + "Scale": nw.new_value(N(6.0, 0.5), "Scale1"), + }, + attrs={"noise_dimensions": "4D"}, + ) + + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": nw.new_value(U(-5, 5), "W2"), + "Scale": sca_sto, + "Randomness": uni_sto, + }, + attrs={"voronoi_dimensions": "4D"}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": voronoi_texture_2.outputs["Position"], + "Scale": nw.new_value(N(20, 2), "Scale2"), + }, + ) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_3.outputs["Fac"]} + ) colorramp_4.color_ramp.interpolation = "CONSTANT" colorramp_4.color_ramp.elements[0].position = 0.1159 colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_4.color_ramp.elements[1].position = 0.475 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': position, 'W': nw.new_value(U(-5, 5), "W3"), 'Scale': sca_sto, 'Randomness': uni_sto}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - multiply = nw.new_node(Nodes.Math, + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": position, + "W": nw.new_value(U(-5, 5), "W3"), + "Scale": sca_sto, + "Randomness": uni_sto, + }, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: 1.5, 1: sca_sto}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture_3 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': position, 'W': nw.new_value(U(-5, 5), "W4"), 'Scale': multiply, 'Randomness': uni_sto}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_4.outputs["Color"], 'Color1': voronoi_texture_1.outputs["Distance"], 'Color2': voronoi_texture_3.outputs["Distance"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': noise_texture.outputs["Fac"], 'Color1': mix_3}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}, label="colorramp_VAR") + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture_3 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": position, + "W": nw.new_value(U(-5, 5), "W4"), + "Scale": multiply, + "Randomness": uni_sto, + }, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_4.outputs["Color"], + "Color1": voronoi_texture_1.outputs["Distance"], + "Color2": voronoi_texture_3.outputs["Distance"], + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": noise_texture.outputs["Fac"], "Color1": mix_3}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": mix}, label="colorramp_VAR" + ) colorramp.color_ramp.elements[0].position = U(0.26, 0.29) colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.377 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - if not geometry: return colorramp + if not geometry: + return colorramp - multiply_1 = nw.new_node(Nodes.VectorMath, + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: colorramp.outputs["Color"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_2 = nw.new_node(Nodes.VectorMath, + multiply_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_1.outputs["Vector"], 1: dep_sto}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': nw.new_value(N(20, 2), "Scale3"), 'Detail': 10.0, 'Distortion': 2.0}) - - subtract = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_4 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Scale": nw.new_value(N(20, 2), "Scale3"), + "Detail": 10.0, + "Distortion": 2.0, + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, input_kwargs={0: noise_texture_4.outputs["Fac"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - multiply_5 = nw.new_node(Nodes.VectorMath, + multiply_5 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract.outputs["Vector"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value_8 = nw.new_value(U(0.01, 0.02), "value_8") - multiply_6 = nw.new_node(Nodes.VectorMath, + multiply_6 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_5.outputs["Vector"], 1: value_8}, - attrs={'operation': 'MULTIPLY'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input, 'Offset': nw.add(multiply_6, multiply_2)}) + attrs={"operation": "MULTIPLY"}, + ) + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input, + "Offset": nw.add(multiply_6, multiply_2), + }, + ) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position_1}) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) def apply(obj, selection=None, **kwargs): - surface.add_geomod(obj,geo_cobblestone, selection=selection) + surface.add_geomod(obj, geo_cobblestone, selection=selection) surface.add_material(obj, shader_cobblestone, selection=selection, reuse=False) diff --git a/infinigen/assets/materials/common.py b/infinigen/assets/materials/common.py index b906c4bcf..8622e3f89 100644 --- a/infinigen/assets/materials/common.py +++ b/infinigen/assets/materials/common.py @@ -2,16 +2,14 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -import functools from collections.abc import Callable, Iterable import numpy as np from infinigen.assets.utils.decorate import read_material_index, write_material_index -from infinigen.core import surface +from infinigen.core import surface, tagging +from infinigen.core import tags as t from infinigen.core.surface import read_attr_data - -from infinigen.core import tags as t, tagging from infinigen.core.util.math import FixedSeed @@ -34,9 +32,11 @@ def apply(obj, shader_func, selection=None, *args, **kwargs): material_index = np.where(sel, index, material_index) elif isinstance(selection, str): try: - sel = read_attr_data(o, selection.lstrip('!'), 'FACE') - material_index = np.where(1 - sel if selection.startswith('!') else sel, index, material_index) - except: + sel = read_attr_data(o, selection.lstrip("!"), "FACE") + material_index = np.where( + 1 - sel if selection.startswith("!") else sel, index, material_index + ) + except KeyError: material_index = np.zeros(len(material_index), dtype=int) else: material_index = np.where(selection, index, material_index) @@ -49,7 +49,7 @@ def get_selection(obj, selection): elif isinstance(selection, t.Tag): return tagging.tagged_face_mask(obj, selection) elif isinstance(selection, str): - return read_attr_data(obj, selection.lstrip('!'), 'FACE') + return read_attr_data(obj, selection.lstrip("!"), "FACE") else: return selection @@ -59,7 +59,6 @@ def unique_surface(surface, seed=None): seed = np.random.randint(1e7) class Surface: - @classmethod def apply(cls, *args, **kwargs): with FixedSeed(seed): diff --git a/infinigen/assets/materials/cracked_ground.py b/infinigen/assets/materials/cracked_ground.py index ba81700e0..ac9848e53 100644 --- a/infinigen/assets/materials/cracked_ground.py +++ b/infinigen/assets/materials/cracked_ground.py @@ -4,43 +4,58 @@ # Authors: Ankit Goyal, Zeyu Ma # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=PIZ_wi3yFUM&list=PLsGl9GczcgBs6TtApKKK-L_0Nm6fovNPk&index=98 by Ryan King Art -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes - import gin +from numpy.random import uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_color_neighbour type = SurfaceTypes.SDFPerturb mod_name = "geo_cracked_ground" -name = "cracked_ground" +name = "cracked_ground" + -@node_utils.to_nodegroup('nodegroup_apply_value_to_normal', singleton=False, type='GeometryNodeTree') +@node_utils.to_nodegroup( + "nodegroup_apply_value_to_normal", singleton=False, type="GeometryNodeTree" +) def nodegroup_apply_value_to_normal(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler normal = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'value', 0.0000), - ('NodeSocketFloat', 'displacement', 1.0000)]) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 'Scale': group_input.outputs["value"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': group_input.outputs["displacement"]}, - attrs={'operation': 'SCALE'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': scale_1.outputs["Vector"]}, attrs={'is_active_output': True}) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "value", 0.0000), + ("NodeSocketFloat", "displacement", 1.0000), + ], + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": group_input.outputs["value"]}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["displacement"], + }, + attrs={"operation": "SCALE"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": scale_1.outputs["Vector"]}, + attrs={"is_active_output": True}, + ) + def shader_cracked_ground(nw: NodeWrangler, random_seed=0): # Code generated using version 2.6.4 of the node_transpiler @@ -49,37 +64,87 @@ def shader_cracked_ground(nw: NodeWrangler, random_seed=0): col_1 = random_color_neighbour((0.3005, 0.1119, 0.0284, 1.0), 0.1, 0.1, 0.1) col_2 = random_color_neighbour((0.6038, 0.4397, 0.2159, 1.0), 0.1, 0.1, 0.1) - attribute_2 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'bump'}) - - attribute = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'crack'}) - + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "bump"}) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "crack"}) + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 15.0000, 'Detail': 10.0000}) - - separate_color = nw.new_node(Nodes.SeparateColor, input_kwargs={'Color': noise_texture.outputs["Color"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_color.outputs["Red"], 1: 0.4000, 2: 0.7000, 3: 0.4900, 4: 0.5100}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_color.outputs["Green"], 1: 0.4000, 2: 0.7200, 3: 0.4000, 4: 1.1000}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range.outputs["Result"], 'Value': map_range_1.outputs["Result"], 'Color': col_1}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Fac"], 'Color1': hue_saturation_value, 'Color2': col_crac}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute_2.outputs["Fac"], 'Color1': mix, 'Color2': col_2}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix_2, 'Specular': 0.2000, 'Roughness': 0.9000}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 15.0000, + "Detail": 10.0000, + }, + ) + + separate_color = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture.outputs["Color"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_color.outputs["Red"], + 1: 0.4000, + 2: 0.7000, + 3: 0.4900, + 4: 0.5100, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_color.outputs["Green"], + 1: 0.4000, + 2: 0.7200, + 3: 0.4000, + 4: 1.1000, + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range.outputs["Result"], + "Value": map_range_1.outputs["Result"], + "Color": col_1, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Fac"], + "Color1": hue_saturation_value, + "Color2": col_crac, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_2.outputs["Fac"], + "Color1": mix, + "Color2": col_2, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_2, "Specular": 0.2000, "Roughness": 0.9000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) return principled_bsdf + @gin.configurable def geo_cracked_ground(nw: NodeWrangler, selection=None, random_seed=0): # Code generated using version 2.6.4 of the node_transpiler @@ -98,7 +163,7 @@ def geo_cracked_ground(nw: NodeWrangler, selection=None, random_seed=0): sca_noise = nw.new_value(uniform(2, 4), "sca_mask") # percentage of area with crac, 0.5 means in half of area - crack_density = nw.new_value(uniform(0.4, 0.55), "crack_density") + crack_density = nw.new_value(uniform(0.4, 0.55), "crack_density") # width of the crack wid_crac = nw.new_value(uniform(0.01, 0.04), "wid_crac") @@ -112,85 +177,182 @@ def geo_cracked_ground(nw: NodeWrangler, selection=None, random_seed=0): # total displacement dep_landscape = nw.new_value(uniform(0.3, 0.7), "dep_landscape") + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - position = nw.new_node(Nodes.InputPosition) - + seed = nw.new_value(noise_rnd_seed, "seed") - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'W': seed, 'Scale': sca_noise, 'Detail': 15.0000, 'Roughness': 0.5375}, - attrs={'noise_dimensions': '4D'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Name': 'noise', 'Value': noise_texture.outputs["Fac"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'W': seed, 'Scale': sca_crac, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_1.outputs["Color"], 'W': seed, 'Scale': 2.3000}, - attrs={'feature': 'DISTANCE_TO_EDGE', 'voronoi_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: wid_crac, 3: 1.0000, 4: 0.0000}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'W': seed, 'Scale': sca_mask, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: crack_density}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 0.0200}, attrs={'operation': 'SUBTRACT'}) - + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": seed, + "Scale": sca_noise, + "Detail": 15.0000, + "Roughness": 0.5375, + }, + attrs={"noise_dimensions": "4D"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": "noise", + "Value": noise_texture.outputs["Fac"], + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": seed, + "Scale": sca_crac, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": noise_texture_1.outputs["Color"], + "W": seed, + "Scale": 2.3000, + }, + attrs={"feature": "DISTANCE_TO_EDGE", "voronoi_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: wid_crac, + 3: 1.0000, + 4: 0.0000, + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": seed, + "Scale": sca_mask, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: crack_density}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 0.0200}, + attrs={"operation": "SUBTRACT"}, + ) + add = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 0.0200}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_2.outputs["Fac"], 1: subtract_1, 2: add}) - - multiply = nw.new_node(Nodes.Math, + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_2.outputs["Fac"], 1: subtract_1, 2: add}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute, 'Name': 'crack', 'Value': multiply}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': position, 'W': seed, 'Scale': sca_gra}, - attrs={'voronoi_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 1: 0.9000}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Name': 'bump', 'Value': map_range_2.outputs["Result"]}) - - applyvaluetonormal = nw.new_node(nodegroup_apply_value_to_normal().name, - input_kwargs={'value': noise_texture.outputs["Fac"], 'displacement': 0.3000}) - - applyvaluetonormal_1 = nw.new_node(nodegroup_apply_value_to_normal().name, input_kwargs={'value': multiply, 'displacement': dep_crac}) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: applyvaluetonormal, 1: applyvaluetonormal_1}) - - applyvaluetonormal_2 = nw.new_node(nodegroup_apply_value_to_normal().name, - input_kwargs={'value': map_range_2.outputs["Result"], 'displacement': 0.0200}) - - add_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: add_1.outputs["Vector"], 1: applyvaluetonormal_2}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_2.outputs["Vector"], 'Scale': dep_landscape}, - attrs={'operation': 'SCALE'}) + attrs={"operation": "MULTIPLY"}, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute, + "Name": "crack", + "Value": multiply, + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": position, "W": seed, "Scale": sca_gra}, + attrs={"voronoi_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 1: 0.9000}, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Name": "bump", + "Value": map_range_2.outputs["Result"], + }, + ) + + applyvaluetonormal = nw.new_node( + nodegroup_apply_value_to_normal().name, + input_kwargs={"value": noise_texture.outputs["Fac"], "displacement": 0.3000}, + ) + + applyvaluetonormal_1 = nw.new_node( + nodegroup_apply_value_to_normal().name, + input_kwargs={"value": multiply, "displacement": dep_crac}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: applyvaluetonormal, 1: applyvaluetonormal_1} + ) + + applyvaluetonormal_2 = nw.new_node( + nodegroup_apply_value_to_normal().name, + input_kwargs={"value": map_range_2.outputs["Result"], "displacement": 0.0200}, + ) + + add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_1.outputs["Vector"], 1: applyvaluetonormal_2}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_2.outputs["Vector"], "Scale": dep_landscape}, + attrs={"operation": "SCALE"}, + ) offset = scale if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': store_named_attribute_2, 'Offset': offset}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": store_named_attribute_2, "Offset": offset}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + def apply(obj, selection=None, **kwargs): # seed = randint(10000000) - surface.add_geomod(obj, geo_cracked_ground, selection=selection) #, input_kwargs={'random_seed': seed}) - surface.add_material(obj, shader_cracked_ground, selection=selection) #, input_kwargs={'random_seed': seed}) \ No newline at end of file + surface.add_geomod( + obj, geo_cracked_ground, selection=selection + ) # , input_kwargs={'random_seed': seed}) + surface.add_material( + obj, shader_cracked_ground, selection=selection + ) # , input_kwargs={'random_seed': seed}) diff --git a/infinigen/assets/materials/dirt.py b/infinigen/assets/materials/dirt.py index 452731ca7..66410fac5 100644 --- a/infinigen/assets/materials/dirt.py +++ b/infinigen/assets/materials/dirt.py @@ -8,12 +8,13 @@ import bpy import gin -from infinigen.core.nodes.node_wrangler import Nodes from numpy.random import uniform -from infinigen.core import surface + from infinigen.assets.materials.utils.surface_utils import sample_color, sample_ratio -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from .mountain import geo_MOUNTAIN_general @@ -24,7 +25,9 @@ def shader_dirt(nw, random_seed=0): nw.force_input_consistency() - dirt_base_color, dirt_roughness = geo_dirt(nw, selection=None, random_seed=random_seed, geometry=False) + dirt_base_color, dirt_roughness = geo_dirt( + nw, selection=None, random_seed=random_seed, geometry=False + ) principled_bsdf = nw.new_node( Nodes.PrincipledBSDF, input_kwargs={ @@ -35,12 +38,13 @@ def shader_dirt(nw, random_seed=0): return principled_bsdf + @gin.configurable def geo_dirt(nw, selection=None, random_seed=0, geometry=True): nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + position = nw.new_node("ShaderNodeNewGeometry") + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) else: position = nw.new_node(Nodes.InputPosition) normal = nw.new_node(Nodes.InputNormal) @@ -68,23 +72,34 @@ def geo_dirt(nw, selection=None, random_seed=0, geometry=True): noise_texture_2 = nw.new_node( Nodes.NoiseTexture, - input_kwargs={"Vector": position, "Scale": 5.0 * scale, "W": nw.new_value(uniform(0, 10), "noise_texture_2_w")}, + input_kwargs={ + "Vector": position, + "Scale": 5.0 * scale, + "W": nw.new_value(uniform(0, 10), "noise_texture_2_w"), + }, attrs={"noise_dimensions": "4D"}, ) - colorramp_2 = nw.new_node(Nodes.MapRange, - input_kwargs={"Value": noise_texture_2.outputs["Fac"], 1: nw.new_value(0.445 + (2 * dens_crack) - 0.1, "colorramp_2_a"), 2: nw.new_value(0.505 + (2 * dens_crack) - 0.1, "colorramp_2_b"), 3: 0.0, 4: 1.0}, - attrs={'clamp': True} + colorramp_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture_2.outputs["Fac"], + 1: nw.new_value(0.445 + (2 * dens_crack) - 0.1, "colorramp_2_a"), + 2: nw.new_value(0.505 + (2 * dens_crack) - 0.1, "colorramp_2_b"), + 3: 0.0, + 4: 1.0, + }, + attrs={"clamp": True}, ) - #nw.new_node( + # nw.new_node( # Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]}, # label = "color_ramp_2_VAR" - #) - #colorramp_2.color_ramp.elements[0].position = 0.445 + (2 * dens_crack) - 0.1 - #colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - #colorramp_2.color_ramp.elements[1].position = 0.505 + (2 * dens_crack) - 0.1 - #colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + # ) + # colorramp_2.color_ramp.elements[0].position = 0.445 + (2 * dens_crack) - 0.1 + # colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + # colorramp_2.color_ramp.elements[1].position = 0.505 + (2 * dens_crack) - 0.1 + # colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) noise_texture_1 = nw.new_node( Nodes.NoiseTexture, @@ -99,51 +114,68 @@ def geo_dirt(nw, selection=None, random_seed=0, geometry=True): voronoi_texture = nw.new_node( Nodes.VoronoiTexture, - input_kwargs={"Vector": noise_texture_1.outputs["Color"], "Scale": nw.new_value(scal_crack * scale, "scal_crack")}, + input_kwargs={ + "Vector": noise_texture_1.outputs["Color"], + "Scale": nw.new_value(scal_crack * scale, "scal_crack"), + }, attrs={"feature": "DISTANCE_TO_EDGE"}, ) - colorramp_1 = nw.new_node(Nodes.MapRange, - input_kwargs={"Value":voronoi_texture.outputs["Distance"], 1: 0.0, 2: nw.new_value(widt_crack, "colorramp_1"), 3: 0.0, 4: 1.0}, + colorramp_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 1: 0.0, + 2: nw.new_value(widt_crack, "colorramp_1"), + 3: 0.0, + 4: 1.0, + }, # label = "color_ramp_1_VAR", - attrs={'clamp': True} + attrs={"clamp": True}, ) - #nw.new_node( + # nw.new_node( # Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]}, # label="color_ramp_1_VAR" - #) - #colorramp_1.color_ramp.elements[0].position = 0.0 - #colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - #colorramp_1.color_ramp.elements[1].position = widt_crack - #colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + # ) + # colorramp_1.color_ramp.elements[0].position = 0.0 + # colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + # colorramp_1.color_ramp.elements[1].position = widt_crack + # colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - mix_sub = nw.new_node(Nodes.VectorMath, + mix_sub = nw.new_node( + Nodes.VectorMath, input_kwargs={0: (1.0, 1.0, 1.0), 1: colorramp_2.outputs["Result"]}, attrs={"operation": "SUBTRACT"}, ) - mix_mul1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: mix_sub.outputs["Vector"], 1: colorramp_1.outputs["Result"]}, + mix_mul1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: mix_sub.outputs["Vector"], + 1: colorramp_1.outputs["Result"], + }, attrs={"operation": "MULTIPLY"}, ) - mix_mul2 = nw.new_node(Nodes.VectorMath, + mix_mul2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: colorramp_2.outputs["Result"], 1: (0.5, 0.5, 0.5)}, attrs={"operation": "MULTIPLY"}, ) - mix = nw.new_node(Nodes.VectorMath, + mix = nw.new_node( + Nodes.VectorMath, input_kwargs={0: mix_mul1.outputs["Vector"], 1: mix_mul2.outputs["Vector"]}, ) - #nw.new_node( + # nw.new_node( # Nodes.MixRGB, # input_kwargs={ # "Fac": colorramp_2.outputs["Color"], # "Color1": colorramp_1.outputs["Color"], # }, - #) + # ) vector_math_2 = nw.new_node( Nodes.VectorMath, @@ -169,31 +201,50 @@ def geo_dirt(nw, selection=None, random_seed=0, geometry=True): attrs={"operation": "MULTIPLY"}, ) - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, "W": nw.new_value(uniform(0, 10), "noise_texture_3_w"), 'Scale': sample_ratio(5, 3/4, 4/3)}, - attrs={"noise_dimensions": "4D"}) + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": nw.new_value(uniform(0, 10), "noise_texture_3_w"), + "Scale": sample_ratio(5, 3 / 4, 4 / 3), + }, + attrs={"noise_dimensions": "4D"}, + ) - subtract = nw.new_node(Nodes.Math, + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture_3.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - multiply_8 = nw.new_node(Nodes.VectorMath, + multiply_8 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract, 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value_5 = nw.new_node(Nodes.Value) value_5.outputs[0].default_value = 0.05 - multiply_9 = nw.new_node(Nodes.VectorMath, + multiply_9 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_8.outputs["Vector"], 1: value_5}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': sample_ratio(20, 3/4, 4/3), "W": nw.new_value(uniform(0, 10), "noise_texture_4_w")}, - attrs={'noise_dimensions': '4D'}) + noise_texture_4 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "Scale": sample_ratio(20, 3 / 4, 4 / 3), + "W": nw.new_value(uniform(0, 10), "noise_texture_4_w"), + }, + attrs={"noise_dimensions": "4D"}, + ) - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_4.outputs["Fac"]}) + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_4.outputs["Fac"]} + ) colorramp_5.color_ramp.elements.new(0) colorramp_5.color_ramp.elements.new(0) colorramp_5.color_ramp.elements[0].position = 0.0 @@ -205,20 +256,26 @@ def geo_dirt(nw, selection=None, random_seed=0, geometry=True): colorramp_5.color_ramp.elements[3].position = 1.0 colorramp_5.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) - subtract_1 = nw.new_node(Nodes.Math, + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_5.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - multiply_10 = nw.new_node(Nodes.VectorMath, + multiply_10 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_1, 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value_6 = nw.new_node(Nodes.Value) value_6.outputs[0].default_value = 0.1 - multiply_11 = nw.new_node(Nodes.VectorMath, + multiply_11 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_10.outputs["Vector"], 1: value_6}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) colorramp = nw.new_node( Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} @@ -255,21 +312,24 @@ def geo_dirt(nw, selection=None, random_seed=0, geometry=True): offset = nw.add(multiply_11, multiply_9, vector_math_8) if geometry: - noise_params = {"scale": ("uniform", 1, 5), "detail": 7, "roughness": 0.7, "zscale": ("power_uniform", -1, -0.5)} - offset = nw.add( - geo_MOUNTAIN_general(nw, 3, noise_params, 0, {}, {}), - offset - ) + noise_params = { + "scale": ("uniform", 1, 5), + "detail": 7, + "roughness": 0.7, + "zscale": ("power_uniform", -1, -0.5), + } + offset = nw.add(geo_MOUNTAIN_general(nw, 3, noise_params, 0, {}, {}), offset) groupinput = nw.new_node(Nodes.GroupInput) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) else: return dirt_base_color, dirt_roughness - def apply(obj, selection=None, **kwargs): surface.add_geomod( obj, @@ -280,13 +340,17 @@ def apply(obj, selection=None, **kwargs): if __name__ == "__main__": - mat = 'dirt' - if not os.path.isdir(os.path.join('outputs', mat)): - os.mkdir(os.path.join('outputs', mat)) + mat = "dirt" + if not os.path.isdir(os.path.join("outputs", mat)): + os.mkdir(os.path.join("outputs", mat)) for i in range(10): - bpy.ops.wm.open_mainfile(filepath='landscape_surface_dev.blend') - apply(bpy.data.objects['Plane.002']) - bpy.context.scene.render.filepath = os.path.join('outputs', mat, '%s_%d.jpg'%(mat, i)) - bpy.context.scene.render.image_settings.file_format='JPEG' + bpy.ops.wm.open_mainfile(filepath="landscape_surface_dev.blend") + apply(bpy.data.objects["Plane.002"]) + bpy.context.scene.render.filepath = os.path.join( + "outputs", mat, "%s_%d.jpg" % (mat, i) + ) + bpy.context.scene.render.image_settings.file_format = "JPEG" bpy.ops.render.render(write_still=True) - bpy.ops.wm.save_as_mainfile(filepath=os.path.join('outputs', mat, 'landscape_surface_dev_dirt.blend')) + bpy.ops.wm.save_as_mainfile( + filepath=os.path.join("outputs", mat, "landscape_surface_dev_dirt.blend") + ) diff --git a/infinigen/assets/materials/dishwasher_shaders.py b/infinigen/assets/materials/dishwasher_shaders.py index 8f2c5f7f8..0de73b6ff 100644 --- a/infinigen/assets/materials/dishwasher_shaders.py +++ b/infinigen/assets/materials/dishwasher_shaders.py @@ -5,24 +5,41 @@ from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def default_shader(nw: NodeWrangler): principled_bsdf = nw.new_node(Nodes.PrincipledBSDF) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + def shader_black_medal_002(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - anisotropic_bsdf = nw.new_node('ShaderNodeBsdfAnisotropic', input_kwargs={'Color': (0.0167, 0.0167, 0.0167, 1.0000)}) + anisotropic_bsdf = nw.new_node( + "ShaderNodeBsdfAnisotropic", + input_kwargs={"Color": (0.0167, 0.0167, 0.0167, 1.0000)}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': anisotropic_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": anisotropic_bsdf}, + attrs={"is_active_output": True}, + ) def shader_glass_002(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={'IOR': 1.5000}) + glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={"IOR": 1.5000}) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glass_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glass_bsdf}, + attrs={"is_active_output": True}, + ) def shader_metal_002(nw: NodeWrangler): @@ -30,22 +47,45 @@ def shader_metal_002(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0000, 1.0000, 80.0000)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 10.0000, 'Detail': 20.0000, 'Roughness': 0.0000, 'Distortion': 1.0000}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Color"]}) + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0000, 1.0000, 80.0000), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 10.0000, + "Detail": 20.0000, + "Roughness": 0.0000, + "Distortion": 1.0000, + }, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.0045 colorramp.color_ramp.elements[0].color = [0.2218, 0.1914, 0.2173, 1.0000] colorramp.color_ramp.elements[1].position = 0.4432 colorramp.color_ramp.elements[1].color = [0.1678, 0.1300, 0.0929, 1.0000] - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 2.0000, 'Detail': 0.0000}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Color"]}) + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 2.0000, + "Detail": 0.0000, + }, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Color"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements[0].position = 0.0000 colorramp_1.color_ramp.elements[0].color = [0.5000, 0.5000, 0.5000, 1.0000] @@ -54,11 +94,22 @@ def shader_metal_002(nw: NodeWrangler): colorramp_1.color_ramp.elements[2].position = 1.0000 colorramp_1.color_ramp.elements[2].color = [1.0000, 1.0000, 1.0000, 1.0000] - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Subsurface Color': (0.9456, 0.5597, 0.0681, 1.0000), 'Metallic': 1.0000, 'Roughness': colorramp_1.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Subsurface Color": (0.9456, 0.5597, 0.0681, 1.0000), + "Metallic": 1.0000, + "Roughness": colorramp_1.outputs["Color"], + }, + attrs={"subsurface_method": "BURLEY"}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_white_metal_002(nw: NodeWrangler): @@ -66,20 +117,49 @@ def shader_white_metal_002(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0000, 1.0000, 50.0000)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 20.0000, 'Detail': 20.0000, 'Distortion': 1.0000}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Color"]}) + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0000, 1.0000, 50.0000), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 20.0000, + "Detail": 20.0000, + "Distortion": 1.0000, + }, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.2500 colorramp.color_ramp.elements[0].color = [0.5244, 0.5244, 0.5244, 1.0000] colorramp.color_ramp.elements[1].position = 1.0000 colorramp.color_ramp.elements[1].color = [0.9698, 0.9698, 0.9698, 1.0000] - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Subsurface Color': (1.0000, 1.0000, 1.0000, 1.0000), 'Metallic': 1.0000, 'Specular': 1.0000, 'Roughness': 0.1000, 'Anisotropic': 0.9182, 'Sheen': 0.0455, 'Sheen Tint': 0.4948}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Subsurface Color": (1.0000, 1.0000, 1.0000, 1.0000), + "Metallic": 1.0000, + "Specular": 1.0000, + "Roughness": 0.1000, + "Anisotropic": 0.9182, + "Sheen": 0.0455, + "Sheen Tint": 0.4948, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/eyeball.py b/infinigen/assets/materials/eyeball.py index 7bc440be8..278ceea13 100644 --- a/infinigen/assets/materials/eyeball.py +++ b/infinigen/assets/materials/eyeball.py @@ -4,92 +4,122 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform, normal, randint +from numpy.random import uniform + +from infinigen.assets.materials.utils.surface_utils import sample_color +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color + def shader_eyeball(nw: NodeWrangler, rand=True, coord="X", **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"]}, + ) if coord == "Y": - math = nw.new_node(Nodes.Math, + math = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) val = math else: val = separate_xyz.outputs["X"] - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': val}) + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": val}) colorramp_1.color_ramp.interpolation = "CONSTANT" colorramp_1.color_ramp.elements[0].position = 0.0045 colorramp_1.color_ramp.elements[0].color = (0.5921, 0.5921, 0.5921, 1.0) colorramp_1.color_ramp.elements[1].position = uniform(0.84, 0.88) if rand else 0.854 colorramp_1.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': val}) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": val}) colorramp.color_ramp.interpolation = "CONSTANT" colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.6618 colorramp.color_ramp.elements[0].color = (0.2403, 0.217, 0.1528, 1.0) - colorramp.color_ramp.elements[1].position = colorramp_1.color_ramp.elements[1].position + colorramp.color_ramp.elements[1].position = colorramp_1.color_ramp.elements[ + 1 + ].position colorramp.color_ramp.elements[1].color = (0.4961, 0.8862, 0.1703, 1.0) - colorramp.color_ramp.elements[2].position = colorramp_1.color_ramp.elements[1].position+0.01 + colorramp.color_ramp.elements[2].position = ( + colorramp_1.color_ramp.elements[1].position + 0.01 + ) colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) if rand: sample_color(colorramp.color_ramp.elements[1].color) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Metallic': 0.0, 'Roughness': 0.03}) - - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, - input_kwargs={'Color': (0.757, 0.757, 0.757, 1.0)}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': (1.0, 1.0, 1.0, 1.0)}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.1, 1: transparent_bsdf, 2: translucent_bsdf}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 1: principled_bsdf, 2: mix_shader_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Metallic": 0.0, + "Roughness": 0.03, + }, + ) + + transparent_bsdf = nw.new_node( + Nodes.TransparentBSDF, input_kwargs={"Color": (0.757, 0.757, 0.757, 1.0)} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": (1.0, 1.0, 1.0, 1.0)} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.1, 1: transparent_bsdf, 2: translucent_bsdf}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + 1: principled_bsdf, + 2: mix_shader_1, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + def shader_eyeball_old(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["X"]}) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"]}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["X"]} + ) colorramp.color_ramp.interpolation = "CONSTANT" colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.8982 - colorramp.color_ramp.elements[0].color = color_category('eye_schlera') + colorramp.color_ramp.elements[0].color = color_category("eye_schlera") colorramp.color_ramp.elements[1].position = 0.9473 - colorramp.color_ramp.elements[1].color = color_category('eye_pupil') + colorramp.color_ramp.elements[1].color = color_category("eye_pupil") colorramp.color_ramp.elements[2].position = 0.9636 colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': 0.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": colorramp.outputs["Color"], "Roughness": 0.0}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, shader_kwargs={}, **kwargs): - surface.add_material(obj, shader_eyeball, input_kwargs=shader_kwargs) \ No newline at end of file + surface.add_material(obj, shader_eyeball, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/fabrics.py b/infinigen/assets/materials/fabrics.py deleted file mode 100644 index feab191bb..000000000 --- a/infinigen/assets/materials/fabrics.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -from infinigen.assets.materials import leather_and_fabrics -from .leather_and_fabrics import * - - -def apply(obj, selection=None, **kwargs): - leather_and_fabrics.apply(obj, selection=selection, **kwargs) diff --git a/infinigen/assets/materials/fabrics/__init__.py b/infinigen/assets/materials/fabrics/__init__.py new file mode 100644 index 000000000..278d1d128 --- /dev/null +++ b/infinigen/assets/materials/fabrics/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + +from . import fabric_random +from .coarse_knit_fabric import shader_coarse_knit_fabric +from .fine_knit_fabric import shader_fine_knit_fabric +from .general_fabric import shader_fabric +from .leather import shader_leather +from .lined_fabric import shader_lined_fur_base +from .sofa_fabric import shader_sofa_fabric diff --git a/infinigen/assets/materials/leather_and_fabrics/coarse_knit_fabric.py b/infinigen/assets/materials/fabrics/coarse_knit_fabric.py similarity index 95% rename from infinigen/assets/materials/leather_and_fabrics/coarse_knit_fabric.py rename to infinigen/assets/materials/fabrics/coarse_knit_fabric.py index 0ac7179b0..3f2d16501 100644 --- a/infinigen/assets/materials/leather_and_fabrics/coarse_knit_fabric.py +++ b/infinigen/assets/materials/fabrics/coarse_knit_fabric.py @@ -5,7 +5,7 @@ # Acknowledgement: This file draws inspiration from following sources: # https://www.youtube.com/watch?v=DfoMWLQ-BkM by 5 Minutes Blender -# https://www.youtube.com/watch?v=tS_U3twxKKg by PIXXO 3D +# https://www.youtube.com/watch?v=tS_U3twxKKg by PIXXO 3D # https://www.youtube.com/watch?v=OCay8AsVD84 by Antonio Palladino # https://www.youtube.com/watch?v=5dS3N90wPkc by Dr Blender # https://www.youtube.com/watch?v=12c1J6LhK4Y by blenderian @@ -14,17 +14,11 @@ # https://www.youtube.com/watch?v=umrARvXC_MI by Ryan King Art -import bpy -import mathutils -from numpy.random import uniform, normal, choice +from numpy.random import choice, uniform +from infinigen.assets.materials import common from infinigen.assets.utils.uv import unwrap_faces from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.materials import common def get_texture_params(): @@ -260,11 +254,11 @@ def shader_fabric_base( ) -def shader_fabric_random(nw: NodeWrangler, **kwargs): +def shader_coarse_knit_fabric(nw: NodeWrangler, **kwargs): fabric_params = get_texture_params() return shader_fabric_base(nw, **fabric_params) def apply(obj, selection=None, **kwargs): unwrap_faces(obj, selection) - common.apply(obj, shader_fabric_random, selection, **kwargs) + common.apply(obj, shader_coarse_knit_fabric, selection, **kwargs) diff --git a/infinigen/assets/materials/fabrics/fabric_random.py b/infinigen/assets/materials/fabrics/fabric_random.py new file mode 100644 index 000000000..775199a34 --- /dev/null +++ b/infinigen/assets/materials/fabrics/fabric_random.py @@ -0,0 +1,21 @@ +from infinigen.core.util.random import random_general as rg + +from ...utils.uv import unwrap_faces +from .. import common +from .coarse_knit_fabric import shader_coarse_knit_fabric +from .fine_knit_fabric import shader_fine_knit_fabric +from .leather import shader_leather +from .sofa_fabric import shader_sofa_fabric + +fabric_shader_list = ( + "weighted_choice", + (1, shader_coarse_knit_fabric), + (1, shader_fine_knit_fabric), + (2, shader_leather), + (1, shader_sofa_fabric), +) + + +def apply(obj, selection=None, **kwargs): + unwrap_faces(obj, selection) + common.apply(obj, rg(fabric_shader_list), selection=selection, **kwargs) diff --git a/infinigen/assets/materials/leather_and_fabrics/fine_knit_fabric.py b/infinigen/assets/materials/fabrics/fine_knit_fabric.py similarity index 96% rename from infinigen/assets/materials/leather_and_fabrics/fine_knit_fabric.py rename to infinigen/assets/materials/fabrics/fine_knit_fabric.py index d3d8f2dcf..99c41a0fa 100644 --- a/infinigen/assets/materials/leather_and_fabrics/fine_knit_fabric.py +++ b/infinigen/assets/materials/fabrics/fine_knit_fabric.py @@ -5,7 +5,7 @@ # Acknowledgement: This file draws inspiration from following sources: # https://www.youtube.com/watch?v=DfoMWLQ-BkM by 5 Minutes Blender -# https://www.youtube.com/watch?v=tS_U3twxKKg by PIXXO 3D +# https://www.youtube.com/watch?v=tS_U3twxKKg by PIXXO 3D # https://www.youtube.com/watch?v=OCay8AsVD84 by Antonio Palladino # https://www.youtube.com/watch?v=5dS3N90wPkc by Dr Blender # https://www.youtube.com/watch?v=12c1J6LhK4Y by blenderian @@ -14,12 +14,11 @@ # https://www.youtube.com/watch?v=umrARvXC_MI by Ryan King Art -import bpy from numpy.random import uniform +from infinigen.assets.materials import common from infinigen.assets.utils.uv import unwrap_faces from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.materials import common def get_texture_params(): @@ -143,7 +142,7 @@ def shader_material( ) -def shader_fabric_random(nw: NodeWrangler, **kwargs): +def shader_fine_knit_fabric(nw: NodeWrangler, **kwargs): fabric_params = get_texture_params() fabric_params["_map"] = "Object" return shader_material(nw, **fabric_params) @@ -151,4 +150,4 @@ def shader_fabric_random(nw: NodeWrangler, **kwargs): def apply(obj, selection=None, **kwargs): unwrap_faces(obj, selection) - common.apply(obj, shader_fabric_random, selection, **kwargs) + common.apply(obj, shader_fine_knit_fabric, selection, **kwargs) diff --git a/infinigen/assets/materials/fabrics/general_fabric.py b/infinigen/assets/materials/fabrics/general_fabric.py new file mode 100644 index 000000000..2a3af9593 --- /dev/null +++ b/infinigen/assets/materials/fabrics/general_fabric.py @@ -0,0 +1,244 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo +# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=umrARvXC_MI by Ryan King Art + + +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials import common +from infinigen.assets.utils.uv import unwrap_faces +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import color_category + + +def func_fabric(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = { + "Weave Scale": 0.0, + "Color Pattern Scale": 0.0, + "Color1": (0.7991, 0.1046, 0.1195, 1.0000), + "Color2": (1.0000, 0.5271, 0.5711, 1.0000), + } + group_input.update(kwargs) + + wave_texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Scale": group_input["Weave Scale"], + "Distortion": 7.0000, + "Detail": 15.0000, + }, + attrs={"bands_direction": "Y"}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.1000 + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": wave_texture_1.outputs["Color"], 1: value_2}, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Scale": group_input["Weave Scale"], + "Distortion": 7.0000, + "Detail": 15.0000, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": wave_texture.outputs["Color"], 1: value_2}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={6: map_range.outputs["Result"], 7: map_range_1.outputs["Result"]}, + attrs={"data_type": "RGBA"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: mix.outputs[2], 1: 0.1000}, + attrs={"operation": "GREATER_THAN"}, + ) + + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input["Color Pattern Scale"], 1: 0.0001}, + attrs={"operation": "LESS_THAN"}, + ) + + brick_texture_2 = nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Color1": group_input["Color1"], + "Mortar": group_input["Color2"], + "Scale": group_input["Color Pattern Scale"], + "Mortar Size": 0.0000, + "Bias": -1.0000, + "Row Height": 0.5000, + }, + attrs={"offset_frequency": 1, "squash": 0.0000}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Rotation": (0.0000, 0.0000, 1.5708), + }, + attrs={"rotation_type": "EULER_XYZ"}, + ) + + brick_texture = nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": vector_rotate, + "Color1": group_input["Color1"], + "Mortar": group_input["Color2"], + "Scale": group_input["Color Pattern Scale"], + "Mortar Size": 0.0000, + "Bias": -1.0000, + "Row Height": 0.5000, + }, + attrs={"offset_frequency": 1, "squash": 0.0000}, + ) + + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 1.0000, + 6: brick_texture_2.outputs["Color"], + 7: brick_texture.outputs["Color"], + }, + attrs={"data_type": "RGBA", "blend_type": "ADD"}, + ) + + mix_4 = nw.new_node( + Nodes.Mix, + input_kwargs={0: less_than, 6: mix_2.outputs[2], 7: group_input["Color1"]}, + attrs={"data_type": "RGBA"}, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: mix.outputs[2], + 6: (0.0000, 0.0000, 0.0000, 1.0000), + 7: mix_4.outputs[2], + }, + attrs={"data_type": "RGBA"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": mix.outputs[2], 3: 1.0000, 4: 0.9000} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_3.outputs[2], + "Roughness": map_range_2.outputs["Result"], + "Sheen": 1.0000, + "Sheen Tint": 1.0000, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": greater_than, 1: transparent_bsdf, 2: principled_bsdf}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input["Weave Scale"], 1: 5.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": multiply} + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={6: musgrave_texture, 7: mix.outputs[2]}, + attrs={"data_type": "RGBA"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: mix_1.outputs[2]}, attrs={"operation": "SUBTRACT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 0.0010}, + attrs={"operation": "MULTIPLY"}, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": multiply_1, "Midlevel": 0.0000}, + ) + + return {"Shader": mix_shader, "Displacement": displacement} + + +def shader_fabric( + nw: NodeWrangler, + weave_scale=500.0, + color_scale=None, + color_1=None, + color_2=None, + **kwargs, +): + # Code generated using version 2.6.4 of the node_transpiler + + if color_scale is None: + color_scale = np.random.choice([0.0, uniform(5.0, 20.0)]) + if color_1 is None: + color_1 = color_category("fabric") + if color_2 is None: + color_2 = color_category("white") + + group = func_fabric( + nw, + **{ + "Weave Scale": weave_scale, + "Color Pattern Scale": color_scale, + "Color1": color_1, + "Color2": color_2, + }, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group["Displacement"], "Midlevel": 0.0000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group["Shader"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) + + +def apply(obj, selection=None, **kwargs): + if not isinstance(obj, list): + obj = [obj] + for o in obj: + unwrap_faces(o, selection) + common.apply(obj, shader_fabric, selection, **kwargs) diff --git a/infinigen/assets/materials/fabrics/leather.py b/infinigen/assets/materials/fabrics/leather.py new file mode 100644 index 000000000..6febeb8f2 --- /dev/null +++ b/infinigen/assets/materials/fabrics/leather.py @@ -0,0 +1,193 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo +# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=In9V4-ih16o by Ryan King Art + + +from numpy.random import uniform + +from infinigen.assets.color_fits import real_color_distribution +from infinigen.assets.materials import common +from infinigen.assets.utils.uv import unwrap_faces +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup("nodegroup_leather", singleton=False, type="ShaderNodeTree") +def nodegroup_leather(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketColor", "Base Color", (0.0000, 0.0000, 0.0000, 1.0000)), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 10.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Distortion": 0.2000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + color_ramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) + color_ramp.color_ramp.elements[0].position = 0.2841 + color_ramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + color_ramp.color_ramp.elements[1].position = 0.9455 + color_ramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.0200, + 6: texture_coordinate.outputs["Object"], + 7: noise_texture.outputs["Color"], + }, + attrs={"blend_type": "LINEAR_LIGHT", "data_type": "RGBA"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 800.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mix.outputs[2], + "W": group_input.outputs["Seed"], + "Scale": multiply_1, + }, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: group_input.outputs["Scale"], + }, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 0.6000, "Color": group_input.outputs["Base Color"]}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: multiply_2, + 6: group_input.outputs["Base Color"], + 7: hue_saturation_value, + }, + attrs={"data_type": "RGBA"}, + ) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 0.4000, "Color": group_input.outputs["Base Color"]}, + ) + + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: color_ramp.outputs["Color"], + 6: mix_1.outputs[2], + 7: hue_saturation_value_1, + }, + attrs={"data_type": "RGBA"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": mix_2.outputs[2], + 3: uniform(0.3, 0.5), + 4: uniform(0.5, 0.7), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_2.outputs[2], + "Roughness": map_range.outputs["Result"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: mix_1.outputs[2], 1: -0.2000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: color_ramp.outputs["Color"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 0.0200}, attrs={"operation": "MULTIPLY"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "Displacement": multiply_5}, + attrs={"is_active_output": True}, + ) + + +def shader_leather(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + if seed is None: + seed = uniform(-1000.0, 1000.0) + + # if base_color is None: + # base_color = color_category('leather') + base_color = real_color_distribution("sofa_leather") + + group = nw.new_node( + nodegroup_leather().name, + input_kwargs={"Seed": seed, "Scale": scale, "Base Color": base_color}, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group.outputs["Displacement"], "Midlevel": 0.0000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) + + +def apply(obj, selection=None, **kwargs): + unwrap_faces(obj, selection) + common.apply(obj, shader_leather, selection=selection, **kwargs) diff --git a/infinigen/assets/materials/leather_and_fabrics/lined_fabric.py b/infinigen/assets/materials/fabrics/lined_fabric.py similarity index 99% rename from infinigen/assets/materials/leather_and_fabrics/lined_fabric.py rename to infinigen/assets/materials/fabrics/lined_fabric.py index 751d52479..0842d5a43 100644 --- a/infinigen/assets/materials/leather_and_fabrics/lined_fabric.py +++ b/infinigen/assets/materials/fabrics/lined_fabric.py @@ -4,13 +4,11 @@ # Authors: Meenal Parakh -import bpy -import mathutils from numpy.random import uniform +from infinigen.assets.materials import common from infinigen.assets.utils.uv import unwrap_faces from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.materials import common def get_texture_params(): diff --git a/infinigen/assets/materials/fabrics/sofa_fabric.py b/infinigen/assets/materials/fabrics/sofa_fabric.py new file mode 100644 index 000000000..9705e85e3 --- /dev/null +++ b/infinigen/assets/materials/fabrics/sofa_fabric.py @@ -0,0 +1,66 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +from numpy.random import uniform + +from infinigen.assets.materials import common +from infinigen.assets.utils.uv import unwrap_faces +from infinigen.core.nodes import Nodes, NodeWrangler +from infinigen.core.util.color import color_category + + +def shader_sofa_fabric(nw: NodeWrangler, scale=1, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "UVMap"}) + attribute = nw.new_node( + Nodes.Mapping, [attribute], input_kwargs={"Scale": [scale] * 3} + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = color_category("fabric") + + brightness_contrast = nw.new_node( + "ShaderNodeBrightContrast", + input_kwargs={"Color": rgb, "Bright": uniform(-0.1500, -0.05)}, + ) + + brick_texture = nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": attribute.outputs["Vector"], + "Color1": rgb, + "Color2": brightness_contrast, + "Scale": 276.9800, + "Mortar Size": 0.0100, + "Mortar Smooth": 1.0000, + "Bias": 0.5000, + "Row Height": 0.1000, + }, + attrs={"offset": 0.5479, "squash_frequency": 1}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": brick_texture.outputs["Color"], + "Roughness": 0.8624, + "Sheen": 1.0000, + }, + ) + + displacement = nw.new_node( + Nodes.Displacement, input_kwargs={"Height": brick_texture.outputs["Fac"]} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + attrs={"is_active_output": True}, + ) + + +def apply(obj, selection=None, **kwargs): + unwrap_faces(obj, selection) + common.apply(obj, shader_sofa_fabric, selection, **kwargs) diff --git a/infinigen/assets/materials/fabrics/velvet.py b/infinigen/assets/materials/fabrics/velvet.py new file mode 100644 index 000000000..db54ed40e --- /dev/null +++ b/infinigen/assets/materials/fabrics/velvet.py @@ -0,0 +1,147 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Stamatis Alexandropoulos +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=55MMAnTYhWI by Dikko + +from numpy.random import uniform + +from infinigen.assets.materials import common +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import color_category + + +def shader_velvet(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.5 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": texture_coordinate.outputs["Object"]} + ) + + mapping = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": reroute}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": mapping, "Scale": 1.0000} + ) + + mix_6 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 0.1125, 6: voronoi_texture.outputs["Color"]}, + attrs={"data_type": "RGBA"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 9.6000, + "Detail": 11.4000, + "Dimension": 0.1000, + "Lacunarity": 1.9000, + }, + attrs={"musgrave_type": "MULTIFRACTAL"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: uniform(0, 0.8), + 6: musgrave_texture, + 7: (0.6044, 0.6044, 0.6044, 1.0000), + }, + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={6: mix_6.outputs[2], 7: mix.outputs[2]}, + attrs={"data_type": "RGBA"}, + ) + + color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1.outputs[2]}) + color_ramp.color_ramp.elements[0].position = 0.0000 + color_ramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] + color_ramp.color_ramp.elements[1].position = 0.8455 + color_ramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = color_category("textile") + # (0.3547, 0.3018, 0.3087, 1.0000) + + brightness_contrast = nw.new_node( + "ShaderNodeBrightContrast", input_kwargs={"Color": rgb, "Bright": 0.0500} + ) + + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={0: color_ramp.outputs["Color"], 6: brightness_contrast, 7: rgb}, + attrs={"data_type": "RGBA"}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_2.outputs[2], + "Specular": 0.0000, + "Roughness": uniform(0.4, 0.9), + "Anisotropic": 0.7614, + "Anisotropic Rotation": 1.0000, + "Sheen": 16.2273, + "Sheen Tint": 1.0000, + }, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": reroute, + "Rotation": (0.0000, 0.0000, 1.0157), + "Scale": (2.2000, 2.2000, 2.2000), + }, + ) + + wave_texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 500.0000, + "Distortion": 4.0000, + "Detail": 6.7000, + "Detail Scale": 1.5000, + "Detail Roughness": 0.4308, + }, + attrs={"bands_direction": "DIAGONAL"}, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 1.0000, 6: mapping_1, 7: wave_texture_1.outputs["Color"]}, + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) + + mix_4 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 1.0000, 6: color_ramp.outputs["Color"], 7: mix_3.outputs[2]}, + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) + + displacement = nw.new_node( + Nodes.Displacement, + input_kwargs={"Height": mix_4.outputs[2], "Midlevel": 0.0000, "Scale": 0.0150}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + attrs={"is_active_output": True}, + ) + + +def apply(obj, selection=None, **kwargs): + common.apply(obj, shader_velvet, selection, **kwargs) + # surface.add_material(obj, shader_velvet, selection=selection) + + +# apply(bpy.context.active_object) diff --git a/infinigen/assets/materials/face_size_visualizer.py b/infinigen/assets/materials/face_size_visualizer.py index d8106f8ba..73aebc06b 100644 --- a/infinigen/assets/materials/face_size_visualizer.py +++ b/infinigen/assets/materials/face_size_visualizer.py @@ -4,44 +4,48 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_material(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'col'}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': attribute.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "col"}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": attribute.outputs["Color"]} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geo_face_colors(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - random_value = nw.new_node(Nodes.RandomValue, - attrs={'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Name': 'col', "Value": random_value.outputs["Value"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'FACE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute}) + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + random_value = nw.new_node(Nodes.RandomValue, attrs={"data_type": "FLOAT_VECTOR"}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": "col", + "Value": random_value.outputs["Value"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "FACE"}, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": store_named_attribute} + ) def apply(obj, selection=None, **kwargs): surface.add_geomod(obj, geo_face_colors, selection=selection, attributes=[]) - surface.add_material(obj, shader_material, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_material, selection=selection) diff --git a/infinigen/assets/materials/fish_eye_shader.py b/infinigen/assets/materials/fish_eye_shader.py index 31d9a6dc4..757d2f70e 100644 --- a/infinigen/assets/materials/fish_eye_shader.py +++ b/infinigen/assets/materials/fish_eye_shader.py @@ -4,181 +4,358 @@ # Authors: Mingzhe Wang # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=EfNzAaqKHXQ by PixelicaCG, https://www.youtube.com/watch?v=JcHX4AT1vtg by CGCookie and https://www.youtube.com/watch?v=E0JyyWeptSA by CGRogue -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + +from numpy.random import uniform as U + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, +) from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + -@node_utils.to_nodegroup('nodegroup_rotate2_d_002', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_rotate2_d_002", singleton=False, type="ShaderNodeTree" +) def nodegroup_rotate2_d_002(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.6.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 0.5000), - ('NodeSocketFloat', 'Value2', 0.5000), - ('NodeSocketFloat', 'Value3', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0175}, attrs={'operation': 'MULTIPLY'}) # pretty sure Value3 is the right one here - - sine = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'SINE'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: sine, 1: group_input}, attrs={'operation': 'MULTIPLY'}) - - cosine = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'COSINE'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input, 1: cosine}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}, attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input, 1: cosine}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input, 1: sine}, attrs={'operation': 'MULTIPLY'}) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 0.5000), + ("NodeSocketFloat", "Value2", 0.5000), + ("NodeSocketFloat", "Value3", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value3"], 1: 0.0175}, + attrs={"operation": "MULTIPLY"}, + ) # pretty sure Value3 is the right one here + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SINE"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: group_input}, + attrs={"operation": "MULTIPLY"}, + ) + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "COSINE"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input, 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input, 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input, 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': subtract, 'Value1': add}, attrs={'is_active_output': True}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": subtract, "Value1": add}, + attrs={"is_active_output": True}, + ) + def shader_eyeball_fish(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler - attribute_1 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'EyeballPosition'}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': attribute_1.outputs["Color"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mapping, 'Scale': 50.0000}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.0200, 'Color1': mapping, 'Color2': noise_texture.outputs["Color"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': mix}) - + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "EyeballPosition"} + ) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": attribute_1.outputs["Color"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Scale": 50.0000} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.0200, + "Color1": mapping, + "Color2": noise_texture.outputs["Color"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mix}) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.0000 - - group = nw.new_node(nodegroup_rotate2_d_002().name, - input_kwargs={0: separate_xyz.outputs["Y"], 'Value2': separate_xyz.outputs["Z"], 2: value}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.3000}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.8000}, attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_2}, attrs={'operation': 'MULTIPLY'}) - + + group = nw.new_node( + nodegroup_rotate2_d_002().name, + input_kwargs={ + 0: separate_xyz.outputs["Y"], + "Value2": separate_xyz.outputs["Z"], + 2: value, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: multiply_2}, + attrs={"operation": "MULTIPLY"}, + ) + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) - + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.6300}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_1}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) colorramp.color_ramp.elements[0].position = 0.6400 colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp.color_ramp.elements[1].position = 0.6591 colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute_1.outputs["Color"], 'Scale': (1.0000, 100.0000, 1.0000)}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.3000, 'Color1': mapping_1, 'Color2': attribute_1.outputs["Color"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mix_1, 'Scale': 10.0000}) - - mix_2 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.7000, 'Color1': noise_texture_1.outputs["Fac"], 'Color2': mix_1}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': mix_2, 'Scale': 20.0000}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute_1.outputs["Color"], 'Scale': (20.0000, 1.0000, 1.0000)}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.3000, 'Color1': mapping_2, 'Color2': attribute_1.outputs["Color"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mix_3, 'Scale': 10.0000}) - - mix_4 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.7000, 'Color1': noise_texture_2.outputs["Fac"], 'Color2': mix_3}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix_4, 'W': U(-10, 10), 'Scale': 1.0000}, - attrs={'voronoi_dimensions': '4D'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0000, 'Color1': multiply_4, 'Color2': multiply_5}, - attrs={'blend_type': 'OVERLAY'}) - - bright_contrast = nw.new_node('ShaderNodeBrightContrast', input_kwargs={'Color': mix_5, 'Bright': 0.6000, 'Contrast': 1.5000}) - + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": attribute_1.outputs["Color"], + "Scale": (1.0000, 100.0000, 1.0000), + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.3000, + "Color1": mapping_1, + "Color2": attribute_1.outputs["Color"], + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mix_1, "Scale": 10.0000} + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.7000, + "Color1": noise_texture_1.outputs["Fac"], + "Color2": mix_1, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": mix_2, "Scale": 20.0000} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": attribute_1.outputs["Color"], + "Scale": (20.0000, 1.0000, 1.0000), + }, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.3000, + "Color1": mapping_2, + "Color2": attribute_1.outputs["Color"], + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mix_3, "Scale": 10.0000} + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.7000, + "Color1": noise_texture_2.outputs["Fac"], + "Color2": mix_3, + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix_4, "W": U(-10, 10), "Scale": 1.0000}, + attrs={"voronoi_dimensions": "4D"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 1.0000, "Color1": multiply_4, "Color2": multiply_5}, + attrs={"blend_type": "OVERLAY"}, + ) + + bright_contrast = nw.new_node( + "ShaderNodeBrightContrast", + input_kwargs={"Color": mix_5, "Bright": 0.6000, "Contrast": 1.5000}, + ) + scale1 = U(0.65, 1.2) - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: scale1}, attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: multiply_6}, attrs={'operation': 'MULTIPLY'}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: scale1}, attrs={'operation': 'MULTIPLY'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: multiply_8}, attrs={'operation': 'MULTIPLY'}) - + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: scale1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_6, 1: multiply_6}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: scale1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_8, 1: multiply_8}, + attrs={"operation": "MULTIPLY"}, + ) + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_7, 1: multiply_9}) - + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_3}) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_3}) colorramp_1.color_ramp.elements[0].position = 0.6159 colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 0.6591 colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': colorramp_1.outputs["Color"]}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_1.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.0295 colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_2.color_ramp.elements[1].position = 0.0523 colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - add_4 = nw.new_node(Nodes.Math, + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: bright_contrast, 1: colorramp_2.outputs["Color"]}, - attrs={'use_clamp': True}) - + attrs={"use_clamp": True}, + ) + scale2 = U(0.6, 0.8) - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: scale2}, attrs={'operation': 'MULTIPLY'}) - - multiply_11 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_10, 1: multiply_10}, attrs={'operation': 'MULTIPLY'}) - - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: scale2}, attrs={'operation': 'MULTIPLY'}) - - multiply_13 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_12, 1: multiply_12}, attrs={'operation': 'MULTIPLY'}) - + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: scale2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_10, 1: multiply_10}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: scale2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_13 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_12, 1: multiply_12}, + attrs={"operation": "MULTIPLY"}, + ) + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_11, 1: multiply_13}) - + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: 0.1800}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': add_6}) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_6}) colorramp_3.color_ramp.elements[0].position = 0.4773 colorramp_3.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_3.color_ramp.elements[1].position = 0.6659 colorramp_3.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - attribute_2 = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'EyeballPosition'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute_2.outputs["Color"], 'W': U(-10, 10), 'Scale': 0.5000}, - attrs={'noise_dimensions': '4D'}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_3.outputs["Color"]}) + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "EyeballPosition"} + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": attribute_2.outputs["Color"], + "W": U(-10, 10), + "Scale": 0.5000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_3.outputs["Color"]} + ) colorramp_4.color_ramp.interpolation = "CARDINAL" colorramp_4.color_ramp.elements[0].position = 0.3704 colorramp_4.color_ramp.elements[0].color = [0.9570, 0.9247, 0.2801, 1.0000] @@ -187,27 +364,61 @@ def shader_eyeball_fish(nw: NodeWrangler): sample_color(colorramp_4.color_ramp.elements[0].color) sample_color(colorramp_4.color_ramp.elements[1].color) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_3.outputs["Color"], 'Color1': (0.7384, 0.5239, 0.2703, 1.0000), 'Color2': colorramp_4.outputs["Color"]}) + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_3.outputs["Color"], + "Color1": (0.7384, 0.5239, 0.2703, 1.0000), + "Color2": colorramp_4.outputs["Color"], + }, + ) sample_color(mix_6.inputs[6].default_value) - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': mix_6, 'Color2': (0.0000, 0.0000, 0.0000, 1.0000)}) - - mix_8 = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': add_4, 'Color1': (0.0000, 0.0000, 0.0000, 1.0000), 'Color2': mix_7}) - - mix_9 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': mix_8, 'Color2': (0.0000, 0.0000, 0.0000, 1.0000)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix_8, 'Specular': 0.0000, 'Roughness': 0.0000}) - - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy') - - mix_shader_1 = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': 0.0200, 1: principled_bsdf, 2: glossy_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader_1}, attrs={'is_active_output': True}) + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": mix_6, + "Color2": (0.0000, 0.0000, 0.0000, 1.0000), + }, + ) + + mix_8 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": add_4, + "Color1": (0.0000, 0.0000, 0.0000, 1.0000), + "Color2": mix_7, + }, + ) + + mix_9 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": mix_8, + "Color2": (0.0000, 0.0000, 0.0000, 1.0000), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_8, "Specular": 0.0000, "Roughness": 0.0000}, + ) + + glossy_bsdf = nw.new_node("ShaderNodeBsdfGlossy") + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.0200, 1: principled_bsdf, 2: glossy_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader_1}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_eyeball_fish, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_eyeball_fish, selection=selection) diff --git a/infinigen/assets/materials/fishbody.py b/infinigen/assets/materials/fishbody.py index 505253e71..f28e4f817 100644 --- a/infinigen/assets/materials/fishbody.py +++ b/infinigen/assets/materials/fishbody.py @@ -5,482 +5,760 @@ # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=mJVuodaPHTQ and https://www.youtube.com/watch?v=v7a4ouBLIow by Lance Phan -import os, sys, random -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os +import random + import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, + sample_ratio, +) from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_node_grid', singleton=False, type='GeometryNodeTree') +@node_utils.to_nodegroup( + "nodegroup_node_grid", singleton=False, type="GeometryNodeTree" +) def nodegroup_node_grid(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'FLOOR'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: floor}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1}) - - trunc = nw.new_node(Nodes.Math, - input_kwargs={0: add}, - attrs={'operation': 'TRUNC'}) - - trunc_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1}, - attrs={'operation': 'TRUNC'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: trunc_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'floor1': trunc, 'floor2': add_1}) - - -@node_utils.to_nodegroup('nodegroup_node_group', singleton=False, type='GeometryNodeTree') + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "FLOOR"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: floor}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1}) + + trunc = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "TRUNC"}) + + trunc_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1}, attrs={"operation": "TRUNC"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: trunc_1}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"floor1": trunc, "floor2": add_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_node_group", singleton=False, type="GeometryNodeTree" +) def nodegroup_UV(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - subtract = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: 0.75, 1: separate_xyz.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={1: absolute}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1}, - attrs={'operation': 'ABSOLUTE'}) - - multiply = nw.new_node(Nodes.Math, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={1: absolute}, attrs={"operation": "SUBTRACT"} + ) + + absolute_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_1}, attrs={"operation": "ABSOLUTE"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: absolute_1, 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: multiply}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': subtract_2, 'Y': separate_xyz.outputs["Y"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - -@node_utils.to_nodegroup('nodegroup_scales', singleton=False, type='GeometryNodeTree') + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_2, "Y": separate_xyz.outputs["Y"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup("nodegroup_scales", singleton=False, type="GeometryNodeTree") def nodegroup_scales(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale', 40.0), - ('NodeSocketFloat', 'Xscale', 1.0), - ('NodeSocketFloat', 'Yscale', 1.0), - ('NodeSocketFloat', 'Xnoise', 0.02), - ('NodeSocketFloat', 'Ynoise', 0.02), - ('NodeSocketFloat', 'Offset', 0.0002)]) - - #subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale", 40.0), + ("NodeSocketFloat", "Xscale", 1.0), + ("NodeSocketFloat", "Yscale", 1.0), + ("NodeSocketFloat", "Xnoise", 0.02), + ("NodeSocketFloat", "Ynoise", 0.02), + ("NodeSocketFloat", "Offset", 0.0002), + ], + ) + + # subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, # input_kwargs={'Mesh': group_input.outputs["Mesh"]}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': multiply}) - - angle = nw.new_node(Nodes.Value, - label='Angle') + attrs={"operation": "MULTIPLY"}, + ) + + reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply}) + + angle = nw.new_node(Nodes.Value, label="Angle") angle.outputs[0].default_value = 0.0 - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: angle}, - attrs={'operation': 'COSINE'}) - - multiply_1 = nw.new_node(Nodes.Math, + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: angle}, attrs={"operation": "COSINE"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_2, 1: cosine}, - attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: angle}, - attrs={'operation': 'SINE'}) - - multiply_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: angle}, attrs={"operation": "SINE"}) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: sine}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}, - attrs={'operation': 'SUBTRACT'}) - - group_input_2 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale', 40.0), - ('NodeSocketFloat', 'Xscale', 1.0), - ('NodeSocketFloat', 'Yscale', 1.0), - ('NodeSocketFloat', 'Xnoise', 0.02), - ('NodeSocketFloat', 'Ynoise', 0.02), - ('NodeSocketFloat', 'Offset', 0.0002)]) - - multiply_3 = nw.new_node(Nodes.Math, + attrs={"operation": "SUBTRACT"}, + ) + + group_input_2 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale", 40.0), + ("NodeSocketFloat", "Xscale", 1.0), + ("NodeSocketFloat", "Yscale", 1.0), + ("NodeSocketFloat", "Xnoise", 0.02), + ("NodeSocketFloat", "Ynoise", 0.02), + ("NodeSocketFloat", "Offset", 0.0002), + ], + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: group_input_2.outputs["Xscale"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 0.8, 'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_2.outputs["Fac"], 1: group_input_2.outputs["Xnoise"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: multiply_4}) - - multiply_5 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 0.8, "Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture_2.outputs["Fac"], + 1: group_input_2.outputs["Xnoise"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_2, 1: sine}, - attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: cosine}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_5, 1: multiply_6}) - - multiply_7 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: multiply_6}) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: group_input_2.outputs["Yscale"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 0.8, 'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture_1.outputs["Fac"], 1: group_input_2.outputs["Ynoise"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_7, 1: multiply_8}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': add_2}) - - group_input_1 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Scale', 40.0), - ('NodeSocketFloat', 'Xscale', 1.0), - ('NodeSocketFloat', 'Yscale', 1.0), - ('NodeSocketFloat', 'Xnoise', 0.02), - ('NodeSocketFloat', 'Ynoise', 0.02), - ('NodeSocketFloat', 'Offset', 0.0002)]) - - multiply_8 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_2, 1: group_input_1.outputs["Scale"]}, attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply_8}) - - nodegrid = nw.new_node(nodegroup_node_grid().name, input_kwargs={'Value': separate_xyz.outputs["Y"]}) - - greater_than = nw.new_node(Nodes.Compare, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 0.8, "Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture_1.outputs["Fac"], + 1: group_input_2.outputs["Ynoise"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_7, 1: multiply_8}) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_2}) + + group_input_1 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale", 40.0), + ("NodeSocketFloat", "Xscale", 1.0), + ("NodeSocketFloat", "Yscale", 1.0), + ("NodeSocketFloat", "Xnoise", 0.02), + ("NodeSocketFloat", "Ynoise", 0.02), + ("NodeSocketFloat", "Offset", 0.0002), + ], + ) + + multiply_8 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_2, 1: group_input_1.outputs["Scale"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": multiply_8}) + + nodegrid = nw.new_node( + nodegroup_node_grid().name, input_kwargs={"Value": separate_xyz.outputs["Y"]} + ) + + greater_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "LESS_THAN"}, + ) + + less_than = nw.new_node( + Nodes.Compare, input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'LESS_THAN'}) - - less_than = nw.new_node(Nodes.Compare, input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}) - - nodegrid_1 = nw.new_node(nodegroup_node_grid().name, input_kwargs={'Value': separate_xyz.outputs["X"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': nodegrid_1.outputs["floor2"], 'Y': nodegrid.outputs["floor1"]}) - - multiply_9 = nw.new_node(Nodes.VectorMath, input_kwargs={0: less_than, 1: combine_xyz}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': nodegrid_1.outputs["floor1"], 'Y': nodegrid.outputs["floor2"]}) - - multiply_10 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than, 1: combine_xyz_1}, attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_9.outputs["Vector"], 1: multiply_10.outputs["Vector"]}) - - subtract_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_3}, attrs={'operation': 'SUBTRACT'}) - - distance = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_3}, attrs={'operation': 'DISTANCE'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: distance.outputs["Value"], 1: 0.0100}) - - less_than_1 = nw.new_node(Nodes.Compare, input_kwargs={0: add_4, 1: 0.5000}, attrs={'operation': 'LESS_THAN'}) - + ) + + nodegrid_1 = nw.new_node( + nodegroup_node_grid().name, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": nodegrid_1.outputs["floor2"], + "Y": nodegrid.outputs["floor1"], + }, + ) + + multiply_9 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: less_than, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": nodegrid_1.outputs["floor1"], + "Y": nodegrid.outputs["floor2"], + }, + ) + + multiply_10 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than, 1: combine_xyz_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_9.outputs["Vector"], + 1: multiply_10.outputs["Vector"], + }, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_3}, + attrs={"operation": "SUBTRACT"}, + ) + + distance = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_3}, + attrs={"operation": "DISTANCE"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: distance.outputs["Value"], 1: 0.0100} + ) + + less_than_1 = nw.new_node( + Nodes.Compare, + input_kwargs={0: add_4, 1: 0.5000}, + attrs={"operation": "LESS_THAN"}, + ) + greater_than_1 = nw.new_node(Nodes.Compare, input_kwargs={0: add_4, 1: 0.5000}) - - multiply_11 = nw.new_node(Nodes.VectorMath, input_kwargs={0: less_than, 1: combine_xyz_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_12 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than, 1: combine_xyz}, attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_11.outputs["Vector"], 1: multiply_12.outputs["Vector"]}) - - subtract_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_5}, attrs={'operation': 'SUBTRACT'}) - - multiply_13 = nw.new_node(Nodes.VectorMath, + + multiply_11 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: less_than, 1: combine_xyz_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_12 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_11.outputs["Vector"], + 1: multiply_12.outputs["Vector"], + }, + ) + + subtract_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_5}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_13 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: greater_than_1, 1: subtract_2.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - _multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 1: less_than_1, 2: multiply_13.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_add = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + _multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract_1.outputs["Vector"], + 1: less_than_1, + 2: multiply_13.outputs["Vector"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: _multiply_add, 1: (1, -1, 1)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_14 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than_1, 1: add_5}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_1 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_14 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than_1, 1: add_5}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: add_3, 1: less_than_1, 2: multiply_14.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply_add_1, 'W': sample_range(-10, 10), 'Scale': 33.0000}, - attrs={'noise_dimensions': '4D'}) - - subtract_3 = nw.new_node(Nodes.MapRange, - input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.26, 2: 0.74, 3: -0.5, 4: 0.5}, - attrs={'clamp': True} - ) + attrs={"operation": "MULTIPLY_ADD"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply_add_1, + "W": sample_range(-10, 10), + "Scale": 33.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: 0.26, + 2: 0.74, + 3: -0.5, + 4: 0.5, + }, + attrs={"clamp": True}, + ) + + sine_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_3}, attrs={"operation": "SINE"} + ) + + cosine_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_3}, attrs={"operation": "COSINE"} + ) + + combine_xyz_color = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": sine_1, "Y": cosine_1, "Z": 0.0000} + ) + + add_6 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_color.outputs["Vector"], 1: multiply_add}, + attrs={"operation": "DOT_PRODUCT"}, + ) + + distance_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_5}, + attrs={"operation": "DISTANCE"}, + ) + + add_7 = nw.new_node( + Nodes.Math, input_kwargs={0: distance_1.outputs["Value"], 1: 0.0100} + ) + + multiply_17 = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than_1, 1: add_7}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_4, 1: less_than_1, 2: multiply_17}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_18 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add_2, 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) - sine_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3}, attrs={'operation': 'SINE'}) - - cosine_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3}, attrs={'operation': 'COSINE'}) - - combine_xyz_color = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': sine_1, 'Y': cosine_1, 'Z': 0.0000}) - - add_6 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_color.outputs["Vector"], 1: multiply_add}, attrs={'operation': 'DOT_PRODUCT'}) - - distance_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_5}, attrs={'operation': 'DISTANCE'}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: distance_1.outputs["Value"], 1: 0.0100}) - - multiply_17 = nw.new_node(Nodes.Math, input_kwargs={0: greater_than_1, 1: add_7}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: less_than_1, 2: multiply_17}, attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_18 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add_2, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_19 = nw.new_node(Nodes.MapRange, + multiply_19 = nw.new_node( + Nodes.MapRange, input_kwargs={0: multiply_18, 1: 0.9156, 2: 1.0000, 3: 0.0000, 4: 0.5}, - attrs={'clamp': True} + attrs={"clamp": True}, + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_6, 1: multiply_19}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_4, 1: 0.0000}, + attrs={"operation": "SUBTRACT"}, ) - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_6, 1: multiply_19}, attrs={'operation': 'SUBTRACT'}) - - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_4, 1: 0.0000}, attrs={'operation': 'SUBTRACT'}) - normal = nw.new_node(Nodes.InputNormal) - - multiply_20 = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_5, 1: normal}, attrs={'operation': 'MULTIPLY'}) - - multiply_21 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_20.outputs["Vector"], 1: group_input.outputs["Offset"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Mesh"], 'Offset': multiply_21.outputs["Vector"]}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: multiply_add_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - capture_attribute_4 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 1: multiply_19}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz_7 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute_1.outputs["Attribute"]}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 2: separate_xyz_7.outputs["X"]}) - - subtract_8 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_7.outputs["X"], 1: attribute_statistic.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, + + multiply_20 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_5, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_21 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_20.outputs["Vector"], + 1: group_input.outputs["Offset"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Mesh"], + "Offset": multiply_21.outputs["Vector"], + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: multiply_add_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + capture_attribute_4 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 1: multiply_19, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz_7 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute_1.outputs["Attribute"]}, + ) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: separate_xyz_7.outputs["X"], + }, + ) + + subtract_8 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_7.outputs["X"], + 1: attribute_statistic.outputs["Min"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_8, 1: attribute_statistic.outputs["Range"]}, - attrs={'operation': 'DIVIDE'}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 2: separate_xyz_7.outputs["Y"]}) - - subtract_9 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_7.outputs["Y"], 1: attribute_statistic_1.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - divide_1 = nw.new_node(Nodes.Math, + attrs={"operation": "DIVIDE"}, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: separate_xyz_7.outputs["Y"], + }, + ) + + subtract_9 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_7.outputs["Y"], + 1: attribute_statistic_1.outputs["Min"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_9, 1: attribute_statistic_1.outputs["Range"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': divide, 'Y': divide_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute_4.outputs["Geometry"], - 'attr2': combine_xyz_3, - 'attr5': capture_attribute_4.outputs["Attribute"]}) + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": divide, "Y": divide_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute_4.outputs["Geometry"], + "attr2": combine_xyz_3, + "attr5": capture_attribute_4.outputs["Attribute"], + }, + ) + def shader_fish_body_regular(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Generated"]}) - - noise_texture_6 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'W': 0.8, 'Scale': 50.0}, - attrs={'noise_dimensions': '4D'}) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": texture_coordinate_1.outputs["Generated"]}, + ) + + noise_texture_6 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "W": 0.8, "Scale": 50.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_6.inputs['W'].default_value = sample_range(-2, 2) + noise_texture_6.inputs["W"].default_value = sample_range(-2, 2) - colorramp_15 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_6.outputs["Fac"]}) + colorramp_15 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_6.outputs["Fac"]} + ) colorramp_15.color_ramp.elements[0].position = 0.3523 colorramp_15.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_15.color_ramp.elements[1].position = 0.3727 colorramp_15.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_3 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset2'}) - - greater_than = nw.new_node(Nodes.Math, + + attribute_3 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset2"}) + + greater_than = nw.new_node( + Nodes.Math, input_kwargs={0: attribute_3.outputs["Vector"], 1: 0.01}, - attrs={'operation': 'GREATER_THAN'}) - + attrs={"operation": "GREATER_THAN"}, + ) + texture_coordinate_5 = nw.new_node(Nodes.TextureCoord) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate_5.outputs["Normal"]}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Z"], 1: 0.5}) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate_5.outputs["Normal"]}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Z"], 1: 0.5}) if rand: add.inputs[1].default_value = sample_range(0.45, 0.6) - - colorramp_14 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': add}) + + colorramp_14 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) colorramp_14.color_ramp.elements[0].position = 0.0 colorramp_14.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_14.color_ramp.elements[1].position = 0.2341 colorramp_14.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Color variations'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute_5.outputs["Vector"]}) - - multiply = nw.new_node(Nodes.Math, + + attribute_5 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Color variations"} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute_5.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': subtract, 1: -0.2}) - - colorramp_12 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': map_range.outputs["Result"]}) + attrs={"operation": "SUBTRACT"}, + ) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": subtract, 1: -0.2}) + + colorramp_12 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range.outputs["Result"]} + ) colorramp_12.color_ramp.elements[0].position = 0.0 colorramp_12.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_12.color_ramp.elements[1].position = 0.2518 colorramp_12.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + texture_coordinate_3 = nw.new_node(Nodes.TextureCoord) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate_3.outputs["Generated"]}) - - invert_1 = nw.new_node(Nodes.Invert, - input_kwargs={'Color': separate_xyz_1.outputs["Z"]}) - - subtract_1 = nw.new_node(Nodes.Math, + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate_3.outputs["Generated"]}, + ) + + invert_1 = nw.new_node( + Nodes.Invert, input_kwargs={"Color": separate_xyz_1.outputs["Z"]} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"], 1: 0.57}, - attrs={'operation': 'SUBTRACT'}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1}, - attrs={'operation': 'ABSOLUTE'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: 0.4}, - attrs={'operation': 'MULTIPLY'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_1}) - - subtract_2 = nw.new_node(Nodes.Math, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_1}, attrs={"operation": "ABSOLUTE"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute, 1: 0.4}, attrs={"operation": "MULTIPLY"} + ) + + map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={"Value": multiply_1}) + + subtract_2 = nw.new_node( + Nodes.Math, input_kwargs={0: invert_1, 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'SUBTRACT'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_2, 1: 0.1}) - - colorramp_13 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': map_range.outputs["Result"]}) + attrs={"operation": "SUBTRACT"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: 0.1}) + + colorramp_13 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range.outputs["Result"]} + ) colorramp_13.color_ramp.elements[0].position = 0.0 colorramp_13.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_13.color_ramp.elements[1].position = 0.6727 colorramp_13.color_ramp.elements[1].color = (0.0685, 0.0685, 0.0685, 1.0) - + texture_coordinate_2 = nw.new_node(Nodes.TextureCoord) - - noise_texture_9 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_2.outputs["Generated"]}) - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9042, 'Color1': noise_texture_9.outputs["Color"], 'Color2': texture_coordinate_2.outputs["Generated"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mix_7, 'Scale': 2.5, 'Distortion': 1.3, 'Detail': 0.0, 'Detail Roughness': 0.0, 'Phase Offset': 0.2}) + + noise_texture_9 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate_2.outputs["Generated"]}, + ) + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.9042, + "Color1": noise_texture_9.outputs["Color"], + "Color2": texture_coordinate_2.outputs["Generated"], + }, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mix_7, + "Scale": 2.5, + "Distortion": 1.3, + "Detail": 0.0, + "Detail Roughness": 0.0, + "Phase Offset": 0.2, + }, + ) if rand: - wave_texture.inputs['Scale'].default_value = sample_ratio(2, 0.5, 2) - wave_texture.inputs['Phase Offset'].default_value = sample_range(0, 10) - wave_texture.inputs['Distortion'].default_value = sample_range(0, 3) + wave_texture.inputs["Scale"].default_value = sample_ratio(2, 0.5, 2) + wave_texture.inputs["Phase Offset"].default_value = sample_range(0, 10) + wave_texture.inputs["Distortion"].default_value = sample_range(0, 3) - colorramp_8 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Color"]}) + colorramp_8 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Color"]} + ) colorramp_8.color_ramp.elements[0].position = 0.0795 colorramp_8.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_8.color_ramp.elements[1].position = 1.0 @@ -488,27 +766,33 @@ def shader_fish_body_regular(nw: NodeWrangler, rand=True, **input_kwargs): if rand: colorramp_8.color_ramp.elements[0].position = sample_range(0, 0.2) - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_8.outputs["Color"], 1: -0.5}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: add_2, 1: 2.0}, - attrs={'operation': 'DIVIDE'}) - - invert = nw.new_node(Nodes.Invert, - input_kwargs={'Color': separate_xyz_1.outputs["Z"]}) + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_8.outputs["Color"], 1: -0.5} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: 2.0}, attrs={"operation": "DIVIDE"} + ) + + invert = nw.new_node( + Nodes.Invert, input_kwargs={"Color": separate_xyz_1.outputs["Z"]} + ) if rand: - invert.inputs['Fac'].default_value = sample_range(0.5, 1) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: invert}) - - mix_10 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0, 'Color1': colorramp_13.outputs["Color"], 'Color2': add_3}, - attrs={'blend_type': 'MULTIPLY'}) - - colorramp_9 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_10}) + invert.inputs["Fac"].default_value = sample_range(0.5, 1) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: invert}) + + mix_10 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 1.0, + "Color1": colorramp_13.outputs["Color"], + "Color2": add_3, + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + colorramp_9 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_10}) colorramp_9.color_ramp.elements.new(0) colorramp_9.color_ramp.elements[0].position = 0.0 colorramp_9.color_ramp.elements[0].color = (0.0179, 0.0119, 0.0, 1.0) @@ -520,112 +804,189 @@ def shader_fish_body_regular(nw: NodeWrangler, rand=True, **input_kwargs): sample_color(colorramp_9.color_ramp.elements[1].color) colorramp_9.color_ramp.elements[1].position = sample_range(0.1, 0.9) - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 3.0}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 3.0}) + + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp_3.color_ramp.elements[0].position = 0.2614 colorramp_3.color_ramp.elements[0].color = (0.0059, 0.0028, 0.0002, 1.0) colorramp_3.color_ramp.elements[1].position = 0.5795 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0, 'Color1': colorramp_9.outputs["Color"], 'Color2': colorramp_3.outputs["Color"]}, - attrs={'blend_type': 'MULTIPLY'}) - - mix_9 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': add_1, 'Color1': (0.021, 0.0158, 0.0026, 1.0), 'Color2': mix_5}) - - mix_8 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_12.outputs["Color"], 'Color1': (1.0, 1.0, 1.0, 1.0), 'Color2': mix_9}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_6.outputs["Fac"]}) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 1.0, + "Color1": colorramp_9.outputs["Color"], + "Color2": colorramp_3.outputs["Color"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + mix_9 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": add_1, + "Color1": (0.021, 0.0158, 0.0026, 1.0), + "Color2": mix_5, + }, + ) + + mix_8 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_12.outputs["Color"], + "Color1": (1.0, 1.0, 1.0, 1.0), + "Color2": mix_9, + }, + ) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_6.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.2455 colorramp_4.color_ramp.elements[0].color = (0.0642, 0.0339, 0.006, 1.0) colorramp_4.color_ramp.elements[1].position = 0.4886 colorramp_4.color_ramp.elements[1].color = (0.1224, 0.3306, 0.261, 1.0) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.0, 'Color1': mix_8, 'Color2': colorramp_4.outputs["Color"]}, - attrs={'blend_type': 'ADD'}) - - mix_11 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_14.outputs["Color"], 'Color1': (0.4072, 0.4072, 0.4072, 1.0), 'Color2': mix_5}) - - colorramp_7 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': greater_than}) + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.0, + "Color1": mix_8, + "Color2": colorramp_4.outputs["Color"], + }, + attrs={"blend_type": "ADD"}, + ) + + mix_11 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_14.outputs["Color"], + "Color1": (0.4072, 0.4072, 0.4072, 1.0), + "Color2": mix_5, + }, + ) + + colorramp_7 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": greater_than}) colorramp_7.color_ramp.elements[0].position = 0.0 colorramp_7.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_7.color_ramp.elements[1].position = 0.7682 colorramp_7.color_ramp.elements[1].color = (0.0228, 0.0165, 0.0, 1.0) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': greater_than, 'Color1': mix_11, 'Color2': colorramp_7.outputs["Color"]}) - - mix_12 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_15.outputs["Color"], 'Color1': (0.0119, 0.0078, 0.0086, 1.0), 'Color2': mix_4}) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": greater_than, + "Color1": mix_11, + "Color2": colorramp_7.outputs["Color"], + }, + ) + + mix_12 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_15.outputs["Color"], + "Color1": (0.0119, 0.0078, 0.0086, 1.0), + "Color2": mix_4, + }, + ) if rand: sample_color(mix_12.inputs[6].default_value, keep_sum=True) - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_12, 'Subsurface Radius': (0.36, 0.46, 0.6), 'Subsurface Color': (1.0, 0.9405, 0.7747, 1.0), 'Metallic': 0.8, 'Specular': .9, 'Roughness': 0.3, 'IOR': 1.69}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf_1}) + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_12, + "Subsurface Radius": (0.36, 0.46, 0.6), + "Subsurface Color": (1.0, 0.9405, 0.7747, 1.0), + "Metallic": 0.8, + "Specular": 0.9, + "Roughness": 0.3, + "IOR": 1.69, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf_1} + ) + def shader_fish_body_gold(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Color variations'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute.outputs["Vector"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["X"]}) + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Color variations"} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute.outputs["Vector"]} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["X"]} + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.4209 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset2'}) - - greater_than = nw.new_node(Nodes.Math, + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset2"}) + + greater_than = nw.new_node( + Nodes.Math, input_kwargs={0: attribute_1.outputs["Vector"], 1: 0.01}, - attrs={'operation': 'GREATER_THAN'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["X"]}) + attrs={"operation": "GREATER_THAN"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["X"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0 colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.8659 colorramp_1.color_ramp.elements[1].color = (0.0685, 0.0685, 0.0685, 1.0) - + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'W': 1.1861}, - attrs={'noise_dimensions': '4D'}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"], "W": 1.1861}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2, 2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.9042, + "Color1": noise_texture.outputs["Color"], + "Color2": texture_coordinate.outputs["Generated"], + }, + ) - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9042, 'Color1': noise_texture.outputs["Color"], 'Color2': texture_coordinate.outputs["Generated"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mix, 'Scale': 1.0062, 'Distortion': 2.7139, 'Detail': 0.0, 'Detail Roughness': 0.0, 'Phase Offset': 8.0064}) + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mix, + "Scale": 1.0062, + "Distortion": 2.7139, + "Detail": 0.0, + "Detail Roughness": 0.0, + "Phase Offset": 8.0064, + }, + ) if rand: - wave_texture.inputs['Scale'].default_value = sample_ratio(2, 0.5, 2) - wave_texture.inputs['Phase Offset'].default_value = sample_range(0, 10) - wave_texture.inputs['Distortion'].default_value = sample_range(0, 3) + wave_texture.inputs["Scale"].default_value = sample_ratio(2, 0.5, 2) + wave_texture.inputs["Phase Offset"].default_value = sample_range(0, 10) + wave_texture.inputs["Distortion"].default_value = sample_range(0, 3) - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Color"]}) + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.1254 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 1.0 @@ -633,32 +994,40 @@ def shader_fish_body_gold(nw: NodeWrangler, rand=True, **input_kwargs): if rand: colorramp_2.color_ramp.elements[0].position = sample_range(0, 0.2) - add = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_2.outputs["Color"], 1: -0.0}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: 2.0}, - attrs={'operation': 'DIVIDE'}) - + add = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_2.outputs["Color"], 1: -0.0} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0}, attrs={"operation": "DIVIDE"} + ) + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': texture_coordinate_1.outputs["Generated"]}) - - invert = nw.new_node(Nodes.Invert, - input_kwargs={'Fac': 0.5593, 'Color': separate_xyz_1.outputs["Z"]}) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": texture_coordinate_1.outputs["Generated"]}, + ) + + invert = nw.new_node( + Nodes.Invert, input_kwargs={"Fac": 0.5593, "Color": separate_xyz_1.outputs["Z"]} + ) if rand: - invert.inputs['Fac'].default_value = sample_range(0.5, 1) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: invert}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0, 'Color1': colorramp_1.outputs["Color"], 'Color2': add_1}, - attrs={'blend_type': 'MULTIPLY'}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + invert.inputs["Fac"].default_value = sample_range(0.5, 1) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: invert}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 1.0, + "Color1": colorramp_1.outputs["Color"], + "Color2": add_1, + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp_3.color_ramp.elements.new(0) colorramp_3.color_ramp.elements[0].position = 0.0091 colorramp_3.color_ramp.elements[0].color = (0.9254, 0.0164, 0.0, 1.0) @@ -671,125 +1040,202 @@ def shader_fish_body_gold(nw: NodeWrangler, rand=True, **input_kwargs): sample_color(e.color, offset=0.05) colorramp_3.color_ramp.elements[1].position = sample_range(0.2, 0.8) - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': greater_than}) + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": greater_than}) colorramp_4.color_ramp.elements[0].position = 0.0 colorramp_4.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) colorramp_4.color_ramp.elements[1].position = 0.7682 colorramp_4.color_ramp.elements[1].color = (0.0228, 0.0165, 0.0, 1.0) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp_4.outputs["Color"]}) + + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_4.outputs["Color"]} + ) colorramp_5.color_ramp.elements[0].position = 0.0 colorramp_5.color_ramp.elements[0].color = (1.0, 0.0817, 0.0, 1.0) colorramp_5.color_ramp.elements[1].position = 1.0 colorramp_5.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': greater_than, 'Color1': colorramp_3.outputs["Color"], 'Color2': colorramp_5.outputs["Color"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 20.0}) - - colorramp_6 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": greater_than, + "Color1": colorramp_3.outputs["Color"], + "Color2": colorramp_5.outputs["Color"], + }, + ) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 20.0}) + + colorramp_6 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_6.color_ramp.elements[0].position = 0.2614 colorramp_6.color_ramp.elements[0].color = (1.0, 0.2402, 0.0026, 1.0) colorramp_6.color_ramp.elements[1].position = 0.5795 colorramp_6.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.75, 'Color1': mix_2, 'Color2': colorramp_6.outputs["Color"]}, - attrs={'blend_type': 'MULTIPLY'}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': (1.0, 1.0, 1.0, 1.0), 'Color2': mix_3}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_4, 'Subsurface Radius': (0.36, 0.46, 0.6), 'Subsurface Color': (1.0, 0.9405, 0.7747, 1.0), 'Metallic': 0.5, 'Specular': 0.5273, 'Roughness': 0.1, 'IOR': 1.69}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.75, + "Color1": mix_2, + "Color2": colorramp_6.outputs["Color"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": (1.0, 1.0, 1.0, 1.0), + "Color2": mix_3, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_4, + "Subsurface Radius": (0.36, 0.46, 0.6), + "Subsurface Color": (1.0, 0.9405, 0.7747, 1.0), + "Metallic": 0.5, + "Specular": 0.5273, + "Roughness": 0.1, + "IOR": 1.69, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_fish_body(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler group_input = nw.new_node(Nodes.GroupInput) - UV = nw.new_node(nodegroup_UV().name, - input_kwargs={'Vector': nw.expose_input('UVMap', attribute='UVMap', dtype='NodeSocketVector')}) + UV = nw.new_node( + nodegroup_UV().name, + input_kwargs={ + "Vector": nw.expose_input( + "UVMap", attribute="UVMap", dtype="NodeSocketVector" + ) + }, + ) - scales = nw.new_node(nodegroup_scales().name, - input_kwargs={'Mesh': group_input, 'Vector': UV, 'Scale': 6, 'Xscale': 0.3, 'Yscale': 12.0, 'Offset': 0.002}) + scales = nw.new_node( + nodegroup_scales().name, + input_kwargs={ + "Mesh": group_input, + "Vector": UV, + "Scale": 6, + "Xscale": 0.3, + "Yscale": 12.0, + "Offset": 0.002, + }, + ) if rand: - scales.inputs['Scale'].default_value = sample_ratio(6, 2/3, 3/2) - scales.inputs['Xscale'].default_value = sample_range(0.2, 0.3) - scales.inputs['Yscale'].default_value = sample_range(8, 12) - scales.inputs['Xnoise'].default_value = sample_range(0.1, 0.3) - scales.inputs['Ynoise'].default_value = sample_range(0.1, 0.3) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 50.0}, - attrs={'noise_dimensions': '4D'}) + scales.inputs["Scale"].default_value = sample_ratio(6, 2 / 3, 3 / 2) + scales.inputs["Xscale"].default_value = sample_range(0.2, 0.3) + scales.inputs["Yscale"].default_value = sample_range(8, 12) + scales.inputs["Xnoise"].default_value = sample_range(0.1, 0.3) + scales.inputs["Ynoise"].default_value = sample_range(0.1, 0.3) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 50.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2, 2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5} + ) - add = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5}) - normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: add, 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.002 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': scales.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position, - 'attr2': scales.outputs['attr2'], - 'attr5': scales.outputs['attr5']}) - -@node_utils.to_nodegroup('nodegroup_gradient_color', singleton=False, type='ShaderNodeTree') + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": scales.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + "attr2": scales.outputs["attr2"], + "attr5": scales.outputs["attr5"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_gradient_color", singleton=False, type="ShaderNodeTree" +) def nodegroup_gradient_color(nw: NodeWrangler, **input_args): # Code generated using version 2.4.3 of the node_transpiler - attribute_5 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Color variations'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': attribute_5.outputs["Vector"]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5), - #('NodeSocketColor', 'Color1', (1.0, 1.0, 1.0, 1.0)), - #('NodeSocketColor', 'Color2', (0.5268, 0.6724, 0.5186, 1.0)), - #('NodeSocketColor', 'Color3', (0.8055, 0.6284, 0.2728, 1.0)), - #('NodeSocketColor', 'Color4', (0.838, 0.5269, 0.0338, 1.0)), - #('NodeSocketColor', 'Color5', (0.0397, 0.0175, 0.0028, 1.0)), - ('NodeSocketFloat', 'Value1', -0.2)]) - - multiply = nw.new_node(Nodes.Math, + attribute_5 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Color variations"} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute_5.outputs["Vector"]} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 0.5), + # ('NodeSocketColor', 'Color1', (1.0, 1.0, 1.0, 1.0)), + # ('NodeSocketColor', 'Color2', (0.5268, 0.6724, 0.5186, 1.0)), + # ('NodeSocketColor', 'Color3', (0.8055, 0.6284, 0.2728, 1.0)), + # ('NodeSocketColor', 'Color4', (0.838, 0.5269, 0.0338, 1.0)), + # ('NodeSocketColor', 'Color5', (0.0397, 0.0175, 0.0028, 1.0)), + ("NodeSocketFloat", "Value1", -0.2), + ], + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': subtract, 1: group_input.outputs["Value1"]}) + attrs={"operation": "SUBTRACT"}, + ) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': map_range.outputs["Result"]}) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": subtract, 1: group_input.outputs["Value1"]}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range.outputs["Result"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) @@ -803,221 +1249,338 @@ def nodegroup_gradient_color(nw: NodeWrangler, **input_args): colorramp_1.color_ramp.elements[3].color = input_args["Color4"] colorramp_1.color_ramp.elements[4].position = 0.7568 colorramp_1.color_ramp.elements[4].color = input_args["Color5"] - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': colorramp_1.outputs["Color"]}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": colorramp_1.outputs["Color"]} + ) -@node_utils.to_nodegroup('nodegroup_noise_color', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_noise_color", singleton=False, type="ShaderNodeTree" +) def nodegroup_noise_color(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate_3 = nw.new_node(Nodes.TextureCoord) - - mapping_3 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_3.outputs["Generated"]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Scale', 10.0), - ('NodeSocketColor', 'Color1', (0.7379, 0.2623, 0.0648, 1.0)), - ('NodeSocketColor', 'Color2', (0.5029, 0.4287, 0.1079, 1.0))]) - - noise_texture_8 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_3, 'W': U(-5, 5), 'Scale': group_input.outputs["Scale"]}, - attrs={'noise_dimensions': '4D'}) - - colorramp_9 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_8.outputs["Fac"]}) + + mapping_3 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": texture_coordinate_3.outputs["Generated"]}, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Scale", 10.0), + ("NodeSocketColor", "Color1", (0.7379, 0.2623, 0.0648, 1.0)), + ("NodeSocketColor", "Color2", (0.5029, 0.4287, 0.1079, 1.0)), + ], + ) + + noise_texture_8 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_3, + "W": U(-5, 5), + "Scale": group_input.outputs["Scale"], + }, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_9 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_8.outputs["Fac"]} + ) colorramp_9.color_ramp.elements[0].position = U(0, 0.5) colorramp_9.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_9.color_ramp.elements[1].position = 1.0 colorramp_9.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_12 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_9.outputs["Color"], 'Color1': group_input.outputs["Color1"], 'Color2': group_input.outputs["Color2"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={"Color": mix_12}) + + mix_12 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_9.outputs["Color"], + "Color1": group_input.outputs["Color1"], + "Color2": group_input.outputs["Color2"], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": mix_12}) def shader_stripe_fish(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute_6 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset2'}) - - colorramp_11 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute_6.outputs["Color"]}) + attribute_6 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset2"}) + + colorramp_11 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_6.outputs["Color"]} + ) colorramp_11.color_ramp.elements[0].position = 0.0 colorramp_11.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_11.color_ramp.elements[1].position = 0.2909 colorramp_11.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + texture_coordinate_2 = nw.new_node(Nodes.TextureCoord) - - mapping_4 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_2.outputs["Generated"]}) - - noise_texture_9 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_4, 'Scale': N(10, 2)}) - - colorramp_10 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_9.outputs["Fac"]}) + + mapping_4 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": texture_coordinate_2.outputs["Generated"]}, + ) + + noise_texture_9 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping_4, "Scale": N(10, 2)} + ) + + colorramp_10 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_9.outputs["Fac"]} + ) colorramp_10.color_ramp.elements[0].position = 0.3773 colorramp_10.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_10.color_ramp.elements[1].position = 1.0 x = U(0.1, 0.9) colorramp_10.color_ramp.elements[1].color = (x, x, x, 1.0) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_2.outputs["Generated"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping_2}) - - subtract = nw.new_node(Nodes.Math, + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": texture_coordinate_2.outputs["Generated"]}, + ) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping_2}) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 4.0 if U()<0.7 else 3.0}, - attrs={'operation': 'POWER'}) - - multiply = nw.new_node(Nodes.Math, + attrs={"operation": "SUBTRACT"}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 4.0 if U() < 0.7 else 3.0}, + attrs={"operation": "POWER"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: power, 1: U(-2, 3)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: multiply}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': separate_xyz_1.outputs["Y"], 'Z': separate_xyz_1.outputs["Z"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': combine_xyz}) - - scale = nw.new_node(Nodes.Value, - label='scale') + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"], 1: multiply} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add, + "Y": separate_xyz_1.outputs["Y"], + "Z": separate_xyz_1.outputs["Z"], + }, + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": combine_xyz}) + + scale = nw.new_node(Nodes.Value, label="scale") scale.outputs[0].default_value = max(N(2, 0.6), 0.8) - - distortion = nw.new_node(Nodes.Value, - label='distortion') + + distortion = nw.new_node(Nodes.Value, label="distortion") distortion.outputs[0].default_value = N(5, 1) - - detail = nw.new_node(Nodes.Value, - label='detail') + + detail = nw.new_node(Nodes.Value, label="detail") detail.outputs[0].default_value = 15.0 - - detailscale = nw.new_node(Nodes.Value, - label='detailscale') + + detailscale = nw.new_node(Nodes.Value, label="detailscale") detailscale.outputs[0].default_value = U(0.1, 0.8) - - offset = nw.new_node(Nodes.Value, - label='offset') + + offset = nw.new_node(Nodes.Value, label="offset") offset.outputs[0].default_value = N(1.8, 0.2) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: offset, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - wave_texture_2 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': reroute, 'Scale': scale, 'Distortion': distortion, 'Detail': detail, 'Detail Scale': detailscale, 'Phase Offset': multiply_1}) - + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: offset, 1: -1.0}, attrs={"operation": "MULTIPLY"} + ) + + wave_texture_2 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": reroute, + "Scale": scale, + "Distortion": distortion, + "Detail": detail, + "Detail Scale": detailscale, + "Phase Offset": multiply_1, + }, + ) + no_side_stripe = U() > 0.7 side_stripe_width = U(0.02, 0.1) - colorramp_8 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture_2.outputs["Color"]}) + colorramp_8 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture_2.outputs["Color"]} + ) colorramp_8.color_ramp.elements[0].position = 0.0 - colorramp_8.color_ramp.elements[0].color = (no_side_stripe, no_side_stripe, no_side_stripe, 1.0) + colorramp_8.color_ramp.elements[0].color = ( + no_side_stripe, + no_side_stripe, + no_side_stripe, + 1.0, + ) colorramp_8.color_ramp.elements[1].position = side_stripe_width colorramp_8.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - wave_texture_1 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': reroute, 'Scale': scale, 'Distortion': distortion, 'Detail': detail, 'Detail Scale': detailscale, 'Phase Offset': offset}) - - colorramp_6 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture_1.outputs["Color"]}) + + wave_texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": reroute, + "Scale": scale, + "Distortion": distortion, + "Detail": detail, + "Detail Scale": detailscale, + "Phase Offset": offset, + }, + ) + + colorramp_6 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture_1.outputs["Color"]} + ) colorramp_6.color_ramp.elements[0].position = 0.0 - colorramp_6.color_ramp.elements[0].color = (no_side_stripe, no_side_stripe, no_side_stripe, 1.0) + colorramp_6.color_ramp.elements[0].color = ( + no_side_stripe, + no_side_stripe, + no_side_stripe, + 1.0, + ) colorramp_6.color_ramp.elements[1].position = side_stripe_width colorramp_6.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': reroute, 'Scale': scale, 'Distortion': distortion, 'Detail': detail, 'Detail Scale': detailscale}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Color"]}) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": reroute, + "Scale": scale, + "Distortion": distortion, + "Detail": detail, + "Detail Scale": detailscale, + }, + ) + + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Color"]} + ) colorramp_5.color_ramp.elements[0].position = U(0.2, 0.8) colorramp_5.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_5.color_ramp.elements[1].position = 0.9955 colorramp_5.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + if U() < 0.5: - group_1 = nw.new_node(nodegroup_noise_color().name, - input_kwargs={"Scale": N(10, 2), "Color1":(U(), U(), U(), 1.0), "Color2": (U(), U(), U(), 1.0)}) + group_1 = nw.new_node( + nodegroup_noise_color().name, + input_kwargs={ + "Scale": N(10, 2), + "Color1": (U(), U(), U(), 1.0), + "Color2": (U(), U(), U(), 1.0), + }, + ) else: - group_1 = nw.new_node(nodegroup_gradient_color( + group_1 = nw.new_node( + nodegroup_gradient_color( Color1=(U(0.8, 1), U(0.8, 1), U(0.8, 1), 1.0), Color2=(U(), U(), U(), 1.0), Color3=(U(), U(), U(), 1.0), Color4=(U(), U(), U(), 1.0), - Color5=(U(0, 0.1), U(0, 0.1), U(0, 0.1), 1.0),).name, + Color5=(U(0, 0.1), U(0, 0.1), U(0, 0.1), 1.0), + ).name, input_kwargs={ "Value": N(0.5, 0.1), "Value1": N(-0.2, 0.1), - }) + }, + ) - group = nw.new_node(nodegroup_gradient_color( + group = nw.new_node( + nodegroup_gradient_color( Color1=(U(0.8, 1), U(0.8, 1), U(0.8, 1), 1.0), Color2=(U(), U(), U(), 1.0), Color3=(U(), U(), U(), 1.0), Color4=(U(), U(), U(), 1.0), - Color5=(U(0, 0.1), U(0, 0.1), U(0, 0.1), 1.0),).name, + Color5=(U(0, 0.1), U(0, 0.1), U(0, 0.1), 1.0), + ).name, input_kwargs={ "Value": N(0.5, 0.1), "Value1": N(-0.2, 0.1), - }) - - mix_8 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_5.outputs["Color"], 'Color1': group_1, 'Color2': group}) - + }, + ) + + mix_8 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_5.outputs["Color"], + "Color1": group_1, + "Color2": group, + }, + ) + side_stripe_color = (U(), U(), U(), 1.0) - mix_9 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_6.outputs["Color"], 'Color1': side_stripe_color, 'Color2': mix_8}) - - mix_10 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_8.outputs["Color"], 'Color1': side_stripe_color, 'Color2': mix_9}) - - mix_11 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_10.outputs["Color"], 'Color1': mix_10, 'Color2': (U(), U(), U(), 1.0)}) - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_11.outputs["Color"], 'Color1': mix_11, 'Color2': (U(0, 0.5), U(0, 0.5), U(0, 0.5), 1.0)}) - - noise_texture_7 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 10.0}) - + mix_9 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_6.outputs["Color"], + "Color1": side_stripe_color, + "Color2": mix_8, + }, + ) + + mix_10 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_8.outputs["Color"], + "Color1": side_stripe_color, + "Color2": mix_9, + }, + ) + + mix_11 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_10.outputs["Color"], + "Color1": mix_10, + "Color2": (U(), U(), U(), 1.0), + }, + ) + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_11.outputs["Color"], + "Color1": mix_11, + "Color2": (U(0, 0.5), U(0, 0.5), U(0, 0.5), 1.0), + }, + ) + + noise_texture_7 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 10.0}) + roughness = U(0, 0.5) - colorramp_14 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_7.outputs["Fac"]}) + colorramp_14 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_7.outputs["Fac"]} + ) colorramp_14.color_ramp.elements[0].position = 0.0 colorramp_14.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_14.color_ramp.elements[1].position = 0.9955 colorramp_14.color_ramp.elements[1].color = (roughness, roughness, roughness, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_7, 'Metallic': max(0, N(0.2, 0.1)), 'Roughness': colorramp_14.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_7, + "Metallic": max(0, N(0.2, 0.1)), + "Roughness": colorramp_14.outputs["Color"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) -def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - attributes = [ - 'Color variations', - 'offset2' - ] +def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): + attributes = ["Color variations", "offset2"] x = random.random() if x < 0.2: @@ -1027,16 +1590,23 @@ def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): else: shader = shader_fish_body_regular - surface.add_geomod(obj, geometry_fish_body, input_kwargs=geo_kwargs, attributes=attributes) + surface.add_geomod( + obj, geometry_fish_body, input_kwargs=geo_kwargs, attributes=attributes + ) surface.add_material(obj, shader, input_kwargs=shader_kwargs) + if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_fish_nurb.blend') + bpy.ops.wm.open_mainfile(filepath="dev_scene_fish_nurb.blend") i = 0 for obj in bpy.data.objects: - if obj.name.find('Nurb') >= 0: - apply(obj, geo_kwargs={'rand': True}, shader_kwargs={'rand': True, 'stripefish':True}) + if obj.name.find("Nurb") >= 0: + apply( + obj, + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True, "stripefish": True}, + ) i += 1 - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_fish_nurb2.blend') + fn = os.path.join(os.path.abspath(os.curdir), "dev_scene_test_fish_nurb2.blend") bpy.ops.wm.save_as_mainfile(filepath=fn) diff --git a/infinigen/assets/materials/fishfin.py b/infinigen/assets/materials/fishfin.py index e019c640b..24e48951b 100644 --- a/infinigen/assets/materials/fishfin.py +++ b/infinigen/assets/materials/fishfin.py @@ -4,40 +4,40 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface import random +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, +) +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + def shader_fin_regular(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Bump'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Bump"}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.0227 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 0.1432 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 20.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 20.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-2, 2) + noise_texture.inputs["W"].default_value = sample_range(-2, 2) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements[0].position = 0.0 colorramp_1.color_ramp.elements[0].color = (0.0288, 0.0301, 0.0266, 1.0) @@ -50,14 +50,17 @@ def shader_fin_regular(nw: NodeWrangler, rand=True, **input_kwargs): for e in colorramp_1.color_ramp.elements: e.color[i] = sample_range(0, 0.15) - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_1.inputs['W'].default_value = sample_range(-2, 2) + noise_texture_1.inputs["W"].default_value = sample_range(-2, 2) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.0045 colorramp.color_ramp.elements[0].color = (0.1512, 0.1236, 0.0977, 1.0) colorramp.color_ramp.elements[1].position = 0.5364 @@ -67,33 +70,42 @@ def shader_fin_regular(nw: NodeWrangler, rand=True, **input_kwargs): for e in colorramp_1.color_ramp.elements: e.color[i] = sample_range(0, 0.15) - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': colorramp_1.outputs["Color"], 'Color2': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": colorramp_1.outputs["Color"], + "Color2": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Bump'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Vector"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Bump"}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Vector"]} + ) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 0.7977 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'BumpMask'}) - - colorramp_8 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute_2.outputs["Color"]}) + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "BumpMask"}) + + colorramp_8 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_2.outputs["Color"]} + ) colorramp_8.color_ramp.elements[0].position = 0.0727 colorramp_8.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_8.color_ramp.elements[1].position = 1.0 @@ -101,12 +113,13 @@ def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): if rand: colorramp_8.color_ramp.elements[0].position = sample_range(0.05, 0.15) - multiply = nw.new_node(Nodes.Math, + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_2.outputs["Color"], 1: colorramp_8.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': multiply}) + attrs={"operation": "MULTIPLY"}, + ) + + colorramp_5 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply}) colorramp_5.color_ramp.elements.new(0) colorramp_5.color_ramp.elements[0].position = 0.0 colorramp_5.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -114,9 +127,10 @@ def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): colorramp_5.color_ramp.elements[1].color = (0.5, 0.5, 0.5, 1.0) colorramp_5.color_ramp.elements[2].position = 0.6977 colorramp_5.color_ramp.elements[2].color = (1.0, 1.0, 1.0, 1.0) - - colorramp_7 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute_2.outputs["Color"]}) + + colorramp_7 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_2.outputs["Color"]} + ) colorramp_7.color_ramp.elements.new(0) colorramp_7.color_ramp.elements[0].position = 0.0 colorramp_7.color_ramp.elements[0].color = (0.4063, 0.4063, 0.4063, 1.0) @@ -127,8 +141,9 @@ def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): if rand: colorramp_7.color_ramp.elements[1].position = sample_range(0.2, 0.8) - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp_7.outputs["Color"]}) + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_7.outputs["Color"]} + ) colorramp_4.color_ramp.elements.new(0) colorramp_4.color_ramp.elements[0].position = 0.0 colorramp_4.color_ramp.elements[0].color = (1.0, 0.8, 0.6, 1.0) @@ -140,30 +155,38 @@ def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): sample_color(colorramp_4.color_ramp.elements[0].color, offset=0.03) sample_color(colorramp_4.color_ramp.elements[1].color, offset=0.03) - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, - input_kwargs={'Color': colorramp_4.outputs["Color"]}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': (1.0, 0.7354, 0.4708, 1.0)}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.1, 1: transparent_bsdf, 2: translucent_bsdf}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Vector"]}) + transparent_bsdf = nw.new_node( + Nodes.TransparentBSDF, input_kwargs={"Color": colorramp_4.outputs["Color"]} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": (1.0, 0.7354, 0.4708, 1.0)} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.1, 1: transparent_bsdf, 2: translucent_bsdf}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Vector"]} + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.1273 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture_1.inputs['W'].default_value = sample_range(-2, 2) + noise_texture_1.inputs["W"].default_value = sample_range(-2, 2) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_3.color_ramp.elements[0].position = 0.3568 colorramp_3.color_ramp.elements[0].color = (0.8258, 0.1192, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 1.0 @@ -172,21 +195,36 @@ def shader_fin_gold(nw: NodeWrangler, rand=True, **input_kwargs): sample_color(colorramp_3.color_ramp.elements[0].color, offset=0.05) sample_color(colorramp_3.color_ramp.elements[1].color, offset=0.05) - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': (1.0, 0.5473, 0.2571, 1.0), 'Color2': colorramp_3.outputs["Color"]}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": (1.0, 0.5473, 0.2571, 1.0), + "Color2": colorramp_3.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix, "Roughness": 1.0} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": colorramp_5.outputs["Color"], + 1: mix_shader_1, + 2: principled_bsdf, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': 1.0}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': colorramp_5.outputs["Color"], 1: mix_shader_1, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) def apply(obj, geo_kwargs={}, shader_kwargs={}, **kwargs): - if 'goldfish' in shader_kwargs: - if shader_kwargs['goldfish']: + if "goldfish" in shader_kwargs: + if shader_kwargs["goldfish"]: shader = shader_fin_gold else: shader = shader_fin_regular @@ -195,4 +233,4 @@ def apply(obj, geo_kwargs={}, shader_kwargs={}, **kwargs): shader = shader_fin_gold else: shader = shader_fin_regular - surface.add_material(obj, shader, input_kwargs=shader_kwargs) \ No newline at end of file + surface.add_material(obj, shader, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/giraffe_attr.py b/infinigen/assets/materials/giraffe_attr.py index 87aa31b5f..b1cc8d464 100644 --- a/infinigen/assets/materials/giraffe_attr.py +++ b/infinigen/assets/materials/giraffe_attr.py @@ -4,99 +4,135 @@ # Authors: Mingzhe Wang, Alex Raistrick -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os + import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba +from numpy.random import uniform as U + +from infinigen.assets.materials.utils.surface_utils import ( + sample_range, + sample_ratio, +) +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask def shader_giraffe_attr(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'local_pos'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute.outputs["Color"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.9, 'Color1': noise_texture.outputs["Color"], 'Color2': attribute.outputs["Color"]}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': mix}) - + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "local_pos"}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": attribute.outputs["Color"]} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.9, + "Color1": noise_texture.outputs["Color"], + "Color2": attribute.outputs["Color"], + }, + ) + + mapping = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": mix}) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 10.0 if rand: - value.outputs[0].default_value = sample_ratio(value.outputs[0].default_value, 0.5, 2) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': value}, - attrs={'voronoi_dimensions': '2D'}) - - voronoi_texture_4 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': value}, - attrs={'voronoi_dimensions': '2D', 'feature': 'SMOOTH_F1'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture_4.outputs["Distance"]}, - attrs={'operation': 'SUBTRACT'}) - - less_than = nw.new_node(Nodes.Math, + value.outputs[0].default_value = sample_ratio( + value.outputs[0].default_value, 0.5, 2 + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": value}, + attrs={"voronoi_dimensions": "2D"}, + ) + + voronoi_texture_4 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": value}, + attrs={"voronoi_dimensions": "2D", "feature": "SMOOTH_F1"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture_4.outputs["Distance"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: sample_range(0.04, 0.08) if rand else 0.07}, - attrs={'operation': 'LESS_THAN'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': less_than}) + attrs={"operation": "LESS_THAN"}, + ) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": less_than}) colorramp_1.color_ramp.elements[0].position = 0.2545 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.2886 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + group = nw.new_node(nodegroup_color_mask().name) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': group}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.9301, 0.5647, 0.3372, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.9755, 1.0, 0.9096, 1.0) if rand: - colorramp.color_ramp.elements[0].color = hsv2rgba((U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7))) - colorramp.color_ramp.elements[1].color = hsv2rgba((U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7))) + colorramp.color_ramp.elements[0].color = hsv2rgba( + (U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7)) + ) + colorramp.color_ramp.elements[1].color = hsv2rgba( + (U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7)) + ) - mix_1 = nw.new_node(Nodes.MixRGB, + mix_1 = nw.new_node( + Nodes.MixRGB, input_kwargs={ - 'Fac': colorramp_1.outputs["Color"], - 'Color1': colorramp.outputs["Color"], - 'Color2': hsv2rgba((U(0.02, 0.06), U(0.4, 0.9), U(0.04, 0.1))) - }) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + "Fac": colorramp_1.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": hsv2rgba((U(0.02, 0.06), U(0.4, 0.9), U(0.04, 0.1))), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_1}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_material(obj, shader_giraffe_attr, reuse=False, input_kwargs=shader_kwargs) + surface.add_material( + obj, shader_giraffe_attr, reuse=False, input_kwargs=shader_kwargs + ) + if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_1019.blend') - #creature(73349, 0).parts(0, factory=QuadrupedBody) - apply(bpy.data.objects['creature(73349, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_giraffe_attr.blend') + bpy.ops.wm.open_mainfile(filepath="dev_scene_1019.blend") + # creature(73349, 0).parts(0, factory=QuadrupedBody) + apply( + bpy.data.objects["creature(73349, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True}, + ) + fn = os.path.join( + os.path.abspath(os.curdir), "dev_scene_test_giraffe_attr.blend" + ) bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/glass.py b/infinigen/assets/materials/glass.py index be4c28cbb..8d55dd9d6 100644 --- a/infinigen/assets/materials/glass.py +++ b/infinigen/assets/materials/glass.py @@ -2,18 +2,14 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -import colorsys from numpy.random import uniform -import bpy - -from infinigen.core.util.color import hsv2rgba from infinigen.assets.materials import common from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util.color import hsv2rgba -from infinigen.core.util import blender as butil def shader_glass(nw: NodeWrangler, color=None, is_window=False, **kwargs): # Code generated using version 2.6.5 of the node_transpiler @@ -21,28 +17,42 @@ def shader_glass(nw: NodeWrangler, color=None, is_window=False, **kwargs): color = get_glass_color(clear=False) # TODO windows are currently planes so refract and dont unrefract. ideally we just fix the geometry - # warning: currently this IOR also accidentally just turns off reflections, the window plane is pretty much invisible. - ior = 1.5 if not is_window else 1.0 - + # warning: currently this IOR also accidentally just turns off reflections, the window plane is pretty much invisible. + ior = 1.5 if not is_window else 1.0 + light_path = nw.new_node(Nodes.LightPath) - + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - - shader = nw.new_node(Nodes.GlassBSDF, input_kwargs={'Roughness': 0.0200, 'IOR': ior}) - + + shader = nw.new_node( + Nodes.GlassBSDF, input_kwargs={"Roughness": 0.0200, "IOR": ior} + ) + if is_window: - shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: transparent_bsdf, 2: shader}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': shader}, attrs={'is_active_output': True}) + shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: transparent_bsdf, + 2: shader, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": shader}, + attrs={"is_active_output": True}, + ) + def apply(obj, selection=None, clear=False, **kwargs): color = get_glass_color(clear) common.apply(obj, shader_glass, selection, color, **kwargs) + def get_glass_color(clear): - if uniform(0, 1) < .5: + if uniform(0, 1) < 0.5: color = 1, 1, 1, 1 else: - color = hsv2rgba(uniform(0, 1), .01 if clear else uniform(.05, .25), 1) + color = hsv2rgba(uniform(0, 1), 0.01 if clear else uniform(0.05, 0.25), 1) return color diff --git a/infinigen/assets/materials/glass_volume.py b/infinigen/assets/materials/glass_volume.py index 84fdc059b..a4e998acc 100644 --- a/infinigen/assets/materials/glass_volume.py +++ b/infinigen/assets/materials/glass_volume.py @@ -5,27 +5,34 @@ from numpy.random import uniform -from infinigen.core.util.color import hsv2rgba from infinigen.assets.materials import common from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util.color import hsv2rgba + def shader_glass_volume(nw: NodeWrangler, color=None, density=100.0, **kwargs): # Code generated using version 2.6.4 of the node_transpiler if color is None: - if uniform(0, 1) < .3: + if uniform(0, 1) < 0.3: color = 1, 1, 1, 1 else: - color = hsv2rgba(uniform(0, 1), uniform(.5, .9), uniform(.6, .9)) + color = hsv2rgba(uniform(0, 1), uniform(0.5, 0.9), uniform(0.6, 0.9)) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Roughness": 0.0000, "Transmission": 1.0000} + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Roughness': 0.0000, 'Transmission': 1.0000}) + volume_absorption = nw.new_node( + "ShaderNodeVolumeAbsorption", input_kwargs={"Color": color, "Density": density} + ) - volume_absorption = nw.new_node('ShaderNodeVolumeAbsorption', - input_kwargs={'Color': color, 'Density': density}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Volume": volume_absorption}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf, 'Volume': volume_absorption}, - attrs={'is_active_output': True}) def apply(obj, selection=None, **kwargs): common.apply(obj, shader_glass_volume, selection, **kwargs) diff --git a/infinigen/assets/materials/grass_blade_texture.py b/infinigen/assets/materials/grass_blade_texture.py index a2d925ed2..35f07ecab 100644 --- a/infinigen/assets/materials/grass_blade_texture.py +++ b/infinigen/assets/materials/grass_blade_texture.py @@ -4,16 +4,14 @@ # Authors: Lahav Lipson, Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface import numpy as np +from numpy.random import uniform + +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -pallete1 = [ ((0.2632, 0.1493, 0.0558, 1.0), 0.0), +pallete1 = [ + ((0.2632, 0.1493, 0.0558, 1.0), 0.0), ((0.2695, 0.1585, 0.064, 1.0), 0.058), ((0.829, 0.7488, 0.449, 1.0), 0.0718), ((0.3486, 0.223, 0.1024, 1.0), 0.0773), @@ -43,9 +41,11 @@ ((0.5537, 0.381, 0.2168, 1.0), 0.9309), ((0.3119, 0.1916, 0.0883, 1.0), 0.9558), ((0.931, 0.7854, 0.5187, 1.0), 0.9613), - ((0.9281, 0.7416, 0.4916, 1.0), 1.0)] + ((0.9281, 0.7416, 0.4916, 1.0), 1.0), +] -pallete2 = [ ((0.3586, 0.3253, 0.1285, 1.0), 0.0), +pallete2 = [ + ((0.3586, 0.3253, 0.1285, 1.0), 0.0), ((0.2603, 0.2268, 0.0725, 1.0), 0.0319), ((0.2972, 0.2467, 0.0864, 1.0), 0.0957), ((0.4891, 0.4373, 0.1874, 1.0), 0.117), @@ -75,9 +75,11 @@ ((0.3915, 0.3563, 0.1493, 1.0), 0.9043), ((0.1694, 0.1873, 0.0553, 1.0), 0.9574), ((0.4335, 0.4064, 0.1848, 1.0), 0.9894), - ((0.2195, 0.1946, 0.0561, 1.0), 1.0)] + ((0.2195, 0.1946, 0.0561, 1.0), 1.0), +] -pallete3 = [ ((0.3821, 0.4798, 0.0818, 1.0), 0.0), +pallete3 = [ + ((0.3821, 0.4798, 0.0818, 1.0), 0.0), ((0.0858, 0.1301, 0.0193, 1.0), 0.0316), ((0.1099, 0.1785, 0.0071, 1.0), 0.0842), ((0.8128, 0.9357, 0.197, 1.0), 0.1053), @@ -107,9 +109,11 @@ ((0.8421, 0.9664, 0.3916, 1.0), 0.9263), ((0.192, 0.2556, 0.0265, 1.0), 0.9474), ((0.4264, 0.5593, 0.0509, 1.0), 0.9579), - ((0.1199, 0.1892, 0.004, 1.0), 1.0)] + ((0.1199, 0.1892, 0.004, 1.0), 1.0), +] -pallete4 = [ ((0.0273, 0.0802, 0.0382, 1.0), 0.0), +pallete4 = [ + ((0.0273, 0.0802, 0.0382, 1.0), 0.0), ((0.0232, 0.0742, 0.0356, 1.0), 0.0323), ((0.1095, 0.2159, 0.1221, 1.0), 0.0645), ((0.0296, 0.0865, 0.0319, 1.0), 0.0968), @@ -132,9 +136,11 @@ ((0.0295, 0.0821, 0.0318, 1.0), 0.8387), ((0.0184, 0.0643, 0.0211, 1.0), 0.9032), ((0.0298, 0.0866, 0.0336, 1.0), 0.9355), - ((0.0193, 0.0679, 0.0271, 1.0), 1.0)] + ((0.0193, 0.0679, 0.0271, 1.0), 1.0), +] -pallete5 = [ ((0.1712, 0.2776, 0.0465, 1.0), 0.0), +pallete5 = [ + ((0.1712, 0.2776, 0.0465, 1.0), 0.0), ((0.0596, 0.1252, 0.0085, 1.0), 0.0732), ((0.1746, 0.2918, 0.0561, 1.0), 0.0976), ((0.0666, 0.1413, 0.007, 1.0), 0.122), @@ -159,48 +165,80 @@ ((0.1389, 0.1985, 0.0443, 1.0), 0.878), ((0.0646, 0.1021, 0.0119, 1.0), 0.9512), ((0.1329, 0.1812, 0.0395, 1.0), 0.9756), - ((0.1195, 0.1651, 0.0319, 1.0), 1.0)] + ((0.1195, 0.1651, 0.0319, 1.0), 1.0), +] pallettes = np.array([pallete1, pallete2, pallete3, pallete4, pallete5], dtype=object) + def shader_grass_texture_original(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - coord = nw.new_node(Nodes.VectorMath, - input_kwargs={0: texture_coordinate.outputs["UV"], 'Scale': uniform(0.02, 0.2)}, - attrs={'operation': 'SCALE'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': coord}) - - edge_height = nw.new_node(Nodes.Math, + + coord = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["UV"], "Scale": uniform(0.02, 0.2)}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": coord}) + + edge_height = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: 6.0, 2: -10.0}, - label='edge height', - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["Y"], 'Y': edge_height}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': separate_xyz.outputs["Y"], 'Scale': 25.0, 'Distortion': 8.0, 'Detail Scale': 6.0}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 8.0, 'Detail': 5.0, 'Dimension': 0.1, 'Lacunarity': 3.0}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': wave_texture.outputs["Color"], 'Color2': musgrave_texture}) - - + label="edge height", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["Y"], "Y": edge_height}, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": separate_xyz.outputs["Y"], + "Scale": 25.0, + "Distortion": 8.0, + "Detail Scale": 6.0, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": combine_xyz, + "Scale": 8.0, + "Detail": 5.0, + "Dimension": 0.1, + "Lacunarity": 3.0, + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": wave_texture.outputs["Color"], + "Color2": musgrave_texture, + }, + ) + object_info = nw.new_node(Nodes.ObjectInfo_Shader) - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={0: uniform(), 3: object_info.outputs["Random"], 4: mix_1}) - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': map_range_1.outputs["Result"]}) - + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={0: uniform(), 3: object_info.outputs["Random"], 4: mix_1}, + ) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range_1.outputs["Result"]} + ) + pallete = np.random.choice(pallettes) np.random.shuffle(pallete) - pallete = pallete[:np.random.randint(4, len(pallete))] - for _ in range(len(pallete)-2): + pallete = pallete[: np.random.randint(4, len(pallete))] + for _ in range(len(pallete) - 2): colorramp.color_ramp.elements.new(0) assert len(pallete) == len(colorramp.color_ramp.elements) for el, (rgba, pos) in zip(colorramp.color_ramp.elements, pallete): @@ -209,15 +247,16 @@ def shader_grass_texture_original(nw: NodeWrangler): rough1, rough2 = uniform(0.2, 0.6, 2) roughness = nw.new_node(Nodes.MapRange, [mix_1, 0, 1, rough1, rough2]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': roughness}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": colorramp.outputs["Color"], "Roughness": roughness}, + ) translucent = nw.new_node(Nodes.TranslucentBSDF, [colorramp.outputs["Color"]]) shader = nw.new_node(Nodes.MixShader, [0.7, principled_bsdf, translucent]) - - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': shader}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": shader}) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_grass_texture_original, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_grass_texture_original, selection=selection) diff --git a/infinigen/assets/materials/hardwood_floor.py b/infinigen/assets/materials/hardwood_floor.py index 34ca16bb5..716e3a458 100644 --- a/infinigen/assets/materials/hardwood_floor.py +++ b/infinigen/assets/materials/hardwood_floor.py @@ -5,25 +5,34 @@ import numpy as np from numpy.random import uniform -from . import common -from .utils.surface_utils import perturb_coordinates -from .table_materials import shader_wood from infinigen.assets.utils.object import new_plane -from ...core.nodes import NodeWrangler, Nodes + +from ...core.nodes import Nodes, NodeWrangler from ...core.util.random import log_uniform +from . import common +from .table_materials import shader_wood +from .utils.surface_utils import perturb_coordinates def shader_hardwood_floor(nw: NodeWrangler, rotation=None): - vec = nw.new_node(Nodes.Mapping, [nw.new_node(Nodes.TextureCoord).outputs["Object"]], - input_kwargs={'Rotation': rotation}) + vec = nw.new_node( + Nodes.Mapping, + [nw.new_node(Nodes.TextureCoord).outputs["Object"]], + input_kwargs={"Rotation": rotation}, + ) color, mortar = map( - nw.new_node(Nodes.BrickTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), (0, 0, 0, uniform(.01, .02))], - input_kwargs={ - 'Scale': 1, - 'Row Height': log_uniform(.06, .15), - 'Brick Width': log_uniform(.6, 1), - 'Mortar Size': uniform(.002, .002) - }).outputs.get, ['Color', 'Fac']) + nw.new_node( + Nodes.BrickTexture, + [vec, (0, 0, 0, 1), (1, 1, 1, 1), (0, 0, 0, uniform(0.01, 0.02))], + input_kwargs={ + "Scale": 1, + "Row Height": log_uniform(0.06, 0.15), + "Brick Width": log_uniform(0.6, 1), + "Mortar Size": uniform(0.002, 0.002), + }, + ).outputs.get, + ["Color", "Fac"], + ) location = nw.combine(color, color, color) shader_wood(nw) perturb_coordinates(nw, nw.find(Nodes.TextureCoord)[1], location, 0) @@ -36,7 +45,7 @@ def shader_hardwood_floor(nw: NodeWrangler, rotation=None): def apply(obj, selection=None, rotation=None, **kwargs): if rotation is None: - rotation = (0,0,0) if uniform() < .1 else (0,0,np.pi / 2) + rotation = (0, 0, 0) if uniform() < 0.1 else (0, 0, np.pi / 2) return common.apply(obj, shader_hardwood_floor, selection, rotation, **kwargs) diff --git a/infinigen/assets/materials/horn.py b/infinigen/assets/materials/horn.py index 6a3cca9c4..ed82df8c9 100644 --- a/infinigen/assets/materials/horn.py +++ b/infinigen/assets/materials/horn.py @@ -4,81 +4,127 @@ # Authors: Yihan Wang -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_horn(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Location': (1.7+ uniform(-1, 1) * 0.05, 0.3 + uniform(-1, 1) * 0.05, 0.0 + uniform(-1, 1) * 0.05)}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 10.8 + uniform(-1, 1) * 3, 'Detail': 15.0, 'Roughness': 0.7667}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_2.outputs["Fac"], 'Scale': 10.0}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture_1.outputs["Color"]}) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Location": ( + 1.7 + uniform(-1, 1) * 0.05, + 0.3 + uniform(-1, 1) * 0.05, + 0.0 + uniform(-1, 1) * 0.05, + ), + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 10.8 + uniform(-1, 1) * 3, + "Detail": 15.0, + "Roughness": 0.7667, + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture_2.outputs["Fac"], "Scale": 10.0}, + ) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.4364 + uniform(-1, 1) * 0.05 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 0.58 + uniform(-1, 1) * 0.05 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': 98.9 + uniform(-0.3, 1) * 30, 'Detail': 15.0, 'Roughness': 0.7667}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Scale': 10.0}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Color"]}) + + mapping_2 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_2, + "Scale": 98.9 + uniform(-0.3, 1) * 30, + "Detail": 15.0, + "Roughness": 0.7667, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture.outputs["Fac"], "Scale": 10.0}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.3089 + uniform(-1, 1) * 0.05 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.673 + uniform(-1, 1) * 0.05 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: colorramp_2.outputs["Color"], 1: colorramp.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["UV"], 'Scale': (1.0, 1.0, 0.0)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 6.4 + uniform(-1, 1) * 1}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + attrs={"operation": "MULTIPLY"}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["UV"], + "Scale": (1.0, 1.0, 0.0), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "Scale": 6.4 + uniform(-1, 1) * 1}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.3682 + uniform(-1, 1) * 0.05 colorramp_1.color_ramp.elements[0].color = (0.3813, 0.2384, 0.1183, 1.0) colorramp_1.color_ramp.elements[1].position = 0.7864 + uniform(-1, 1) * 0.05 colorramp_1.color_ramp.elements[1].color = (0.3916, 0.2831, 0.1683, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': multiply.outputs["Vector"], 'Color1': (0.1878, 0.15, 0.0976, 1.0), 'Color2': colorramp_1.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': 0.0}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.5917, 1: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": multiply.outputs["Vector"], + "Color1": (0.1878, 0.15, 0.0976, 1.0), + "Color2": colorramp_1.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix, "Roughness": 0.0} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.5917, 1: principled_bsdf} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_horn, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_horn, selection=selection) diff --git a/infinigen/assets/materials/ice.py b/infinigen/assets/materials/ice.py index c26f2b3f3..b3479fe50 100644 --- a/infinigen/assets/materials/ice.py +++ b/infinigen/assets/materials/ice.py @@ -6,110 +6,158 @@ import gin from numpy.random import uniform -from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_color_neighbour type = SurfaceTypes.SDFPerturb mod_name = "geo_ice" name = "ice" -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface def shader_ice(nw: NodeWrangler): geometry = nw.new_node(Nodes.NewGeometry) - + noise_value = nw.new_node(Nodes.Value) noise_value.outputs[0].default_value = 6.5000 - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': geometry.outputs["Position"], 'W': noise_value, 'Scale': 4.0000, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": geometry.outputs["Position"], + "W": noise_value, + "Scale": 4.0000, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + color_ramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) color_ramp.color_ramp.elements[0].position = 0.5000 color_ramp.color_ramp.elements[0].color = [0.0844, 0.0844, 0.0844, 1.0000] color_ramp.color_ramp.elements[1].position = 0.7500 color_ramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - + col_ice = random_color_neighbour((0.6469, 0.6947, 0.9522, 1.0000), 0.05, 0.1, 0.1) principled_bsdf = nw.new_node( Nodes.PrincipledBSDF, input_kwargs={ - 'Subsurface': 1.0000, - 'Subsurface Radius': (0.0010, 0.0010, 0.0020), - 'Subsurface Color': tuple(col_ice), - 'Roughness': color_ramp.outputs["Color"], - 'IOR': 1.3100 + "Subsurface": 1.0000, + "Subsurface Radius": (0.0010, 0.0010, 0.0020), + "Subsurface Color": tuple(col_ice), + "Roughness": color_ramp.outputs["Color"], + "IOR": 1.3100, }, ) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) return principled_bsdf + @gin.configurable def geo_ice(nw: NodeWrangler, random_seed=0, selection=None): # Code generated using version 2.6.4 of the node_transpiler with FixedSeed(random_seed): - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + normal_1 = nw.new_node(Nodes.InputNormal) - + position_1 = nw.new_node(Nodes.InputPosition) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position_1, 'W': nw.new_value(uniform(0, 10), "W1"), 'Scale': nw.new_value(uniform(7, 9), "Scale1"), 'Detail': 20.0000, 'Roughness': 1.0000}, - attrs={'noise_dimensions': '4D'}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_2.outputs["Fac"]}) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position_1, + "W": nw.new_value(uniform(0, 10), "W1"), + "Scale": nw.new_value(uniform(7, 9), "Scale1"), + "Detail": 20.0000, + "Roughness": 1.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.5000 colorramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp.color_ramp.elements[1].position = 1.0000 colorramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: colorramp.outputs["Color"], 'Scale': 0.0300}, - attrs={'operation': 'SCALE'}) - + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: colorramp.outputs["Color"], "Scale": 0.0300}, + attrs={"operation": "SCALE"}, + ) + normal_2 = nw.new_node(Nodes.InputNormal) - + position_2 = nw.new_node(Nodes.InputPosition) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position_2, 'W': nw.new_value(uniform(0, 10), "W2"), 'Scale': nw.new_value(uniform(1.3, 1.7), "Scale2"), 'Detail': 15.0000, 'Roughness': 0.7000, 'Distortion': 1.5000}, - attrs={'noise_dimensions': '4D'}) - - multiply = nw.new_node(Nodes.VectorMath, + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position_2, + "W": nw.new_value(uniform(0, 10), "W2"), + "Scale": nw.new_value(uniform(1.3, 1.7), "Scale2"), + "Detail": 15.0000, + "Roughness": 0.7000, + "Distortion": 1.5000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: normal_2, 1: noise_texture_3.outputs["Fac"]}, - attrs={'operation': 'MULTIPLY'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"], 'Scale': 0.0800}, - attrs={'operation': 'SCALE'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal_1, 1: scale.outputs["Vector"], 2: scale_1.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - + attrs={"operation": "MULTIPLY"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply.outputs["Vector"], "Scale": 0.0800}, + attrs={"operation": "SCALE"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: normal_1, + 1: scale.outputs["Vector"], + 2: scale_1.outputs["Vector"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + offset = multiply_add.outputs["Vector"] if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': offset}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position_1}, attrs={'is_active_output': True}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": offset, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position_1}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): surface.add_geomod(obj, geo_ice, selection=selection) surface.add_material(obj, shader_ice, selection=selection) - diff --git a/infinigen/assets/materials/invisible_to_camera.py b/infinigen/assets/materials/invisible_to_camera.py index 3c3e1723e..529fb849d 100644 --- a/infinigen/assets/materials/invisible_to_camera.py +++ b/infinigen/assets/materials/invisible_to_camera.py @@ -5,33 +5,43 @@ # Authors: Alexander Raistrick import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_invisible(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler light_path = nw.new_node(Nodes.LightPath) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Roughness': 0.7697}) - + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Roughness": 0.7697} + ) + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: principled_bsdf, 2: transparent_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}, attrs={'is_active_output': True}) -def apply(obj, selection=None, **kwargs): + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: principled_bsdf, + 2: transparent_bsdf, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader}, + attrs={"is_active_output": True}, + ) + +def apply(obj, selection=None, **kwargs): if not isinstance(obj, list): obj = [obj] for o in obj: for i in range(len(o.material_slots)): - bpy.ops.object.material_slot_remove({'object': o}) - surface.add_material(obj, shader_invisible, selection=selection) \ No newline at end of file + bpy.ops.object.material_slot_remove({"object": o}) + surface.add_material(obj, shader_invisible, selection=selection) diff --git a/infinigen/assets/materials/lamp_shaders.py b/infinigen/assets/materials/lamp_shaders.py index 55f437b78..15e89506e 100644 --- a/infinigen/assets/materials/lamp_shaders.py +++ b/infinigen/assets/materials/lamp_shaders.py @@ -3,16 +3,25 @@ # Authors: Hongyu Wen -from numpy.random import uniform as U, normal as N, randint as RI +from numpy.random import uniform as U + from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_metal(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - anisotropic_bsdf = nw.new_node('ShaderNodeBsdfAnisotropic', - input_kwargs={'Color': (0.3224, 0.3224, 0.3224, 1.0000), 'Roughness': 0.1000}) + anisotropic_bsdf = nw.new_node( + "ShaderNodeBsdfAnisotropic", + input_kwargs={"Color": (0.3224, 0.3224, 0.3224, 1.0000), "Roughness": 0.1000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": anisotropic_bsdf}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': anisotropic_bsdf}, attrs={'is_active_output': True}) def shader_lampshade(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler @@ -21,39 +30,70 @@ def shader_lampshade(nw: NodeWrangler): object_info = nw.new_node(Nodes.ObjectInfo_Shader) - white_noise_texture = nw.new_node(Nodes.WhiteNoiseTexture, - input_kwargs={'Vector': object_info.outputs["Random"]}, - attrs={'noise_dimensions': '4D'}) + white_noise_texture = nw.new_node( + Nodes.WhiteNoiseTexture, + input_kwargs={"Vector": object_info.outputs["Random"]}, + attrs={"noise_dimensions": "4D"}, + ) - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.9000, 6: white_noise_texture.outputs["Color"], 7: (0.5000, 0.4444, 0.3669, 1.0000)}, - attrs={'data_type': 'RGBA'}) + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.9000, + 6: white_noise_texture.outputs["Color"], + 7: (0.5000, 0.4444, 0.3669, 1.0000), + }, + attrs={"data_type": "RGBA"}, + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': mix.outputs[2], - 'Subsurface': U(0.03, 0.08), - 'Subsurface Radius': (0.1000, 0.1000, 0.1000), - 'Subsurface IOR': 1.6029, - 'Roughness': U(0.5, 0.8), - 'IOR': 4.0000, - 'Transmission': U(0.05, 0.2), - 'Transmission Roughness': 1.0000 - } + "Base Color": mix.outputs[2], + "Subsurface": U(0.03, 0.08), + "Subsurface Radius": (0.1000, 0.1000, 0.1000), + "Subsurface IOR": 1.6029, + "Roughness": U(0.5, 0.8), + "IOR": 4.0000, + "Transmission": U(0.05, 0.2), + "Transmission Roughness": 1.0000, + }, ) - - translucent_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix.outputs[2], 'Roughness': 0.7, }) + translucent_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix.outputs[2], + "Roughness": 0.7, + }, + ) - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: principled_bsdf, 2: translucent_bsdf}) + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: principled_bsdf, + 2: translucent_bsdf, + }, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader}, + attrs={"is_active_output": True}, + ) def shader_black(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': (0.0039, 0.0039, 0.0039, 1.0000)}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (0.0039, 0.0039, 0.0039, 1.0000)}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/lava.py b/infinigen/assets/materials/lava.py index 7fa252cd3..f2c8782ff 100644 --- a/infinigen/assets/materials/lava.py +++ b/infinigen/assets/materials/lava.py @@ -9,19 +9,21 @@ import gin from mathutils import Vector -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes from numpy.random import uniform + from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes -from infinigen.terrain.utils import drive_param +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_color_neighbour +from infinigen.terrain.utils import drive_param type = SurfaceTypes.BlenderDisplacement mod_name = "lava_geo" name = "lava" + def nodegroup_polynomial_base(nw): group_input = nw.new_node( Nodes.GroupInput, @@ -80,11 +82,13 @@ def nodegroup_polynomial_base(nw): group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + @node_utils.to_nodegroup("nodegroup_polynomial", singleton=False) def nodegroup_polynomial_geo(nw): nodegroup_polynomial_base(nw) -@node_utils.to_nodegroup("nodegroup_polynomial", singleton=False, type='ShaderNodeTree') + +@node_utils.to_nodegroup("nodegroup_polynomial", singleton=False, type="ShaderNodeTree") def nodegroup_polynomial_shader(nw): nodegroup_polynomial_base(nw) @@ -128,7 +132,6 @@ def lava_shader(nw): ) drive_param(voronoi_texture.inputs["W"], scale=0.003, offset=uniform(0, 10)) - colorramp_1 = nw.new_node( Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} ) @@ -150,7 +153,6 @@ def lava_shader(nw): ) drive_param(voronoi_texture_1.inputs["W"], scale=0.003, offset=uniform(0, 10)) - colorramp_2 = nw.new_node( Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Distance"]} ) @@ -175,7 +177,8 @@ def lava_shader(nw): 0: ambient_occlusion_1.outputs["Color"], # determines how strong the small scale noise are # this makes the lava look turbulent - 1: 0 if uniform() < 0.2 else uniform(0.0, 0.5)}, + 1: 0 if uniform() < 0.2 else uniform(0.0, 0.5), + }, attrs={"operation": "SUBTRACT"}, ) @@ -200,12 +203,12 @@ def lava_shader(nw): colorramp.color_ramp.elements[1].position = 0.85 + amo_roc colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - invert = nw.new_node( - "ShaderNodeInvert", input_kwargs={"Color": lava_dir} - ) + invert = nw.new_node("ShaderNodeInvert", input_kwargs={"Color": lava_dir}) multiply = nw.new_node( - Nodes.Math, input_kwargs={0: invert, 1: max_lava_temp - min_lava_temp}, attrs={"operation": "MULTIPLY"} + Nodes.Math, + input_kwargs={0: invert, 1: max_lava_temp - min_lava_temp}, + attrs={"operation": "MULTIPLY"}, ) add_1 = nw.new_node(Nodes.Math, input_kwargs={0: min_lava_temp, 1: multiply}) @@ -214,24 +217,28 @@ def lava_shader(nw): "ShaderNodeBlackbody", input_kwargs={"Temperature": add_1} ) - noise_emission = nw.new_node(Nodes.NoiseTexture, - input_kwargs={"W": uniform(0, 10), "Scale": 0.5} + noise_emission = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"W": uniform(0, 10), "Scale": 0.5} + ) + + strength_emission = nw.new_node( + Nodes.Math, input_kwargs={0: noise_emission.outputs["Fac"], 1: lava_emi} ) - - strength_emission = nw.new_node(Nodes.Math, input_kwargs={0: noise_emission.outputs["Fac"], 1: lava_emi}) emission_1 = nw.new_node( - "ShaderNodeEmission", input_kwargs={"Color": blackbody_1, "Strength": - strength_emission} + "ShaderNodeEmission", + input_kwargs={"Color": blackbody_1, "Strength": strength_emission}, ) - noise_bsdf = nw.new_node(Nodes.NoiseTexture, - input_kwargs={"W": uniform(0, 10), "Scale": 0.5, - "Detail": 10.0}, + noise_bsdf = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": uniform(0, 10), "Scale": 0.5, "Detail": 10.0}, attrs={"noise_dimensions": "4D"}, ) - color_bsdf = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": noise_bsdf.outputs["Fac"]}) + color_bsdf = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_bsdf.outputs["Fac"]} + ) color_bsdf.color_ramp.elements[0].position = 0.0 color_bsdf.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -252,14 +259,14 @@ def lava_shader(nw): }, ) - return mix_shader + @gin.configurable def lava_geo(nw, selection=None, random_seed=0, geometry=True): nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') + position = nw.new_node("ShaderNodeNewGeometry") # normal = (nw.new_node('ShaderNodeNewGeometry'), 1) else: position = nw.new_node(Nodes.InputPosition) @@ -271,7 +278,7 @@ def lava_geo(nw, selection=None, random_seed=0, geometry=True): wave_sca = nw.new_value(uniform(3.5, 4.5), "wave_sca") # direction of wave dir_x = uniform(-2, 2) - dir_y = nw.new_value(math.sqrt(5 - (dir_x ** 2)), "dir_y") + dir_y = nw.new_value(math.sqrt(5 - (dir_x**2)), "dir_y") dir_x = nw.new_value(dir_x, "dir_x") # print(f"{wave_sca=} {dir_x=} {dir_y=}") @@ -287,32 +294,16 @@ def lava_geo(nw, selection=None, random_seed=0, geometry=True): separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) - group_3 = nw.scalar_divide( - nw.scalar_add( - separate_xyz.outputs["X"], - 200 - ), - 400 - ) + group_3 = nw.scalar_divide(nw.scalar_add(separate_xyz.outputs["X"], 200), 400) - group_4 = nw.scalar_divide( - nw.scalar_add( - separate_xyz.outputs["Y"], - 200 - ), - 400 - ) + group_4 = nw.scalar_divide(nw.scalar_add(separate_xyz.outputs["Y"], 200), 400) - group = nw.scalar_divide( - nw.scalar_add( - separate_xyz.outputs["Z"], - 0 - ), - 20 - ) + group = nw.scalar_divide(nw.scalar_add(separate_xyz.outputs["Z"], 0), 20) group_2 = nw.new_node( - nodegroup_polynomial_geo().name if nw.node_group.type != "SHADER" else nodegroup_polynomial_shader().name, + nodegroup_polynomial_geo().name + if nw.node_group.type != "SHADER" + else nodegroup_polynomial_shader().name, input_kwargs={ "X": group_3, "Y": group_4, @@ -332,13 +323,7 @@ def lava_geo(nw, selection=None, random_seed=0, geometry=True): attrs={"operation": "MULTIPLY_ADD"}, ) - group_1 = nw.scalar_divide( - nw.scalar_add( - multiply_add, - 0 - ), - 3 - ) + group_1 = nw.scalar_divide(nw.scalar_add(multiply_add, 0), 3) noise_texture = nw.new_node( Nodes.NoiseTexture, @@ -397,7 +382,11 @@ def lava_geo(nw, selection=None, random_seed=0, geometry=True): voronoi_texture = nw.new_node( Nodes.VoronoiTexture, - input_kwargs={"W": nw.new_value(uniform(0, 10), "voronoi_texture_w"), "Vector": position, "Scale": 1.0}, + input_kwargs={ + "W": nw.new_value(uniform(0, 10), "voronoi_texture_w"), + "Vector": position, + "Scale": 1.0, + }, attrs={"voronoi_dimensions": "4D", "feature": "SMOOTH_F1"}, ) @@ -423,14 +412,18 @@ def lava_geo(nw, selection=None, random_seed=0, geometry=True): groupinput = nw.new_node(Nodes.GroupInput) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) else: return lava_dir def apply(obj, selection=None, **kwargs): surface.add_geomod( - obj, lava_geo, selection=selection, + obj, + lava_geo, + selection=selection, ) surface.add_material(obj, lava_shader, selection=selection) diff --git a/infinigen/assets/materials/leather_and_fabrics/__init__.py b/infinigen/assets/materials/leather_and_fabrics/__init__.py deleted file mode 100644 index 751f12282..000000000 --- a/infinigen/assets/materials/leather_and_fabrics/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -from .general_fabric import shader_fabric -from .lined_fabric import shader_lined_fur_base -from .coarse_knit_fabric import shader_fabric_random as shader_coarse_fabric_random -from .fine_knit_fabric import shader_fabric_random as shader_fine_fabric_random -from .leather import shader_leather -from .sofa_fabric import shader_sofa_fabric - -from infinigen.core.util.random import random_general as rg -from .. import common -from ...utils.uv import unwrap_faces - -fabric_shader_list = 'weighted_choice', (1, shader_coarse_fabric_random), (1, shader_fine_fabric_random), \ - (2, shader_leather), (1, shader_sofa_fabric), # (1, shader_fabric), - - -def apply(obj, selection=None, **kwargs): - unwrap_faces(obj, selection) - common.apply(obj, rg(fabric_shader_list), selection=selection, **kwargs) diff --git a/infinigen/assets/materials/leather_and_fabrics/general_fabric.py b/infinigen/assets/materials/leather_and_fabrics/general_fabric.py deleted file mode 100644 index 33e1e987e..000000000 --- a/infinigen/assets/materials/leather_and_fabrics/general_fabric.py +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo -# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=umrARvXC_MI by Ryan King Art - - -from infinigen.assets.materials import common - -import bpy -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint - -from infinigen.assets.utils.uv import ensure_uv, unwrap_faces -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - - -def func_fabric(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = { - 'Weave Scale': 0., - 'Color Pattern Scale': 0., - 'Color1': (0.7991, 0.1046, 0.1195, 1.0000), - 'Color2': (1.0000, 0.5271, 0.5711, 1.0000) - } - group_input.update(kwargs) - - wave_texture_1 = nw.new_node(Nodes.WaveTexture, input_kwargs={ - 'Vector': texture_coordinate.outputs["UV"], - 'Scale': group_input["Weave Scale"], - 'Distortion': 7.0000, - 'Detail': 15.0000 - }, attrs={'bands_direction': 'Y'}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.1000 - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': wave_texture_1.outputs["Color"], 1: value_2}) - - wave_texture = nw.new_node(Nodes.WaveTexture, input_kwargs={ - 'Vector': texture_coordinate.outputs["UV"], - 'Scale': group_input["Weave Scale"], - 'Distortion': 7.0000, - 'Detail': 15.0000 - }) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': wave_texture.outputs["Color"], 1: value_2}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={6: map_range.outputs["Result"], 7: map_range_1.outputs["Result"]}, - attrs={'data_type': 'RGBA'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: mix.outputs[2], 1: 0.1000}, - attrs={'operation': 'GREATER_THAN'}) - - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: group_input["Color Pattern Scale"], 1: 0.0001}, - attrs={'operation': 'LESS_THAN'}) - - brick_texture_2 = nw.new_node(Nodes.BrickTexture, input_kwargs={ - 'Vector': texture_coordinate.outputs["UV"], - 'Color1': group_input["Color1"], - 'Mortar': group_input["Color2"], - 'Scale': group_input["Color Pattern Scale"], - 'Mortar Size': 0.0000, - 'Bias': -1.0000, - 'Row Height': 0.5000 - }, attrs={'offset_frequency': 1, 'squash': 0.0000}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, input_kwargs={ - 'Vector': texture_coordinate.outputs["UV"], - 'Rotation': (0.0000, 0.0000, 1.5708) - }, attrs={'rotation_type': 'EULER_XYZ'}) - - brick_texture = nw.new_node(Nodes.BrickTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Color1': group_input["Color1"], - 'Mortar': group_input["Color2"], - 'Scale': group_input["Color Pattern Scale"], - 'Mortar Size': 0.0000, - 'Bias': -1.0000, - 'Row Height': 0.5000 - }, attrs={'offset_frequency': 1, 'squash': 0.0000}) - - mix_2 = nw.new_node(Nodes.Mix, input_kwargs={ - 0: 1.0000, - 6: brick_texture_2.outputs["Color"], - 7: brick_texture.outputs["Color"] - }, attrs={'data_type': 'RGBA', 'blend_type': 'ADD'}) - - mix_4 = nw.new_node(Nodes.Mix, input_kwargs={0: less_than, 6: mix_2.outputs[2], 7: group_input["Color1"]}, - attrs={'data_type': 'RGBA'}) - - mix_3 = nw.new_node(Nodes.Mix, input_kwargs={ - 0: mix.outputs[2], - 6: (0.0000, 0.0000, 0.0000, 1.0000), - 7: mix_4.outputs[2] - }, attrs={'data_type': 'RGBA'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': mix.outputs[2], 3: 1.0000, 4: 0.9000}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': mix_3.outputs[2], - 'Roughness': map_range_2.outputs["Result"], - 'Sheen': 1.0000, - 'Sheen Tint': 1.0000 - }) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': greater_than, 1: transparent_bsdf, 2: principled_bsdf}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input["Weave Scale"], 1: 5.0000}, - attrs={'operation': 'MULTIPLY'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Scale': multiply}) - - mix_1 = nw.new_node(Nodes.Mix, input_kwargs={6: musgrave_texture, 7: mix.outputs[2]}, - attrs={'data_type': 'RGBA'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: mix_1.outputs[2]}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 0.0010}, attrs={'operation': 'MULTIPLY'}) - - displacement = nw.new_node( - 'ShaderNodeDisplacement', input_kwargs={'Height': multiply_1, 'Midlevel': 0.0000} - ) - - return {'Shader': mix_shader, 'Displacement': displacement} - - -def shader_fabric(nw: NodeWrangler, weave_scale=500.0, color_scale=None, color_1=None, color_2=None, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - if color_scale is None: - color_scale = np.random.choice([0.0, uniform(5., 20.)]) - if color_1 is None: - color_1 = color_category('fabric') - if color_2 is None: - color_2 = color_category('white') - - group = func_fabric(nw, **{ - 'Weave Scale': weave_scale, - 'Color Pattern Scale': color_scale, - 'Color1': color_1, - 'Color2': color_2 - }) - - displacement = nw.new_node('ShaderNodeDisplacement', - input_kwargs={'Height': group["Displacement"], 'Midlevel': 0.0000}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group["Shader"], 'Displacement': displacement - }, attrs={'is_active_output': True}) - - -def apply(obj, selection=None, **kwargs): - if not isinstance(obj, list): - obj = [obj] - for o in obj: - unwrap_faces(o, selection) - common.apply(obj, shader_fabric, selection, **kwargs) diff --git a/infinigen/assets/materials/leather_and_fabrics/leather.py b/infinigen/assets/materials/leather_and_fabrics/leather.py deleted file mode 100644 index 808f4a950..000000000 --- a/infinigen/assets/materials/leather_and_fabrics/leather.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo -# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=In9V4-ih16o by Ryan King Art - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -import functools - -from infinigen.assets.materials import common -from infinigen.assets.utils.uv import unwrap_faces -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -from infinigen.assets.color_fits import real_color_distribution - -@node_utils.to_nodegroup('nodegroup_leather', singleton=False, type='ShaderNodeTree') -def nodegroup_leather(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketColor', 'Base Color', (0.0000, 0.0000, 0.0000, 1.0000))]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 10.0000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Distortion': 0.2000}, - attrs={'noise_dimensions': '4D'}) - - color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Fac"]}) - color_ramp.color_ramp.elements[0].position = 0.2841 - color_ramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] - color_ramp.color_ramp.elements[1].position = 0.9455 - color_ramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.0200, 6: texture_coordinate.outputs["Object"], 7: noise_texture.outputs["Color"]}, - attrs={'blend_type': 'LINEAR_LIGHT', 'data_type': 'RGBA'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 800.0000}, attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix.outputs[2], 'W': group_input.outputs["Seed"], 'Scale': multiply_1}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: group_input.outputs["Scale"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', input_kwargs={'Value': 0.6000, 'Color': group_input.outputs["Base Color"]}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: multiply_2, 6: group_input.outputs["Base Color"], 7: hue_saturation_value}, - attrs={'data_type': 'RGBA'}) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', input_kwargs={'Value': 0.4000, 'Color': group_input.outputs["Base Color"]}) - - mix_2 = nw.new_node(Nodes.Mix, - input_kwargs={0: color_ramp.outputs["Color"], 6: mix_1.outputs[2], 7: hue_saturation_value_1}, - attrs={'data_type': 'RGBA'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': mix_2.outputs[2], 3: uniform(.3, .5), 4: uniform(.5, .7)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2.outputs[2], 'Roughness': map_range.outputs["Result"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: mix_1.outputs[2], 1: -0.2000}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: color_ramp.outputs["Color"], 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.0200}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': multiply_5}, - attrs={'is_active_output': True}) - -def shader_leather(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - if seed is None: - seed = uniform(-1000.0, 1000.0) - - # if base_color is None: - # base_color = color_category('leather') - base_color = real_color_distribution('sofa_leather') - - group = nw.new_node(nodegroup_leather().name, - input_kwargs={'Seed': seed, 'Scale': scale, 'Base Color': base_color}) - - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement}, - attrs={'is_active_output': True}) - - -def apply(obj, selection=None, **kwargs): - unwrap_faces(obj, selection) - common.apply(obj, shader_leather, selection=selection, **kwargs) - diff --git a/infinigen/assets/materials/leather_and_fabrics/sofa_fabric.py b/infinigen/assets/materials/leather_and_fabrics/sofa_fabric.py deleted file mode 100644 index 03292592a..000000000 --- a/infinigen/assets/materials/leather_and_fabrics/sofa_fabric.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -from numpy.random import uniform - -from infinigen.assets.materials import common -from infinigen.assets.utils.uv import unwrap_faces -from infinigen.core.nodes import NodeWrangler, Nodes -from infinigen.core.util.color import color_category - - -def shader_sofa_fabric(nw: NodeWrangler, scale=1, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'UVMap'}) - attribute = nw.new_node(Nodes.Mapping,[attribute],input_kwargs={'Scale':[scale]*3}) - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = color_category('fabric') - - brightness_contrast = nw.new_node('ShaderNodeBrightContrast', input_kwargs={'Color': rgb, 'Bright': uniform(-0.1500, -0.05)}) - - brick_texture = nw.new_node(Nodes.BrickTexture, - input_kwargs={'Vector': attribute.outputs["Vector"], 'Color1': rgb, 'Color2': brightness_contrast, 'Scale': 276.9800, 'Mortar Size': 0.0100, 'Mortar Smooth': 1.0000, 'Bias': 0.5000, 'Row Height': 0.1000}, - attrs={'offset': 0.5479, 'squash_frequency': 1}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': brick_texture.outputs["Color"], 'Roughness': 0.8624, 'Sheen': 1.0000}) - - displacement = nw.new_node(Nodes.Displacement, input_kwargs={'Height': brick_texture.outputs["Fac"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}, - attrs={'is_active_output': True}) - -def apply(obj, selection=None, **kwargs): - unwrap_faces(obj, selection) - common.apply(obj, shader_sofa_fabric, selection, **kwargs) - diff --git a/infinigen/assets/materials/leather_and_fabrics/velvet.py b/infinigen/assets/materials/leather_and_fabrics/velvet.py deleted file mode 100644 index 3f7f8e2a4..000000000 --- a/infinigen/assets/materials/leather_and_fabrics/velvet.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Stamatis Alexandropoulos -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=55MMAnTYhWI by Dikko - -import bpy -import mathutils -from infinigen.assets.materials import common -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - - - -def shader_velvet(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.5 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': texture_coordinate.outputs["Object"]}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': reroute}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': mapping, 'Scale': 1.0000}) - - mix_6 = nw.new_node(Nodes.Mix, input_kwargs={0: 0.1125, 6: voronoi_texture.outputs["Color"]}, attrs={'data_type': 'RGBA'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping, 'Scale': 9.6000, 'Detail': 11.4000, 'Dimension': 0.1000, 'Lacunarity': 1.9000}, - attrs={'musgrave_type': 'MULTIFRACTAL'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: uniform(0,0.8), 6: musgrave_texture, 7: (0.6044, 0.6044, 0.6044, 1.0000)}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) - - mix_1 = nw.new_node(Nodes.Mix, input_kwargs={6: mix_6.outputs[2], 7: mix.outputs[2]}, attrs={'data_type': 'RGBA'}) - - color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_1.outputs[2]}) - color_ramp.color_ramp.elements[0].position = 0.0000 - color_ramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] - color_ramp.color_ramp.elements[1].position = 0.8455 - color_ramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = color_category('textile') - # (0.3547, 0.3018, 0.3087, 1.0000) - - brightness_contrast = nw.new_node('ShaderNodeBrightContrast', input_kwargs={'Color': rgb, 'Bright': 0.0500}) - - mix_2 = nw.new_node(Nodes.Mix, - input_kwargs={0: color_ramp.outputs["Color"], 6: brightness_contrast, 7: rgb}, - attrs={'data_type': 'RGBA'}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2.outputs[2], 'Specular': 0.0000, 'Roughness': uniform(0.4,0.9), 'Anisotropic': 0.7614, 'Anisotropic Rotation': 1.0000, 'Sheen': 16.2273, 'Sheen Tint': 1.0000}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': reroute, 'Rotation': (0.0000, 0.0000, 1.0157), 'Scale': (2.2000, 2.2000, 2.2000)}) - - wave_texture_1 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 500.0000, 'Distortion': 4.0000, 'Detail': 6.7000, 'Detail Scale': 1.5000, 'Detail Roughness': 0.4308}, - attrs={'bands_direction': 'DIAGONAL'}) - - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: 1.0000, 6: mapping_1, 7: wave_texture_1.outputs["Color"]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) - - mix_4 = nw.new_node(Nodes.Mix, - input_kwargs={0: 1.0000, 6: color_ramp.outputs["Color"], 7: mix_3.outputs[2]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) - - displacement = nw.new_node(Nodes.Displacement, input_kwargs={'Height': mix_4.outputs[2], 'Midlevel': 0.0000, 'Scale': 0.0150}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}, - attrs={'is_active_output': True}) - - - -def apply(obj, selection=None, **kwargs): - common.apply(obj, shader_velvet, selection, **kwargs) - # surface.add_material(obj, shader_velvet, selection=selection) -# apply(bpy.context.active_object) \ No newline at end of file diff --git a/infinigen/assets/materials/marble_regular.py b/infinigen/assets/materials/marble_regular.py index 1da008c3c..73fe095a5 100644 --- a/infinigen/assets/materials/marble_regular.py +++ b/infinigen/assets/materials/marble_regular.py @@ -4,55 +4,88 @@ # Authors: Zeyu Ma # Acknowledgement: This file draws inspiration from https://physbam.stanford.edu/cs448x/old/Procedural_Noise(2f)Perlin_Noise.html -import bpy -import mathutils import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface +from numpy.random import uniform +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def shader_material_001(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler geometry = nw.new_node(Nodes.NewGeometry) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': geometry.outputs["Position"], 'Scale': (20.0000, 20.0000, 20.0000)}) - - roughness = nw.new_node(Nodes.Value, label='roughness ~ U(0.7,0.9)') + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": geometry.outputs["Position"], + "Scale": (20.0000, 20.0000, 20.0000), + }, + ) + + roughness = nw.new_node(Nodes.Value, label="roughness ~ U(0.7,0.9)") roughness.outputs[0].default_value = uniform(0.7, 0.9) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 0.1000, 'Detail': 9.0000, 'Roughness': roughness, 'Distortion': 0.2000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: 20.0000}, attrs={'operation': 'MULTIPLY'}) - + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 0.1000, + "Detail": 9.0000, + "Roughness": roughness, + "Distortion": 0.2000, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"], 1: 20.0000}, + attrs={"operation": "MULTIPLY"}, + ) + random_plane_angle = uniform(0, 2 * np.pi) - - dot_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: mapping, 1: (np.cos(random_plane_angle), np.sin(random_plane_angle), 0.0000)}, - attrs={'operation': 'DOT_PRODUCT'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: dot_product.outputs["Value"]}) - - sine = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'SINE'}) - + + dot_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: mapping, + 1: (np.cos(random_plane_angle), np.sin(random_plane_angle), 0.0000), + }, + attrs={"operation": "DOT_PRODUCT"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: dot_product.outputs["Value"]} + ) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "SINE"}) + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: sine, 1: 1.0000}) - - darkness = nw.new_node(Nodes.Value, label='darkness ~ U(0,1)') + + darkness = nw.new_node(Nodes.Value, label="darkness ~ U(0,1)") darkness.outputs[0].default_value = uniform(0.0, 1.0) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': darkness, 3: 0.2000, 4: 0.3000}) - - power = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: map_range.outputs["Result"]}, attrs={'operation': 'POWER'}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': power}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": darkness, 3: 0.2000, 4: 0.3000} + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: map_range.outputs["Result"]}, + attrs={"operation": "POWER"}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": power} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_material_001, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_material_001, selection=selection) diff --git a/infinigen/assets/materials/marble_voronoi.py b/infinigen/assets/materials/marble_voronoi.py index b1119c8b3..9ebfb7f6d 100644 --- a/infinigen/assets/materials/marble_voronoi.py +++ b/infinigen/assets/materials/marble_voronoi.py @@ -4,50 +4,81 @@ # Authors: Zeyu Ma # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=wTzk9T06gdw by Ryan King Art -import bpy -import mathutils import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface +from numpy.random import uniform +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def shader_material(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler geometry = nw.new_node(Nodes.NewGeometry) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': geometry.outputs["Position"]}) - - roughness = nw.new_node(Nodes.Value, label='roughness ~ U(0.5,0.7)') + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": geometry.outputs["Position"]} + ) + + roughness = nw.new_node(Nodes.Value, label="roughness ~ U(0.5,0.7)") roughness.outputs[0].default_value = uniform(0.5, 0.7) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 2.0000, 'Detail': 9.0000, 'Roughness': roughness}) - + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 2.0000, + "Detail": 9.0000, + "Roughness": roughness, + }, + ) + random_plane_angle = uniform(0, 2 * np.pi) - dot_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: mapping, 1: (np.cos(random_plane_angle), np.sin(random_plane_angle), 0.0000)}, - attrs={'operation': 'DOT_PRODUCT'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: noise_texture.outputs["Color"], 1: dot_product.outputs["Value"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': add.outputs["Vector"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + dot_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: mapping, + 1: (np.cos(random_plane_angle), np.sin(random_plane_angle), 0.0000), + }, + attrs={"operation": "DOT_PRODUCT"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: noise_texture.outputs["Color"], + 1: dot_product.outputs["Value"], + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp.color_ramp.elements[0].position = uniform(0.4, 0.5) colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp.color_ramp.elements[1].position = 0.9600 colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Metallic': 0.5000, 'Roughness': 0.0000}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Metallic": 0.5000, + "Roughness": 0.0000, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_material, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_material, selection=selection) diff --git a/infinigen/assets/materials/metal/__init__.py b/infinigen/assets/materials/metal/__init__.py index c427a1357..a00b68ae9 100644 --- a/infinigen/assets/materials/metal/__init__.py +++ b/infinigen/assets/materials/metal/__init__.py @@ -8,13 +8,18 @@ from numpy.random import uniform from infinigen.core.util.color import hsv2rgba, rgb2hsv -from infinigen.core.util.random import random_general as rg, log_uniform +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg + +from .. import common +from ..bark_random import hex_to_rgb from . import ( - brushed_metal, galvanized_metal, grained_and_polished_metal, hammered_metal, + brushed_metal, + galvanized_metal, + grained_and_polished_metal, + hammered_metal, metal_basic, ) -from .. import common -from ..bark_random import hex_to_rgb def apply(obj, selection=None, metal_color=None, **kwargs): @@ -25,32 +30,63 @@ def apply(obj, selection=None, metal_color=None, **kwargs): def get_shader(): return np.random.choice( - [brushed_metal.shader_brushed_metal, galvanized_metal.shader_galvanized_metal, + [ + brushed_metal.shader_brushed_metal, + galvanized_metal.shader_galvanized_metal, grained_and_polished_metal.shader_grained_metal, - hammered_metal.shader_hammered_metal] + hammered_metal.shader_hammered_metal, + ] ) -plain_colors = 'weighted_choice', (.5, 0xfdd017), (1, 0xc0c0c0), (1, 0x8c7853), (.5, 0xb87333), (.5, 0xb5a642), ( - 1, 0xbdbaae), (1, 0xa9acb6), (1, 0xb6afa9) -natural_colors = 'weighted_choice', (1, 0xc0c0c0), (1, 0x8c7853), (1, 0xbdbaae), (1, 0xa9acb6), (1, 0xb6afa9) +plain_colors = ( + "weighted_choice", + (0.5, 0xFDD017), + (1, 0xC0C0C0), + (1, 0x8C7853), + (0.5, 0xB87333), + (0.5, 0xB5A642), + (1, 0xBDBAAE), + (1, 0xA9ACB6), + (1, 0xB6AFA9), +) +natural_colors = ( + "weighted_choice", + (1, 0xC0C0C0), + (1, 0x8C7853), + (1, 0xBDBAAE), + (1, 0xA9ACB6), + (1, 0xB6AFA9), +) def sample_metal_color(metal_color=None, **kwargs): match metal_color: case np.ndarray(): return metal_color - case 'plain': + case "plain": h, s, v = rgb2hsv(hex_to_rgb(rg(plain_colors))[:-1]) - return hsv2rgba(h + uniform(-.1, .1), s + uniform(-.1, .1), v * log_uniform(.5, .2)) - case 'natural': + return hsv2rgba( + h + uniform(-0.1, 0.1), + s + uniform(-0.1, 0.1), + v * log_uniform(0.5, 0.2), + ) + case "natural": h, s, v = rgb2hsv(hex_to_rgb(rg(natural_colors))[:-1]) - return hsv2rgba(h + uniform(-.1, .1), s + uniform(-.1, .1), v * log_uniform(.5, .2)) - case 'bw': - return hsv2rgba(uniform(0, 1), uniform(.0, .2), log_uniform(.01, .2)) - case 'bw+natural': - return sample_metal_color('bw') if uniform() < .5 else sample_metal_color('natural') + return hsv2rgba( + h + uniform(-0.1, 0.1), + s + uniform(-0.1, 0.1), + v * log_uniform(0.5, 0.2), + ) + case "bw": + return hsv2rgba(uniform(0, 1), uniform(0.0, 0.2), log_uniform(0.01, 0.2)) + case "bw+natural": + return ( + sample_metal_color("bw") + if uniform() < 0.5 + else sample_metal_color("natural") + ) case _: - if uniform() < .2: - return sample_metal_color('natural') - return hsv2rgba(uniform(0, 1), uniform(.3, .6), log_uniform(.02, .5)) + if uniform() < 0.2: + return sample_metal_color("natural") + return hsv2rgba(uniform(0, 1), uniform(0.3, 0.6), log_uniform(0.02, 0.5)) diff --git a/infinigen/assets/materials/metal/brushed_metal.py b/infinigen/assets/materials/metal/brushed_metal.py index 18d879394..6611fb1cb 100644 --- a/infinigen/assets/materials/metal/brushed_metal.py +++ b/infinigen/assets/materials/metal/brushed_metal.py @@ -5,83 +5,165 @@ # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=QcAMYRgR03k by blenderian -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_brushed_metal', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_brushed_metal", singleton=False, type="ShaderNodeTree" +) def nodegroup_brushed_metal(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (0.2000, 0.2000, 5.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketFloat', 'Seed', 0.0000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 100.0000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Roughness': 0.4000, 'Distortion': 0.1000}, - attrs={'noise_dimensions': '4D'}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0000, 1.0000, 20.0000)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': 0.1000, 'Detail': 15.0000, 'Roughness': 0.0000}, - attrs={'noise_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (0.2000, 0.2000, 5.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 100.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Roughness": 0.4000, + "Distortion": 0.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0000, 1.0000, 20.0000), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": 0.1000, + "Detail": 15.0000, + "Roughness": 0.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.2000, 6: mapping, 7: noise_texture_1.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mix.outputs[2], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Roughness': 0.6000, 'Distortion': 0.1000}, - attrs={'noise_dimensions': '4D'}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: 1.0000, 6: noise_texture_2.outputs["Fac"], 7: noise_texture.outputs["Fac"]}, - attrs={'blend_type': 'DARKEN', 'data_type': 'RGBA'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': mix_1, 1: 0.4000, 2: 0.6000, 3: 0.8000, 4: 1.2000}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': map_range.outputs["Result"], 'Color': group_input.outputs["Base Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': mix_1, 1: 0.4000, 2: 0.6000, 3: 0.2000, 4: 0.3000}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hue_saturation_value, 'Metallic': 1.0000, 'Specular': 0.0000, 'Roughness': map_range_1.outputs["Result"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'tmp_viewer': principled_bsdf}, - attrs={'is_active_output': True}) - -def shader_brushed_metal(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs): + attrs={"data_type": "RGBA"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mix.outputs[2], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Roughness": 0.6000, + "Distortion": 0.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 1.0000, + 6: noise_texture_2.outputs["Fac"], + 7: noise_texture.outputs["Fac"], + }, + attrs={"blend_type": "DARKEN", "data_type": "RGBA"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": mix_1, 1: 0.4000, 2: 0.6000, 3: 0.8000, 4: 1.2000}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Value": map_range.outputs["Result"], + "Color": group_input.outputs["Base Color"], + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": mix_1, 1: 0.4000, 2: 0.6000, 3: 0.2000, 4: 0.3000}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Metallic": 1.0000, + "Specular": 0.0000, + "Roughness": map_range_1.outputs["Result"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "tmp_viewer": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_brushed_metal( + nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs +): # Code generated using version 2.6.4 of the node_transpiler if seed is None: seed = uniform(-1000.0, 1000.0) if base_color is None: from infinigen.assets.materials.metal import sample_metal_color + base_color = sample_metal_color(**kwargs) - group = nw.new_node(nodegroup_brushed_metal().name, - input_kwargs={'Base Color': base_color, 'Scale': scale, 'Seed': seed}) + group = nw.new_node( + nodegroup_brushed_metal().name, + input_kwargs={"Base Color": base_color, "Scale": scale, "Seed": seed}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"]}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs['BSDF']}, - attrs={'is_active_output': True}) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_brushed_metal, selection=selection, input_kwargs=kwargs) + surface.add_material( + obj, shader_brushed_metal, selection=selection, input_kwargs=kwargs + ) diff --git a/infinigen/assets/materials/metal/galvanized_metal.py b/infinigen/assets/materials/metal/galvanized_metal.py index ddf6a4040..8fdf82e7e 100644 --- a/infinigen/assets/materials/metal/galvanized_metal.py +++ b/infinigen/assets/materials/metal/galvanized_metal.py @@ -4,65 +4,119 @@ # Authors: Yiming Zuo # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=ECl2pQ1jQm8 by Ryan King Art -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint +from numpy.random import uniform from infinigen.assets.materials import common -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_galvanized_metal', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_galvanized_metal", singleton=False, type="ShaderNodeTree" +) def nodegroup_galvanized_metal(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketFloat', 'Seed', 0.0000)]) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ], + ) texture_coordinate = nw.new_node(Nodes.TextureCoord) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 5.0000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Roughness': 0.4000, 'Distortion': 0.2000}, - attrs={'noise_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.0500, 6: texture_coordinate.outputs["Object"], 7: noise_texture.outputs["Color"]}, - attrs={'clamp_factor': False, 'data_type': 'RGBA'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 500.0000}, attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix.outputs[2], 'W': group_input.outputs["Seed"], 'Scale': multiply_1}, - attrs={'distance': 'MINKOWSKI', 'voronoi_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': voronoi_texture.outputs["Color"], 3: 0.1000, 4: 0.5000}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_input.outputs["Base Color"], 'Metallic': 1.0000, 'Specular': 0.0000, 'Roughness': map_range.outputs["Result"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'BSDF': principled_bsdf}, attrs={'is_active_output': True}) - - -def shader_galvanized_metal(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs): + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 5.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Roughness": 0.4000, + "Distortion": 0.2000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.0500, + 6: texture_coordinate.outputs["Object"], + 7: noise_texture.outputs["Color"], + }, + attrs={"clamp_factor": False, "data_type": "RGBA"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 500.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mix.outputs[2], + "W": group_input.outputs["Seed"], + "Scale": multiply_1, + }, + attrs={"distance": "MINKOWSKI", "voronoi_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Color"], 3: 0.1000, 4: 0.5000}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_input.outputs["Base Color"], + "Metallic": 1.0000, + "Specular": 0.0000, + "Roughness": map_range.outputs["Result"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_galvanized_metal( + nw: NodeWrangler, scale=1.0, base_color=None, seed=None, **kwargs +): # Code generated using version 2.6.4 of the node_transpiler if seed is None: seed = uniform(-1000.0, 1000.0) if base_color is None: from infinigen.assets.materials.metal import sample_metal_color + base_color = sample_metal_color(**kwargs) - group = nw.new_node(nodegroup_galvanized_metal().name, - input_kwargs={'Base Color': base_color, 'Scale': scale, 'Seed': seed}) + group = nw.new_node( + nodegroup_galvanized_metal().name, + input_kwargs={"Base Color": base_color, "Scale": scale, "Seed": seed}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': group}, attrs={'is_active_output': True}) def apply(obj, selection=None, **kwargs): - common.apply(obj, shader_galvanized_metal, selection=selection,**kwargs) + common.apply(obj, shader_galvanized_metal, selection=selection, **kwargs) diff --git a/infinigen/assets/materials/metal/grained_and_polished_metal.py b/infinigen/assets/materials/metal/grained_and_polished_metal.py index 5e63f8219..89dfb23b2 100644 --- a/infinigen/assets/materials/metal/grained_and_polished_metal.py +++ b/infinigen/assets/materials/metal/grained_and_polished_metal.py @@ -3,55 +3,92 @@ # Authors: Yiming Zuo -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_grained_metal', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_grained_metal", singleton=False, type="ShaderNodeTree" +) def nodegroup_grained_metal(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 5.0000), - ('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloat', 'Roughness', 0.0000)]) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs["Roughness"], 3: 0.0500, 4: 0.2500}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_input.outputs["Base Color"], 'Metallic': 1.0000, 'Specular': 0.0000, 'Roughness': map_range.outputs["Result"]}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 5.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloat", "Roughness", 0.0000), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Roughness"], 3: 0.0500, 4: 0.2500}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_input.outputs["Base Color"], + "Metallic": 1.0000, + "Specular": 0.0000, + "Roughness": map_range.outputs["Result"], + }, + ) texture_coordinate = nw.new_node(Nodes.TextureCoord) - multiply = nw.new_node(Nodes.Math, + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 2000.0000}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Distortion': 2.0000}, - attrs={'noise_dimensions': '4D'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: 0.4000}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Distortion": 2.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: 0.4000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: 0.010}, attrs={'operation': 'MULTIPLY'}) + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: 0.010}, + attrs={"operation": "MULTIPLY"}, + ) - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': multiply_3}, - attrs={'is_active_output': True}) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "Displacement": multiply_3}, + attrs={"is_active_output": True}, + ) -def shader_grained_metal(nw: NodeWrangler, scale=1.0, base_color=None, roughness=None, seed=None, **kwargs): +def shader_grained_metal( + nw: NodeWrangler, scale=1.0, base_color=None, roughness=None, seed=None, **kwargs +): # Code generated using version 2.6.4 of the node_transpiler if roughness is None: roughness = uniform(0.0, 1.0) @@ -59,21 +96,32 @@ def shader_grained_metal(nw: NodeWrangler, scale=1.0, base_color=None, roughness seed = uniform(-1000.0, 1000.0) if base_color is None: from infinigen.assets.materials.metal import sample_metal_color + base_color = sample_metal_color(**kwargs) + group = nw.new_node( + nodegroup_grained_metal().name, + input_kwargs={ + "Base Color": base_color, + "Scale": scale, + "Seed": seed, + "Roughness": roughness, + }, + ) - group = nw.new_node(nodegroup_grained_metal().name, - input_kwargs={'Base Color': base_color, - 'Scale': scale, - 'Seed': seed, - 'Roughness': roughness, - }) + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group.outputs["Displacement"], "Midlevel": 0.0000}, + ) - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement}, - attrs={'is_active_output': True}) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_grained_metal, selection=selection, input_kwargs=kwargs) + surface.add_material( + obj, shader_grained_metal, selection=selection, input_kwargs=kwargs + ) diff --git a/infinigen/assets/materials/metal/hammered_metal.py b/infinigen/assets/materials/metal/hammered_metal.py index d87a27f2b..251ec875f 100644 --- a/infinigen/assets/materials/metal/hammered_metal.py +++ b/infinigen/assets/materials/metal/hammered_metal.py @@ -4,78 +4,150 @@ # Authors: Yiming Zuo # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=82smQvoh0GE by Mix CG Arts -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import log_uniform -@node_utils.to_nodegroup('nodegroup_hammered_metal', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup( + "nodegroup_hammered_metal", singleton=False, type="ShaderNodeTree" +) def nodegroup_hammered_metal(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketFloat', 'Seed', 0.0000)]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_input.outputs["Base Color"], 'Metallic': 1.0000, 'Specular': 0.0000, 'Roughness': 0.1000}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ], + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_input.outputs["Base Color"], + "Metallic": 1.0000, + "Specular": 0.0000, + "Roughness": 0.1000, + }, + ) texture_coordinate = nw.new_node(Nodes.TextureCoord) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 20.0000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Roughness': 0.4000, 'Distortion': 0.2000}, - attrs={'noise_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.0100, 6: texture_coordinate.outputs["Object"], 7: noise_texture.outputs["Color"]}, - attrs={'clamp_factor': False, 'data_type': 'RGBA'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 300.0000}, attrs={'operation': 'MULTIPLY'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix.outputs[2], 'W': group_input.outputs["Seed"], 'Scale': multiply_1, 'Smoothness': 0.2000}, - attrs={'voronoi_dimensions': '4D', 'feature': 'SMOOTH_F1'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: group_input.outputs["Scale"]}, - attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: 2.5000}, attrs={'operation': 'POWER'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: power, 1: log_uniform(.001,0.003)}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': multiply_3, 'tmp_viewer': voronoi_texture_1.outputs["Color"]}, - attrs={'is_active_output': True}) - -def shader_hammered_metal(nw: NodeWrangler, scale=None, base_color=None, seed=None, **kwargs): + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 20.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Roughness": 0.4000, + "Distortion": 0.2000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.0100, + 6: texture_coordinate.outputs["Object"], + 7: noise_texture.outputs["Color"], + }, + attrs={"clamp_factor": False, "data_type": "RGBA"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 300.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mix.outputs[2], + "W": group_input.outputs["Seed"], + "Scale": multiply_1, + "Smoothness": 0.2000, + }, + attrs={"voronoi_dimensions": "4D", "feature": "SMOOTH_F1"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: group_input.outputs["Scale"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: 2.5000}, + attrs={"operation": "POWER"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: log_uniform(0.001, 0.003)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "BSDF": principled_bsdf, + "Displacement": multiply_3, + "tmp_viewer": voronoi_texture_1.outputs["Color"], + }, + attrs={"is_active_output": True}, + ) + + +def shader_hammered_metal( + nw: NodeWrangler, scale=None, base_color=None, seed=None, **kwargs +): # Code generated using version 2.6.4 of the node_transpiler if seed is None: seed = uniform(-1000.0, 1000.0) if base_color is None: from infinigen.assets.materials.metal import sample_metal_color + base_color = sample_metal_color(**kwargs) if scale is None: - scale = log_uniform(.8, 1.2) + scale = log_uniform(0.8, 1.2) - group = nw.new_node(nodegroup_hammered_metal().name, - input_kwargs={'Base Color': base_color, 'Scale': scale, 'Seed': seed}) + group = nw.new_node( + nodegroup_hammered_metal().name, + input_kwargs={"Base Color": base_color, "Scale": scale, "Seed": seed}, + ) - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000}) + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group.outputs["Displacement"], "Midlevel": 0.0000}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement}, - attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_hammered_metal, selection=selection, input_kwargs=kwargs) + surface.add_material( + obj, shader_hammered_metal, selection=selection, input_kwargs=kwargs + ) diff --git a/infinigen/assets/materials/metal/metal_basic.py b/infinigen/assets/materials/metal/metal_basic.py index 81d5b96b6..928a99f4a 100644 --- a/infinigen/assets/materials/metal/metal_basic.py +++ b/infinigen/assets/materials/metal/metal_basic.py @@ -2,7 +2,6 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -import numpy as np from numpy.random import uniform from infinigen.assets.materials import common @@ -11,23 +10,27 @@ def shader_metal(nw: NodeWrangler, color=None, **kwargs): - position = nw.new_node(Nodes.TextureCoord).outputs['Object'] + position = nw.new_node(Nodes.TextureCoord).outputs["Object"] roughness = nw.build_float_curve( - nw.new_node(Nodes.NoiseTexture, [position], input_kwargs={'Scale': uniform(10, 25)}), - [(0, uniform(0, .2)), (1, uniform(.4, .7))] + nw.new_node( + Nodes.NoiseTexture, [position], input_kwargs={"Scale": uniform(10, 25)} + ), + [(0, uniform(0, 0.2)), (1, uniform(0.4, 0.7))], ) principled_bsdf = nw.new_node( - Nodes.PrincipledBSDF, input_kwargs={ - "Metallic": 1., - 'Specular': uniform(.5, 1.), - 'Base Color': color, - 'Roughness': roughness - } + Nodes.PrincipledBSDF, + input_kwargs={ + "Metallic": 1.0, + "Specular": uniform(0.5, 1.0), + "Base Color": color, + "Roughness": roughness, + }, ) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf}) def apply(obj, selection=None, **kwargs): from infinigen.assets.materials.metal import sample_metal_color + color = sample_metal_color(**kwargs) common.apply(obj, shader_metal, selection, color, **kwargs) diff --git a/infinigen/assets/materials/microwave_shaders.py b/infinigen/assets/materials/microwave_shaders.py index f08b4ff42..e37bdc0d0 100644 --- a/infinigen/assets/materials/microwave_shaders.py +++ b/infinigen/assets/materials/microwave_shaders.py @@ -5,23 +5,44 @@ from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_black_medal(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - anisotropic_bsdf = nw.new_node('ShaderNodeBsdfAnisotropic', input_kwargs={'Color': (0.0167, 0.0167, 0.0167, 1.0000)}) + anisotropic_bsdf = nw.new_node( + "ShaderNodeBsdfAnisotropic", + input_kwargs={"Color": (0.0167, 0.0167, 0.0167, 1.0000)}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": anisotropic_bsdf}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': anisotropic_bsdf}, attrs={'is_active_output': True}) def shader_black_glass(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glossy_bsdf = nw.new_node(Nodes.GlossyBSDF, input_kwargs={'Color': (0.0068, 0.0068, 0.0068, 1.0000), 'Roughness': 0.2000}) + glossy_bsdf = nw.new_node( + Nodes.GlossyBSDF, + input_kwargs={"Color": (0.0068, 0.0068, 0.0068, 1.0000), "Roughness": 0.2000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glossy_bsdf}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glossy_bsdf}, attrs={'is_active_output': True}) def shader_glass(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={'IOR': 1.5000}) + glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={"IOR": 1.5000}) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glass_bsdf}, attrs={'is_active_output': True}) \ No newline at end of file + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glass_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/mirror.py b/infinigen/assets/materials/mirror.py index a725fce4e..429195488 100644 --- a/infinigen/assets/materials/mirror.py +++ b/infinigen/assets/materials/mirror.py @@ -3,15 +3,19 @@ # Authors: Lingjie Mei from infinigen.assets.materials import common -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler -def shader_mirror(nw: NodeWrangler,**kwargs): - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy', - input_kwargs={'Color': (1.0, 1.0, 1.0, 1.0), 'Roughness': 0, - }) +def shader_mirror(nw: NodeWrangler, **kwargs): + glossy_bsdf = nw.new_node( + "ShaderNodeBsdfGlossy", + input_kwargs={ + "Color": (1.0, 1.0, 1.0, 1.0), + "Roughness": 0, + }, + ) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glossy_bsdf}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": glossy_bsdf}) def apply(obj, selection=None, **kwargs): diff --git a/infinigen/assets/materials/mountain.py b/infinigen/assets/materials/mountain.py index c7ad82cf0..4004c14f8 100644 --- a/infinigen/assets/materials/mountain.py +++ b/infinigen/assets/materials/mountain.py @@ -7,19 +7,19 @@ import gin import numpy as np -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import clip_hsv, random_color, random_color_neighbour from infinigen.core.util.random import random_general as rg - from infinigen.terrain.land_process.snowfall import snowfall_params type = SurfaceTypes.SDFPerturb mod_name = "geo_MOUNTAIN" name = "mountain" + def geo_MOUNTAIN_general( nw: NodeWrangler, n_noise, @@ -27,7 +27,7 @@ def geo_MOUNTAIN_general( n_crack, crack_params, crack_modulation_params, - selection=None + selection=None, ): position = nw.new_node("GeometryNodeInputPosition", []) normal = nw.new_node("GeometryNodeInputNormal", []) @@ -51,18 +51,19 @@ def geo_MOUNTAIN_general( nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': nw.add(position, position_shift), - 'Scale': scale, 'Detail': detail, 'Roughness': roughness - } + "Vector": nw.add(position, position_shift), + "Scale": scale, + "Detail": detail, + "Roughness": roughness, + }, ), - 0.5 + 0.5, ), - zscale + zscale, ) noises.append(content) offset = nw.scalar_max(*noises) - if n_crack > 0: cracks = [] for i in range(n_crack): @@ -71,27 +72,46 @@ def geo_MOUNTAIN_general( detail = nw.new_node(Nodes.Value, label=f"crack_modulation_detail{i}") detail.outputs[0].default_value = rg(crack_modulation_params["detail"]) roughness = nw.new_node(Nodes.Value, label=f"crack_modulation_roughness{i}") - roughness.outputs[0].default_value = rg(crack_modulation_params["roughness"]) + roughness.outputs[0].default_value = rg( + crack_modulation_params["roughness"] + ) position_shift = nw.new_node(Nodes.Vector, label=f"position_shift_mask{i}") position_shift.vector = nw.get_position_translation_seed(f"mask{i}") mask = nw.new_node( Nodes.NoiseTexture, - input_kwargs={'Vector': nw.add(position, position_shift), 'Scale': scale, 'Detail': detail, 'Roughness': roughness} + input_kwargs={ + "Vector": nw.add(position, position_shift), + "Scale": scale, + "Detail": detail, + "Roughness": roughness, + }, ) - position_shift = nw.new_node(Nodes.Vector, label=f"position_shift_slope_modulation{i}") - position_shift.vector = nw.get_position_translation_seed(f"slope_modulation{i}") + position_shift = nw.new_node( + Nodes.Vector, label=f"position_shift_slope_modulation{i}" + ) + position_shift.vector = nw.get_position_translation_seed( + f"slope_modulation{i}" + ) - slope_modulation = nw.new_node(Nodes.MapRange, input_kwargs={ - "Value": nw.new_node( - Nodes.NoiseTexture, - input_kwargs={'Vector': nw.add(position, position_shift), 'Scale': scale, 'Detail': detail, 'Roughness': roughness} - ), - "From Min": 0.45, - "From Max": 0.55 - }) + slope_modulation = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.add(position, position_shift), + "Scale": scale, + "Detail": detail, + "Roughness": roughness, + }, + ), + "From Min": 0.45, + "From Max": 0.55, + }, + ) scale = nw.new_node(Nodes.Value, label=f"crack_scale{i}") scale.outputs[0].default_value = rg(crack_params["scale"]) @@ -106,40 +126,47 @@ def geo_MOUNTAIN_general( mask_rampmax = nw.new_node(Nodes.Value, label=f"crack_mask_rampmax{i}") mask_rampmax.outputs[0].default_value = rg(crack_params["mask_rampmax"]) - mask_crack = nw.new_node(Nodes.MapRange, input_kwargs={ - "Value": mask, - "From Min": nw.scalar_add(mask_rampmin, 0.5), - "From Max": nw.scalar_add(mask_rampmax, 0.5) - }) + mask_crack = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": mask, + "From Min": nw.scalar_add(mask_rampmin, 0.5), + "From Max": nw.scalar_add(mask_rampmax, 0.5), + }, + ) zscale_modulation = nw.scalar_multiply( zscale_scale, nw.power( slope_base, - slope_modulation # reuse - ) + slope_modulation, # reuse + ), ) slope_modulation = nw.scalar_multiply( nw.scalar_divide(1.0, slope_scale), - nw.power( - nw.scalar_divide(1.0, slope_base), - slope_modulation - ) + nw.power(nw.scalar_divide(1.0, slope_base), slope_modulation), ) position_shift = nw.new_node(Nodes.Vector, label=f"position_shift_crack{i}") position_shift.vector = nw.get_position_translation_seed(f"crack{i}") crack = nw.scalar_multiply( - nw.new_node(Nodes.MapRange, input_kwargs={ - "Value": nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': nw.add(position, position_shift), 'Scale': scale}, - attrs={"feature": "DISTANCE_TO_EDGE"} - ), - "From Max": slope_modulation, - "To Min": -1.0, - "To Max": 0.0 - }), + nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": nw.add(position, position_shift), + "Scale": scale, + }, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ), + "From Max": slope_modulation, + "To Min": -1.0, + "To Max": 0.0, + }, + ), mask_crack, - zscale_modulation + zscale_modulation, ) cracks.append(crack) offset = nw.scalar_add(offset, nw.scalar_add(*cracks)) @@ -153,85 +180,103 @@ def geo_MOUNTAIN_general( def geo_MOUNTAIN( nw: NodeWrangler, n_noise=3, - noise_params={"scale": ("uniform", 1, 5), "detail": 8, "roughness": 0.7, "zscale": ("power_uniform", -1, -0.5)}, + noise_params={ + "scale": ("uniform", 1, 5), + "detail": 8, + "roughness": 0.7, + "zscale": ("power_uniform", -1, -0.5), + }, n_crack=8, - crack_params={"scale": ("uniform", 1, 5), "zscale_scale": 0.02, "slope_scale": 5, "slope_base": 3, "mask_rampmin": 0.0, "mask_rampmax": 0.3}, + crack_params={ + "scale": ("uniform", 1, 5), + "zscale_scale": 0.02, + "slope_scale": 5, + "slope_base": 3, + "mask_rampmin": 0.0, + "mask_rampmax": 0.3, + }, crack_modulation_params={"scale": 1, "detail": 5, "roughness": 0.5}, - selection=None + selection=None, ): nw.force_input_consistency() groupinput = nw.new_node(Nodes.GroupInput) - offset = geo_MOUNTAIN_general(nw, n_noise, noise_params, n_crack, crack_params, crack_modulation_params) - if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + offset = geo_MOUNTAIN_general( + nw, n_noise, noise_params, n_crack, crack_params, crack_modulation_params + ) + if selection is not None: + offset = nw.multiply(offset, surface.eval_argument(nw, selection)) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) @gin.configurable("shader") def shader_MOUNTAIN( - nw, - obj, - is_rock=False, - spherical=False, - preset_zrange=(0, 120), - color=None, - shader_roughness=1, - num_layers=16, - random_seed=0, - layered_mountain=True, - prob_arranged_layers=0.1, - hue_diff=0.1, - max_sat=0.4, - max_val=0.4, - snowy=False, - *args, - **kwargs - ): + nw, + obj, + is_rock=False, + spherical=False, + preset_zrange=(0, 120), + color=("palette", "mountain soil"), + shader_roughness=1, + num_layers=16, + random_seed=0, + layered_mountain=True, + prob_arranged_layers=0.1, + hue_diff=0.1, + max_sat=0.4, + max_val=0.4, + snowy=False, + *args, + **kwargs, +): nw.force_input_consistency() with FixedSeed(random_seed): - if np.random.uniform() > prob_arranged_layers: arranged_layers = True else: arranged_layers = False - shader_roughness = rg(shader_roughness) layered_mountain = rg(layered_mountain) if layered_mountain: - tex_coor = nw.new_node('ShaderNodeNewGeometry', []) + tex_coor = nw.new_node("ShaderNodeNewGeometry", []) if spherical: - z = nw.new_node(Nodes.VectorMath, [(tex_coor, 0)], attrs={"operation": "LENGTH"}) - z = nw.new_node('ShaderNodeMapRange', [z]) + z = nw.new_node( + Nodes.VectorMath, [(tex_coor, 0)], attrs={"operation": "LENGTH"} + ) + z = nw.new_node("ShaderNodeMapRange", [z]) else: - z = nw.new_node('ShaderNodeSeparateXYZ', [(tex_coor, 0)]) - z = nw.new_node('ShaderNodeMapRange', [(z, 2)]) + z = nw.new_node("ShaderNodeSeparateXYZ", [(tex_coor, 0)]) + z = nw.new_node("ShaderNodeMapRange", [(z, 2)]) z_noise_mag = np.random.uniform(0.1, 0.4) if preset_zrange is None: # map value from (-z_noise_mag / 2) to (1 - z_noise_mag / 2) - z_min = 0 # obj.bound_box[0][-1] + z_min = 0 # obj.bound_box[0][-1] z_max = obj.bound_box[1][-1] else: z_min, z_max = preset_zrange # z_min must be 0 to avoid sediment under water - + z.inputs[1].default_value = z_min # from min z.inputs[2].default_value = z_max # from max z.inputs[3].default_value = -1 * (z_noise_mag / 2) # to min z.inputs[4].default_value = 1 - (z_noise_mag / 2) # to max - z_noise = nw.new_node('ShaderNodeTexNoise', - input_kwargs={'Vector': (tex_coor, 0), 'Scale': 0.1, "Detail": 9}, + z_noise = nw.new_node( + "ShaderNodeTexNoise", + input_kwargs={"Vector": (tex_coor, 0), "Scale": 0.1, "Detail": 9}, ) # noise scale - z_noise = nw.new_node('ShaderNodeMath', [z_noise]) - z_noise.operation = 'MULTIPLY' + z_noise = nw.new_node("ShaderNodeMath", [z_noise]) + z_noise.operation = "MULTIPLY" z_noise.inputs[1].default_value = np.random.uniform(0.1, 0.3) z = nw.add2(z, z_noise) - ramp = nw.new_node('ShaderNodeValToRGB', [z]) + ramp = nw.new_node("ShaderNodeValToRGB", [z]) elements = ramp.color_ramp.elements elements.remove(elements[0]) # todo: better way to sample the initial color @@ -242,27 +287,29 @@ def shader_MOUNTAIN( cur_color = rg(color) elements[-1].color = cur_color - cur_loc = 1 for _ in range(num_layers): - if arranged_layers: - cur_loc -= (np.random.uniform() * 2 / num_layers) + cur_loc -= np.random.uniform() * 2 / num_layers cur_loc = max(0, cur_loc) else: cur_loc = np.random.uniform() element = elements.new(cur_loc) if color is None: - cur_color = random_color_neighbour(cur_color, sat_diff=None, val_diff=None, hue_diff=hue_diff) + cur_color = random_color_neighbour( + cur_color, sat_diff=None, val_diff=None, hue_diff=hue_diff + ) cur_color = clip_hsv(cur_color, max_s=max_sat, max_v=max_val) else: cur_color = rg(color) element.color = cur_color # ambient occlusion - amb_occl = nw.new_node('ShaderNodeAmbientOcclusion', []) - ramp = nw.new_node('ShaderNodeMixRGB', [amb_occl, (0.0, 0.0, 0.0, 1.0), ramp]) + amb_occl = nw.new_node("ShaderNodeAmbientOcclusion", []) + ramp = nw.new_node( + "ShaderNodeMixRGB", [amb_occl, (0.0, 0.0, 0.0, 1.0), ramp] + ) else: if color is None: @@ -271,45 +318,74 @@ def shader_MOUNTAIN( ramp = rg(color)[:3] color_ = nw.multiply( ramp, - nw.scalar_max(0.2, - nw.new_node(Nodes.Math, [ - nw.new_node(Nodes.VectorMath, [ - nw.new_node(Nodes.VectorMath, [ - (0.0, 0.0, 1.0), - (nw.new_node("ShaderNodeNewGeometry", []), 1) - ], attrs={'operation': 'DOT_PRODUCT'}) - ], attrs={'operation': 'ABSOLUTE'}), - 3 - ], attrs={'operation': 'POWER'}) - ) + nw.scalar_max( + 0.2, + nw.new_node( + Nodes.Math, + [ + nw.new_node( + Nodes.VectorMath, + [ + nw.new_node( + Nodes.VectorMath, + [ + (0.0, 0.0, 1.0), + (nw.new_node("ShaderNodeNewGeometry", []), 1), + ], + attrs={"operation": "DOT_PRODUCT"}, + ) + ], + attrs={"operation": "ABSOLUTE"}, + ), + 3, + ], + attrs={"operation": "POWER"}, + ), + ), ) if snowy: if not is_rock: normal_params = snowfall_params()["detailed_normal_params"] else: normal_params = snowfall_params()["on_rock_normal_params"] - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) weights = [0] for normal_preference, (th0, th1) in normal_params: - disturb = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 0.1, 'Detail': 9}) + disturb = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": 0.1, "Detail": 9} + ) th0 = nw.scalar_add(disturb, th0 - 0.5) th1 = nw.scalar_add(disturb, th1 - 0.5) - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': nw.dot(normal, normal_preference), 1: th0, 2: th1}) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": nw.dot(normal, normal_preference), + 1: th0, + 2: th1, + }, + ) weights.append(map_range) weights = nw.scalar_add(*weights) weights.use_clamp = 1 - color_ = nw.new_node('ShaderNodeMixRGB', [weights, color_, [0.904]*3 + [1]]) - + color_ = nw.new_node( + "ShaderNodeMixRGB", [weights, color_, [0.904] * 3 + [1]] + ) - bsdf_mountain = nw.new_node("ShaderNodeBsdfPrincipled", - [color_, None, None, None, None, None, None, - None, None, shader_roughness]) + bsdf_mountain = nw.new_node( + "ShaderNodeBsdfPrincipled", + [color_, None, None, None, None, None, None, None, None, shader_roughness], + ) return bsdf_mountain + def apply(objs, selection=None, **kwargs): if isinstance(objs, list) and len(objs) == 0: return surface.add_geomod(objs, geo_MOUNTAIN, selection=selection) - surface.add_material(objs, shader_MOUNTAIN, selection=selection, - input_kwargs={"obj": objs[0] if isinstance(objs, list) else objs, **kwargs}) + surface.add_material( + objs, + shader_MOUNTAIN, + selection=selection, + input_kwargs={"obj": objs[0] if isinstance(objs, list) else objs, **kwargs}, + ) diff --git a/infinigen/assets/materials/mud.py b/infinigen/assets/materials/mud.py index 6f8073cff..c8d424752 100644 --- a/infinigen/assets/materials/mud.py +++ b/infinigen/assets/materials/mud.py @@ -3,145 +3,229 @@ # Authors: Mingzhe Wang -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +import gin +from numpy.random import normal as N +from numpy.random import uniform as U + from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.math import FixedSeed -import gin +from infinigen.core.util.organization import SurfaceTypes type = SurfaceTypes.SDFPerturb mod_name = "geo_mud" name = "mud" + def shader_mud(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler geometry_5 = nw.new_node(Nodes.NewGeometry) - - noise_texture_1_w = nw.new_node(Nodes.Value, label='noise_texture_1_w') + + noise_texture_1_w = nw.new_node(Nodes.Value, label="noise_texture_1_w") noise_texture_1_w.outputs[0].default_value = 9.6366 - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': geometry_5.outputs["Position"], 'W': noise_texture_1_w, 'Scale': N(5, 0.5)}, - attrs={'noise_dimensions': '4D'}) - + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": geometry_5.outputs["Position"], + "W": noise_texture_1_w, + "Scale": N(5, 0.5), + }, + attrs={"noise_dimensions": "4D"}, + ) + color1 = [0.0216, 0.0145, 0.0113, 1.0000] - color2 = [0.0424, 0.0308, 0.0142, 1.0000] + color2 = [0.0424, 0.0308, 0.0142, 1.0000] for i in range(3): color1[i] += N(0, 0.005) color2[i] += N(0, 0.005) - colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_3.color_ramp.elements[0].position = 0.0000 colorramp_3.color_ramp.elements[0].color = color1 colorramp_3.color_ramp.elements[1].position = 1.0000 colorramp_3.color_ramp.elements[1].color = color2 - + geometry_1 = nw.new_node(Nodes.NewGeometry) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': geometry_1.outputs["Position"], 'Scale': 0.2000, 'W': U(-10, 10)}, - attrs={'musgrave_dimensions': '4D', 'musgrave_type': 'RIDGED_MULTIFRACTAL'}) - - colorramp_5 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': musgrave_texture}) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": geometry_1.outputs["Position"], + "Scale": 0.2000, + "W": U(-10, 10), + }, + attrs={"musgrave_dimensions": "4D", "musgrave_type": "RIDGED_MULTIFRACTAL"}, + ) + + colorramp_5 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) colorramp_5.color_ramp.elements[0].position = 0.0000 colorramp_5.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_5.color_ramp.elements[1].position = N(0.1045, 0.01) colorramp_5.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - + x1 = U(0.85, 0.95) x2 = U(0.65, 0.75) - colorramp_6 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': colorramp_5.outputs["Color"]}) + colorramp_6 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_5.outputs["Color"]} + ) colorramp_6.color_ramp.elements[0].position = 0.0000 colorramp_6.color_ramp.elements[0].color = [x1, x1, x1, 1.0000] colorramp_6.color_ramp.elements[1].position = 1.0000 colorramp_6.color_ramp.elements[1].color = [x2, x2, x2, 1.0000] - + x1 = U(0.05, 0.15) x2 = U(0.45, 0.55) - colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.0000 colorramp_4.color_ramp.elements[0].color = [x1, x1, x1, 1.0000] colorramp_4.color_ramp.elements[1].position = 1.0000 colorramp_4.color_ramp.elements[1].color = [x2, x2, x2, 1.0000] - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_5.outputs["Color"], 'Color1': (0.0000, 0.0000, 0.0000, 1.0000), 'Color2': colorramp_4.outputs["Color"]}) - - principled_bsdf_2 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_3.outputs["Color"], 'Specular': colorramp_6.outputs["Color"], 'Roughness': mix_3}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf_2}, attrs={'is_active_output': True}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_5.outputs["Color"], + "Color1": (0.0000, 0.0000, 0.0000, 1.0000), + "Color2": colorramp_4.outputs["Color"], + }, + ) + + principled_bsdf_2 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_3.outputs["Color"], + "Specular": colorramp_6.outputs["Color"], + "Roughness": mix_3, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf_2}, + attrs={"is_active_output": True}, + ) return principled_bsdf_2 + @gin.configurable def geo_mud(nw: NodeWrangler, random_seed=0, selection=None): # Code generated using version 2.6.4 of the node_transpiler with FixedSeed(random_seed): + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - position_5 = nw.new_node(Nodes.InputPosition) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': position_5}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': nw.new_value(N(0.6, 0.1), "mix_2_fac"), 'Color1': noise_texture_3.outputs["Color"], 'Color2': position_5}) - - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mix_2, 'Scale': nw.new_value(N(50, 5), "noise_texture_4_scale")}) - - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': mix_2, 'Scale': nw.new_value(N(3.0000, 0.5), "voronoi_texture_2_scale")}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture_2.outputs["Distance"]}) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": position_5} + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": nw.new_value(N(0.6, 0.1), "mix_2_fac"), + "Color1": noise_texture_3.outputs["Color"], + "Color2": position_5, + }, + ) + + noise_texture_4 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mix_2, + "Scale": nw.new_value(N(50, 5), "noise_texture_4_scale"), + }, + ) + + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": mix_2, + "Scale": nw.new_value(N(3.0000, 0.5), "voronoi_texture_2_scale"), + }, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_2.outputs["Distance"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0000 colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 1.0000 colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={ - 'Value': colorramp_1.outputs["Color"]}) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": colorramp_1.outputs["Color"]} + ) node_utils.assign_curve( - float_curve_1.mapping.curves[0], - [(0.0000, 0.0000), (0.3386, 0.0844), (0.8114, 0.6312), (1.0000, 0.7656)] + float_curve_1.mapping.curves[0], + [(0.0000, 0.0000), (0.3386, 0.0844), (0.8114, 0.6312), (1.0000, 0.7656)], ) # node_utils.assign_curve( - # float_curve_1.mapping.curves[0], + # float_curve_1.mapping.curves[0], # [(0.0000, 0.0000), (0.3386+N(0, 0.05), 0.0844), (0.8114+N(0, 0.05), 0.6312), (1.0000, 0.7656)] # ) - + value_6 = nw.new_node(Nodes.Value) value_6.outputs[0].default_value = N(2, 0.2) - - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: float_curve_1, 1: value_6}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: noise_texture_4.outputs["Fac"], 1: multiply.outputs["Vector"]}) - + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: float_curve_1, 1: value_6}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: noise_texture_4.outputs["Fac"], + 1: multiply.outputs["Vector"], + }, + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: add.outputs["Vector"], 1: normal}, attrs={'operation': 'MULTIPLY'}) - + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + value_5 = nw.new_node(Nodes.Value) value_5.outputs[0].default_value = N(0.04, 0.005) - - multiply_2 = nw.new_node(Nodes.VectorMath, + + multiply_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_1.outputs["Vector"], 1: value_5}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + offset = multiply_2.outputs["Vector"] if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': offset}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": offset, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + def apply(obj, selection=None, **kwargs): surface.add_geomod(obj, geo_mud, selection=selection) - surface.add_material(obj, shader_mud, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_mud, selection=selection) diff --git a/infinigen/assets/materials/new_whitewater.py b/infinigen/assets/materials/new_whitewater.py index 0d3980033..6b0b2f3a4 100644 --- a/infinigen/assets/materials/new_whitewater.py +++ b/infinigen/assets/materials/new_whitewater.py @@ -3,15 +3,13 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import random_color_neighbour + def new_whitewater(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler @@ -19,7 +17,9 @@ def new_whitewater(nw: NodeWrangler): Nodes.PrincipledBSDF, input_kwargs={ "Base Color": (1.0000, 1.0000, 1.0000, 1.0000), - "Subsurface Color": random_color_neighbour((0.7147, 0.6062, 0.8000, 1.0000), 0.05, 0.05, 0.05), + "Subsurface Color": random_color_neighbour( + (0.7147, 0.6062, 0.8000, 1.0000), 0.05, 0.05, 0.05 + ), "Specular": 0.0886 + 0.01 * normal(), "Roughness": 0.1500, "Sheen Tint": 0.0000, @@ -43,4 +43,4 @@ def new_whitewater(nw: NodeWrangler): def apply(obj, selection=None, **kwargs): - surface.add_material(obj, new_whitewater, selection=selection) \ No newline at end of file + surface.add_material(obj, new_whitewater, selection=selection) diff --git a/infinigen/assets/materials/nose.py b/infinigen/assets/materials/nose.py index 797746dee..671f8b0e8 100644 --- a/infinigen/assets/materials/nose.py +++ b/infinigen/assets/materials/nose.py @@ -4,39 +4,47 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal as N +from numpy.random import uniform as U + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_nose(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': U(2, 6), 'Detail': 14.699999999999999, 'Dimension': 1.5}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture}) + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Scale": U(2, 6), "Detail": 14.699999999999999, "Dimension": 1.5}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) colorramp.color_ramp.elements[0].position = U(0.2, 0.6) colorramp.color_ramp.elements[0].color = (0.008, 0.0053, 0.0044, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.7068, 0.436, 0.35, 1.0) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': 10.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture_1, 3: N(0.4, 0.1), 4: N(0.7, 0.15)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': map_range.outputs["Result"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": 10.0} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_1, 3: N(0.4, 0.1), 4: N(0.7, 0.15)}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": map_range.outputs["Result"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) def apply(obj, selection=None, **kwargs): diff --git a/infinigen/assets/materials/oven_shaders.py b/infinigen/assets/materials/oven_shaders.py index c6e3017e9..b672c2f02 100644 --- a/infinigen/assets/materials/oven_shaders.py +++ b/infinigen/assets/materials/oven_shaders.py @@ -8,21 +8,39 @@ def shader_super_black_glass(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glossy_bsdf = nw.new_node(Nodes.GlossyBSDF, input_kwargs={'Color': (0.0095, 0.0095, 0.0095, 1.0000), 'Roughness': 0.0000}) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glossy_bsdf}, attrs={'is_active_output': True}) + glossy_bsdf = nw.new_node( + Nodes.GlossyBSDF, + input_kwargs={"Color": (0.0095, 0.0095, 0.0095, 1.0000), "Roughness": 0.0000}, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glossy_bsdf}, + attrs={"is_active_output": True}, + ) def shader_black_medal(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - anisotropic_bsdf = nw.new_node('ShaderNodeBsdfAnisotropic', input_kwargs={'Color': (0.0167, 0.0167, 0.0167, 1.0000)}) + anisotropic_bsdf = nw.new_node( + "ShaderNodeBsdfAnisotropic", + input_kwargs={"Color": (0.0167, 0.0167, 0.0167, 1.0000)}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": anisotropic_bsdf}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': anisotropic_bsdf}, attrs={'is_active_output': True}) def shader_glass(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={'IOR': 1.5000}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glass_bsdf}, attrs={'is_active_output': True}) + glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={"IOR": 1.5000}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glass_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/plaster.py b/infinigen/assets/materials/plaster.py index 918108828..589ac0f3f 100644 --- a/infinigen/assets/materials/plaster.py +++ b/infinigen/assets/materials/plaster.py @@ -4,56 +4,77 @@ # Authors: Lingjie Mei from collections.abc import Iterable -import numpy as np from numpy.random import uniform -from infinigen.assets.utils.object import new_plane +from infinigen.assets.materials import common from infinigen.assets.utils.uv import unwrap_normal -from infinigen.core.util.color import hsv2rgba from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.assets.materials import common +from infinigen.core.util.color import hsv2rgba from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_utils import build_color_ramp def shader_plaster(nw: NodeWrangler, plaster_colored, **kwargs): hue = uniform(0, 1) - front_value = log_uniform(.5, 1.) - back_value = front_value * uniform(.6, 1) + front_value = log_uniform(0.5, 1.0) + back_value = front_value * uniform(0.6, 1) if plaster_colored: - front_color = hsv2rgba(hue, uniform(.3, .5), front_value) - back_color = hsv2rgba(hue + uniform(-.1, .1), uniform(.3, .5), back_value) + front_color = hsv2rgba(hue, uniform(0.3, 0.5), front_value) + back_color = hsv2rgba(hue + uniform(-0.1, 0.1), uniform(0.3, 0.5), back_value) else: front_color = hsv2rgba(hue, 0, front_value) - back_color = hsv2rgba(hue + uniform(-.1, .1), 0, back_value) + back_color = hsv2rgba(hue + uniform(-0.1, 0.1), 0, back_value) uv_map = nw.new_node(Nodes.UVMap) - musgrave = nw.new_node(Nodes.MusgraveTexture, [uv_map], - input_kwargs={'Detail': log_uniform(15, 30), 'Dimension': 0}) - noise = nw.new_node(Nodes.NoiseTexture, [uv_map], - input_kwargs={'Detail': log_uniform(15, 30), 'Distortion': log_uniform(4, 8)}) - noise = build_color_ramp(nw, noise, [0, uniform(.3, .5)], [(0, 0, 0, 1), (1, 1, 1, 1)]) - difference = nw.new_node(Nodes.MixRGB, [musgrave, noise], attrs={'blend_type': 'DIFFERENCE'}) - base_color = build_color_ramp(nw, difference, [uniform(.2, .3), 1], [back_color, front_color]) - - displacement = nw.new_node(Nodes.Displacement, input_kwargs={ - 'Scale': log_uniform(.0001, .0003), - 'Height': nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Scale': uniform(1e3, 2e3)}) - }) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': base_color, - 'Roughness': uniform(.7, .8), - }) - - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}) + musgrave = nw.new_node( + Nodes.MusgraveTexture, + [uv_map], + input_kwargs={"Detail": log_uniform(15, 30), "Dimension": 0}, + ) + noise = nw.new_node( + Nodes.NoiseTexture, + [uv_map], + input_kwargs={"Detail": log_uniform(15, 30), "Distortion": log_uniform(4, 8)}, + ) + noise = build_color_ramp( + nw, noise, [0, uniform(0.3, 0.5)], [(0, 0, 0, 1), (1, 1, 1, 1)] + ) + difference = nw.new_node( + Nodes.MixRGB, [musgrave, noise], attrs={"blend_type": "DIFFERENCE"} + ) + base_color = build_color_ramp( + nw, difference, [uniform(0.2, 0.3), 1], [back_color, front_color] + ) + + displacement = nw.new_node( + Nodes.Displacement, + input_kwargs={ + "Scale": log_uniform(0.0001, 0.0003), + "Height": nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(1e3, 2e3)} + ), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": base_color, + "Roughness": uniform(0.7, 0.8), + }, + ) + + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + ) def apply(obj, selection=None, plaster_colored=None, **kwargs): if plaster_colored is None: - plaster_colored = uniform() < .4 + plaster_colored = uniform() < 0.4 for o in obj if isinstance(obj, Iterable) else [obj]: unwrap_normal(o, selection) - common.apply(obj, shader_plaster, selection, plaster_colored=plaster_colored, **kwargs) - - + common.apply( + obj, shader_plaster, selection, plaster_colored=plaster_colored, **kwargs + ) diff --git a/infinigen/assets/materials/plastic.py b/infinigen/assets/materials/plastic.py index 1dd6912b8..d9f12540f 100644 --- a/infinigen/assets/materials/plastic.py +++ b/infinigen/assets/materials/plastic.py @@ -4,21 +4,24 @@ # Authors: Mingzhe Wang, Lingjie Mei -import colorsys -from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic -from infinigen.assets.materials.plastics.plastic_translucent import shader_translucent_plastic -from infinigen.core.util.color import hsv2rgba - -from infinigen.assets.materials import common from numpy.random import uniform +from infinigen.assets.materials import common +from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic +from infinigen.assets.materials.plastics.plastic_translucent import ( + shader_translucent_plastic, +) def apply(obj, selection=None, clear=None, **kwargs): - is_rough = kwargs.get('rough', uniform(0, 1)) - is_translucent = kwargs.get('translucent', uniform(0, 1)) + is_rough = kwargs.get("rough", uniform(0, 1)) + is_translucent = kwargs.get("translucent", uniform(0, 1)) if clear is None: - clear = uniform() < .2 - shader_func = shader_rough_plastic if is_rough > is_translucent else shader_translucent_plastic + clear = uniform() < 0.2 + shader_func = ( + shader_rough_plastic + if is_rough > is_translucent + else shader_translucent_plastic + ) common.apply(obj, shader_func, selection, clear=clear, **kwargs) diff --git a/infinigen/assets/materials/plastics/plastic_rough.py b/infinigen/assets/materials/plastics/plastic_rough.py index c2db3bd0e..f2ceacc11 100644 --- a/infinigen/assets/materials/plastics/plastic_rough.py +++ b/infinigen/assets/materials/plastics/plastic_rough.py @@ -4,56 +4,96 @@ # Authors: Mingzhe Wang, Lingjie Mei -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint +from numpy.random import uniform from infinigen.assets.materials import common -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba from infinigen.core.util.random import log_uniform -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -@node_utils.to_nodegroup('nodegroup_plastics', singleton=False, type='ShaderNodeTree') + +@node_utils.to_nodegroup("nodegroup_plastics", singleton=False, type="ShaderNodeTree") def nodegroup_plastics(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 5.0000), - ('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloat', 'Roughness', 0.0000)]) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs["Roughness"], 3: 0.0500, 4: 0.2500}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_input.outputs["Base Color"], 'Roughness': map_range.outputs["Result"]}) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 5.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloat", "Roughness", 0.0000), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Roughness"], 3: 0.0500, 4: 0.2500}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_input.outputs["Base Color"], + "Roughness": map_range.outputs["Result"], + }, + ) + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - multiply = nw.new_node(Nodes.Math, + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 2000.0000}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000, 'Distortion': 2.0000}, - attrs={'noise_dimensions': '4D'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: 0.4000}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + "Distortion": 2.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: 0.4000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: 0.0030}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': multiply_3}, - attrs={'is_active_output': True}) - -def shader_rough_plastic(nw: NodeWrangler, scale=1.0, base_color=None, roughness=None, seed=None, clear=False, **kwargs): + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: 0.0030}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "Displacement": multiply_3}, + attrs={"is_active_output": True}, + ) + + +def shader_rough_plastic( + nw: NodeWrangler, + scale=1.0, + base_color=None, + roughness=None, + seed=None, + clear=False, + **kwargs, +): # Code generated using version 2.6.4 of the node_transpiler if roughness is None: roughness = uniform(0.0, 1.0) @@ -61,23 +101,33 @@ def shader_rough_plastic(nw: NodeWrangler, scale=1.0, base_color=None, roughness seed = uniform(-1000.0, 1000.0) if base_color is None: if clear: - base_color = hsv2rgba(0, 0, log_uniform(.02, .8)) + base_color = hsv2rgba(0, 0, log_uniform(0.02, 0.8)) else: - base_color = hsv2rgba(uniform(0, 1), uniform(.5, .8), log_uniform(.01, .5)) - - group = nw.new_node(nodegroup_plastics().name, - input_kwargs={'Base Color': base_color, - 'Scale': scale, - 'Seed': seed, - 'Roughness': roughness, - }) - - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement}, - attrs={'is_active_output': True}) + base_color = hsv2rgba( + uniform(0, 1), uniform(0.5, 0.8), log_uniform(0.01, 0.5) + ) + + group = nw.new_node( + nodegroup_plastics().name, + input_kwargs={ + "Base Color": base_color, + "Scale": scale, + "Seed": seed, + "Roughness": roughness, + }, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group.outputs["Displacement"], "Midlevel": 0.0000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): - common.apply(obj, shader_rough_plastic, selection, **kwargs) \ No newline at end of file + common.apply(obj, shader_rough_plastic, selection, **kwargs) diff --git a/infinigen/assets/materials/plastics/plastic_translucent.py b/infinigen/assets/materials/plastics/plastic_translucent.py index d41a04264..14fb2d15f 100644 --- a/infinigen/assets/materials/plastics/plastic_translucent.py +++ b/infinigen/assets/materials/plastics/plastic_translucent.py @@ -4,41 +4,54 @@ # Authors: Mingzhe Wang, Lingjie Mei -import colorsys -from infinigen.core.util.color import hsv2rgba +from numpy.random import uniform + from infinigen.assets.materials import common -from infinigen.core.util.random import log_uniform from infinigen.assets.materials.utils.surface_utils import sample_range -from numpy.random import uniform from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.random import log_uniform + def shader_translucent_plastic(nw: NodeWrangler, clear=False, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - layer_weight = nw.new_node('ShaderNodeLayerWeight', input_kwargs={'Blend': sample_range(0.2, 0.4)}) + layer_weight = nw.new_node( + "ShaderNodeLayerWeight", input_kwargs={"Blend": sample_range(0.2, 0.4)} + ) rgb = nw.new_node(Nodes.RGB) if clear: - base_color = hsv2rgba(0, 0, log_uniform(.4, .8)) + base_color = hsv2rgba(0, 0, log_uniform(0.4, 0.8)) else: - base_color = hsv2rgba(uniform(0, 1), uniform(.5, .8), log_uniform(.4, .8)) + base_color = hsv2rgba(uniform(0, 1), uniform(0.5, 0.8), log_uniform(0.4, 0.8)) rgb.outputs[0].default_value = base_color value = nw.new_node(Nodes.Value) value.outputs[0].default_value = sample_range(1.2, 1.6) - glass_bsdf = nw.new_node('ShaderNodeBsdfGlass', input_kwargs={'Color': rgb, 'Roughness': 0.2, 'IOR': value}) + glass_bsdf = nw.new_node( + "ShaderNodeBsdfGlass", + input_kwargs={"Color": rgb, "Roughness": 0.2, "IOR": value}, + ) - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy', input_kwargs={'Roughness': 0.2}) + glossy_bsdf = nw.new_node("ShaderNodeBsdfGlossy", input_kwargs={"Roughness": 0.2}) - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': layer_weight.outputs["Fresnel"], 1: glass_bsdf, 2: glossy_bsdf - }) + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": layer_weight.outputs["Fresnel"], + 1: glass_bsdf, + 2: glossy_bsdf, + }, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) def apply(obj, selection=None, **kwargs): - common.apply(obj, shader_translucent_plastic, selection, **kwargs) \ No newline at end of file + common.apply(obj, shader_translucent_plastic, selection, **kwargs) diff --git a/infinigen/assets/materials/reptile_brown_circle_attr.py b/infinigen/assets/materials/reptile_brown_circle_attr.py index c08130923..231e74f88 100644 --- a/infinigen/assets/materials/reptile_brown_circle_attr.py +++ b/infinigen/assets/materials/reptile_brown_circle_attr.py @@ -4,90 +4,101 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os + import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask def shader_brown_circle(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'local_pos'}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': attribute_2.outputs["Color"]}) - + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "local_pos"}) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": attribute_2.outputs["Color"]} + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 10.0 - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': reroute, 'Scale': value}, - attrs={'voronoi_dimensions': '2D'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.0, 'Color1': reroute, 'Color2': noise_texture.outputs["Color"]}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': value}, - attrs={'voronoi_dimensions': '2D', 'feature': 'SMOOTH_F1'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'operation': 'SUBTRACT'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 2.0}, - attrs={'operation': 'POWER'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: 10000.0}, - attrs={'operation': 'MULTIPLY'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: 1.0}, - attrs={'operation': 'LESS_THAN'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: less_than, 1: 2.0}, - attrs={'operation': 'DIVIDE'}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': divide}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": reroute, "Scale": value}, + attrs={"voronoi_dimensions": "2D"}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Vector": reroute}) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.0, + "Color1": reroute, + "Color2": noise_texture.outputs["Color"], + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": value}, + attrs={"voronoi_dimensions": "2D", "feature": "SMOOTH_F1"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 2.0}, attrs={"operation": "POWER"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: power, 1: 10000.0}, attrs={"operation": "MULTIPLY"} + ) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: 1.0}, attrs={"operation": "LESS_THAN"} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: less_than, 1: 2.0}, attrs={"operation": "DIVIDE"} + ) + + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": divide}) colorramp_4.color_ramp.elements[0].position = 0.0591 colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_4.color_ramp.elements[1].position = 0.1136 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + group = nw.new_node(nodegroup_color_mask().name) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'value'}) - - less_than_1 = nw.new_node(Nodes.Math, + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "value"}) + + less_than_1 = nw.new_node( + Nodes.Math, input_kwargs={0: attribute.outputs["Color"], 1: 0.85}, - attrs={'operation': 'LESS_THAN'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'index'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute_1.outputs["Color"], 'Scale': 100.0}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + attrs={"operation": "LESS_THAN"}, + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "index"}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": attribute_1.outputs["Color"], "Scale": 100.0}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements.new(0) @@ -101,12 +112,14 @@ def shader_brown_circle(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[3].color = (0.7346, 0.456, 0.2857, 1.0) colorramp.color_ramp.elements[4].position = 0.9455 colorramp.color_ramp.elements[4].color = (0.2134, 0.0921, 0.0372, 1.0) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute, 'Scale': 500.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"]}) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": reroute, "Scale": 500.0} + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) @@ -120,55 +133,89 @@ def shader_brown_circle(nw: NodeWrangler, rand=True, **input_kwargs): colorramp_1.color_ramp.elements[3].color = (0.6724, 0.4179, 0.2623, 1.0) colorramp_1.color_ramp.elements[4].position = 1.0 colorramp_1.color_ramp.elements[4].color = (0.1946, 0.0844, 0.0343, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': colorramp.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}, - attrs={'blend_type': 'MULTIPLY'}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': less_than_1, 'Color1': (0.4969, 0.305, 0.1746, 1.0), 'Color2': mix_1}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': group}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": less_than_1, + "Color1": (0.4969, 0.305, 0.1746, 1.0), + "Color2": mix_1, + }, + ) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}) colorramp_3.color_ramp.elements[0].position = 0.0 colorramp_3.color_ramp.elements[0].color = (0.4969, 0.305, 0.1746, 1.0) colorramp_3.color_ramp.elements[1].position = 1.0 colorramp_3.color_ramp.elements[1].color = (0.9684, 1.0, 0.6723, 1.0) - - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': group, 'Color1': mix_2, 'Color2': colorramp_3.outputs["Color"]}) - - power_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 2.0}, - attrs={'operation': 'POWER'}) - - multiply_1 = nw.new_node(Nodes.Math, + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": group, + "Color1": mix_2, + "Color2": colorramp_3.outputs["Color"], + }, + ) + + power_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 2.0}, attrs={"operation": "POWER"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: power_1, 1: 1000000.0}, - attrs={'operation': 'MULTIPLY'}) - - less_than_2 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + less_than_2 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: 0.001}, - attrs={'operation': 'LESS_THAN'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: less_than_2, 1: divide}, - attrs={'use_clamp': True}) - - multiply_2 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "LESS_THAN"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: less_than_2, 1: divide}, attrs={"use_clamp": True} + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: less_than_2, 1: attribute_1.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply_2.outputs["Vector"], 'Scale': 100.0, 'Roughness': 0.49}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': less_than_2, 'Color1': (0.0, 0.0, 0.0, 1.0), 'Color2': noise_texture_3.outputs["Fac"]}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': less_than_2, 'Color1': add, 'Color2': mix_3}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_4}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply_2.outputs["Vector"], + "Scale": 100.0, + "Roughness": 0.49, + }, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": less_than_2, + "Color1": (0.0, 0.0, 0.0, 1.0), + "Color2": noise_texture_3.outputs["Fac"], + }, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, input_kwargs={"Fac": less_than_2, "Color1": add, "Color2": mix_3} + ) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_4}) colorramp_2.color_ramp.elements.new(0) colorramp_2.color_ramp.elements.new(0) colorramp_2.color_ramp.elements[0].position = 0.0045 @@ -179,128 +226,219 @@ def shader_brown_circle(nw: NodeWrangler, rand=True, **input_kwargs): colorramp_2.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[3].position = 0.5591 colorramp_2.color_ramp.elements[3].color = (0.4524, 0.3119, 0.1992, 1.0) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_4.outputs["Color"], 'Color1': mix_6, 'Color2': colorramp_2.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_5}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_4.outputs["Color"], + "Color1": mix_6, + "Color2": colorramp_2.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_5} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_reptile_vor(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'value', (0.0, 0.0, 0.0))]) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "value", (0.0, 0.0, 0.0)), + ], + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: group_input.outputs["value"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.003 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + def geometry_reptile_vor_attr(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 1.0 - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 6.0, 'Detail': 15.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': multiply.outputs["Vector"], 'Color2': noise_texture.outputs["Fac"]}, - attrs={'blend_type': 'ADD'}) - + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply.outputs["Vector"], + "Scale": 6.0, + "Detail": 15.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.1, + "Color1": multiply.outputs["Vector"], + "Color2": noise_texture.outputs["Fac"], + }, + attrs={"blend_type": "ADD"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0.0 - + value_2 = nw.new_node(Nodes.Value) value_2.outputs[0].default_value = 80.0 - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp.color_ramp.elements[0].position = 0.02 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.2 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 100.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": multiply.outputs["Vector"], "Scale": 100.0}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.1 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.4 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D'}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: voronoi_texture_1.outputs["Position"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'attr1': capture_attribute.outputs["Attribute"], 'attr2': capture_attribute_1.outputs["Attribute"]}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D"}, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 1: voronoi_texture_1.outputs["Position"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "attr1": capture_attribute.outputs["Attribute"], + "attr2": capture_attribute_1.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geometry_reptile_vor_attr, input_kwargs=geo_kwargs, attributes=['value', 'index']) - surface.add_geomod(obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[]) + surface.add_geomod( + obj, + geometry_reptile_vor_attr, + input_kwargs=geo_kwargs, + attributes=["value", "index"], + ) + surface.add_geomod( + obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[] + ) surface.add_material(obj, shader_brown_circle, input_kwargs=shader_kwargs) + if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_1019.blend') - #creature(73349, 0).parts(0, factory=QuadrupedBody) + bpy.ops.wm.open_mainfile(filepath="dev_scene_1019.blend") + # creature(73349, 0).parts(0, factory=QuadrupedBody) import generated_surface_script_replile_gray as gray import generated_surface_script_replile_two_color as two_color - apply(bpy.data.objects['creature(73349, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True, 'mat_name':'brown_circle'}) - gray.apply(bpy.data.objects['creature(19946, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True, 'mat_name':'two_color'}) - two_color.apply(bpy.data.objects['creature(51668, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True, 'mat_name':'gray'}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_brown_circle_attr.blend') + + apply( + bpy.data.objects["creature(73349, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True, "mat_name": "brown_circle"}, + ) + gray.apply( + bpy.data.objects["creature(19946, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True, "mat_name": "two_color"}, + ) + two_color.apply( + bpy.data.objects["creature(51668, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True, "mat_name": "gray"}, + ) + fn = os.path.join( + os.path.abspath(os.curdir), "dev_scene_test_brown_circle_attr.blend" + ) bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/reptile_gray_attr.py b/infinigen/assets/materials/reptile_gray_attr.py index 867173ee0..370858bfe 100644 --- a/infinigen/assets/materials/reptile_gray_attr.py +++ b/infinigen/assets/materials/reptile_gray_attr.py @@ -4,43 +4,34 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask def shader_gray(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler group = nw.new_node(nodegroup_color_mask().name) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': group}) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 1.0 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'value'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'index'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute_1.outputs["Color"], 'Scale': 10.0}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Color"]}) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "value"}) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "index"}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": attribute_1.outputs["Color"], "Scale": 10.0}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Color"]} + ) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.1 @@ -51,132 +42,224 @@ def shader_gray(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[2].color = (0.0489, 0.0488, 0.0489, 1.0) colorramp.color_ramp.elements[3].position = 0.8227 colorramp.color_ramp.elements[3].color = (0.0282, 0.0252, 0.027, 1.0) - + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Scale': 200.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Scale": 200.0, + }, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 1.0 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': (0.033, 0.033, 0.033, 1.0), 'Color2': mix}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_2.outputs["Color"], 'Color1': mix_1, 'Color2': (1.0, 1.0, 1.0, 1.0)}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2, 'Roughness': 1.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": (0.033, 0.033, 0.033, 1.0), + "Color2": mix, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": mix_1, + "Color2": (1.0, 1.0, 1.0, 1.0), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_2, "Roughness": 1.0} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_reptile_vor(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'value', (0.0, 0.0, 0.0))]) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "value", (0.0, 0.0, 0.0)), + ], + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: group_input.outputs["value"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.003 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + def geometry_reptile_vor_attr(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 1.0 - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 6.0, 'Detail': 15.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': multiply.outputs["Vector"], 'Color2': noise_texture.outputs["Fac"]}, - attrs={'blend_type': 'ADD'}) - + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply.outputs["Vector"], + "Scale": 6.0, + "Detail": 15.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.1, + "Color1": multiply.outputs["Vector"], + "Color2": noise_texture.outputs["Fac"], + }, + attrs={"blend_type": "ADD"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0.0 - + value_2 = nw.new_node(Nodes.Value) value_2.outputs[0].default_value = 80.0 - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp.color_ramp.elements[0].position = 0.02 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.2 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 100.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": multiply.outputs["Vector"], "Scale": 100.0}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.1 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.4 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D'}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: voronoi_texture_1.outputs["Position"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'attr1': capture_attribute.outputs["Attribute"], 'attr2': capture_attribute_1.outputs["Attribute"]}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D"}, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 1: voronoi_texture_1.outputs["Position"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "attr1": capture_attribute.outputs["Attribute"], + "attr2": capture_attribute_1.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geometry_reptile_vor_attr, input_kwargs=geo_kwargs, attributes=['value', 'index']) - surface.add_geomod(obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[]) + surface.add_geomod( + obj, + geometry_reptile_vor_attr, + input_kwargs=geo_kwargs, + attributes=["value", "index"], + ) + surface.add_geomod( + obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[] + ) surface.add_material(obj, shader_gray, input_kwargs=shader_kwargs) - diff --git a/infinigen/assets/materials/reptile_two_color_attr.py b/infinigen/assets/materials/reptile_two_color_attr.py index 2f6994ed1..c3331be32 100644 --- a/infinigen/assets/materials/reptile_two_color_attr.py +++ b/infinigen/assets/materials/reptile_two_color_attr.py @@ -4,50 +4,69 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask def shader_two_color(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler group = nw.new_node(nodegroup_color_mask().name) - + texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Rotation': (0.5236, -0.6807, 0.0)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 3.0, 'Detail': 10.0, 'Distortion': 0.5}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 20.0, 'Detail': 50.0, 'Distortion': 0.5}, - attrs={'noise_dimensions': '4D'}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.5667, 'Color1': noise_texture.outputs["Fac"], 'Color2': noise_texture_1.outputs["Fac"]}, - attrs={'blend_type': 'MULTIPLY'}) - + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Rotation": (0.5236, -0.6807, 0.0), + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 3.0, + "Detail": 10.0, + "Distortion": 0.5, + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 20.0, + "Detail": 50.0, + "Distortion": 0.5, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.5667, + "Color1": noise_texture.outputs["Fac"], + "Color2": noise_texture_1.outputs["Fac"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 2.0 - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: mix, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': multiply.outputs["Vector"]}) + attrs={"operation": "MULTIPLY"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": multiply.outputs["Vector"]} + ) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -55,9 +74,10 @@ def shader_two_color(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[1].color = (0.1064, 0.1064, 0.1064, 1.0) colorramp.color_ramp.elements[2].position = 1.0 colorramp.color_ramp.elements[2].color = (1.0, 1.0, 1.0, 1.0) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': colorramp.outputs["Color"]}) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp.outputs["Color"]} + ) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) colorramp_1.color_ramp.elements.new(0) @@ -119,123 +139,203 @@ def shader_two_color(nw: NodeWrangler, rand=True, **input_kwargs): colorramp_1.color_ramp.elements[19].color = (0.0566, 0.0405, 0.0273, 1.0) colorramp_1.color_ramp.elements[20].position = 1.0 colorramp_1.color_ramp.elements[20].color = (0.0356, 0.0247, 0.0168, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': group, 'Color1': colorramp_1.outputs["Color"], 'Color2': (1.0, 1.0, 1.0, 1.0)}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'value'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": group, + "Color1": colorramp_1.outputs["Color"], + "Color2": (1.0, 1.0, 1.0, 1.0), + }, + ) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "value"}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.2634, 0.2634, 0.2634, 1.0) colorramp_2.color_ramp.elements[1].position = 1.0 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1, 'Specular': 0.0, 'Roughness': colorramp_2.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_1, + "Specular": 0.0, + "Roughness": colorramp_2.outputs["Color"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_reptile_vor(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'value', (0.0, 0.0, 0.0))]) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "value", (0.0, 0.0, 0.0)), + ], + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: group_input.outputs["value"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.003 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + def geometry_reptile_vor_attr(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 1.0 - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 6.0, 'Detail': 15.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': multiply.outputs["Vector"], 'Color2': noise_texture.outputs["Fac"]}, - attrs={'blend_type': 'ADD'}) - + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply.outputs["Vector"], + "Scale": 6.0, + "Detail": 15.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.1, + "Color1": multiply.outputs["Vector"], + "Color2": noise_texture.outputs["Fac"], + }, + attrs={"blend_type": "ADD"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0.0 - + value_2 = nw.new_node(Nodes.Value) value_2.outputs[0].default_value = 80.0 - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D', 'feature': 'DISTANCE_TO_EDGE'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp.color_ramp.elements[0].position = 0.02 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.2 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Scale': 100.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": multiply.outputs["Vector"], "Scale": 100.0}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_1.color_ramp.elements[0].position = 0.1 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.4 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp.outputs["Color"], 'Color1': colorramp.outputs["Color"], 'Color2': colorramp_1.outputs["Color"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'W': value_1, 'Scale': value_2}, - attrs={'voronoi_dimensions': '4D'}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 1: voronoi_texture_1.outputs["Position"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 'attr1': capture_attribute.outputs["Attribute"], 'attr2': capture_attribute_1.outputs["Attribute"]}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "W": value_1, "Scale": value_2}, + attrs={"voronoi_dimensions": "4D"}, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 1: voronoi_texture_1.outputs["Position"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "attr1": capture_attribute.outputs["Attribute"], + "attr2": capture_attribute_1.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geometry_reptile_vor_attr, input_kwargs=geo_kwargs, attributes=['value', 'index']) - surface.add_geomod(obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[]) + surface.add_geomod( + obj, + geometry_reptile_vor_attr, + input_kwargs=geo_kwargs, + attributes=["value", "index"], + ) + surface.add_geomod( + obj, geometry_reptile_vor, input_kwargs=geo_kwargs, attributes=[] + ) surface.add_material(obj, shader_two_color, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/river_water.py b/infinigen/assets/materials/river_water.py index ef7aef798..b0429fadb 100644 --- a/infinigen/assets/materials/river_water.py +++ b/infinigen/assets/materials/river_water.py @@ -3,181 +3,324 @@ # Authors: Karhan Kayan -import bpy -import mathutils from numpy.random import normal as N -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import random_color_neighbour + def shader_river_water(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler light_path = nw.new_node(Nodes.LightPath) - multiply = nw.new_node(Nodes.Math, input_kwargs={1: light_path.outputs["Is Camera Ray"]}, attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={1: light_path.outputs["Is Camera Ray"]}, + attrs={"operation": "MULTIPLY"}, + ) transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Roughness': 0.0000, 'IOR': 1.3300, 'Transmission': 1.0000}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Roughness": 0.0000, "IOR": 1.3300, "Transmission": 1.0000}, + ) - mix_shader = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': multiply, 1: transparent_bsdf, 2: principled_bsdf}) + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply, 1: transparent_bsdf, 2: principled_bsdf}, + ) texture_coordinate = nw.new_node(Nodes.TextureCoord) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': separate_xyz.outputs["Y"], 2: 20.0000, 3: -0.4000}) + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Y"], 2: 20.0000, 3: -0.4000}, + ) - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': map_range_1.outputs["Result"]}) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range_1.outputs["Result"]} + ) colorramp.color_ramp.interpolation = "B_SPLINE" colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.0000 - colorramp.color_ramp.elements[0].color = random_color_neighbour([0.1982, 0.1841, 0.0513, 1.0000], 0.05, 0.05, 0.05) + colorramp.color_ramp.elements[0].color = random_color_neighbour( + [0.1982, 0.1841, 0.0513, 1.0000], 0.05, 0.05, 0.05 + ) colorramp.color_ramp.elements[1].position = 0.3545 + 0.01 * N() - colorramp.color_ramp.elements[1].color = random_color_neighbour([0.1278, 0.1384, 0.0615, 1.0000], 0.05, 0.05, 0.05) + colorramp.color_ramp.elements[1].color = random_color_neighbour( + [0.1278, 0.1384, 0.0615, 1.0000], 0.05, 0.05, 0.05 + ) colorramp.color_ramp.elements[2].position = 0.6773 + 0.01 * N() - colorramp.color_ramp.elements[2].color = random_color_neighbour([0.0563, 0.0897, 0.0347, 1.0000], 0.05, 0.05, 0.05) + colorramp.color_ramp.elements[2].color = random_color_neighbour( + [0.0563, 0.0897, 0.0347, 1.0000], 0.05, 0.05, 0.05 + ) colorramp.color_ramp.elements[3].position = 1.0000 - colorramp.color_ramp.elements[3].color = random_color_neighbour([0.0256, 0.0123, 0.0000, 1.0000], 0.05, 0.05, 0.05) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': separate_xyz.outputs["Y"], 2: 20.0000, 3: 1.0000, 4: 6.0000}) - - volume_scatter = nw.new_node('ShaderNodeVolumeScatter', - input_kwargs={'Color': colorramp.outputs["Color"], 'Density': map_range_2.outputs["Result"], 'Anisotropy': 0.1500}) + colorramp.color_ramp.elements[3].color = random_color_neighbour( + [0.0256, 0.0123, 0.0000, 1.0000], 0.05, 0.05, 0.05 + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 2: 20.0000, + 3: 1.0000, + 4: 6.0000, + }, + ) + + volume_scatter = nw.new_node( + "ShaderNodeVolumeScatter", + input_kwargs={ + "Color": colorramp.outputs["Color"], + "Density": map_range_2.outputs["Result"], + "Anisotropy": 0.1500, + }, + ) rgb = nw.new_node(Nodes.RGB) - rgb.outputs[0].default_value = random_color_neighbour((0.0290, 0.2718, 0.6748, 1.0000), 0.05, 0.05, 0.05) + rgb.outputs[0].default_value = random_color_neighbour( + (0.0290, 0.2718, 0.6748, 1.0000), 0.05, 0.05, 0.05 + ) geometry = nw.new_node(Nodes.NewGeometry) - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Vector': geometry.outputs["Position"], 'Scale': 11.6400}) + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": geometry.outputs["Position"], "Scale": 11.6400}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 3: 0.0784, 4: 0.2000} + ) - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture, 3: 0.0784, 4: 0.2000}) + principled_volume = nw.new_node( + Nodes.PrincipledVolume, + input_kwargs={ + "Color": rgb, + "Density": map_range.outputs["Result"], + "Anisotropy": 0.3909, + }, + ) - principled_volume = nw.new_node(Nodes.PrincipledVolume, - input_kwargs={'Color': rgb, 'Density': map_range.outputs["Result"], 'Anisotropy': 0.3909}) + mix_shader_2 = nw.new_node( + Nodes.MixShader, input_kwargs={1: volume_scatter, 2: principled_volume} + ) - mix_shader_2 = nw.new_node(Nodes.MixShader, input_kwargs={1: volume_scatter, 2: principled_volume}) + volume_absorption = nw.new_node( + "ShaderNodeVolumeAbsorption", + input_kwargs={"Color": rgb, "Density": 5.9000 + 0.1 * N()}, + ) - volume_absorption = nw.new_node('ShaderNodeVolumeAbsorption', input_kwargs={'Color': rgb, 'Density': 5.9000+ 0.1 * N()}) + mix_shader_1 = nw.new_node( + Nodes.MixShader, input_kwargs={1: mix_shader_2, 2: volume_absorption} + ) - mix_shader_1 = nw.new_node(Nodes.MixShader, input_kwargs={1: mix_shader_2, 2: volume_absorption}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader, "Volume": mix_shader_1}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader, 'Volume': mix_shader_1}, - attrs={'is_active_output': True}) def geometry_river_water(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) position = nw.new_node(Nodes.InputPosition) - wave = nw.new_node(Nodes.Vector, label='wave') + wave = nw.new_node(Nodes.Vector, label="wave") wave.vector = (581.0000, 380.0000, 982.0000) add = nw.new_node(Nodes.VectorMath, input_kwargs={0: position, 1: wave}) - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: (0.0000, 3.8168, 0.0000), 1: add.outputs["Vector"]}) + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (0.0000, 3.8168, 0.0000), 1: add.outputs["Vector"]}, + ) - water_scale = nw.new_node(Nodes.Value, label='water_scale') + water_scale = nw.new_node(Nodes.Value, label="water_scale") water_scale.outputs[0].default_value = 4.8569 - water_detail = nw.new_node(Nodes.Value, label='water_detail') + water_detail = nw.new_node(Nodes.Value, label="water_detail") water_detail.outputs[0].default_value = 5.8690 - water_dimension = nw.new_node(Nodes.Value, label='water_dimension') + water_dimension = nw.new_node(Nodes.Value, label="water_dimension") water_dimension.outputs[0].default_value = 1.1885 - water_lacunarity = nw.new_node(Nodes.Value, label='water_lacunarity') + water_lacunarity = nw.new_node(Nodes.Value, label="water_lacunarity") water_lacunarity.outputs[0].default_value = 1.8505 - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': add_1.outputs["Vector"], 'Scale': water_scale, 'Detail': water_detail, 'Dimension': water_dimension, 'Lacunarity': water_lacunarity}) - - water_height = nw.new_node(Nodes.Value, label='water_height') + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": add_1.outputs["Vector"], + "Scale": water_scale, + "Detail": water_detail, + "Dimension": water_dimension, + "Lacunarity": water_lacunarity, + }, + ) + + water_height = nw.new_node(Nodes.Value, label="water_height") water_height.outputs[0].default_value = 0.0011 position_1 = nw.new_node(Nodes.InputPosition) - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Vector': position_1, 'Scale': 4.8811}) + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Vector": position_1, "Scale": 4.8811} + ) add_2 = nw.new_node(Nodes.Math, input_kwargs={1: musgrave_texture}) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: water_height, 1: add_2}, attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: water_height, 1: add_2}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: musgrave_texture_1, 1: multiply}, attrs={'operation': 'MULTIPLY'}) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture_1, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) - ripple0 = nw.new_node(Nodes.Vector, label='ripple0') + ripple0 = nw.new_node(Nodes.Vector, label="ripple0") ripple0.vector = (130.0000, 634.0000, 140.0000) add_3 = nw.new_node(Nodes.VectorMath, input_kwargs={0: ripple0, 1: position}) - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': add_3.outputs["Vector"], 'Scale': 0.1000}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': add_3.outputs["Vector"], 'Scale': 0.1000}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_3.outputs["Vector"], 1: voronoi_texture.outputs["Position"]}, - attrs={'operation': 'SUBTRACT'}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': subtract.outputs["Vector"], 'Scale': 1.0000, 'Phase Offset': -79.3357}, - attrs={'wave_type': 'RINGS', 'rings_direction': 'SPHERICAL'}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: wave_texture.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - ripple1 = nw.new_node(Nodes.Vector, label='ripple1') + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": add_3.outputs["Vector"], "Scale": 0.1000}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": add_3.outputs["Vector"], "Scale": 0.1000}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_3.outputs["Vector"], + 1: voronoi_texture.outputs["Position"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": subtract.outputs["Vector"], + "Scale": 1.0000, + "Phase Offset": -79.3357, + }, + attrs={"wave_type": "RINGS", "rings_direction": "SPHERICAL"}, + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: wave_texture.outputs["Color"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + ripple1 = nw.new_node(Nodes.Vector, label="ripple1") ripple1.vector = (819.0000, 938.0000, 541.0000) add_4 = nw.new_node(Nodes.VectorMath, input_kwargs={0: ripple1, 1: position}) - voronoi_texture_3 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': add_4.outputs["Vector"], 'Scale': 0.1000}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': add_4.outputs["Vector"], 'Scale': 0.1000}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add_4.outputs["Vector"], 1: voronoi_texture_2.outputs["Position"]}, - attrs={'operation': 'SUBTRACT'}) - - wave_texture_1 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': subtract_1.outputs["Vector"], 'Scale': 1.0000, 'Phase Offset': -46.3218}, - attrs={'wave_type': 'RINGS', 'rings_direction': 'SPHERICAL'}) - - multiply_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: voronoi_texture_3.outputs["Distance"], 1: wave_texture_1.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_2.outputs["Vector"], 1: multiply_3.outputs["Vector"]}) - - ripple_height = nw.new_node(Nodes.Value, label='ripple_height') + voronoi_texture_3 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": add_4.outputs["Vector"], "Scale": 0.1000}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": add_4.outputs["Vector"], "Scale": 0.1000}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_4.outputs["Vector"], + 1: voronoi_texture_2.outputs["Position"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + wave_texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": subtract_1.outputs["Vector"], + "Scale": 1.0000, + "Phase Offset": -46.3218, + }, + attrs={"wave_type": "RINGS", "rings_direction": "SPHERICAL"}, + ) + + multiply_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: voronoi_texture_3.outputs["Distance"], + 1: wave_texture_1.outputs["Color"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_2.outputs["Vector"], 1: multiply_3.outputs["Vector"]}, + ) + + ripple_height = nw.new_node(Nodes.Value, label="ripple_height") ripple_height.outputs[0].default_value = 0.0109 - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_5.outputs["Vector"], 1: ripple_height}, attrs={'operation': 'MULTIPLY'}) + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_5.outputs["Vector"], 1: ripple_height}, + attrs={"operation": "MULTIPLY"}, + ) add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_4}) - multiply_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: add_6, 1: (0.0000, 0.0000, 1.0000)}, attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_5.outputs["Vector"]}) + multiply_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_6, 1: (0.0000, 0.0000, 1.0000)}, + attrs={"operation": "MULTIPLY"}, + ) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_5.outputs["Vector"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) def apply(obj, selection=None, **kwargs): surface.add_geomod(obj, geometry_river_water, selection=selection, attributes=[]) - surface.add_material(obj, shader_river_water, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_river_water, selection=selection) diff --git a/infinigen/assets/materials/rug.py b/infinigen/assets/materials/rug.py index 4f03d19f2..b26f6d392 100644 --- a/infinigen/assets/materials/rug.py +++ b/infinigen/assets/materials/rug.py @@ -2,49 +2,75 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -import numpy as np from numpy.random import uniform from infinigen.assets.materials import common -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.util.color import hsv2rgba from infinigen.core.util.random import log_uniform -def shader_rug(nw: NodeWrangler, strength=1., **kwargs): - coord = nw.new_node(Nodes.Mapping, [nw.new_node(Nodes.TextureCoord).outputs['Object']]) - vec = nw.new_node(Nodes.MixRGB, [uniform(.8, .9), nw.new_node(Nodes.NoiseTexture, [coord]), coord]) +def shader_rug(nw: NodeWrangler, strength=1.0, **kwargs): + coord = nw.new_node( + Nodes.Mapping, [nw.new_node(Nodes.TextureCoord).outputs["Object"]] + ) + vec = nw.new_node( + Nodes.MixRGB, + [uniform(0.8, 0.9), nw.new_node(Nodes.NoiseTexture, [coord]), coord], + ) height = 0, 0, 0, 1 base_scale = log_uniform(250, 500) - for scale, thresh in zip([1, .75, .5], [1, .5, .33]): - voronoi = nw.new_node(Nodes.VoronoiTexture, [vec], input_kwargs={'Scale': scale * base_scale}).outputs[ - 0] - height = nw.new_node(Nodes.MixRGB, [nw.math('GREATER_THAN', voronoi, thresh), voronoi, height]) + for scale, thresh in zip([1, 0.75, 0.5], [1, 0.5, 0.33]): + voronoi = nw.new_node( + Nodes.VoronoiTexture, [vec], input_kwargs={"Scale": scale * base_scale} + ).outputs[0] + height = nw.new_node( + Nodes.MixRGB, [nw.math("GREATER_THAN", voronoi, thresh), voronoi, height] + ) - displacement = nw.new_node(Nodes.Displacement, input_kwargs={ - 'Scale': strength, - 'Height': height - }) + displacement = nw.new_node( + Nodes.Displacement, input_kwargs={"Scale": strength, "Height": height} + ) base_hue = uniform(0, 1) - base_value = uniform(.2, .5) - if uniform() < .2: - base_saturation = log_uniform(.02, .05) + base_value = uniform(0.2, 0.5) + if uniform() < 0.2: + base_saturation = log_uniform(0.02, 0.05) front_color = hsv2rgba(base_hue, base_saturation, base_value) - back_color = hsv2rgba(base_hue + uniform(-.01, .01), base_saturation * uniform(.9, 1.1), - base_value * uniform(.9, 1.1)) + back_color = hsv2rgba( + base_hue + uniform(-0.01, 0.01), + base_saturation * uniform(0.9, 1.1), + base_value * uniform(0.9, 1.1), + ) else: - base_saturation = log_uniform(.2, .4) + base_saturation = log_uniform(0.2, 0.4) front_color = hsv2rgba(base_hue, base_saturation, base_value) - back_color = hsv2rgba(base_hue + uniform(-.01, .01), base_saturation * uniform(.9, 1.1), - base_value * uniform(.9, 1.1)) - color = nw.new_node(Nodes.MixRGB, [ - nw.build_float_curve(nw.musgrave(uniform(20, 50)), [(0, 1), (uniform(.3, .4), 0), (1, 0)]), front_color, - back_color]) - roughness = nw.build_float_curve(nw.musgrave(uniform(20, 50)), [(.5, .9), (1, .8)]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness}) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}) + back_color = hsv2rgba( + base_hue + uniform(-0.01, 0.01), + base_saturation * uniform(0.9, 1.1), + base_value * uniform(0.9, 1.1), + ) + color = nw.new_node( + Nodes.MixRGB, + [ + nw.build_float_curve( + nw.musgrave(uniform(20, 50)), [(0, 1), (uniform(0.3, 0.4), 0), (1, 0)] + ), + front_color, + back_color, + ], + ) + roughness = nw.build_float_curve( + nw.musgrave(uniform(20, 50)), [(0.5, 0.9), (1, 0.8)] + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color, "Roughness": roughness} + ) + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + ) + def apply(obj, selection=None, **kwargs): common.apply(obj, shader_rug, selection, **kwargs) diff --git a/infinigen/assets/materials/sand.py b/infinigen/assets/materials/sand.py index 6fa798412..e5afe286f 100644 --- a/infinigen/assets/materials/sand.py +++ b/infinigen/assets/materials/sand.py @@ -8,61 +8,79 @@ import gin from mathutils import Vector -from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_general as rg type = SurfaceTypes.SDFPerturb mod_name = "geo_SAND" name = "sand" -@gin.configurable('shader') + +@gin.configurable("shader") def shader_SAND( - nw, - color=("palette", "desert"), - random_seed=0, - wet=False, - wet_part=("uniform", 0.2, 0.25), - *args, - **kwargs - ): + nw, + color=("palette", "desert"), + random_seed=0, + wet=False, + wet_part=("uniform", 0.2, 0.25), + *args, + **kwargs, +): nw.force_input_consistency() with FixedSeed(random_seed): position = (nw.new_node("ShaderNodeTexCoord", []), 3) - assert(color is not None) + assert color is not None if wet: - position = nw.new_node('ShaderNodeNewGeometry') - factor = nw.scalar_divide(nw.separate(position)[2], 3) # this needs to be consistent with value in coast.gin - factor = nw.scalar_add(factor, -0.5, nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 0.1})) + position = nw.new_node("ShaderNodeNewGeometry") + factor = nw.scalar_divide( + nw.separate(position)[2], 3 + ) # this needs to be consistent with value in coast.gin + factor = nw.scalar_add( + factor, + -0.5, + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 0.1}), + ) sand_color = nw.new_node(Nodes.ColorRamp, [factor]) sand_color.color_ramp.elements[0].position = rg(wet_part) sand_color.color_ramp.elements[0].color = rg(("color_category", "wet_sand")) - sand_color.color_ramp.elements[1].position = sand_color.color_ramp.elements[0].position + 0.11 + sand_color.color_ramp.elements[1].position = ( + sand_color.color_ramp.elements[0].position + 0.11 + ) sand_color.color_ramp.elements[1].color = rg(("color_category", "dry_sand")) roughness = nw.new_node(Nodes.ColorRamp, [factor]) - roughness.color_ramp.elements[0].position = sand_color.color_ramp.elements[0].position / 2 + roughness.color_ramp.elements[0].position = ( + sand_color.color_ramp.elements[0].position / 2 + ) roughness.color_ramp.elements[0].color = (0.1, 0.1, 0.1, 0.1) - roughness.color_ramp.elements[1].position = sand_color.color_ramp.elements[1].position + roughness.color_ramp.elements[1].position = sand_color.color_ramp.elements[ + 1 + ].position roughness.color_ramp.elements[1].color = (1, 1, 1, 1) else: sand_color = tuple(rg(color)) roughness = 1.0 - bsdf_sand = nw.new_node("ShaderNodeBsdfPrincipled", input_kwargs={ - "Base Color": sand_color, - "Roughness": roughness, - }) + bsdf_sand = nw.new_node( + "ShaderNodeBsdfPrincipled", + input_kwargs={ + "Base Color": sand_color, + "Roughness": roughness, + }, + ) return bsdf_sand -@gin.configurable('geo') -def geo_SAND(nw, + +@gin.configurable("geo") +def geo_SAND( + nw, n_waves=3, wave_scale=("log_uniform", 0.2, 4), wave_distortion=4, noise_scale=125, - noise_detail=9, # tune down if there are numerical spikes + noise_detail=9, # tune down if there are numerical spikes noise_roughness=0.9, selection=None, ): @@ -73,67 +91,106 @@ def geo_SAND(nw, offsets = [] for i in range(n_waves): wave_scale_node = nw.new_value(rg(wave_scale), f"wave_scale_{i}") - - + position_shift0 = nw.new_node(Nodes.Vector, label=f"position_shift_0_{i}") - position_shift0.vector = nw.get_position_translation_seed(f"position_shift_0_{i}") + position_shift0.vector = nw.get_position_translation_seed( + f"position_shift_0_{i}" + ) position_shift1 = nw.new_node(Nodes.Vector, label=f"position_shift_1_{i}") - position_shift1.vector = nw.get_position_translation_seed(f"position_shift_1_{i}") + position_shift1.vector = nw.get_position_translation_seed( + f"position_shift_1_{i}" + ) position_shift2 = nw.new_node(Nodes.Vector, label=f"position_shift_2_{i}") - position_shift2.vector = nw.get_position_translation_seed(f"position_shift_2_{i}") + position_shift2.vector = nw.get_position_translation_seed( + f"position_shift_2_{i}" + ) position_shift3 = nw.new_node(Nodes.Vector, label=f"position_shift_3_{i}") - position_shift3.vector = nw.get_position_translation_seed(f"position_shift_3_{i}") + position_shift3.vector = nw.get_position_translation_seed( + f"position_shift_3_{i}" + ) - mag = nw.power(1e5, nw.scalar_sub(nw.new_node(Nodes.NoiseTexture, input_kwargs={ - "Vector": nw.add(position, position_shift3), - "Scale": 0.1, - }), 0.6)) + mag = nw.power( + 1e5, + nw.scalar_sub( + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.add(position, position_shift3), + "Scale": 0.1, + }, + ), + 0.6, + ), + ) mag.use_clamp = 1 - offsets.append(nw.multiply( - nw.add( - nw.new_node(Nodes.WaveTexture, [ - nw.add( - position, - position_shift0, - (nw.new_node(Nodes.NoiseTexture, input_kwargs={ - "Scale": nw.new_value(1, "warp_scale"), - "Detail": nw.new_value(9, "warp_detail"), - }), 1), + offsets.append( + nw.multiply( + nw.add( + nw.new_node( + Nodes.WaveTexture, + [ + nw.add( + position, + position_shift0, + ( + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Scale": nw.new_value(1, "warp_scale"), + "Detail": nw.new_value(9, "warp_detail"), + }, + ), + 1, + ), + ), + wave_scale_node, + wave_distortion, + ], ), - wave_scale_node, - wave_distortion - ]), - nw.new_node(Nodes.WaveTexture, [ - nw.add(position, position_shift1), - nw.scalar_multiply(wave_scale_node, 0.98), - wave_distortion - ]), - nw.multiply( - nw.new_node(Nodes.NoiseTexture, [ - nw.add(position, position_shift2), - None, - noise_scale, - noise_detail, - noise_roughness - ]), - Vector([1] * 3), - ) - ), - normal, - mag, - Vector([0.01] * 3) - )) + nw.new_node( + Nodes.WaveTexture, + [ + nw.add(position, position_shift1), + nw.scalar_multiply(wave_scale_node, 0.98), + wave_distortion, + ], + ), + nw.multiply( + nw.new_node( + Nodes.NoiseTexture, + [ + nw.add(position, position_shift2), + None, + noise_scale, + noise_detail, + noise_roughness, + ], + ), + Vector([1] * 3), + ), + ), + normal, + mag, + Vector([0.01] * 3), + ) + ) offset = nw.add(*offsets) groupinput = nw.new_node(Nodes.GroupInput) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - groupoutput = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) - - + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + groupoutput = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) def apply(objs, selection=None, **kwargs): surface.add_geomod(objs, geo_SAND, selection=selection) - surface.add_material(objs, shader_SAND, selection=selection, - input_kwargs={"obj": objs[0] if isinstance(objs, list) else objs}) \ No newline at end of file + surface.add_material( + objs, + shader_SAND, + selection=selection, + input_kwargs={"obj": objs[0] if isinstance(objs, list) else objs}, + ) diff --git a/infinigen/assets/materials/sandstone.py b/infinigen/assets/materials/sandstone.py index c9b63bdea..c5163511f 100644 --- a/infinigen/assets/materials/sandstone.py +++ b/infinigen/assets/materials/sandstone.py @@ -4,23 +4,24 @@ # Authors: Ankit Goyal, Mingzhe Wang, Zeyu Ma - # Code generated using version v2.0.0 of the node_transpiler import gin -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes import NodeWrangler, node_utils +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_color_neighbour from infinigen.core.util.random import random_general as rg + from .mountain import geo_MOUNTAIN_general -from mathutils import Vector type = SurfaceTypes.SDFPerturb mod_name = "geometry_sandstone" name = "sandstone" + @node_utils.to_nodegroup("nodegroup_roughness", singleton=False) def nodegroup_roughness(nw): nw.force_input_consistency() @@ -45,10 +46,12 @@ def nodegroup_roughness(nw): attrs={"noise_dimensions": "4D"}, ) - multiply = nw.new_node( Nodes.VectorMath, - input_kwargs={0: noise_texture_1.outputs["Color"], 1: group_input.outputs["Normal"]}, + input_kwargs={ + 0: noise_texture_1.outputs["Color"], + 1: group_input.outputs["Normal"], + }, attrs={"operation": "MULTIPLY"}, ) @@ -71,10 +74,12 @@ def nodegroup_roughness(nw): attrs={"noise_dimensions": "4D"}, ) - multiply_2 = nw.new_node( Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Color"], 1: group_input.outputs["Normal"]}, + input_kwargs={ + 0: noise_texture_2.outputs["Color"], + 1: group_input.outputs["Normal"], + }, attrs={"operation": "MULTIPLY"}, ) @@ -106,7 +111,9 @@ def nodegroup_roughness(nw): ) -@node_utils.to_nodegroup("nodegroup_cracked_with_mask", singleton=False, type="GeometryNodeTree") +@node_utils.to_nodegroup( + "nodegroup_cracked_with_mask", singleton=False, type="GeometryNodeTree" +) def nodegroup_cracked_with_mask(nw): nw.force_input_consistency() group_input = nw.new_node( @@ -267,7 +274,6 @@ def nodegroup_add_noise(nw): group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add}) - @node_utils.to_nodegroup("nodegroup_displacement_to_offset", singleton=False) def nodegroup_displacement_to_offset(nw): nw.force_input_consistency() @@ -300,8 +306,9 @@ def nodegroup_displacement_to_offset(nw): Nodes.GroupOutput, input_kwargs={"Vector": multiply_1.outputs["Vector"]} ) + @gin.configurable -def shader(nw, color=None): +def shader(nw: NodeWrangler, color=("palette", "sandstone")): nw.force_input_consistency() per_dark_1 = uniform(-0.1, 0.1) per_dark_2 = uniform(-0.1, 0.1) @@ -321,7 +328,7 @@ def shader(nw, color=None): colorramp_0.color_ramp.elements[1].position = 1.0 colorramp_0.color_ramp.elements[1].color = (1, 1, 1, 1) - vector = nw.new_node('ShaderNodeNewGeometry', []) + vector = nw.new_node("ShaderNodeNewGeometry", []) noise_texture = nw.new_node( Nodes.NoiseTexture, @@ -390,6 +397,7 @@ def shader(nw, color=None): return principled_bsdf + @gin.configurable def geometry_sandstone(nw, selection=None, is_rock=False, **kwargs): nw.force_input_consistency() @@ -399,7 +407,9 @@ def geometry_sandstone(nw, selection=None, is_rock=False, **kwargs): else: roug_mag = nw.new_value(uniform(0.3, 0.5), "roug_mag") - side_step_displacement_to_offset_magnitude = nw.new_value(uniform(0.5, 1.5), "side_step_displacement_to_offset_magnitude") + side_step_displacement_to_offset_magnitude = nw.new_value( + uniform(0.5, 1.5), "side_step_displacement_to_offset_magnitude" + ) side_step_poly_aplha_x = nw.new_value(uniform(0, 2), "side_step_poly_aplha_x") side_step_poly_aplha_y = nw.new_value(uniform(0, 2), "side_step_poly_aplha_y") @@ -413,7 +423,11 @@ def geometry_sandstone(nw, selection=None, is_rock=False, **kwargs): group_3 = nw.new_node( nodegroup_roughness().name, - input_kwargs={"Noise 1 Scale": 200.0, "Noise 1 Magnitude": 0.5, 'Normal': normal}, + input_kwargs={ + "Noise 1 Scale": 200.0, + "Noise 1 Magnitude": 0.5, + "Normal": normal, + }, ) multiply = nw.new_node( @@ -551,28 +565,37 @@ def geometry_sandstone(nw, selection=None, is_rock=False, **kwargs): input_kwargs={ "Vector": position_1, "Scale": nw.new_value(1, "stripe_warp_scale"), - } + }, ), nw.new_value(0.2, "stripe_warp_mag"), - ) + ), ) offset2 = nw.add( multiply_3, nw.multiply( - nw.new_node(Nodes.WaveTexture, input_kwargs={ - "Vector": warped_position, - "Scale": nw.new_value(20, "stripe_scale"), - }, attrs={ - "bands_direction": "Z", - "wave_profile": "SAW", - }), + nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": warped_position, + "Scale": nw.new_value(20, "stripe_scale"), + }, + attrs={ + "bands_direction": "Z", + "wave_profile": "SAW", + }, + ), nw.new_value(0.005, "stripe_mag"), normal, - ) + ), ) - noise_params = {"scale": ("uniform", 10, 20), "detail": 9, "roughness": 0.6, "zscale": ("log_uniform", 0.05, 0.1)} + noise_params = { + "scale": ("uniform", 10, 20), + "detail": 9, + "roughness": 0.6, + "zscale": ("log_uniform", 0.05, 0.1), + } offset = nw.add( geo_MOUNTAIN_general(nw, 3, noise_params, 0, {}, {}), diff --git a/infinigen/assets/materials/scale.py b/infinigen/assets/materials/scale.py index fb8dfeb17..b271ebda4 100644 --- a/infinigen/assets/materials/scale.py +++ b/infinigen/assets/materials/scale.py @@ -5,34 +5,41 @@ # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=mJVuodaPHTQ and https://www.youtube.com/watch?v=v7a4ouBLIow by Lance Phan -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os +import random + import bpy -import mathutils -from numpy.random import normal as normal_func -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, + sample_ratio, +) from infinigen.core import surface -import random +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_scale(nw, rand=True, **input_kwargs): - math_4 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0}, - attrs={'operation': 'SUBTRACT'}) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Color variations'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute_2.outputs["Color"], 'W': 1.0}, - attrs={'noise_dimensions': '4D'}) + math_4 = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0}, attrs={"operation": "SUBTRACT"} + ) + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Color variations"} + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": attribute_2.outputs["Color"], "W": 1.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: noise_texture_3.inputs["W"].default_value = sample_range(-5, 5) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_3.outputs["Fac"]}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_3.outputs["Fac"]} + ) for i in range(3): colorramp_2.color_ramp.elements.new(0.0) colorramp_2.color_ramp.elements[0].position = 0.125 @@ -49,19 +56,21 @@ def shader_scale(nw, rand=True, **input_kwargs): for e in colorramp_2.color_ramp.elements: sample_color(e.color) - vector_math = nw.new_node(Nodes.VectorMath, + vector_math = nw.new_node( + Nodes.VectorMath, input_kwargs={0: math_4, 1: colorramp_2.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - attribute_3 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset2'}) - - math_5 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + attribute_3 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset2"}) + + math_5 = nw.new_node( + Nodes.Math, input_kwargs={0: attribute_3.outputs["Vector"], 1: 0.01}, - attrs={'operation': 'GREATER_THAN'}) - - colorramp_7 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': math_5}) + attrs={"operation": "GREATER_THAN"}, + ) + + colorramp_7 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": math_5}) colorramp_7.color_ramp.elements.new(1) colorramp_7.color_ramp.elements[0].position = 0.0 colorramp_7.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -72,285 +81,531 @@ def shader_scale(nw, rand=True, **input_kwargs): if rand: sample_color(colorramp_7.color_ramp.elements[2].color) - vector_math_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math.outputs["Vector"], 1: colorramp_7.outputs["Color"]}) - - attribute_4 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Color variations'}) - - uv_map_1 = nw.new_node('ShaderNodeUVMap') - uv_map_1.uv_map = 'UVMap' - - noise_texture_5 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': uv_map_1, 'Scale': 50.0}, - attrs={'noise_dimensions': '4D'}) + vector_math_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: vector_math.outputs["Vector"], + 1: colorramp_7.outputs["Color"], + }, + ) + + attribute_4 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Color variations"} + ) + + uv_map_1 = nw.new_node("ShaderNodeUVMap") + uv_map_1.uv_map = "UVMap" + + noise_texture_5 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": uv_map_1, "Scale": 50.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: noise_texture_5.inputs["W"].default_value = sample_range(-5, 5) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 1.0, 'Color1': attribute_4.outputs["Color"], 'Color2': noise_texture_5.outputs["Color"]}, - attrs={'blend_type': 'ADD'}) - - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mix_3}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_4.outputs["Fac"]}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 1.0, + "Color1": attribute_4.outputs["Color"], + "Color2": noise_texture_5.outputs["Color"], + }, + attrs={"blend_type": "ADD"}, + ) + + noise_texture_4 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Vector": mix_3}) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_4.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.5078, 0.5078, 0.5078, 1.0) - - colormap = random.choice([vector_math.outputs["Vector"], vector_math_1.outputs["Vector"]]) if rand else vector_math.outputs["Vector"] - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colormap, 'Subsurface': 0.2, 'Subsurface Radius': (0.36, 0.46, 0.6), 'Subsurface Color': (1.0, 0.9405, 0.7747, 1.0), 'Metallic': 0.8, 'Roughness': colorramp.outputs["Color"], 'IOR': 1.69}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf_1}) + colormap = ( + random.choice([vector_math.outputs["Vector"], vector_math_1.outputs["Vector"]]) + if rand + else vector_math.outputs["Vector"] + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colormap, + "Subsurface": 0.2, + "Subsurface Radius": (0.36, 0.46, 0.6), + "Subsurface Color": (1.0, 0.9405, 0.7747, 1.0), + "Metallic": 0.8, + "Roughness": colorramp.outputs["Color"], + "IOR": 1.69, + }, + attrs={"subsurface_method": "BURLEY"}, + ) -@node_utils.to_nodegroup('nodegroup_node_grid', singleton=False, type='GeometryNodeTree') + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_node_grid", singleton=False, type="GeometryNodeTree" +) def nodegroup_node_grid(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: 2.0}, - attrs={'operation': 'FLOOR'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: floor}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1}) - - trunc = nw.new_node(Nodes.Math, - input_kwargs={0: add}, - attrs={'operation': 'TRUNC'}) - - trunc_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1}, - attrs={'operation': 'TRUNC'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: trunc_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'floor1': trunc, 'floor2': add_1}) + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: 2.0}, attrs={"operation": "FLOOR"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: floor}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1}) + + trunc = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "TRUNC"}) + + trunc_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1}, attrs={"operation": "TRUNC"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: trunc_1}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"floor1": trunc, "floor2": add_1} + ) + def geo_scale(nw, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': nw.expose_input('UVMap', attribute='UVMap', dtype='NodeSocketVector')}) - - angle = nw.new_node(Nodes.Value, label='Angle') + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": nw.expose_input( + "UVMap", attribute="UVMap", dtype="NodeSocketVector" + ) + }, + ) + + angle = nw.new_node(Nodes.Value, label="Angle") angle.outputs[0].default_value = 0.0000 - - cosine = nw.new_node(Nodes.Math, input_kwargs={0: angle}, attrs={'operation': 'COSINE'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: cosine}, attrs={'operation': 'MULTIPLY'}) - - sine = nw.new_node(Nodes.Math, input_kwargs={0: angle}, attrs={'operation': 'SINE'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: sine}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - xscale = nw.new_node(Nodes.Value, label='Xscale') + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: angle}, attrs={"operation": "COSINE"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: angle}, attrs={"operation": "SINE"}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + xscale = nw.new_node(Nodes.Value, label="Xscale") xscale.outputs[0].default_value = sample_range(0.7, 1.3) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: xscale}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'W': sample_range(-10, 10), 'Scale': 10.0000}, attrs={'noise_dimensions': '4D'}) - - xnoise = nw.new_node(Nodes.Value, label='Xnoise') + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: xscale}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": sample_range(-10, 10), "Scale": 10.0000}, + attrs={"noise_dimensions": "4D"}, + ) + + xnoise = nw.new_node(Nodes.Value, label="Xnoise") xnoise.outputs[0].default_value = sample_range(0.01, 0.03) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture_2.outputs["Fac"], 1: xnoise}, attrs={'operation': 'MULTIPLY'}) - + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_2.outputs["Fac"], 1: xnoise}, + attrs={"operation": "MULTIPLY"}, + ) + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: sine}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["Y"], 1: cosine}, attrs={'operation': 'MULTIPLY'}) - + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_5}) - - yscale = nw.new_node(Nodes.Value, label='Yscale') + + yscale = nw.new_node(Nodes.Value, label="Yscale") yscale.outputs[0].default_value = sample_range(0.7, 1.3) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: yscale}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'W': sample_range(-10, 10), 'Scale': 10.0000}, attrs={'noise_dimensions': '4D'}) - - ynoise = nw.new_node(Nodes.Value, label='Ynoise') + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: yscale}, attrs={"operation": "MULTIPLY"} + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": sample_range(-10, 10), "Scale": 10.0000}, + attrs={"noise_dimensions": "4D"}, + ) + + ynoise = nw.new_node(Nodes.Value, label="Ynoise") ynoise.outputs[0].default_value = sample_range(0.01, 0.03) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture_1.outputs["Fac"], 1: ynoise}, attrs={'operation': 'MULTIPLY'}) - + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"], 1: ynoise}, + attrs={"operation": "MULTIPLY"}, + ) + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: multiply_7}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_2}) - - scale = nw.new_node(Nodes.Value, label='Scale') - scale.outputs[0].default_value = sample_ratio(25, 2/3, 3/2) - - multiply_8 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_2, 1: scale}, attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply_8}) - - nodegrid = nw.new_node(nodegroup_node_grid().name, input_kwargs={'Value': separate_xyz.outputs["Y"]}) - - greater_than = nw.new_node(Nodes.Compare, + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_2}) + + scale = nw.new_node(Nodes.Value, label="Scale") + scale.outputs[0].default_value = sample_ratio(25, 2 / 3, 3 / 2) + + multiply_8 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_2, 1: scale}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": multiply_8}) + + nodegrid = nw.new_node( + nodegroup_node_grid().name, input_kwargs={"Value": separate_xyz.outputs["Y"]} + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'LESS_THAN'}) - - less_than = nw.new_node(Nodes.Compare, input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}) - - nodegrid_1 = nw.new_node(nodegroup_node_grid().name, input_kwargs={'Value': separate_xyz.outputs["X"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': nodegrid_1.outputs["floor2"], 'Y': nodegrid.outputs["floor1"]}) - - multiply_9 = nw.new_node(Nodes.VectorMath, input_kwargs={0: less_than, 1: combine_xyz}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': nodegrid_1.outputs["floor1"], 'Y': nodegrid.outputs["floor2"]}) - - multiply_10 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than, 1: combine_xyz_1}, attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_9.outputs["Vector"], 1: multiply_10.outputs["Vector"]}) - - subtract_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_3}, attrs={'operation': 'SUBTRACT'}) - - distance = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_3}, attrs={'operation': 'DISTANCE'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: distance.outputs["Value"], 1: 0.0100}) - - less_than_1 = nw.new_node(Nodes.Compare, input_kwargs={0: add_4, 1: 0.5000}, attrs={'operation': 'LESS_THAN'}) - + attrs={"operation": "LESS_THAN"}, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: nodegrid.outputs["floor1"], 1: separate_xyz.outputs["Y"]}, + ) + + nodegrid_1 = nw.new_node( + nodegroup_node_grid().name, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": nodegrid_1.outputs["floor2"], + "Y": nodegrid.outputs["floor1"], + }, + ) + + multiply_9 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: less_than, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": nodegrid_1.outputs["floor1"], + "Y": nodegrid.outputs["floor2"], + }, + ) + + multiply_10 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than, 1: combine_xyz_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_9.outputs["Vector"], + 1: multiply_10.outputs["Vector"], + }, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_3}, + attrs={"operation": "SUBTRACT"}, + ) + + distance = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_3}, + attrs={"operation": "DISTANCE"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: distance.outputs["Value"], 1: 0.0100} + ) + + less_than_1 = nw.new_node( + Nodes.Compare, + input_kwargs={0: add_4, 1: 0.5000}, + attrs={"operation": "LESS_THAN"}, + ) + greater_than_1 = nw.new_node(Nodes.Compare, input_kwargs={0: add_4, 1: 0.5000}) - - multiply_11 = nw.new_node(Nodes.VectorMath, input_kwargs={0: less_than, 1: combine_xyz_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_12 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than, 1: combine_xyz}, attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_11.outputs["Vector"], 1: multiply_12.outputs["Vector"]}) - - subtract_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_5}, attrs={'operation': 'SUBTRACT'}) - - multiply_13 = nw.new_node(Nodes.VectorMath, + + multiply_11 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: less_than, 1: combine_xyz_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_12 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_11.outputs["Vector"], + 1: multiply_12.outputs["Vector"], + }, + ) + + subtract_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_5}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_13 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: greater_than_1, 1: subtract_2.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - _multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 1: less_than_1, 2: multiply_13.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_add = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + _multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract_1.outputs["Vector"], + 1: less_than_1, + 2: multiply_13.outputs["Vector"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: _multiply_add, 1: (1, -1, 1)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_14 = nw.new_node(Nodes.VectorMath, input_kwargs={0: greater_than_1, 1: add_5}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_1 = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_14 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: greater_than_1, 1: add_5}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: add_3, 1: less_than_1, 2: multiply_14.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': multiply_add_1, 'W': sample_range(-10, 10), 'Scale': 33.0000}, - attrs={'noise_dimensions': '4D'}) - - subtract_3 = nw.new_node(Nodes.MapRange, - input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.26, 2: 0.74, 3: -0.5, 4: 0.5}, - attrs={'clamp': True} - ) - - sine_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3}, attrs={'operation': 'SINE'}) - - cosine_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3}, attrs={'operation': 'COSINE'}) - - combine_xyz_color = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': sine_1, 'Y': cosine_1, 'Z': 0.0000}) - - add_6 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_color.outputs["Vector"], 1: multiply_add}, attrs={'operation': 'DOT_PRODUCT'}) - - distance_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_8, 1: add_5}, attrs={'operation': 'DISTANCE'}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: distance_1.outputs["Value"], 1: 0.0100}) - - multiply_17 = nw.new_node(Nodes.Math, input_kwargs={0: greater_than_1, 1: add_7}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: less_than_1, 2: multiply_17}, attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_18 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add_2, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_19 = nw.new_node(Nodes.MapRange, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": multiply_add_1, + "W": sample_range(-10, 10), + "Scale": 33.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: 0.26, + 2: 0.74, + 3: -0.5, + 4: 0.5, + }, + attrs={"clamp": True}, + ) + + sine_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_3}, attrs={"operation": "SINE"} + ) + + cosine_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_3}, attrs={"operation": "COSINE"} + ) + + combine_xyz_color = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": sine_1, "Y": cosine_1, "Z": 0.0000} + ) + + add_6 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_color.outputs["Vector"], 1: multiply_add}, + attrs={"operation": "DOT_PRODUCT"}, + ) + + distance_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_8, 1: add_5}, + attrs={"operation": "DISTANCE"}, + ) + + add_7 = nw.new_node( + Nodes.Math, input_kwargs={0: distance_1.outputs["Value"], 1: 0.0100} + ) + + multiply_17 = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than_1, 1: add_7}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_4, 1: less_than_1, 2: multiply_17}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_18 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add_2, 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_19 = nw.new_node( + Nodes.MapRange, input_kwargs={0: multiply_18, 1: 0.9156, 2: 1.0000, 3: 0.0000, 4: 0.5}, - attrs={'clamp': True} + attrs={"clamp": True}, + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_6, 1: multiply_19}, + attrs={"operation": "SUBTRACT"}, ) - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_6, 1: multiply_19}, attrs={'operation': 'SUBTRACT'}) + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_4, 1: 0.0000}, + attrs={"operation": "SUBTRACT"}, + ) - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_4, 1: 0.0000}, attrs={'operation': 'SUBTRACT'}) - normal = nw.new_node(Nodes.InputNormal) - - multiply_20 = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_5, 1: normal}, attrs={'operation': 'MULTIPLY'}) - - offset_scale = nw.new_node(Nodes.Value, label='OffsetScale') + + multiply_20 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_5, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + offset_scale = nw.new_node(Nodes.Value, label="OffsetScale") offset_scale.outputs[0].default_value = 0.0020 - - multiply_21 = nw.new_node(Nodes.VectorMath, + + multiply_21 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_20.outputs["Vector"], 1: offset_scale}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_21.outputs["Vector"]}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position.outputs["Geometry"], 1: multiply_add_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - capture_attribute_4 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], 1: multiply_19}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_21.outputs["Vector"], + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position.outputs["Geometry"], 1: multiply_add_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + capture_attribute_4 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 1: multiply_19, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={ - 'Geometry': capture_attribute_4.outputs["Geometry"], - 'attr2': capture_attribute_1.outputs["Attribute"], - 'attr5': capture_attribute_4.outputs["Attribute"] - }, - attrs={'is_active_output': True}) + "Geometry": capture_attribute_4.outputs["Geometry"], + "attr2": capture_attribute_1.outputs["Attribute"], + "attr5": capture_attribute_4.outputs["Attribute"], + }, + attrs={"is_active_output": True}, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - attributes = [ - 'Color variations', - 'offset2' - ] - surface.add_geomod(obj, geo_scale, apply=False, input_kwargs=geo_kwargs, attributes=attributes) + attributes = ["Color variations", "offset2"] + surface.add_geomod( + obj, geo_scale, apply=False, input_kwargs=geo_kwargs, attributes=attributes + ) surface.add_material(obj, shader_scale, reuse=False, input_kwargs=shader_kwargs) + if __name__ == "__main__": template = "scale_new2" - #outpath = os.path.join("outputs", template) - #if not os.path.isdir(outpath): + # outpath = os.path.join("outputs", template) + # if not os.path.isdir(outpath): # os.mkdir(outpath) for i in range(1): - bpy.ops.wm.open_mainfile(filepath='scale_new2.blend') - apply(bpy.data.objects['creature_16_aquatic_0_root_mesh.001'], geo_kwargs={'rand': False}, shader_kwargs={'rand': True}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_test_scale_new2.blend') + bpy.ops.wm.open_mainfile(filepath="scale_new2.blend") + apply( + bpy.data.objects["creature_16_aquatic_0_root_mesh.001"], + geo_kwargs={"rand": False}, + shader_kwargs={"rand": True}, + ) + fn = os.path.join(os.path.abspath(os.curdir), "dev_test_scale_new2.blend") bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join(outpath, 'scale_%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file + # bpy.context.scene.render.filepath = os.path.join(outpath, 'scale_%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/shelf_shaders.py b/infinigen/assets/materials/shelf_shaders.py index ed92d64ad..dd49e3363 100644 --- a/infinigen/assets/materials/shelf_shaders.py +++ b/infinigen/assets/materials/shelf_shaders.py @@ -4,8 +4,9 @@ # Authors: Beining Han # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=jDEijCwz6to by Lachlan Sarv + import numpy as np -from numpy.random import uniform, normal, randint +from numpy.random import normal, uniform from infinigen.assets.materials import ( metal_shader_list, @@ -13,83 +14,95 @@ shader_rough_plastic, wood, ) -from infinigen.assets.materials.leather_and_fabrics import fabric_shader_list from infinigen.core import surface - - - from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface - -import json -from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.random import random_general as rg +from infinigen.core.util.color import hsv2rgba def shader_shelves_white(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler - rgb = kwargs.get('rgb', [0.9, 0.9, 0.9]) - base_color = (*rgb, 1.) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': base_color, - 'Roughness': kwargs.get('roughness', 0.9)}) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) + rgb = kwargs.get("rgb", [0.9, 0.9, 0.9]) + base_color = (*rgb, 1.0) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": base_color, + "Roughness": kwargs.get("roughness", 0.9), + }, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_shelves_white_sampler(): params = dict() v = uniform(0.7, 1.0) - base_color = [v * (1. + normal(0, 0.005)), - v * (1. + normal(0, 0.005)), - v * (1. + normal(0, 0.005))] - params['rgb'] = base_color - params['roughness'] = uniform(0.7, 1.0) + base_color = [ + v * (1.0 + normal(0, 0.005)), + v * (1.0 + normal(0, 0.005)), + v * (1.0 + normal(0, 0.005)), + ] + params["rgb"] = base_color + params["roughness"] = uniform(0.7, 1.0) return params def shader_shelves_black_metallic(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler - color = (*kwargs.get('rgb', [0., 0., 0.]), 1.) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={ - 'Base Color': color, - 'Metallic': kwargs.get('metallic', 0.65)}) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) + color = (*kwargs.get("rgb", [0.0, 0.0, 0.0]), 1.0) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": color, "Metallic": kwargs.get("metallic", 0.65)}, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_shelves_black_metallic_sampler(): params = dict() base_color = [uniform(0, 0.01), uniform(0, 0.01), uniform(0, 0.01)] - params['rgb'] = base_color - params['metallic'] = uniform(0.45, 0.75) + params["rgb"] = base_color + params["metallic"] = uniform(0.45, 0.75) return params def shader_shelves_white_metallic(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler - rgb = kwargs.get('rgb', [0.9, 0.9, 0.9]) - base_color = (*rgb, 1.) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': base_color, - 'Metallic': kwargs.get('metallic', 0.65)}) + rgb = kwargs.get("rgb", [0.9, 0.9, 0.9]) + base_color = (*rgb, 1.0) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": base_color, + "Metallic": kwargs.get("metallic", 0.65), + }, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_shelves_white_metallic_sampler(): params = dict() v = uniform(0.7, 1.0) - base_color = [v * (1. + normal(0, 0.005)), - v * (1. + normal(0, 0.005)), - v * (1. + normal(0, 0.005))] - params['rgb'] = base_color - params['metallic'] = uniform(0.45, 0.75) + base_color = [ + v * (1.0 + normal(0, 0.005)), + v * (1.0 + normal(0, 0.005)), + v * (1.0 + normal(0, 0.005)), + ] + params["rgb"] = base_color + params["metallic"] = uniform(0.45, 0.75) return params @@ -97,24 +110,40 @@ def shader_shelves_black_wood(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.5 of the node_transpiler texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - wave_scale = kwargs.get('wave_scale', 2.0) - if kwargs.get('z_axis_texture', False): + wave_scale = kwargs.get("wave_scale", 2.0) + if kwargs.get("z_axis_texture", False): wave_scale = (wave_scale, wave_scale, 0.1) else: wave_scale = (wave_scale, 0.1, 0.1) - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], - 'Scale': (0.1, 0.1, 2.0) if kwargs.get('z_axis_texture', False) else (0.1, 2.0, 2.0)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 100.0000, 'Detail': 10.0000, - 'Distortion': 2.0000}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_1.outputs["Fac"], 'Scale': 40.0000}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Color"]}) + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": (0.1, 0.1, 2.0) + if kwargs.get("z_axis_texture", False) + else (0.1, 2.0, 2.0), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 100.0000, + "Detail": 10.0000, + "Distortion": 2.0000, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture_1.outputs["Fac"], "Scale": 40.0000}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Color"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0864 colorramp_1.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 0.1091 @@ -122,65 +151,114 @@ def shader_shelves_black_wood(nw: NodeWrangler, **kwargs): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], - 'Scale': wave_scale}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 3.0000, 'Detail': 15.0000, - 'Distortion': 2.0000}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Scale': 20.0000, - 'Detail': 3.0000}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={6: musgrave_texture, 7: noise_texture.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_1.outputs[2]}) + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": wave_scale, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 3.0000, + "Detail": 15.0000, + "Distortion": 2.0000, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "Scale": 20.0000, + "Detail": 3.0000, + }, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={6: musgrave_texture, 7: noise_texture.outputs["Color"]}, + attrs={"data_type": "RGBA"}, + ) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1.outputs[2]}) colorramp_2.color_ramp.elements[0].position = 0.0818 colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_2.color_ramp.elements[1].position = 0.8500 colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - mix_2 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.6000, 6: colorramp_1.outputs["Color"], 7: colorramp_2.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - dark_scale = kwargs.get('dark_scale', 0.005) - gray_scale = kwargs.get('gray_scale', 0.02) - color_scale = [*kwargs.get('rgb', [0.02, 0.002, 0.002]), 1.0] - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_2}) + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.6000, + 6: colorramp_1.outputs["Color"], + 7: colorramp_2.outputs["Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + dark_scale = kwargs.get("dark_scale", 0.005) + gray_scale = kwargs.get("gray_scale", 0.02) + color_scale = [*kwargs.get("rgb", [0.02, 0.002, 0.002]), 1.0] + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_2}) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.15 - colorramp.color_ramp.elements[0].color = [dark_scale, dark_scale, dark_scale, 1.0000] + colorramp.color_ramp.elements[0].color = [ + dark_scale, + dark_scale, + dark_scale, + 1.0000, + ] colorramp.color_ramp.elements[1].position = 0.5 - colorramp.color_ramp.elements[1].color = [gray_scale, gray_scale, gray_scale, 1.0000] + colorramp.color_ramp.elements[1].color = [ + gray_scale, + gray_scale, + gray_scale, + 1.0000, + ] colorramp.color_ramp.elements[2].position = 1.0000 colorramp.color_ramp.elements[2].color = color_scale - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.0040, 6: colorramp_1.outputs["Color"], 7: colorramp_2.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - bump = nw.new_node(Nodes.Bump, input_kwargs={'Strength': 0.5000, 'Height': mix_3.outputs[2]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], - 'Roughness': kwargs.get('roughness', 0.9), 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.0040, + 6: colorramp_1.outputs["Color"], + 7: colorramp_2.outputs["Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + Nodes.Bump, input_kwargs={"Strength": 0.5000, "Height": mix_3.outputs[2]} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": kwargs.get("roughness", 0.9), + "Normal": bump, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_shelves_black_wood_sampler(): params = dict() - params['wave_scale'] = uniform(1., 3.) - params['dark_scale'] = uniform(0.0, 0.01) - params['gray_scale'] = uniform(0.01, 0.03) - params['rgb'] = [uniform(0.015, 0.035), uniform(0., 0.01), uniform(0.0, 0.01)] - params['roughness'] = uniform(0.75, 1.0) + params["wave_scale"] = uniform(1.0, 3.0) + params["dark_scale"] = uniform(0.0, 0.01) + params["gray_scale"] = uniform(0.01, 0.03) + params["rgb"] = [uniform(0.015, 0.035), uniform(0.0, 0.01), uniform(0.0, 0.01)] + params["roughness"] = uniform(0.75, 1.0) return params @@ -188,24 +266,40 @@ def shader_shelves_wood(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.5 of the node_transpiler texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - wave_scale = kwargs.get('wave_scale', 2.0) - if kwargs.get('z_axis_texture', False): + wave_scale = kwargs.get("wave_scale", 2.0) + if kwargs.get("z_axis_texture", False): wave_scale = (wave_scale, wave_scale, 0.1) else: wave_scale = (wave_scale, 0.1, 0.1) - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], - 'Scale': (0.1, 0.1, 2.0) if kwargs.get('z_axis_texture', False) else (0.1, 2.0, 2.0)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 100.0000, 'Detail': 10.0000, - 'Distortion': 2.0000}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_1.outputs["Fac"], 'Scale': 40.0000}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Color"]}) + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": (0.1, 0.1, 2.0) + if kwargs.get("z_axis_texture", False) + else (0.1, 2.0, 2.0), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 100.0000, + "Detail": 10.0000, + "Distortion": 2.0000, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture_1.outputs["Fac"], "Scale": 40.0000}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Color"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0864 colorramp_1.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 0.1091 @@ -213,37 +307,60 @@ def shader_shelves_wood(nw: NodeWrangler, **kwargs): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], - 'Scale': wave_scale}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 3.0000, 'Detail': 15.0000, - 'Distortion': 2.0000}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Scale': 20.0000, - 'Detail': 3.0000}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={6: musgrave_texture, 7: noise_texture.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_1.outputs[2]}) + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": wave_scale, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 3.0000, + "Detail": 15.0000, + "Distortion": 2.0000, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "Scale": 20.0000, + "Detail": 3.0000, + }, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={6: musgrave_texture, 7: noise_texture.outputs["Color"]}, + attrs={"data_type": "RGBA"}, + ) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1.outputs[2]}) colorramp_2.color_ramp.elements[0].position = 0.0818 colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_2.color_ramp.elements[1].position = 0.8500 colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - mix_2 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.6000, 6: colorramp_1.outputs["Color"], 7: colorramp_2.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - bright_hsv = kwargs.get('bright_hsv', [0.068, 0.665, 0.805]) - mid_hsv = kwargs.get('mid_hsv', [0.042, 0.853, 0.447]) - dark_hsv = kwargs.get('dark_hsv', [0.043, 0.882, 0.183]) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_2}) + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.6000, + 6: colorramp_1.outputs["Color"], + 7: colorramp_2.outputs["Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + bright_hsv = kwargs.get("bright_hsv", [0.068, 0.665, 0.805]) + mid_hsv = kwargs.get("mid_hsv", [0.042, 0.853, 0.447]) + dark_hsv = kwargs.get("dark_hsv", [0.043, 0.882, 0.183]) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_2}) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.02 colorramp.color_ramp.elements[0].color = hsv2rgba(dark_hsv) @@ -252,52 +369,88 @@ def shader_shelves_wood(nw: NodeWrangler, **kwargs): colorramp.color_ramp.elements[2].position = 0.8 colorramp.color_ramp.elements[2].color = hsv2rgba(bright_hsv) - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.0040, 6: colorramp_1.outputs["Color"], 7: colorramp_2.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - bump = nw.new_node(Nodes.Bump, input_kwargs={'Strength': 0.5000, 'Height': mix_3.outputs[2]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], - 'Roughness': kwargs.get('roughness', 0.9), 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.0040, + 6: colorramp_1.outputs["Color"], + 7: colorramp_2.outputs["Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + Nodes.Bump, input_kwargs={"Strength": 0.5000, "Height": mix_3.outputs[2]} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": kwargs.get("roughness", 0.9), + "Normal": bump, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) def shader_shelves_wood_sampler(): params = dict() - params['bright_hsv'] = [uniform(0.03, 0.09), uniform(0.5, 0.7), uniform(0.7, 1.0)] - params['mid_hsv'] = [uniform(0.02, 0.06), uniform(0.6, 1.0), uniform(0.3, 0.6)] - params['dark_hsv'] = [uniform(0.03, 0.05), uniform(0.6, 1.0), uniform(0.1, 0.3)] - params['wave_scale'] = uniform(1., 3.) - params['roughness'] = uniform(0.75, 1.0) + params["bright_hsv"] = [uniform(0.03, 0.09), uniform(0.5, 0.7), uniform(0.7, 1.0)] + params["mid_hsv"] = [uniform(0.02, 0.06), uniform(0.6, 1.0), uniform(0.3, 0.6)] + params["dark_hsv"] = [uniform(0.03, 0.05), uniform(0.6, 1.0), uniform(0.1, 0.3)] + params["wave_scale"] = uniform(1.0, 3.0) + params["roughness"] = uniform(0.75, 1.0) return params def get_shelf_material(name, **kwargs): match name: - case 'white': - shader_func = np.random.choice([shader_shelves_white, shader_rough_plastic], p=[.6, .4]) - case 'black_wood': - shader_func = np.random.choice([shader_shelves_black_wood, wood.shader_wood], p=[.6, .4]) - case 'wood': - shader_func = np.random.choice([shader_shelves_wood, wood.shader_wood], p=[.6, .4]) - - case 'glass': + case "white": + shader_func = np.random.choice( + [shader_shelves_white, shader_rough_plastic], p=[0.6, 0.4] + ) + case "black_wood": + shader_func = np.random.choice( + [shader_shelves_black_wood, wood.shader_wood], p=[0.6, 0.4] + ) + case "wood": + shader_func = np.random.choice( + [shader_shelves_wood, wood.shader_wood], p=[0.6, 0.4] + ) + + case "glass": shader_func = shader_glass case _: - shader_func = np.random.choice([shader_shelves_white, shader_rough_plastic, - shader_shelves_black_wood, wood.shader_wood, - shader_shelves_wood], p=[.3, .2, .3, .1, .1]) - r = uniform() - if name == 'metal': + shader_func = np.random.choice( + [ + shader_shelves_white, + shader_rough_plastic, + shader_shelves_black_wood, + wood.shader_wood, + shader_shelves_wood, + ], + p=[0.3, 0.2, 0.3, 0.1, 0.1], + ) + r = uniform() + if name == "metal": shader_func = np.random.choice(metal_shader_list) else: - shader_func = np.random.choice([shader_shelves_white, shader_rough_plastic, - shader_shelves_black_wood, wood.shader_wood, - shader_shelves_wood], p=[.3, .2, .3, .1, .1]) + shader_func = np.random.choice( + [ + shader_shelves_white, + shader_rough_plastic, + shader_shelves_black_wood, + wood.shader_wood, + shader_shelves_wood, + ], + p=[0.3, 0.2, 0.3, 0.1, 0.1], + ) # elif r < .3: # shader_func = rg(fabric_shader_list) return surface.shaderfunc_to_material(shader_func, **kwargs) diff --git a/infinigen/assets/materials/simple_brownish.py b/infinigen/assets/materials/simple_brownish.py index 7043a84da..b0fd32fc4 100644 --- a/infinigen/assets/materials/simple_brownish.py +++ b/infinigen/assets/materials/simple_brownish.py @@ -4,30 +4,45 @@ # Authors: Beining Han -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal as N +from numpy.random import uniform as U + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def shader_simple_brown(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler def noise(): - return nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '4D'}, - input_kwargs={'W': U(0, 100), 'Scale': N(60, 25), 'Detail': U(0, 10), 'Roughness': U(0, 1), - 'Distortion': U(0, 3)}) - - rough = nw.new_node(Nodes.MapRange, attrs={'interpolation_type': 'SMOOTHSTEP'}, - input_kwargs={'Value': noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}) + return nw.new_node( + Nodes.NoiseTexture, + attrs={"noise_dimensions": "4D"}, + input_kwargs={ + "W": U(0, 100), + "Scale": N(60, 25), + "Detail": U(0, 10), + "Roughness": U(0, 1), + "Distortion": U(0, 3), + }, + ) + + rough = nw.new_node( + Nodes.MapRange, + attrs={"interpolation_type": "SMOOTHSTEP"}, + input_kwargs={"Value": noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}, + ) v = U(0.01, 0.2) - base_color = (v, v * U(0, 0.15), v * U(0, 0.12), 1.) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': base_color, 'Roughness': rough.outputs["Result"]}) + base_color = (v, v * U(0, 0.15), v * U(0, 0.12), 1.0) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": base_color, "Roughness": rough.outputs["Result"]}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_simple_brown, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_simple_brown, selection=selection) diff --git a/infinigen/assets/materials/simple_greenery.py b/infinigen/assets/materials/simple_greenery.py index 10f3b9f79..18a605827 100644 --- a/infinigen/assets/materials/simple_greenery.py +++ b/infinigen/assets/materials/simple_greenery.py @@ -3,42 +3,73 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint +from numpy.random import normal as N +from numpy.random import uniform as U +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils from infinigen.core.util.color import color_category -from infinigen.core import surface + def shader_simple_greenery(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - + def noise(): - return nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '4D'}, - input_kwargs={'W': U(0, 100), 'Scale': N(60, 25), 'Detail': U(0, 10), 'Roughness': U(0, 1), 'Distortion': U(0, 3)}) - - - fac_color = nw.new_node(Nodes.MapRange, attrs={'interpolation_type': 'SMOOTHSTEP'}, - input_kwargs={'Value': noise(), 4: U(0.1, 1)}) - color = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': fac_color.outputs["Result"], 'Color1': color_category('greenery'), 'Color2': color_category('greenery')}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': color}) - - rough = nw.new_node(Nodes.MapRange, attrs={'interpolation_type': 'SMOOTHSTEP'}, - input_kwargs={'Value': noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': rough.outputs["Result"]}) - - fac_translucent = nw.new_node(Nodes.MapRange, attrs={'interpolation_type': 'SMOOTHSTEP'}, - input_kwargs={'Value': noise(), 3: U(0.6, 0.9)}) - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': fac_translucent.outputs["Result"], 1: translucent_bsdf, 2: principled_bsdf}) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - + return nw.new_node( + Nodes.NoiseTexture, + attrs={"noise_dimensions": "4D"}, + input_kwargs={ + "W": U(0, 100), + "Scale": N(60, 25), + "Detail": U(0, 10), + "Roughness": U(0, 1), + "Distortion": U(0, 3), + }, + ) + + fac_color = nw.new_node( + Nodes.MapRange, + attrs={"interpolation_type": "SMOOTHSTEP"}, + input_kwargs={"Value": noise(), 4: U(0.1, 1)}, + ) + color = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": fac_color.outputs["Result"], + "Color1": color_category("greenery"), + "Color2": color_category("greenery"), + }, + ) + + translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, input_kwargs={"Color": color}) + + rough = nw.new_node( + Nodes.MapRange, + attrs={"interpolation_type": "SMOOTHSTEP"}, + input_kwargs={"Value": noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}, + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": color, "Roughness": rough.outputs["Result"]}, + ) + + fac_translucent = nw.new_node( + Nodes.MapRange, + attrs={"interpolation_type": "SMOOTHSTEP"}, + input_kwargs={"Value": noise(), 3: U(0.6, 0.9)}, + ) + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": fac_translucent.outputs["Result"], + 1: translucent_bsdf, + 2: principled_bsdf, + }, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + def apply(obj, selection=None, **kwargs): surface.add_material(obj, shader_simple_greenery, selection=selection) diff --git a/infinigen/assets/materials/simple_whitish.py b/infinigen/assets/materials/simple_whitish.py index 3c9ec2e8f..61373a3df 100644 --- a/infinigen/assets/materials/simple_whitish.py +++ b/infinigen/assets/materials/simple_whitish.py @@ -4,31 +4,50 @@ # Authors: Beining Han -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal as N +from numpy.random import uniform as U + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler def shader_simple_white(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler def noise(): - return nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '4D'}, - input_kwargs={'W': U(0, 100), 'Scale': N(60, 25), 'Detail': U(0, 10), 'Roughness': U(0, 1), - 'Distortion': U(0, 3)}) - - rough = nw.new_node(Nodes.MapRange, attrs={'interpolation_type': 'SMOOTHSTEP'}, - input_kwargs={'Value': noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}) + return nw.new_node( + Nodes.NoiseTexture, + attrs={"noise_dimensions": "4D"}, + input_kwargs={ + "W": U(0, 100), + "Scale": N(60, 25), + "Detail": U(0, 10), + "Roughness": U(0, 1), + "Distortion": U(0, 3), + }, + ) + + rough = nw.new_node( + Nodes.MapRange, + attrs={"interpolation_type": "SMOOTHSTEP"}, + input_kwargs={"Value": noise(), 3: U(0.1, 0.8), 4: U(0.1, 0.8)}, + ) v = U(0.7, 1.0) - base_color = (v * (1. + N(0, 0.05)), v * (1. + N(0, 0.05)), v * (1. + N(0, 0.05)), 1.) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': base_color, 'Roughness': rough.outputs["Result"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + base_color = ( + v * (1.0 + N(0, 0.05)), + v * (1.0 + N(0, 0.05)), + v * (1.0 + N(0, 0.05)), + 1.0, + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": base_color, "Roughness": rough.outputs["Result"]}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_simple_white, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_simple_white, selection=selection) diff --git a/infinigen/assets/materials/slimy.py b/infinigen/assets/materials/slimy.py index a5c8926e5..d2062dc02 100644 --- a/infinigen/assets/materials/slimy.py +++ b/infinigen/assets/materials/slimy.py @@ -4,124 +4,176 @@ # Authors: Mingzhe Wang # Acknowledgement: This file draws inspiration from https://blender.stackexchange.com/questions/111219/slime-effect-material -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from numpy.random import uniform + +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, +) from infinigen.core import surface -import random +from infinigen.core.nodes.node_wrangler import Nodes + def shader_slimy(nw, rand=False, **input_kwargs): texture_coordinate = nw.new_node(Nodes.TextureCoord) - + value = nw.new_node(Nodes.Value) - value.outputs["Value"].default_value = input_kwargs['scale'] if 'scale' in input_kwargs else 0.5 - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"], 'Scale': value}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping, 'Scale': 6.4}, - attrs={'musgrave_dimensions': '4D'}) + value.outputs["Value"].default_value = ( + input_kwargs["scale"] if "scale" in input_kwargs else 0.5 + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Scale": value, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": mapping, "Scale": 6.4}, + attrs={"musgrave_dimensions": "4D"}, + ) if rand: musgrave_texture.inputs["W"].default_value = sample_range(-5, 5) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture}) + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) colorramp_1.color_ramp.elements[0].position = 0.0399 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.2464 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 7.6, 'Distortion': 3.0}, - attrs={'noise_dimensions': '4D'}) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping, "Scale": 7.6, "Distortion": 3.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: noise_texture_2.inputs["W"].default_value = sample_range(-5, 5) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"]}) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.3554 colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_4.color_ramp.elements[1].position = 1.0 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': colorramp_1.outputs["Color"], 'Color2': colorramp_4.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.6605, 0.0279, 0.0359, 1.0), 'Subsurface': 0.2, 'Subsurface Color': (0.4621, 0.0213, 0.0265, 1.0), 'Specular': 0.8591, 'Roughness': mix_1}) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": colorramp_1.outputs["Color"], + "Color2": colorramp_4.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.6605, 0.0279, 0.0359, 1.0), + "Subsurface": 0.2, + "Subsurface Color": (0.4621, 0.0213, 0.0265, 1.0), + "Specular": 0.8591, + "Roughness": mix_1, + }, + ) if rand: - sample_color(principled_bsdf.inputs['Base Color'].default_value) - sample_color(principled_bsdf.inputs['Subsurface Color'].default_value) + sample_color(principled_bsdf.inputs["Base Color"].default_value) + sample_color(principled_bsdf.inputs["Subsurface Color"].default_value) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) -def geo_slimy(nw, rand=False, **input_kwargs): +def geo_slimy(nw, rand=False, **input_kwargs): group_input = nw.new_node(Nodes.GroupInput) position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) - value.outputs["Value"].default_value = input_kwargs['scale'] if 'scale' in input_kwargs else 0.2 - - vector_math = nw.new_node(Nodes.VectorMath, + value.outputs["Value"].default_value = ( + input_kwargs["scale"] if "scale" in input_kwargs else 0.2 + ) + + vector_math = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_math.outputs["Vector"], 'Distortion': 2.0}, - attrs={'noise_dimensions': '4D'}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": vector_math.outputs["Vector"], "Distortion": 2.0}, + attrs={"noise_dimensions": "4D"}, + ) if rand: - noise_texture.inputs['W'].default_value = sample_range(-5, 5) - noise_texture.inputs['Scale'].default_value = sample_range(3, 7) - noise_texture.inputs['Distortion'].default_value = sample_range(1, 4) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Fac"]}) + noise_texture.inputs["W"].default_value = sample_range(-5, 5) + noise_texture.inputs["Scale"].default_value = sample_range(3, 7) + noise_texture.inputs["Distortion"].default_value = sample_range(1, 4) + + wave_texture = nw.new_node( + Nodes.WaveTexture, input_kwargs={"Vector": noise_texture.outputs["Fac"]} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Fac"]} + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - math = nw.new_node(Nodes.Math, + + math = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: colorramp.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) - + attrs={"operation": "SUBTRACT"}, + ) + normal = nw.new_node(Nodes.InputNormal) - - vector_math_1 = nw.new_node(Nodes.VectorMath, + + vector_math_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: math, 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs["Value"].default_value = uniform(0.005, 0.02) if rand else 0.015 - vector_math_2 = nw.new_node(Nodes.VectorMath, + vector_math_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_math_1.outputs["Vector"], 1: value_1}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input, 'Offset': vector_math_2.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={ - 'Geometry': set_position, - 1: math}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Attribute': capture_attribute.outputs["Attribute"]}) + "Geometry": group_input, + "Offset": vector_math_2.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: math}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_slimy, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) + surface.add_geomod( + obj, geo_slimy, apply=False, input_kwargs=geo_kwargs, attributes=["offset"] + ) surface.add_material(obj, shader_slimy, reuse=False, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/smoke_material.py b/infinigen/assets/materials/smoke_material.py index 305cd0914..a87d32a2d 100644 --- a/infinigen/assets/materials/smoke_material.py +++ b/infinigen/assets/materials/smoke_material.py @@ -1,29 +1,30 @@ - # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-clause license found in the LICENSE file in the root directory of this source tree. # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface import numpy as np + +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.random import random_color_neighbour + def smoke_material(nw): # Code generated using version 2.3.2 of the node_transpiler - principled_volume = nw.new_node(Nodes.PrincipledVolume, - input_kwargs={'Color': random_color_neighbour((0.3803, 0.3803, 0.3803, 1.0)), 'Density': np.random.uniform(1.0, 5.0)}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Volume': principled_volume}) + principled_volume = nw.new_node( + Nodes.PrincipledVolume, + input_kwargs={ + "Color": random_color_neighbour((0.3803, 0.3803, 0.3803, 1.0)), + "Density": np.random.uniform(1.0, 5.0), + }, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Volume": principled_volume} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, smoke_material, selection=selection) \ No newline at end of file + surface.add_material(obj, smoke_material, selection=selection) diff --git a/infinigen/assets/materials/snake_plant.py b/infinigen/assets/materials/snake_plant.py index f4dc29af0..9826215b5 100644 --- a/infinigen/assets/materials/snake_plant.py +++ b/infinigen/assets/materials/snake_plant.py @@ -4,28 +4,26 @@ # Authors: Beining Han # Acknowledgements: This file draws inspiration from https://www.youtube.com/watch?v=sHr8LjfX09c -import bpy -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint, choice +import colorsys + +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils from infinigen.core.util.color import hsv2rgba -from infinigen.core import surface -import numpy as np -import colorsys def shader_snake_plant(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler r = 2.0 * np.random.choice([0, 1], p=(0.4, 0.6)) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: 0.001, 1: r}, - attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: 0.001, 1: r}, attrs={"operation": "MULTIPLY"} + ) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': multiply}) + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply}) e = U(0.34, 0.42) colorramp_1.color_ramp.elements[0].position = e colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) @@ -34,63 +32,89 @@ def shader_snake_plant(nw: NodeWrangler): texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': U(0.2, 1.0), 'Roughness': 1.0}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Fac"], 1: (1.0, 1.0, 0.6)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_1.outputs["Vector"], 1: mapping_1}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': U(1.0, 2.5), - 'Distortion': U(2.0, 4.5), - 'Detail Scale': U(2.0, 8.0), 'Detail Roughness': 2.0}, - attrs={'bands_direction': 'Z'}) + mapping_1 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate_1.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "Scale": U(0.2, 1.0), "Roughness": 1.0}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Fac"], 1: (1.0, 1.0, 0.6)}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_1.outputs["Vector"], 1: mapping_1} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": U(1.0, 2.5), + "Distortion": U(2.0, 4.5), + "Detail Scale": U(2.0, 8.0), + "Detail Roughness": 2.0, + }, + attrs={"bands_direction": "Z"}, + ) w = U(0.2, 0.7) - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: wave_texture.outputs["Fac"], 1: w}, - attrs={'operation': 'GREATER_THAN'}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Scale': (7.0, 7.0, 0.05)}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': U(20.0, 40.0)}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: greater_than, 1: noise_texture_1.outputs["Fac"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp_8 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': multiply_2}) + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: wave_texture.outputs["Fac"], 1: w}, + attrs={"operation": "GREATER_THAN"}, + ) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": (7.0, 7.0, 0.05), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping_2, "Scale": U(20.0, 40.0)} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than, 1: noise_texture_1.outputs["Fac"]}, + attrs={"operation": "MULTIPLY"}, + ) + + colorramp_8 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply_2}) colorramp_8.color_ramp.elements[0].position = 0.2318 colorramp_8.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_8.color_ramp.elements[1].position = U(0.55, 0.75) colorramp_8.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) r = 0.6 + (w - 0.2) * 0.6 - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': wave_texture.outputs["Fac"]}) + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.6 + (w - 0.2) * 0.6 colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_4.color_ramp.elements[1].position = np.minimum(1.0, r + U(0.02, 0.15)) colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': U(0.8, 1.0), 'Color1': colorramp_8.outputs["Color"], - 'Color2': colorramp_4.outputs["Color"]}, - attrs={'blend_type': 'ADD'}) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": U(0.8, 1.0), + "Color1": colorramp_8.outputs["Color"], + "Color2": colorramp_4.outputs["Color"], + }, + attrs={"blend_type": "ADD"}, + ) c = [U(0.28, 0.36), U(0.35, 0.80), U(0.20, 0.45)] - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp_3.color_ramp.elements[0].position = 0.0 colorramp_3.color_ramp.elements[0].color = hsv2rgba(c) colorramp_3.color_ramp.elements[1].position = 1.0 @@ -99,16 +123,30 @@ def shader_snake_plant(nw: NodeWrangler): c[0] += N(0, 0.015) colorramp_3.color_ramp.elements[1].color = hsv2rgba(c) - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_1.outputs["Color"], 'Color1': (*colorsys.hsv_to_rgb( - *[U(0.16, 0.23), U(0.8, 0.95), U(0.35, 0.8)]), 1.0), - 'Color2': colorramp_3.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': U(8.0, 15.0), 'Clearcoat Roughness': 0.0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": ( + *colorsys.hsv_to_rgb(*[U(0.16, 0.23), U(0.8, 0.95), U(0.35, 0.8)]), + 1.0, + ), + "Color2": colorramp_3.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Roughness": U(8.0, 15.0), + "Clearcoat Roughness": 0.0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) def apply(obj, selection=None, **kwargs): diff --git a/infinigen/assets/materials/snake_scale.py b/infinigen/assets/materials/snake_scale.py index 6bff30eb6..938f4d0b5 100644 --- a/infinigen/assets/materials/snake_scale.py +++ b/infinigen/assets/materials/snake_scale.py @@ -4,275 +4,476 @@ # Authors: Hongyu Wen -import bpy -import mathutils -import random -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface +from numpy.random import uniform + from infinigen.assets.materials import snake_shaders +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + -@node_utils.to_nodegroup('nodegroup_scale_shape', singleton=False, type='GeometryNodeTree') +@node_utils.to_nodegroup( + "nodegroup_scale_shape", singleton=False, type="GeometryNodeTree" +) def nodegroup_scale_shape(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 0.0), - ('NodeSocketFloat', 'length', 1.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["length"]}) - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': combine_xyz}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line}) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.0), + ("NodeSocketFloat", "length", 1.0), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["length"]} + ) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz}) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line} + ) + spline_parameter = nw.new_node(Nodes.SplineParameter) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': spline_parameter.outputs["Factor"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': separate_xyz.outputs["X"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.25419999999999998), (0.10680000000000001, 0.34379999999999999), (0.39479999999999998, 0.3695), (1.0, 0.0)], handles=['AUTO_CLAMPED', 'AUTO', 'AUTO', 'AUTO']) - - multiply = nw.new_node(Nodes.Math, + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": spline_parameter.outputs["Factor"]} + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.25419999999999998), + (0.10680000000000001, 0.34379999999999999), + (0.39479999999999998, 0.3695), + (1.0, 0.0), + ], + handles=["AUTO_CLAMPED", "AUTO", "AUTO", "AUTO"], + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: float_curve, 1: group_input.outputs["length"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Offset': combine_xyz_1}) - + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Offset": combine_xyz_1}, + ) + position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - multiply_1 = nw.new_node(Nodes.Math, + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': multiply_1, 'Z': separate_xyz_1.outputs["Z"]}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Position': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_position_1, set_position]}) - - convex_hull = nw.new_node(Nodes.ConvexHull, - input_kwargs={'Geometry': join_geometry}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': convex_hull}) - + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_1.outputs["X"], + "Y": multiply_1, + "Z": separate_xyz_1.outputs["Z"], + }, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position, "Position": combine_xyz_2}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_position_1, set_position]} + ) + + convex_hull = nw.new_node( + Nodes.ConvexHull, input_kwargs={"Geometry": join_geometry} + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": convex_hull}) + position_1 = nw.new_node(Nodes.InputPosition) - - transfer_attribute = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': convex_hull, 'Value': position_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': (transfer_attribute, "Value")}) - - divide = nw.new_node(Nodes.Math, + + transfer_attribute = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={"Mesh": convex_hull, "Value": position_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": (transfer_attribute, "Value")} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs["length"]}, - attrs={'operation': 'DIVIDE'}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': divide}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0114, 0.46110000000000001), (0.51139999999999997, 0.30940000000000001), (1.0, 0.058099999999999999)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': mesh_to_curve, 'Radius': float_curve_1}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': group_input.outputs["thickness"]}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': combine_xyz_3}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_line_1}) - - convex_hull_1 = nw.new_node(Nodes.ConvexHull, - input_kwargs={'Geometry': curve_to_mesh}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [join_geometry, resample_curve]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Convex Hull': convex_hull_1, 'Curve': join_geometry_1}) - -@node_utils.to_nodegroup('nodegroup_scale', singleton=False, type='GeometryNodeTree') + attrs={"operation": "DIVIDE"}, + ) + + float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": divide}) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0114, 0.46110000000000001), + (0.51139999999999997, 0.30940000000000001), + (1.0, 0.058099999999999999), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": mesh_to_curve, "Radius": float_curve_1}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["thickness"]} + ) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_3}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_radius, "Profile Curve": curve_line_1}, + ) + + convex_hull_1 = nw.new_node( + Nodes.ConvexHull, input_kwargs={"Geometry": curve_to_mesh} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [join_geometry, resample_curve]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Convex Hull": convex_hull_1, "Curve": join_geometry_1}, + ) + + +@node_utils.to_nodegroup("nodegroup_scale", singleton=False, type="GeometryNodeTree") def nodegroup_scale(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.040000000000000001), - ('NodeSocketFloat', 'thickness', 0.10000000000000001), - ('NodeSocketVectorEuler', 'Rotation', (0.0, -0.17449999999999999, 0.0))]) - - nodegroup_nodegroup_nodegroup_scale_shape_011 = nw.new_node(nodegroup_scale_shape().name, - input_kwargs={'thickness': group_input.outputs["thickness"], 'length': group_input.outputs["Radius"]}) - + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.040000000000000001), + ("NodeSocketFloat", "thickness", 0.10000000000000001), + ("NodeSocketVectorEuler", "Rotation", (0.0, -0.17449999999999999, 0.0)), + ], + ) + + nodegroup_nodegroup_nodegroup_scale_shape_011 = nw.new_node( + nodegroup_scale_shape().name, + input_kwargs={ + "thickness": group_input.outputs["thickness"], + "length": group_input.outputs["Radius"], + }, + ) + position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': separate_xyz.outputs["X"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.45229999999999998, 0.13439999999999999), (0.92949999999999999, 0.1875), (1.0, 0.0)]) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': nodegroup_nodegroup_nodegroup_scale_shape_011.outputs["Curve"], 'Radius': float_curve}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 5, 'Radius': 0.02}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': curve_to_mesh}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [join_geometry, nodegroup_nodegroup_nodegroup_scale_shape_011.outputs["Convex Hull"]]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Rotation': group_input.outputs["Rotation"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform}) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.0), + (0.45229999999999998, 0.13439999999999999), + (0.92949999999999999, 0.1875), + (1.0, 0.0), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": nodegroup_nodegroup_nodegroup_scale_shape_011.outputs["Curve"], + "Radius": float_curve, + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 5, "Radius": 0.02} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": curve_to_mesh} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + join_geometry, + nodegroup_nodegroup_nodegroup_scale_shape_011.outputs["Convex Hull"], + ] + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_1, + "Rotation": group_input.outputs["Rotation"], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": transform}) + def geometry_snake_scale(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - greater_than = nw.new_node(Nodes.Math, + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + greater_than = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0}, - attrs={'operation': 'GREATER_THAN'}) - - separate_geometry = nw.new_node(Nodes.SeparateGeometry, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Selection': greater_than}, - attrs={'domain': 'EDGE'}) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': separate_geometry.outputs["Selection"], 'Distance Min': 0.03, 'Density Max': 10000.0}, - attrs={'distribute_method': 'POISSON'}) - - named_attribute = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': 'corner'}) - - named_attribute_1 = nw.new_node(Nodes.NamedAttribute, - input_kwargs={'Name': 'inside_mouth'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: named_attribute.outputs[1], 1: named_attribute_1.outputs[1]}) - - separate_geometry_1 = nw.new_node(Nodes.SeparateGeometry, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Selection': add}) - - geometry_proximity = nw.new_node(Nodes.Proximity, - input_kwargs={'Target': separate_geometry_1.outputs["Selection"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': geometry_proximity.outputs["Distance"], 2: 0.050000000000000003}) - - greater_than_1 = nw.new_node(Nodes.Math, + attrs={"operation": "GREATER_THAN"}, + ) + + separate_geometry = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Selection": greater_than, + }, + attrs={"domain": "EDGE"}, + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": separate_geometry.outputs["Selection"], + "Distance Min": 0.03, + "Density Max": 10000.0, + }, + attrs={"distribute_method": "POISSON"}, + ) + + named_attribute = nw.new_node(Nodes.NamedAttribute, input_kwargs={"Name": "corner"}) + + named_attribute_1 = nw.new_node( + Nodes.NamedAttribute, input_kwargs={"Name": "inside_mouth"} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: named_attribute.outputs[1], 1: named_attribute_1.outputs[1]}, + ) + + separate_geometry_1 = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={"Geometry": group_input.outputs["Geometry"], "Selection": add}, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": separate_geometry_1.outputs["Selection"]}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": geometry_proximity.outputs["Distance"], + 2: 0.050000000000000003, + }, + ) + + greater_than_1 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: 0.10000000000000001}, - attrs={'operation': 'GREATER_THAN'}) - - nodegroup_scale_1 = nw.new_node(nodegroup_scale().name, - input_kwargs={'Radius': 0.14999999999999999, 'thickness': 0.01, 'Rotation': (0.0, -0.017500000000000002, 0.0)}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': nodegroup_scale_1}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_shade_smooth, 'Scale': (0.59999999999999998, 0.59999999999999998, 0.59999999999999998)}) - + attrs={"operation": "GREATER_THAN"}, + ) + + nodegroup_scale_1 = nw.new_node( + nodegroup_scale().name, + input_kwargs={ + "Radius": 0.14999999999999999, + "thickness": 0.01, + "Rotation": (0.0, -0.017500000000000002, 0.0), + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": nodegroup_scale_1} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_shade_smooth, + "Scale": (0.59999999999999998, 0.59999999999999998, 0.59999999999999998), + }, + ) + normal = nw.new_node(Nodes.InputNormal) - - transfer_attribute = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': separate_geometry.outputs["Selection"], 'Value': normal}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': (transfer_attribute, "Value")}, - attrs={'axis': 'Z'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range.outputs["Result"], 'Y': map_range.outputs["Result"], 'Z': map_range.outputs["Result"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': distribute_points_on_faces.outputs["Points"], 'Selection': greater_than_1, 'Instance': transform, 'Rotation': align_euler_to_vector, 'Scale': combine_xyz}) - - distribute_points_on_faces_1 = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': separate_geometry.outputs["Inverted"], 'Distance Min': 0.02, 'Density Max': 10000.0}, - attrs={'distribute_method': 'POISSON'}) - - greater_than_2 = nw.new_node(Nodes.Math, + + transfer_attribute = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={"Mesh": separate_geometry.outputs["Selection"], "Value": normal}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": (transfer_attribute, "Value")}, + attrs={"axis": "Z"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": map_range.outputs["Result"], + "Y": map_range.outputs["Result"], + "Z": map_range.outputs["Result"], + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": distribute_points_on_faces.outputs["Points"], + "Selection": greater_than_1, + "Instance": transform, + "Rotation": align_euler_to_vector, + "Scale": combine_xyz, + }, + ) + + distribute_points_on_faces_1 = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": separate_geometry.outputs["Inverted"], + "Distance Min": 0.02, + "Density Max": 10000.0, + }, + attrs={"distribute_method": "POISSON"}, + ) + + greater_than_2 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: 0.10000000000000001}, - attrs={'operation': 'GREATER_THAN'}) - - nodegroup_scale_2 = nw.new_node(nodegroup_scale().name, - input_kwargs={'Radius': 0.070000000000000007, 'thickness': 0.0060000000000000001, 'Rotation': (0.0, -0.017500000000000002, 0.0)}) - - set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': nodegroup_scale_2}) - + attrs={"operation": "GREATER_THAN"}, + ) + + nodegroup_scale_2 = nw.new_node( + nodegroup_scale().name, + input_kwargs={ + "Radius": 0.070000000000000007, + "thickness": 0.0060000000000000001, + "Rotation": (0.0, -0.017500000000000002, 0.0), + }, + ) + + set_shade_smooth_1 = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": nodegroup_scale_2} + ) + normal_1 = nw.new_node(Nodes.InputNormal) - - transfer_attribute_1 = nw.new_node(Nodes.SampleNearestSurface, - input_kwargs={'Mesh': separate_geometry.outputs["Inverted"], 'Value': normal_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': (transfer_attribute_1, "Value")}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': distribute_points_on_faces_1.outputs["Points"], 'Selection': greater_than_2, 'Instance': set_shade_smooth_1, 'Rotation': align_euler_to_vector_1, 'Scale': combine_xyz}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [group_input.outputs["Geometry"], instance_on_points, instance_on_points_1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry}) - + + transfer_attribute_1 = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={"Mesh": separate_geometry.outputs["Inverted"], "Value": normal_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": (transfer_attribute_1, "Value")}, + attrs={"axis": "Z"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": distribute_points_on_faces_1.outputs["Points"], + "Selection": greater_than_2, + "Instance": set_shade_smooth_1, + "Rotation": align_euler_to_vector_1, + "Scale": combine_xyz, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + group_input.outputs["Geometry"], + instance_on_points, + instance_on_points_1, + ] + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + position_1 = nw.new_node(Nodes.InputPosition) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': realize_instances}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': position_1, 7: bounding_box.outputs["Min"], 8: bounding_box.outputs["Max"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': realize_instances, 'Name': 'Position', 2: map_range.outputs["Vector"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': store_named_attribute}, attrs={'is_active_output': True}) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": realize_instances} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": position_1, + 7: bounding_box.outputs["Min"], + 8: bounding_box.outputs["Max"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": realize_instances, + "Name": "Position", + 2: map_range.outputs["Vector"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": store_named_attribute}, + attrs={"is_active_output": True}, + ) + def apply(obj, **kwargs): shader = snake_shaders.shaders.choose() rand = uniform() > 0.3 surface.add_geomod(obj, geometry_snake_scale) - surface.add_material(obj, shader, input_kwargs={'rand': rand}) \ No newline at end of file + surface.add_material(obj, shader, input_kwargs={"rand": rand}) diff --git a/infinigen/assets/materials/snake_shaders.py b/infinigen/assets/materials/snake_shaders.py index 5fe6ac9ca..6d0784fcd 100644 --- a/infinigen/assets/materials/snake_shaders.py +++ b/infinigen/assets/materials/snake_shaders.py @@ -5,46 +5,65 @@ # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=MP7EZCFrXek by blenderbitesize and https://www.youtube.com/watch?v=VPI9xq41nOk by Ryan King -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface import random +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, +) +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + def shader_black_white_snake(nw: NodeWrangler, rand=True): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Position'}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Scale': 10.0, 'Distortion': 1.5, 'Detail': 5.0, 'Detail Roughness': 0.8}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 3.0, 'Detail': 5.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.8, 'Color1': noise_texture.outputs["Color"], 'Color2': mapping_1}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 8.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.2, 'Color1': wave_texture.outputs["Fac"], 'Color2': voronoi_texture.outputs["Distance"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Position"}) + + mapping = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": attribute}) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 10.0, + "Distortion": 1.5, + "Detail": 5.0, + "Detail Roughness": 0.8, + }, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": attribute}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "Scale": 3.0, "Detail": 5.0}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.8, + "Color1": noise_texture.outputs["Color"], + "Color2": mapping_1, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": 8.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.2, + "Color1": wave_texture.outputs["Fac"], + "Color2": voronoi_texture.outputs["Distance"], + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (1.0, 0.9647, 0.8308, 1.0) colorramp.color_ramp.elements[1].position = 0.0977 @@ -52,28 +71,46 @@ def shader_black_white_snake(nw: NodeWrangler, rand=True): if rand: for e in colorramp.color_ramp.elements: sample_color(e.color, offset=0.05, keep_sum=True) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Metallic': 0.6, 'Specular': 0.2, 'Roughness': 0.4}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Metallic": 0.6, + "Specular": 0.2, + "Roughness": 0.4, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + def shader_brown(nw: NodeWrangler, rand=False): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Position'}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute, 'Scale': (0.5, 1.0, 1.0)}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': 10.0, 'Detail': 20.0, 'Roughness': 0.4, 'Distortion': 0.1}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Position"}) + + mapping_2 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": attribute, "Scale": (0.5, 1.0, 1.0)} + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_2, + "Scale": 10.0, + "Detail": 20.0, + "Roughness": 0.4, + "Distortion": 0.1, + }, + ) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} + ) colorramp_2.color_ramp.elements[0].position = 0.4045 colorramp_2.color_ramp.elements[0].color = (0.013, 0.0011, 0.0027, 1.0) colorramp_2.color_ramp.elements[1].position = 0.4568 @@ -81,43 +118,69 @@ def shader_brown(nw: NodeWrangler, rand=False): if rand: for e in colorramp_2.color_ramp.elements: sample_color(e.color, offset=0.05) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_2.outputs["Color"], 'Metallic': 0.4, 'Specular': 0.3, 'Roughness': 1}, - attrs={'subsurface_method': 'BURLEY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': attribute, 'Scale': 0.4, 'Detail': 15.0}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.95, 'Color1': noise_texture_1.outputs["Fac"], 'Color2': attribute}, - attrs={'blend_type': 'LINEAR_LIGHT'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix_1, 'Scale': 4.0, 'Randomness': 3.0}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture_1.outputs["Distance"]}) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_2.outputs["Color"], + "Metallic": 0.4, + "Specular": 0.3, + "Roughness": 1, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": attribute, "Scale": 0.4, "Detail": 15.0}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.95, + "Color1": noise_texture_1.outputs["Fac"], + "Color2": attribute, + }, + attrs={"blend_type": "LINEAR_LIGHT"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix_1, "Scale": 4.0, "Randomness": 3.0}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Distance"]} + ) colorramp.color_ramp.elements[0].position = 0.1614 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.3068 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix_1, 'Scale': 10.0, 'Randomness': 3.0}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture_2.outputs["Distance"]}) + + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix_1, "Scale": 10.0, "Randomness": 3.0}, + ) + + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_2.outputs["Distance"]} + ) colorramp_3.color_ramp.elements[0].position = 0.1682 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.2864 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.0, 'Color1': colorramp.outputs["Color"], 'Color2': colorramp_3.outputs["Color"]}) - - colorramp_4 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_2}) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.0, + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_3.outputs["Color"], + }, + ) + + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_2}) colorramp_4.color_ramp.elements.new(0) colorramp_4.color_ramp.elements.new(0) colorramp_4.color_ramp.elements[0].position = 0.0 @@ -131,49 +194,88 @@ def shader_brown(nw: NodeWrangler, rand=False): if rand: for e in colorramp_4.color_ramp.elements: sample_color(e.color, offset=0.2) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_4.outputs["Color"], 'Metallic': 0.4, 'Roughness': 0.5}, - attrs={'subsurface_method': 'BURLEY'}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.3, 1: principled_bsdf_1, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_4.outputs["Color"], + "Metallic": 0.4, + "Roughness": 0.5, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.3, 1: principled_bsdf_1, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + def shader_golden(nw: NodeWrangler, rand=False): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Position'}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute, 'Scale': (0.5, 1.0, 1.0)}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Distortion': 1.0, 'Detail': 15.0, 'Detail Roughness': 0.8, 'Phase Offset': 2.0}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': wave_texture.outputs["Color"], 'Scale': 1.5, 'Detail': 5.0, 'Roughness': 0.4}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 3.0, 'Detail': 5.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.8, 'Color1': noise_texture.outputs["Color"], 'Color2': mapping_1}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 8.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': noise_texture_1.outputs["Color"], 'Color2': voronoi_texture.outputs["Distance"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Position"}) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": attribute, "Scale": (0.5, 1.0, 1.0)} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Distortion": 1.0, + "Detail": 15.0, + "Detail Roughness": 0.8, + "Phase Offset": 2.0, + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": wave_texture.outputs["Color"], + "Scale": 1.5, + "Detail": 5.0, + "Roughness": 0.4, + }, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": attribute}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "Scale": 3.0, "Detail": 5.0}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.8, + "Color1": noise_texture.outputs["Color"], + "Color2": mapping_1, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": 8.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.1, + "Color1": noise_texture_1.outputs["Color"], + "Color2": voronoi_texture.outputs["Distance"], + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp.color_ramp.elements[0].position = 0.4682 colorramp.color_ramp.elements[0].color = (0.017, 0.0094, 0.0033, 1.0) colorramp.color_ramp.elements[1].position = 1.0 @@ -181,89 +283,162 @@ def shader_golden(nw: NodeWrangler, rand=False): if rand: for e in colorramp.color_ramp.elements: sample_color(e.color, offset=0.05, keep_sum=True) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Metallic': 0.4, 'Specular': 0.2, 'Roughness': 0.4}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Metallic": 0.4, + "Specular": 0.2, + "Roughness": 0.4, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def shader_green(nw: NodeWrangler, rand=True): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'Position'}) - - mapping_3 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping_3}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Position"}) + + mapping_3 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": attribute}) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping_3}) + + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp_3.color_ramp.elements[0].position = 0.3864 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.6682 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute, 'Scale': (0.5, 1.0, 1.0)}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': 10.0, 'Detail': 10.0, 'Roughness': 0.40000000000000002, 'Distortion': 0.10000000000000001}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"]}) + + mapping_2 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": attribute, "Scale": (0.5, 1.0, 1.0)} + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_2, + "Scale": 10.0, + "Detail": 10.0, + "Roughness": 0.40000000000000002, + "Distortion": 0.10000000000000001, + }, + ) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} + ) colorramp_2.color_ramp.elements.new(0) colorramp_2.color_ramp.elements[0].position = 0.2318 - colorramp_2.color_ramp.elements[0].color = (0.64449999999999996, 0.52710000000000001, 0.0011999999999999999, 1.0) + colorramp_2.color_ramp.elements[0].color = ( + 0.64449999999999996, + 0.52710000000000001, + 0.0011999999999999999, + 1.0, + ) colorramp_2.color_ramp.elements[1].position = 0.375 - colorramp_2.color_ramp.elements[1].color = (0.050299999999999997, 0.033799999999999997, 0.0071999999999999998, 1.0) + colorramp_2.color_ramp.elements[1].color = ( + 0.050299999999999997, + 0.033799999999999997, + 0.0071999999999999998, + 1.0, + ) colorramp_2.color_ramp.elements[2].position = 0.45 colorramp_2.color_ramp.elements[2].color = (0.0172, 0.040599999999999997, 0.0, 1.0) if rand: for e in colorramp_2.color_ramp.elements: sample_color(e.color, offset=0.1, keep_sum=True) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_2.outputs["Color"], 'Metallic': 0.40000000000000002, 'Roughness': 0.27000000000000002}, - attrs={'subsurface_method': 'BURLEY'}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute, 'Scale': (0.5, 1.0, 1.0)}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Distortion': 1.0, 'Detail': 15.0, 'Detail Roughness': 0.80000000000000004, 'Phase Offset': 2.0}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': wave_texture.outputs["Color"], 'Scale': 1.3999999999999999, 'Detail': 5.0, 'Roughness': 0.40000000000000002}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': attribute}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 3.0, 'Detail': 5.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.80000000000000004, 'Color1': noise_texture.outputs["Color"], 'Color2': mapping_1}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 8.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.10000000000000001, 'Color1': noise_texture_1.outputs["Color"], 'Color2': voronoi_texture.outputs["Distance"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_2.outputs["Color"], + "Metallic": 0.40000000000000002, + "Roughness": 0.27000000000000002, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": attribute, "Scale": (0.5, 1.0, 1.0)} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Distortion": 1.0, + "Detail": 15.0, + "Detail Roughness": 0.80000000000000004, + "Phase Offset": 2.0, + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": wave_texture.outputs["Color"], + "Scale": 1.3999999999999999, + "Detail": 5.0, + "Roughness": 0.40000000000000002, + }, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": attribute}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping_1, "Scale": 3.0, "Detail": 5.0}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.80000000000000004, + "Color1": noise_texture.outputs["Color"], + "Color2": mapping_1, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": 8.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.10000000000000001, + "Color1": noise_texture_1.outputs["Color"], + "Color2": voronoi_texture.outputs["Distance"], + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.2818 - colorramp.color_ramp.elements[0].color = (0.76819999999999999, 0.78349999999999997, 0.76049999999999995, 1.0) + colorramp.color_ramp.elements[0].color = ( + 0.76819999999999999, + 0.78349999999999997, + 0.76049999999999995, + 1.0, + ) colorramp.color_ramp.elements[1].position = 0.4295 - colorramp.color_ramp.elements[1].color = (0.0012999999999999999, 0.0012999999999999999, 0.0012999999999999999, 1.0) + colorramp.color_ramp.elements[1].color = ( + 0.0012999999999999999, + 0.0012999999999999999, + 0.0012999999999999999, + 1.0, + ) colorramp.color_ramp.elements[2].position = 0.5068 colorramp.color_ramp.elements[2].color = (0.0095999999999999992, 0.0149, 0.0, 1.0) colorramp.color_ramp.elements[3].position = 0.6727 @@ -271,16 +446,30 @@ def shader_green(nw: NodeWrangler, rand=True): if rand: for e in colorramp.color_ramp.elements: sample_color(e.color, keep_sum=True) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Metallic': 0.40000000000000002, 'Roughness': 0.27000000000000002}, - attrs={'subsurface_method': 'BURLEY'}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': colorramp_3.outputs["Color"], 1: principled_bsdf_1, 2: principled_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Metallic": 0.40000000000000002, + "Roughness": 0.27000000000000002, + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": colorramp_3.outputs["Color"], + 1: principled_bsdf_1, + 2: principled_bsdf, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + def shader_shining_golden(nw: NodeWrangler, rand=True): # Code generated using version 2.4.3 of the node_transpiler @@ -288,19 +477,29 @@ def shader_shining_golden(nw: NodeWrangler, rand=True): base_color = [0.8, 0.2227, 0.0326, 1.0] if rand: base_color = sample_color(base_color, keep_sum=True) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': base_color, 'Metallic': 0.6, 'Roughness': 0.27}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": base_color, "Metallic": 0.6, "Roughness": 0.27}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + class shaders: def choose(): - choices = [shader_black_white_snake, shader_shining_golden, shader_golden, shader_green] + choices = [ + shader_black_white_snake, + shader_shining_golden, + shader_golden, + shader_green, + ] # choices = [shader_green] return random.choice(choices) + def apply(obj, selection=None, **kwargs): shader = shaders.choose() surface.add_material(obj, shader, selection=selection) diff --git a/infinigen/assets/materials/snow.py b/infinigen/assets/materials/snow.py index 2baf3160f..49d764aa6 100644 --- a/infinigen/assets/materials/snow.py +++ b/infinigen/assets/materials/snow.py @@ -2,8 +2,8 @@ # adapted from Blender Real Snow add-on https://docs.blender.org/manual/en/latest/addons/object/real_snow.html # License: GPL -from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.organization import SurfaceTypes type = SurfaceTypes.SDFPerturb @@ -13,46 +13,46 @@ def shader_snow(nw, subsurface=1.0, **kwargs): nw.force_input_consistency() - position = nw.new_node('ShaderNodeNewGeometry', []) + position = nw.new_node("ShaderNodeNewGeometry", []) combine_xyz = nw.new_node( - Nodes.CombineXYZ, - input_kwargs={'X': 0.36, 'Y': 0.46, 'Z': 0.6} + Nodes.CombineXYZ, input_kwargs={"X": 0.36, "Y": 0.46, "Z": 0.6} ) vector_math = nw.new_node( Nodes.VectorMath, input_kwargs={0: combine_xyz, 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'MULTIPLY'} + attrs={"operation": "MULTIPLY"}, ) mapping = nw.new_node( - Nodes.Mapping, - input_kwargs={'Vector': position, 'Scale': (12.0, 12.0, 12.0)} + Nodes.Mapping, input_kwargs={"Vector": position, "Scale": (12.0, 12.0, 12.0)} ) voronoi_texture = nw.new_node( Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': 30.0}, - attrs={'feature': 'N_SPHERE_RADIUS'} + input_kwargs={"Vector": mapping, "Scale": 30.0}, + attrs={"feature": "N_SPHERE_RADIUS"}, + ) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Radius"]} ) - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Radius"]}) colorramp.color_ramp.elements[0].position = 0.525 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.58 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + principled_bsdf = nw.new_node( Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': (0.904, 0.904, 0.904, 1.0), - 'Subsurface': subsurface, - 'Subsurface Radius': vector_math.outputs["Vector"], - 'Subsurface Color': (0.904, 0.904, 0.904, 1.0), - 'Specular': 0.224, - 'Roughness': 0.1, - 'Clearcoat': colorramp.outputs["Color"], - 'Clearcoat Roughness': 0.1, + "Base Color": (0.904, 0.904, 0.904, 1.0), + "Subsurface": subsurface, + "Subsurface Radius": vector_math.outputs["Vector"], + "Subsurface Color": (0.904, 0.904, 0.904, 1.0), + "Specular": 0.224, + "Roughness": 0.1, + "Clearcoat": colorramp.outputs["Color"], + "Clearcoat Roughness": 0.1, }, - attrs={'distribution': 'MULTI_GGX'} + attrs={"distribution": "MULTI_GGX"}, ) - + return principled_bsdf @@ -61,38 +61,35 @@ def geo_snowtexture(nw, selection=None, **kwargs): group_input = nw.new_node(Nodes.GroupInput) normal_dir = nw.new_node(Nodes.InputNormal) position0 = nw.new_node(Nodes.InputPosition) - position = nw.multiply(position0, [12]*3) - + position = nw.multiply(position0, [12] * 3) + noise_texture = nw.new_node( Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 12.0, 'Detail': 2} + input_kwargs={"Vector": position, "Scale": 12.0, "Detail": 2}, ) - + noise_texture_1 = nw.new_node( - Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 2.0, 'Detail': 4} + Nodes.NoiseTexture, input_kwargs={"Vector": position, "Scale": 2.0, "Detail": 4} ) colorramp_1 = nw.new_node( - Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_1.outputs["Fac"]} + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} ) colorramp_1.color_ramp.elements[0].position = 0.069 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_1.color_ramp.elements[1].position = 0.757 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + noise_texture_2 = nw.new_node( - Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 1.0, 'Detail': 4} + Nodes.NoiseTexture, input_kwargs={"Vector": position, "Scale": 1.0, "Detail": 4} ) - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_2.outputs["Fac"]} + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]} ) colorramp_2.color_ramp.elements[0].position = 0.069 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_2.color_ramp.elements[1].position = 0.757 colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - + height = nw.scalar_add( nw.scalar_multiply(0.6, noise_texture), nw.scalar_multiply(0.4, colorramp_1), @@ -101,34 +98,33 @@ def geo_snowtexture(nw, selection=None, **kwargs): map_range = nw.new_node( Nodes.MapRange, - input_kwargs={'Value': height, 1: 0.0, 2: 2.0, 3: -0.03, 4: 0.03} + input_kwargs={"Value": height, 1: 0.0, 2: 2.0, 3: -0.03, 4: 0.03}, ) - + modulation = nw.new_node( - Nodes.NoiseTexture, - input_kwargs={'Vector': position0, 'Scale': 0.5} - ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': modulation} + Nodes.NoiseTexture, input_kwargs={"Vector": position0, "Scale": 0.5} ) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": modulation}) colorramp_3.color_ramp.elements[0].position = 0.25 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.75 colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - offset = nw.multiply(normal_dir, map_range, colorramp_3) - + if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - + set_position = nw.new_node( Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': offset} + input_kwargs={"Geometry": group_input.outputs["Geometry"], "Offset": offset}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} ) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + def apply(objs, selection=None, **kwargs): surface.add_geomod(objs, geo_snowtexture, selection=selection) - surface.add_material(objs, shader_snow, selection=selection, input_kwargs=kwargs) \ No newline at end of file + surface.add_material(objs, shader_snow, selection=selection, input_kwargs=kwargs) diff --git a/infinigen/assets/materials/soil.py b/infinigen/assets/materials/soil.py index 02029a439..6a02112d5 100644 --- a/infinigen/assets/materials/soil.py +++ b/infinigen/assets/materials/soil.py @@ -5,18 +5,20 @@ import gin -from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import Nodes from numpy.random import uniform + from infinigen.core import surface -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from infinigen.core.util.random import random_color_neighbour type = SurfaceTypes.SDFPerturb mod_name = "geometry_soil" name = "soil" + @node_utils.to_nodegroup( "nodegroup_displacement_to_offset", singleton=False, type="GeometryNodeTree" ) @@ -55,7 +57,7 @@ def nodegroup_displacement_to_offset(nw): def nodegroup_pebble(nw): if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') + position = nw.new_node("ShaderNodeNewGeometry") else: position = nw.new_node(Nodes.InputPosition) @@ -110,13 +112,13 @@ def nodegroup_pebble(nw): ) - @node_utils.to_nodegroup("nodegroup_pebble", singleton=False) def nodegroup_pebble_geo(nw): nw.force_input_consistency() nodegroup_pebble(nw) -@node_utils.to_nodegroup("nodegroup_pebble", singleton=False, type='ShaderNodeTree') + +@node_utils.to_nodegroup("nodegroup_pebble", singleton=False, type="ShaderNodeTree") def nodegroup_pebble_shader(nw): nw.force_input_consistency() nodegroup_pebble(nw) @@ -127,8 +129,12 @@ def shader_soil(nw, random_seed=0): big_stone = geometry_soil(nw, random_seed=random_seed, geometry=False) # Code generated using version 2.3.1 of the node_transpiler darkness = 1.5 - soil_col_1 = random_color_neighbour((0.28 / darkness, 0.11 / darkness, 0.042 / darkness, 1.0), 0.05, 0.1, 0.1) - soil_col_2 = random_color_neighbour((0.22 / darkness , 0.0906 / darkness , 0.035 / darkness, 1.0), 0.05, 0.1, 0.1) + soil_col_1 = random_color_neighbour( + (0.28 / darkness, 0.11 / darkness, 0.042 / darkness, 1.0), 0.05, 0.1, 0.1 + ) + soil_col_2 = random_color_neighbour( + (0.22 / darkness, 0.0906 / darkness, 0.035 / darkness, 1.0), 0.05, 0.1, 0.1 + ) peb_col_1 = random_color_neighbour((0.3813, 0.1714, 0.0782, 1.0), 0.1, 0.1, 0.1) peb_col_2 = random_color_neighbour((0.314, 0.1274, 0.0578, 1.0), 0.1, 0.1, 0.1) @@ -165,9 +171,7 @@ def shader_soil(nw, random_seed=0): colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = soil_col_2 - colorramp_3 = nw.new_node( - Nodes.ColorRamp, input_kwargs={"Fac": big_stone} - ) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": big_stone}) colorramp_3.color_ramp.elements[0].position = 0.0 colorramp_3.color_ramp.elements[0].color = peb_col_1 colorramp_3.color_ramp.elements[1].position = 1.0 @@ -211,12 +215,13 @@ def shader_soil(nw, random_seed=0): return principled_bsdf_4 + @gin.configurable def geometry_soil(nw, selection=None, random_seed=0, geometry=True): nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + position = nw.new_node("ShaderNodeNewGeometry") + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) else: position = nw.new_node(Nodes.InputPosition) normal = nw.new_node(Nodes.InputNormal) @@ -225,17 +230,24 @@ def geometry_soil(nw, selection=None, random_seed=0, geometry=True): # Code generated using version 2.3.1 of the node_transpiler peb1_size = nw.new_value(uniform(2.0, 5.0), "peb1_size ~ U(2, 5)") - peb1_noise_mag = nw.new_value((1 / peb1_size.outputs[0].default_value) * uniform(1.5, 2), "peb1_noise_mag ~ U(0.1, 0.5)") + peb1_noise_mag = nw.new_value( + (1 / peb1_size.outputs[0].default_value) * uniform(1.5, 2), + "peb1_noise_mag ~ U(0.1, 0.5)", + ) group = nw.new_node( - nodegroup_pebble_geo().name if nw.node_group.type != "SHADER" else nodegroup_pebble_shader().name, + nodegroup_pebble_geo().name + if nw.node_group.type != "SHADER" + else nodegroup_pebble_shader().name, input_kwargs={"PebbleScale": peb1_size, "NoiseMag": peb1_noise_mag}, ) peb1_roundness = uniform(0.5, 1.0) peb1_amount = uniform(0.2, 0.5) - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}, label="colorramp_VAR") + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": group}, label="colorramp_VAR" + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = ( peb1_roundness, @@ -255,16 +267,23 @@ def geometry_soil(nw, selection=None, random_seed=0, geometry=True): colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) peb2_size = nw.new_value(uniform(5, 9), "peb2_size ~ U(5, 9)") - peb2_noise_scale = nw.new_value((1 / peb2_size.outputs[0].default_value) * uniform(1.5, 2), "peb2_noise_scale ~ U(0.05, 0.2)") + peb2_noise_scale = nw.new_value( + (1 / peb2_size.outputs[0].default_value) * uniform(1.5, 2), + "peb2_noise_scale ~ U(0.05, 0.2)", + ) group_3 = nw.new_node( - nodegroup_pebble_geo().name if nw.node_group.type != "SHADER" else nodegroup_pebble_shader().name, + nodegroup_pebble_geo().name + if nw.node_group.type != "SHADER" + else nodegroup_pebble_shader().name, input_kwargs={"PebbleScale": peb2_size, "NoiseMag": peb2_noise_scale}, ) peb2_roundness = uniform(0.3, 0.8) peb2_amount = uniform(0.2, 0.5) - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group_3}, label="colorramp_2_VAR") + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": group_3}, label="colorramp_2_VAR" + ) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = ( peb2_roundness, @@ -285,16 +304,23 @@ def geometry_soil(nw, selection=None, random_seed=0, geometry=True): add = nw.new_node( Nodes.Math, - input_kwargs={0: colorramp.outputs["Color"], 1: colorramp_2.outputs["Color"]}, + input_kwargs={ + 0: colorramp.outputs["Color"], + 1: colorramp_2.outputs["Color"], + }, ) big_stone = colorramp peb3_size = nw.new_value(uniform(12.0, 18.0), "peb3_size ~ U(12, 18)") - peb3_noise_scale = nw.new_value(uniform(0.05, 0.35), "peb3_noise_scale ~ U(0.05, 0.35)") + peb3_noise_scale = nw.new_value( + uniform(0.05, 0.35), "peb3_noise_scale ~ U(0.05, 0.35)" + ) group_2 = nw.new_node( - nodegroup_pebble_geo().name if nw.node_group.type != "SHADER" else nodegroup_pebble_shader().name, + nodegroup_pebble_geo().name + if nw.node_group.type != "SHADER" + else nodegroup_pebble_shader().name, input_kwargs={"PebbleScale": peb3_size, "NoiseMag": peb3_noise_scale}, ) @@ -316,14 +342,14 @@ def geometry_soil(nw, selection=None, random_seed=0, geometry=True): groupinput = nw.new_node(Nodes.GroupInput) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) else: return big_stone def apply(obj, selection=None, **kwargs): - surface.add_geomod( - obj, geometry_soil, selection=selection - ) + surface.add_geomod(obj, geometry_soil, selection=selection) surface.add_material(obj, shader_soil, selection=selection) diff --git a/infinigen/assets/materials/spider_plant.py b/infinigen/assets/materials/spider_plant.py index 1d920761b..b9b615ed1 100644 --- a/infinigen/assets/materials/spider_plant.py +++ b/infinigen/assets/materials/spider_plant.py @@ -3,11 +3,11 @@ # Authors: Beining Han -from numpy.random import uniform, normal , randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba def shader_spider_plant(nw: NodeWrangler): @@ -16,19 +16,27 @@ def shader_spider_plant(nw: NodeWrangler): main_hsv_color = (uniform(0.18, 0.36), uniform(0.70, 0.90), uniform(0.2, 0.3)) main_color = hsv2rgba(main_hsv_color) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': main_color, 'Subsurface IOR': 1.01, - 'Roughness': 2.0}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": main_color, + "Subsurface IOR": 1.01, + "Roughness": 2.0, + }, + ) - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': main_color}) + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": main_color} + ) - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={1: principled_bsdf, 2: translucent_bsdf}) + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={1: principled_bsdf, 2: translucent_bsdf} + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_spider_plant, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_spider_plant, selection=selection) diff --git a/infinigen/assets/materials/spot_sparse_attr.py b/infinigen/assets/materials/spot_sparse_attr.py index 610f452b8..56279c54f 100644 --- a/infinigen/assets/materials/spot_sparse_attr.py +++ b/infinigen/assets/materials/spot_sparse_attr.py @@ -4,28 +4,31 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise +import os + import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.materials.utils.surface_utils import ( + sample_range, + sample_ratio, +) +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask def shader_spots_sparse_attr(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Fac"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset"}) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Fac"]} + ) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -33,68 +36,99 @@ def shader_spots_sparse_attr(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[1].color = (0.0942, 0.0942, 0.0942, 1.0) colorramp.color_ramp.elements[2].position = 0.5 colorramp.color_ramp.elements[2].color = (1.0, 1.0, 1.0, 1.0) - + group = nw.new_node(nodegroup_color_mask().name) - - getcolor = lambda: hsv2rgba((U(0.02, 0.06), U(0.05, 0.9), np.abs(N(0.05, 0.1)))) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': group}) + def getcolor(): + return hsv2rgba((U(0.02, 0.06), U(0.05, 0.9), np.abs(N(0.05, 0.1)))) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}) colorramp_3.color_ramp.elements[0].position = 0.0 colorramp_3.color_ramp.elements[0].color = getcolor() colorramp_3.color_ramp.elements[1].position = 1.0 - colorramp_3.color_ramp.elements[1].color = hsv2rgba((U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7))) + colorramp_3.color_ramp.elements[1].color = hsv2rgba( + (U(0.02, 0.06), U(0.4, 0.8), U(0.15, 0.7)) + ) if rand: colorramp_3.color_ramp.elements[0].position = sample_range(0, 0.5) colorramp_3.color_ramp.elements[0].color = getcolor() - #sample_color(colorramp_3.color_ramp.elements[1].color) + # sample_color(colorramp_3.color_ramp.elements[1].color) - mix = nw.new_node(Nodes.MixRGB, + mix = nw.new_node( + Nodes.MixRGB, input_kwargs={ - 'Fac': colorramp.outputs["Color"] if U() < 0.6 else 1, - 'Color1': (0.024, 0.0499, 0.0168, 1.0), - 'Color2': colorramp_3.outputs["Color"] - }) + "Fac": colorramp.outputs["Color"] if U() < 0.6 else 1, + "Color1": (0.024, 0.0499, 0.0168, 1.0), + "Color2": colorramp_3.outputs["Color"], + }, + ) if rand: mix.inputs[6].default_value = getcolor() - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Specular': 0.0, 'Roughness': colorramp.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Specular": 0.0, + "Roughness": colorramp.outputs["Color"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def geometry_spots_sparse(nw: NodeWrangler, rand=True, **input_kwargs): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + position = nw.new_node(Nodes.InputPosition) - + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = U(0.1, 1) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: value}, attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.1, 'Color1': add.outputs["Vector"], 'Color2': noise_texture.outputs["Fac"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': sample_ratio(8, 0.5, 2) if rand else 8.0}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 15.0, 'Roughness': 1.0}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': voronoi_texture.outputs["Distance"], 'Color2': noise_texture_1.outputs["Fac"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.1, + "Color1": add.outputs["Vector"], + "Color2": noise_texture.outputs["Fac"], + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": sample_ratio(8, 0.5, 2) if rand else 8.0}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": add.outputs["Vector"], "Scale": 15.0, "Roughness": 1.0}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": voronoi_texture.outputs["Distance"], + "Color2": noise_texture_1.outputs["Fac"], + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) colorramp.color_ramp.elements.new(0) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -102,45 +136,75 @@ def geometry_spots_sparse(nw: NodeWrangler, rand=True, **input_kwargs): colorramp.color_ramp.elements[1].color = (0.0953, 0.0953, 0.0953, 1.0) colorramp.color_ramp.elements[2].position = 0.8091 colorramp.color_ramp.elements[2].color = (1.0, 1.0, 1.0, 1.0) - - subtract = nw.new_node(Nodes.Math, + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: colorramp.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) - + attrs={"operation": "SUBTRACT"}, + ) + normal = nw.new_node(Nodes.InputNormal) - - multiply = nw.new_node(Nodes.VectorMath, + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract, 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0 - - multiply_1 = nw.new_node(Nodes.VectorMath, + + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value_1}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Attribute': capture_attribute.outputs["Attribute"]}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geometry_spots_sparse, input_kwargs=geo_kwargs, attributes=['offset']) - surface.add_material(obj, shader_spots_sparse_attr, reuse=False, input_kwargs=shader_kwargs) + surface.add_geomod( + obj, geometry_spots_sparse, input_kwargs=geo_kwargs, attributes=["offset"] + ) + surface.add_material( + obj, shader_spots_sparse_attr, reuse=False, input_kwargs=shader_kwargs + ) + if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_1019.blend') - #creature(73349, 0).parts(0, factory=QuadrupedBody) - apply(bpy.data.objects['creature(73349, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_spot_sparse.blend') + bpy.ops.wm.open_mainfile(filepath="dev_scene_1019.blend") + # creature(73349, 0).parts(0, factory=QuadrupedBody) + apply( + bpy.data.objects["creature(73349, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True}, + ) + fn = os.path.join( + os.path.abspath(os.curdir), "dev_scene_test_spot_sparse.blend" + ) bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/stone.py b/infinigen/assets/materials/stone.py index 185fb12c8..3478030c9 100644 --- a/infinigen/assets/materials/stone.py +++ b/infinigen/assets/materials/stone.py @@ -5,27 +5,28 @@ # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=YKRK82JeBo8 by Ryan King Art -import os - -import bpy import gin -from infinigen.core.nodes.node_wrangler import Nodes -from numpy.random import uniform, normal as N -from infinigen.core import surface +from numpy.random import normal as N +from numpy.random import uniform + from infinigen.assets.materials.utils.surface_utils import sample_color, sample_ratio -from infinigen.core.util.organization import SurfaceTypes +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import SurfaceTypes from .mountain import geo_MOUNTAIN_general - type = SurfaceTypes.SDFPerturb mod_name = "geo_stone" name = "stone" + def shader_stone(nw, random_seed=0): nw.force_input_consistency() - stone_base_color, stone_roughness = geo_stone(nw, random_seed=random_seed, geometry=False) + stone_base_color, stone_roughness = geo_stone( + nw, random_seed=random_seed, geometry=False + ) principled_bsdf = nw.new_node( Nodes.PrincipledBSDF, @@ -37,12 +38,13 @@ def shader_stone(nw, random_seed=0): return principled_bsdf + @gin.configurable def geo_stone(nw, selection=None, random_seed=0, geometry=True): nw.force_input_consistency() if nw.node_group.type == "SHADER": - position = nw.new_node('ShaderNodeNewGeometry') - normal = (nw.new_node('ShaderNodeNewGeometry'), 1) + position = nw.new_node("ShaderNodeNewGeometry") + normal = (nw.new_node("ShaderNodeNewGeometry"), 1) else: position = nw.new_node(Nodes.InputPosition) normal = nw.new_node(Nodes.InputNormal) @@ -51,18 +53,22 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): # size of low frequency bumps, higher means smaller bumps size_bumps_lf = uniform(0, 30) # height of low frequency bumps - heig_bumps_lf = nw.new_value(uniform(.08, .15), "heig_bumps_lf") + heig_bumps_lf = nw.new_value(uniform(0.08, 0.15), "heig_bumps_lf") # density of cracks, lower means cracks are present in smaller area dens_crack = uniform(0, 0.1) # scale cracks - scal_crack = uniform(5, 10)/2 + scal_crack = uniform(5, 10) / 2 # width of the crack widt_crack = uniform(0.08, 0.12) scale = 0.5 musgrave_texture = nw.new_node( Nodes.MusgraveTexture, - input_kwargs={"Vector": position, "Scale": nw.new_value(size_bumps_lf * scale, "size_bumps_lf"), "W": nw.new_value(uniform(0, 10), "musgrave_texture_w")}, + input_kwargs={ + "Vector": position, + "Scale": nw.new_value(size_bumps_lf * scale, "size_bumps_lf"), + "W": nw.new_value(uniform(0, 10), "musgrave_texture_w"), + }, attrs={"musgrave_dimensions": "4D"}, ) @@ -114,12 +120,18 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): noise_texture_2 = nw.new_node( Nodes.NoiseTexture, - input_kwargs={"Vector": position, "Scale": 5.0 * scale, "W": nw.new_value(uniform(0, 10), "noise_texture_2_w")}, + input_kwargs={ + "Vector": position, + "Scale": 5.0 * scale, + "W": nw.new_value(uniform(0, 10), "noise_texture_2_w"), + }, attrs={"noise_dimensions": "4D"}, ) colorramp_2 = nw.new_node( - Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_2.outputs["Fac"]}, label="colorramp_2_VAR" + Nodes.ColorRamp, + input_kwargs={"Fac": noise_texture_2.outputs["Fac"]}, + label="colorramp_2_VAR", ) colorramp_2.color_ramp.elements[0].position = 0.445 + (2 * dens_crack) - 0.1 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -139,11 +151,18 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): wave_texture = nw.new_node( Nodes.WaveTexture, - input_kwargs={"Vector": noise_texture_1.outputs["Color"], "Scale": nw.new_value(N(2, 0.5), "wave_texture_scale"), "Distortion": nw.new_value(N(6, 2), "wave_texture_distortion"), "Detail": nw.new_value(N(15, 5), "wave_texture_detail")}, + input_kwargs={ + "Vector": noise_texture_1.outputs["Color"], + "Scale": nw.new_value(N(2, 0.5), "wave_texture_scale"), + "Distortion": nw.new_value(N(6, 2), "wave_texture_distortion"), + "Detail": nw.new_value(N(15, 5), "wave_texture_detail"), + }, ) colorramp_1 = nw.new_node( - Nodes.ColorRamp, input_kwargs={"Fac": wave_texture.outputs["Fac"]}, label="colorramp_1_VAR" + Nodes.ColorRamp, + input_kwargs={"Fac": wave_texture.outputs["Fac"]}, + label="colorramp_1_VAR", ) colorramp_1.color_ramp.elements[0].position = 0.0 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -190,31 +209,54 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): attrs={"operation": "MULTIPLY"}, ) - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, "W": nw.new_value(uniform(0, 10), "noise_texture_3_w"), 'Scale': nw.new_value(sample_ratio(5, 3/4, 4/3), "noise_texture_3_scale")}, - attrs={"noise_dimensions": "4D"}) + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "W": nw.new_value(uniform(0, 10), "noise_texture_3_w"), + "Scale": nw.new_value( + sample_ratio(5, 3 / 4, 4 / 3), "noise_texture_3_scale" + ), + }, + attrs={"noise_dimensions": "4D"}, + ) - subtract = nw.new_node(Nodes.Math, + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture_3.outputs["Fac"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - multiply_8 = nw.new_node(Nodes.VectorMath, + multiply_8 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract, 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value_5 = nw.new_node(Nodes.Value) value_5.outputs[0].default_value = 0.05 - multiply_9 = nw.new_node(Nodes.VectorMath, + multiply_9 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_8.outputs["Vector"], 1: value_5}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - noise_texture_4 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': nw.new_value(sample_ratio(20, 3/4, 4/3), "noise_texture_4_scale"), "W": nw.new_value(uniform(0, 10), "noise_texture_4_w")}, - attrs={'noise_dimensions': '4D'}) + noise_texture_4 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": position, + "Scale": nw.new_value( + sample_ratio(20, 3 / 4, 4 / 3), "noise_texture_4_scale" + ), + "W": nw.new_value(uniform(0, 10), "noise_texture_4_w"), + }, + attrs={"noise_dimensions": "4D"}, + ) - colorramp_5 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture_4.outputs["Fac"]}) + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_4.outputs["Fac"]} + ) colorramp_5.color_ramp.elements.new(0) colorramp_5.color_ramp.elements.new(0) colorramp_5.color_ramp.elements[0].position = 0.0 @@ -226,25 +268,33 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): colorramp_5.color_ramp.elements[3].position = 1.0 colorramp_5.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) - subtract_1 = nw.new_node(Nodes.Math, + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: colorramp_5.outputs["Color"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - multiply_10 = nw.new_node(Nodes.VectorMath, + multiply_10 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_1, 1: normal}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value_6 = nw.new_node(Nodes.Value) value_6.outputs[0].default_value = 0.1 - multiply_11 = nw.new_node(Nodes.VectorMath, + multiply_11 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply_10.outputs["Vector"], 1: value_6}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) offset = nw.add(multiply_9, vector_math_8, multiply_11) colorramp = nw.new_node( - Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]}, label="colorramp_1_VAR" + Nodes.ColorRamp, + input_kwargs={"Fac": noise_texture.outputs["Fac"]}, + label="colorramp_1_VAR", ) color1 = uniform(0, 0.05) color2 = uniform(0.05, 0.1) @@ -271,28 +321,46 @@ def geo_stone(nw, selection=None, random_seed=0, geometry=True): rough_min = uniform(0.6, 0.7) rough_max = uniform(0.7, 0.8) colorramp_3 = nw.new_node( - Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]}, label="colorramp_3_VAR" + Nodes.ColorRamp, + input_kwargs={"Fac": noise_texture.outputs["Fac"]}, + label="colorramp_3_VAR", ) colorramp_3.color_ramp.elements[0].position = 0.082 - colorramp_3.color_ramp.elements[0].color = (rough_min, rough_min, rough_min, 1.0) + colorramp_3.color_ramp.elements[0].color = ( + rough_min, + rough_min, + rough_min, + 1.0, + ) colorramp_3.color_ramp.elements[1].position = 0.768 - colorramp_3.color_ramp.elements[1].color = (rough_max, rough_max, rough_max, 1.0) + colorramp_3.color_ramp.elements[1].color = ( + rough_max, + rough_max, + rough_max, + 1.0, + ) stone_roughness = colorramp_3 if geometry: groupinput = nw.new_node(Nodes.GroupInput) - noise_params = {"scale": ("uniform", 10, 20), "detail": 9, "roughness": 0.6, "zscale": ("log_uniform", 0.007, 0.013)} + noise_params = { + "scale": ("uniform", 10, 20), + "detail": 9, + "roughness": 0.6, + "zscale": ("log_uniform", 0.007, 0.013), + } offset = nw.add(offset, geo_MOUNTAIN_general(nw, 3, noise_params, 0, {}, {})) if selection is not None: offset = nw.multiply(offset, surface.eval_argument(nw, selection)) - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": groupinput, "Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": set_position}) else: return stone_base_color, stone_roughness - def apply(obj, selection=None, **kwargs): surface.add_geomod( obj, @@ -300,4 +368,3 @@ def apply(obj, selection=None, **kwargs): selection=selection, ) surface.add_material(obj, shader_stone, selection=selection) - diff --git a/infinigen/assets/materials/stone_and_concrete/concrete.py b/infinigen/assets/materials/stone_and_concrete/concrete.py index a41f00ec6..387e1aa7e 100644 --- a/infinigen/assets/materials/stone_and_concrete/concrete.py +++ b/infinigen/assets/materials/stone_and_concrete/concrete.py @@ -5,213 +5,440 @@ # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=XDqRa0ExDqs by Ryan King Art -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint +from numpy.random import uniform from infinigen.assets.materials import common -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.color import color_category -from infinigen.core import surface -@node_utils.to_nodegroup('nodegroup_crack', singleton=False, type='ShaderNodeTree') + +@node_utils.to_nodegroup("nodegroup_crack", singleton=False, type="ShaderNodeTree") def nodegroup_crack(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloat', 'Amount', 1.0000), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketFloatFactor', 'Snake Crack', 0.3000)]) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloat", "Amount", 1.0000), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketFloatFactor", "Snake Crack", 0.3000), + ], + ) texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': group_input.outputs["Scale"], 'Detail': 15.0000, 'Dimension': 0.2000}, - attrs={'musgrave_dimensions': '4D'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"]}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_2.outputs["Fac"], 'Scale': 1.2000}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.0200, 3: 2.0000, 4: 0.0000}) - - mix_7 = nw.new_node(Nodes.Mix, - input_kwargs={0: group_input.outputs["Snake Crack"], 6: musgrave_texture_2, 7: map_range_4.outputs["Result"]}, - attrs={'blend_type': 'ADD', 'data_type': 'RGBA'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 0.6000}, attrs={'operation': 'MULTIPLY'}) - - musgrave_texture_3 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply_1, 'Detail': 15.0000, 'Dimension': 1.0000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs["Amount"], 3: 1.0000, 4: -0.5000}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: 0.1000}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_3, 1: map_range_2.outputs["Result"], 2: add}) - - mix_4 = nw.new_node(Nodes.Mix, + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": group_input.outputs["Scale"], + "Detail": 15.0000, + "Dimension": 0.2000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"]}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": noise_texture_2.outputs["Fac"], "Scale": 1.2000}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: 0.0200, + 3: 2.0000, + 4: 0.0000, + }, + ) + + mix_7 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: group_input.outputs["Snake Crack"], + 6: musgrave_texture_2, + 7: map_range_4.outputs["Result"], + }, + attrs={"blend_type": "ADD", "data_type": "RGBA"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 0.6000}, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture_3 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply_1, + "Detail": 15.0000, + "Dimension": 1.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Amount"], 3: 1.0000, 4: -0.5000}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: 0.1000} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": musgrave_texture_3, + 1: map_range_2.outputs["Result"], + 2: add, + }, + ) + + mix_4 = nw.new_node( + Nodes.Mix, input_kwargs={0: 1.0000, 6: mix_7.outputs[2], 7: map_range_1.outputs["Result"]}, - attrs={'blend_type': 'DARKEN', 'data_type': 'RGBA'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Scale"], 1: 0.3000}, attrs={'operation': 'MULTIPLY'}) - - musgrave_texture_4 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'W': group_input.outputs["Seed"], 'Scale': multiply_2, 'Detail': 15.0000, 'Dimension': 1.0000}, - attrs={'musgrave_dimensions': '4D'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: 0.1000}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_4, 1: map_range_2.outputs["Result"], 2: add_1}) - - mix_5 = nw.new_node(Nodes.Mix, + attrs={"blend_type": "DARKEN", "data_type": "RGBA"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 0.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture_4 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "W": group_input.outputs["Seed"], + "Scale": multiply_2, + "Detail": 15.0000, + "Dimension": 1.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range_2.outputs["Result"], 1: 0.1000} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": musgrave_texture_4, + 1: map_range_2.outputs["Result"], + 2: add_1, + }, + ) + + mix_5 = nw.new_node( + Nodes.Mix, input_kwargs={0: 1.0000, 6: mix_4.outputs[2], 7: map_range.outputs["Result"]}, - attrs={'blend_type': 'DARKEN', 'data_type': 'RGBA'}) + attrs={"blend_type": "DARKEN", "data_type": "RGBA"}, + ) - color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_5.outputs[2]}) + color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_5.outputs[2]}) color_ramp.color_ramp.elements[0].position = 0.0000 color_ramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] color_ramp.color_ramp.elements[1].position = 1.0000 color_ramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Color': color_ramp.outputs["Color"]}, attrs={'is_active_output': True}) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Color": color_ramp.outputs["Color"]}, + attrs={"is_active_output": True}, + ) + -@node_utils.to_nodegroup('nodegroup_concrete', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_concrete", singleton=False, type="ShaderNodeTree") def nodegroup_concrete(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input_1 = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Base Color', (0.8000, 0.8000, 0.8000, 1.0000)), - ('NodeSocketFloat', 'Scale', 0.0000), - ('NodeSocketFloat', 'Seed', 0.0000), - ('NodeSocketFloat', 'Roughness', 0.0000), - ('NodeSocketFloat', 'Crack Amount', 0.0000), - ('NodeSocketFloat', 'Crack Scale', 0.0000), - ('NodeSocketFloatFactor', 'Snake Crack', 0.3000)]) - - multiply = nw.new_node(Nodes.Math, + group_input_1 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Base Color", (0.8000, 0.8000, 0.8000, 1.0000)), + ("NodeSocketFloat", "Scale", 0.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ("NodeSocketFloat", "Roughness", 0.0000), + ("NodeSocketFloat", "Crack Amount", 0.0000), + ("NodeSocketFloat", "Crack Scale", 0.0000), + ("NodeSocketFloatFactor", "Snake Crack", 0.3000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: group_input_1.outputs["Scale"], 1: 10.0000}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: group_input_1.outputs["Crack Scale"]}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - group = nw.new_node(nodegroup_crack().name, - input_kwargs={'Seed': group_input_1.outputs["Seed"], 'Amount': group_input_1.outputs["Crack Amount"], 'Scale': multiply_1, 'Snake Crack': group_input_1.outputs["Snake Crack"]}) + group = nw.new_node( + nodegroup_crack().name, + input_kwargs={ + "Seed": group_input_1.outputs["Seed"], + "Amount": group_input_1.outputs["Crack Amount"], + "Scale": multiply_1, + "Snake Crack": group_input_1.outputs["Snake Crack"], + }, + ) - map_range_3 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group}) + map_range_3 = nw.new_node(Nodes.MapRange, input_kwargs={"Value": group}) texture_coordinate = nw.new_node(Nodes.TextureCoord) - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Scale"], 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input_1.outputs["Seed"], 'Scale': multiply_2, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Scale"], 1: 5.0000}, attrs={'operation': 'MULTIPLY'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input_1.outputs["Seed"], 'Scale': multiply_3, 'Detail': 15.0000, 'Dimension': 1.0000, 'Lacunarity': 3.0000}, - attrs={'musgrave_dimensions': '4D'}) - - mix_2 = nw.new_node(Nodes.Mix, input_kwargs={6: musgrave_texture_1}, attrs={'data_type': 'RGBA'}) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 0.6000, 'Color': group_input_1.outputs["Base Color"]}) - - multiply_4 = nw.new_node(Nodes.Math, + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Scale"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input_1.outputs["Seed"], + "Scale": multiply_2, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Scale"], 1: 5.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input_1.outputs["Seed"], + "Scale": multiply_3, + "Detail": 15.0000, + "Dimension": 1.0000, + "Lacunarity": 3.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix_2 = nw.new_node( + Nodes.Mix, input_kwargs={6: musgrave_texture_1}, attrs={"data_type": "RGBA"} + ) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 0.6000, "Color": group_input_1.outputs["Base Color"]}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input_1.outputs["Scale"], 1: 20.0000}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input_1.outputs["Seed"], 'Scale': multiply_4, 'Detail': 15.0000, 'Distortion': 0.2000}, - attrs={'noise_dimensions': '4D'}) - - multiply_5 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input_1.outputs["Seed"], + "Scale": multiply_4, + "Detail": 15.0000, + "Distortion": 0.2000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input_1.outputs["Scale"], 1: 20.0000}, - attrs={'operation': 'MULTIPLY'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'W': group_input_1.outputs["Seed"], 'Scale': multiply_5, 'Detail': 15.0000, 'Dimension': 0.2000}, - attrs={'musgrave_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "W": group_input_1.outputs["Seed"], + "Scale": multiply_5, + "Detail": 15.0000, + "Dimension": 0.2000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, - attrs={'data_type': 'RGBA'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 1.4000, 'Color': group_input_1.outputs["Base Color"]}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: mix.outputs[2], 6: group_input_1.outputs["Base Color"], 7: hue_saturation_value}, - attrs={'data_type': 'RGBA'}) - - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: mix_2.outputs[2], 6: hue_saturation_value_1, 7: mix_1.outputs[2]}, - attrs={'data_type': 'RGBA'}) - - hue_saturation_value_2 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': noise_texture_1.outputs["Fac"], 'Fac': 0.2000, 'Color': mix_3.outputs[2]}) - - hue_saturation_value_3 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 0.2000, 'Color': group_input_1.outputs["Base Color"]}) - - mix_6 = nw.new_node(Nodes.Mix, - input_kwargs={0: map_range_3.outputs["Result"], 6: hue_saturation_value_2, 7: hue_saturation_value_3}, - attrs={'data_type': 'RGBA'}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_6.outputs[2], 'Roughness': group_input_1.outputs["Roughness"]}) - - multiply_6 = nw.new_node(Nodes.Math, + attrs={"data_type": "RGBA"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 1.4000, "Color": group_input_1.outputs["Base Color"]}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: mix.outputs[2], + 6: group_input_1.outputs["Base Color"], + 7: hue_saturation_value, + }, + attrs={"data_type": "RGBA"}, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: mix_2.outputs[2], + 6: hue_saturation_value_1, + 7: mix_1.outputs[2], + }, + attrs={"data_type": "RGBA"}, + ) + + hue_saturation_value_2 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Value": noise_texture_1.outputs["Fac"], + "Fac": 0.2000, + "Color": mix_3.outputs[2], + }, + ) + + hue_saturation_value_3 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 0.2000, "Color": group_input_1.outputs["Base Color"]}, + ) + + mix_6 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 6: hue_saturation_value_2, + 7: hue_saturation_value_3, + }, + attrs={"data_type": "RGBA"}, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_6.outputs[2], + "Roughness": group_input_1.outputs["Roughness"], + }, + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input_1.outputs["Crack Amount"], 1: 0.6000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: 5.0000}, attrs={'operation': 'MULTIPLY'}) - - group_1 = nw.new_node(nodegroup_crack().name, - input_kwargs={'Seed': group_input_1.outputs["Seed"], 'Amount': multiply_6, 'Scale': multiply_7}) - - multiply_8 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: 5.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + group_1 = nw.new_node( + nodegroup_crack().name, + input_kwargs={ + "Seed": group_input_1.outputs["Seed"], + "Amount": multiply_6, + "Scale": multiply_7, + }, + ) + + multiply_8 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input_1.outputs["Roughness"], 1: 1.0000}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: group_1, 1: multiply_8}, attrs={'operation': 'MULTIPLY'}) + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_1, 1: multiply_8}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: group}, attrs={'operation': 'MULTIPLY'}) + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_8, 1: group}, + attrs={"operation": "MULTIPLY"}, + ) add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_9, 1: multiply_10}) value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 0.3000 - multiply_11 = nw.new_node(Nodes.Math, + multiply_11 = nw.new_node( + Nodes.Math, input_kwargs={0: value, 1: group_input_1.outputs["Roughness"]}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_11, 1: mix_1.outputs[2]}, attrs={'operation': 'MULTIPLY'}) + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_11, 1: mix_1.outputs[2]}, + attrs={"operation": "MULTIPLY"}, + ) add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply_12}) - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': add_1}, - attrs={'is_active_output': True}) - -def shader_concrete(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, - roughness=None, crack_amount=None, crack_scale=None, snake_crack=None, **kwargs): + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "Displacement": add_1}, + attrs={"is_active_output": True}, + ) + + +def shader_concrete( + nw: NodeWrangler, + scale=1.0, + base_color=None, + seed=None, + roughness=None, + crack_amount=None, + crack_scale=None, + snake_crack=None, + **kwargs, +): # Code generated using version 2.6.4 of the node_transpiler if seed is None: seed = uniform(-1000.0, 1000.0) @@ -224,19 +451,36 @@ def shader_concrete(nw: NodeWrangler, scale=1.0, base_color=None, seed=None, if snake_crack is None: snake_crack = uniform(0.0, 1.0) if base_color is None: - base_color = color_category('concrete') - - group = nw.new_node(nodegroup_concrete().name, - input_kwargs={'Base Color': base_color, 'Scale': scale, 'Seed': seed, - 'Roughness': roughness, 'Crack Amount': crack_amount, - 'Crack Scale': crack_scale, 'Snake Crack': snake_crack}) - - displacement_1 = nw.new_node('ShaderNodeDisplacement', - input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000, 'Scale': 0.0500}) + base_color = color_category("concrete") + + group = nw.new_node( + nodegroup_concrete().name, + input_kwargs={ + "Base Color": base_color, + "Scale": scale, + "Seed": seed, + "Roughness": roughness, + "Crack Amount": crack_amount, + "Crack Scale": crack_scale, + "Snake Crack": snake_crack, + }, + ) + + displacement_1 = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={ + "Height": group.outputs["Displacement"], + "Midlevel": 0.0000, + "Scale": 0.0500, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement_1}, + attrs={"is_active_output": True}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement_1}, - attrs={'is_active_output': True}) def apply(obj, selection=None, **kwargs): common.apply(obj, shader_concrete, selection=selection, **kwargs) diff --git a/infinigen/assets/materials/succulent.py b/infinigen/assets/materials/succulent.py index 499c70802..3191b74db 100644 --- a/infinigen/assets/materials/succulent.py +++ b/infinigen/assets/materials/succulent.py @@ -4,23 +4,21 @@ # Authors: Beining Han # Acknowledgements: This file draws inspiration from https://blenderartists.org/t/extrude-face-along-curve-with-geometry-nodes/1432653/3 -from numpy.random import uniform, normal , randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import uniform + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.color import hsv2rgba - def shader_green_transition_succulent(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': uniform(5.0, 20.0)}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(5.0, 20.0)} + ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.elements[0].position = 0.1182 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.7727 @@ -28,59 +26,87 @@ def shader_green_transition_succulent(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.85}, - attrs={'operation': 'LESS_THAN'}) + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.85}, + attrs={"operation": "LESS_THAN"}, + ) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(2.0, 10.0)}, - attrs={'operation': 'MULTIPLY'}) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(2.0, 10.0)}, + attrs={"operation": "MULTIPLY"}, + ) main_hsv_color = (uniform(0.35, 0.42), uniform(0.5, 0.93), uniform(0.20, 0.80)) main_color = hsv2rgba(main_hsv_color) diffuse_color = hsv2rgba((uniform(0.34, 0.43),) + main_hsv_color[1:]) split_point = uniform(0.82, 0.92) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp_1.color_ramp.interpolation = "B_SPLINE" colorramp_1.color_ramp.elements[0].position = split_point colorramp_1.color_ramp.elements[0].color = main_color colorramp_1.color_ramp.elements[1].position = split_point + uniform(0.01, 0.05) - colorramp_1.color_ramp.elements[1].color = (uniform(0.6, 1.0), uniform(0.0, 0.08), uniform(0.0, 0.08), 1.0) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Subsurface': uniform(0.01, 0.03), - 'Subsurface Radius': (0.01, 0.1, 0.1), 'Subsurface Color': colorramp_1.outputs["Color"], - 'Subsurface IOR': 0.0, 'Specular': 0.0, 'Roughness': 2.0, 'Sheen Tint': 0.0, - 'Clearcoat Roughness': 0.0, 'IOR': 1.3, 'Emission Strength': 0.0}) - - diffuse_bsdf_1 = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': diffuse_color, 'Roughness': uniform(0.3, 0.8)}) - - mix_shader_2 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_2}) + colorramp_1.color_ramp.elements[1].color = ( + uniform(0.6, 1.0), + uniform(0.0, 0.08), + uniform(0.0, 0.08), + 1.0, + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_1.outputs["Color"], + "Subsurface": uniform(0.01, 0.03), + "Subsurface Radius": (0.01, 0.1, 0.1), + "Subsurface Color": colorramp_1.outputs["Color"], + "Subsurface IOR": 0.0, + "Specular": 0.0, + "Roughness": 2.0, + "Sheen Tint": 0.0, + "Clearcoat Roughness": 0.0, + "IOR": 1.3, + "Emission Strength": 0.0, + }, + ) + + diffuse_bsdf_1 = nw.new_node( + Nodes.DiffuseBSDF, + input_kwargs={"Color": diffuse_color, "Roughness": uniform(0.3, 0.8)}, + ) + + mix_shader_2 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_2} + ) def shader_pink_transition_succulent(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': uniform(5.0, 20.0)}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(5.0, 20.0)} + ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.elements[0].position = 0.1182 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.7727 @@ -88,56 +114,79 @@ def shader_pink_transition_succulent(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.85}, - attrs={'operation': 'LESS_THAN'}) + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.85}, + attrs={"operation": "LESS_THAN"}, + ) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(2.0, 8.0)}, - attrs={'operation': 'MULTIPLY'}) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(2.0, 8.0)}, + attrs={"operation": "MULTIPLY"}, + ) main_hsv_color = (uniform(0.93, 0.99), uniform(0.64, 0.90), uniform(0.50, 0.90)) main_color = hsv2rgba(main_hsv_color) - diffuse_color = hsv2rgba((uniform(0.93, 1.), ) + main_hsv_color[1:]) + diffuse_color = hsv2rgba((uniform(0.93, 1.0),) + main_hsv_color[1:]) split_point = uniform(0.82, 0.92) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp_1.color_ramp.interpolation = "B_SPLINE" colorramp_1.color_ramp.elements[0].position = split_point colorramp_1.color_ramp.elements[0].color = main_color colorramp_1.color_ramp.elements[1].position = split_point + uniform(0.01, 0.05) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Subsurface': uniform(0.01, 0.05), - 'Subsurface Radius': (0.01, 0.03, 0.03), 'Subsurface Color': colorramp_1.outputs["Color"], - 'Subsurface IOR': 0.0, 'Specular': 0.0, 'Roughness': 2.0, 'Sheen Tint': 0.0, - 'Clearcoat Roughness': 0.0, 'IOR': 1.3, 'Emission Strength': 0.0}) - - diffuse_bsdf_1 = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': diffuse_color, 'Roughness': uniform(0.0, 0.5)}) - - mix_shader_2 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_2}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_1.outputs["Color"], + "Subsurface": uniform(0.01, 0.05), + "Subsurface Radius": (0.01, 0.03, 0.03), + "Subsurface Color": colorramp_1.outputs["Color"], + "Subsurface IOR": 0.0, + "Specular": 0.0, + "Roughness": 2.0, + "Sheen Tint": 0.0, + "Clearcoat Roughness": 0.0, + "IOR": 1.3, + "Emission Strength": 0.0, + }, + ) + + diffuse_bsdf_1 = nw.new_node( + Nodes.DiffuseBSDF, + input_kwargs={"Color": diffuse_color, "Roughness": uniform(0.0, 0.5)}, + ) + + mix_shader_2 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_2} + ) def shader_green_succulent(nw: NodeWrangler): - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': uniform(4.0, 15.0)}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(4.0, 15.0)} + ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.elements[0].position = 0.1182 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.7727 @@ -145,54 +194,77 @@ def shader_green_succulent(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 1.0}, - attrs={'operation': 'LESS_THAN'}) + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 1.0}, + attrs={"operation": "LESS_THAN"}, + ) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(2.0, 8.0)}, - attrs={'operation': 'MULTIPLY'}) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(2.0, 8.0)}, + attrs={"operation": "MULTIPLY"}, + ) main_hsv_color = (uniform(0.33, 0.39), uniform(0.5, 0.93), uniform(0.20, 0.70)) main_color = hsv2rgba(main_hsv_color) diffuse_color = hsv2rgba((uniform(0.34, 0.38),) + main_hsv_color[1:]) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp_1.color_ramp.interpolation = "B_SPLINE" colorramp_1.color_ramp.elements[0].position = 1.0 colorramp_1.color_ramp.elements[0].color = main_color - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Subsurface': uniform(0.01, 0.05), - 'Subsurface Radius': (0.1, 0.1, 0.1), 'Subsurface Color': colorramp_1.outputs["Color"], - 'Subsurface IOR': 0.0, 'Specular': 0.0, 'Roughness': 2.0, 'Sheen Tint': 0.0, - 'Clearcoat Roughness': 0.0, 'IOR': 1.3, 'Emission Strength': 0.0}) - - diffuse_bsdf_1 = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': diffuse_color, 'Roughness': uniform(0.0, 0.5)}) - - mix_shader_2 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_2}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_1.outputs["Color"], + "Subsurface": uniform(0.01, 0.05), + "Subsurface Radius": (0.1, 0.1, 0.1), + "Subsurface Color": colorramp_1.outputs["Color"], + "Subsurface IOR": 0.0, + "Specular": 0.0, + "Roughness": 2.0, + "Sheen Tint": 0.0, + "Clearcoat Roughness": 0.0, + "IOR": 1.3, + "Emission Strength": 0.0, + }, + ) + + diffuse_bsdf_1 = nw.new_node( + Nodes.DiffuseBSDF, + input_kwargs={"Color": diffuse_color, "Roughness": uniform(0.0, 0.5)}, + ) + + mix_shader_2 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: principled_bsdf, 2: diffuse_bsdf_1}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_2} + ) def shader_yellow_succulent(nw: NodeWrangler): - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': uniform(5.0, 8.0)}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(5.0, 8.0)} + ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.elements[0].position = 0.1182 colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.7727 @@ -200,65 +272,91 @@ def shader_yellow_succulent(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: 1.0}, - attrs={'operation': 'LESS_THAN'}) + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 1.0}, + attrs={"operation": "LESS_THAN"}, + ) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(1.0, 3.0)}, - attrs={'operation': 'MULTIPLY'}) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(1.0, 3.0)}, + attrs={"operation": "MULTIPLY"}, + ) main_color = hsv2rgba((uniform(0.1, 0.15), uniform(0.8, 1.0), uniform(0.5, 0.7))) - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp_1.color_ramp.interpolation = "B_SPLINE" colorramp_1.color_ramp.elements[0].position = 0.3114 colorramp_1.color_ramp.elements[0].color = main_color colorramp_1.color_ramp.elements[1].position = 0.6864 - colorramp_1.color_ramp.elements[1].color = hsv2rgba((uniform(0.0, 0.06), uniform(0.8, 1.0), uniform(0.5, 0.7))) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Subsurface': 0.01, - 'Subsurface Radius': (1.0, 1.0, 1.0), - 'Subsurface Color': colorramp_1.outputs["Alpha"], 'Subsurface IOR': 1.3, - 'Specular': 0.0, 'Roughness': 2.0, 'Sheen Tint': 0.0, - 'Clearcoat Roughness': 0.0, 'IOR': 1.3, 'Emission Strength': 0.0}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': main_color}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.4, 1: principled_bsdf, 2: translucent_bsdf}) - - diffuse_bsdf_1 = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': main_color, - 'Roughness': uniform(0.2, 1.0)}) - - mix_shader_2 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': multiply_1, 1: mix_shader_1, 2: diffuse_bsdf_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_2}) + colorramp_1.color_ramp.elements[1].color = hsv2rgba( + (uniform(0.0, 0.06), uniform(0.8, 1.0), uniform(0.5, 0.7)) + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_1.outputs["Color"], + "Subsurface": 0.01, + "Subsurface Radius": (1.0, 1.0, 1.0), + "Subsurface Color": colorramp_1.outputs["Alpha"], + "Subsurface IOR": 1.3, + "Specular": 0.0, + "Roughness": 2.0, + "Sheen Tint": 0.0, + "Clearcoat Roughness": 0.0, + "IOR": 1.3, + "Emission Strength": 0.0, + }, + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": main_color} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.4, 1: principled_bsdf, 2: translucent_bsdf}, + ) + + diffuse_bsdf_1 = nw.new_node( + Nodes.DiffuseBSDF, + input_kwargs={"Color": main_color, "Roughness": uniform(0.2, 1.0)}, + ) + + mix_shader_2 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: mix_shader_1, 2: diffuse_bsdf_1}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_2} + ) def shader_whitish_green_succulent(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': uniform(5.0, 8.0)}) + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": uniform(5.0, 8.0)} + ) - colorramp_3 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.elements[0].position = uniform(0.0, 0.3) colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp_3.color_ramp.elements[1].position = 0.5273 @@ -266,55 +364,83 @@ def shader_whitish_green_succulent(nw: NodeWrangler): texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) l = uniform(0.88, 0.98) - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: l - 0.05}, - attrs={'operation': 'LESS_THAN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(1.0, 4.0)}, - attrs={'operation': 'MULTIPLY'}) - - main_color = hsv2rgba((uniform(0.23, 0.25), uniform(0.40, 0.60), uniform(0.18, 0.25))) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': separate_xyz.outputs["Z"]}) + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: l - 0.05}, + attrs={"operation": "LESS_THAN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_3.outputs["Color"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(1.0, 4.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + main_color = hsv2rgba( + (uniform(0.23, 0.25), uniform(0.40, 0.60), uniform(0.18, 0.25)) + ) + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz.outputs["Z"]} + ) colorramp.color_ramp.elements[0].position = l - uniform(0.04, 0.1) - colorramp.color_ramp.elements[0].color = hsv2rgba((uniform(0.20, 0.38), uniform(0.12, 0.25), uniform(0.50, 0.70))) + colorramp.color_ramp.elements[0].color = hsv2rgba( + (uniform(0.20, 0.38), uniform(0.12, 0.25), uniform(0.50, 0.70)) + ) colorramp.color_ramp.elements[1].position = l colorramp.color_ramp.elements[1].color = main_color - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Subsurface': 0.01, - 'Subsurface Radius': (1.0, 1.0, 1.0), - 'Subsurface Color': colorramp.outputs["Color"], 'Subsurface IOR': 1.3, - 'Specular': 0.0, 'Roughness': 2.0, 'Sheen Tint': 0.0, - 'Clearcoat Roughness': 0.0, 'IOR': 1.3, 'Emission Strength': 0.0}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': main_color}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.7, 1: principled_bsdf, 2: translucent_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Subsurface": 0.01, + "Subsurface Radius": (1.0, 1.0, 1.0), + "Subsurface Color": colorramp.outputs["Color"], + "Subsurface IOR": 1.3, + "Specular": 0.0, + "Roughness": 2.0, + "Sheen Tint": 0.0, + "Clearcoat Roughness": 0.0, + "IOR": 1.3, + "Emission Strength": 0.0, + }, + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": main_color} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.7, 1: principled_bsdf, 2: translucent_bsdf}, + ) diffuse = hsv2rgba((uniform(0.23, 0.25), uniform(0.40, 0.60), uniform(0.10, 0.15))) - diffuse_bsdf_1 = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': diffuse, 'Roughness': 0.5}) + diffuse_bsdf_1 = nw.new_node( + Nodes.DiffuseBSDF, input_kwargs={"Color": diffuse, "Roughness": 0.5} + ) - mix_shader_2 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': multiply_1, 1: mix_shader_1, 2: diffuse_bsdf_1}) + mix_shader_2 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: mix_shader_1, 2: diffuse_bsdf_1}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_2}) + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_2} + ) def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_green_transition_succulent, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_green_transition_succulent, selection=selection) diff --git a/infinigen/assets/materials/table_marble.py b/infinigen/assets/materials/table_marble.py index 740f9ef75..ae39f7a2e 100644 --- a/infinigen/assets/materials/table_marble.py +++ b/infinigen/assets/materials/table_marble.py @@ -5,143 +5,275 @@ # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=wTzk9T06gdw by Ryan King Arts -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface -def shader_marble(nw: NodeWrangler,**kwargs): + +def shader_marble(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: texture_coordinate.outputs["Object"]}, attrs={'operation': 'SCALE'}) + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["Object"]}, + attrs={"operation": "SCALE"}, + ) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': scale.outputs["Vector"]}, - attrs={'rotation_type': 'EULER_XYZ'}) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": scale.outputs["Vector"]}, + attrs={"rotation_type": "EULER_XYZ"}, + ) - seed = nw.new_node(Nodes.Value, label='seed') + seed = nw.new_node(Nodes.Value, label="seed") seed.outputs[0].default_value = 0.0000 - scale_1 = nw.new_node(Nodes.Value, label='scale') + scale_1 = nw.new_node(Nodes.Value, label="scale") scale_1.outputs[0].default_value = 3.0000 add = nw.new_node(Nodes.Math, input_kwargs={0: scale_1, 1: 1.0000}) - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_rotate, 'W': seed, 'Scale': add, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_2.outputs["Fac"], 1: 0.4800, 2: 0.6000}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_rotate, 'W': seed, 'Scale': scale_1, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': noise_texture.outputs["Color"], 'Scale': 8.0000, 'Detail': 15.0000}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_3.outputs["Color"], 'W': 1.6400, 'Scale': 3.0000}, - attrs={'feature': 'DISTANCE_TO_EDGE', 'voronoi_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_rotate, + "W": seed, + "Scale": add, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_2.outputs["Fac"], 1: 0.4800, 2: 0.6000}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_rotate, + "W": seed, + "Scale": scale_1, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Color"], + "Scale": 8.0000, + "Detail": 15.0000, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": noise_texture_3.outputs["Color"], + "W": 1.6400, + "Scale": 3.0000, + }, + attrs={"feature": "DISTANCE_TO_EDGE", "voronoi_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0000 colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 0.0300 colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - multiply = nw.new_node(Nodes.Math, + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: colorramp_1.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': noise_texture.outputs["Color"], 'W': seed, 'Scale': 8.0000, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.8000, 6: noise_texture.outputs["Fac"], 7: noise_texture_1.outputs["Fac"]}, - attrs={'data_type': 'RGBA'}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_1.outputs[2]}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Color"], + "W": seed, + "Scale": 8.0000, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.8000, + 6: noise_texture.outputs["Fac"], + 7: noise_texture_1.outputs["Fac"], + }, + attrs={"data_type": "RGBA"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1.outputs[2]}) colorramp.color_ramp.elements[0].position = 0.3000 colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp.color_ramp.elements[1].position = 0.9000 colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: multiply, 6: colorramp.outputs["Color"], 7: (0.0376, 0.0179, 0.0033, 1.0000)}, - attrs={'data_type': 'RGBA'}) + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: multiply, + 6: colorramp.outputs["Color"], + 7: (0.0376, 0.0179, 0.0033, 1.0000), + }, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + "ShaderNodeBump", input_kwargs={"Strength": 0.0200, "Height": multiply} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_1.outputs[2], + "Specular": 0.6000, + "Roughness": 0.1000, + "Normal": bump, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) - bump = nw.new_node('ShaderNodeBump', input_kwargs={'Strength': 0.0200, 'Height': multiply}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1.outputs[2], 'Specular': 0.6000, 'Roughness': 0.1000, 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) def shader_wood(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: texture_coordinate.outputs["Object"]}, attrs={'operation': 'SCALE'}) + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["Object"]}, + attrs={"operation": "SCALE"}, + ) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': scale.outputs["Vector"]}, - attrs={'rotation_type': 'EULER_XYZ'}) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": scale.outputs["Vector"]}, + attrs={"rotation_type": "EULER_XYZ"}, + ) - mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate, 'Scale': (5.0000, 100.0000, 100.0000)}) + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": vector_rotate, "Scale": (5.0000, 100.0000, 100.0000)}, + ) - seed = nw.new_node(Nodes.Value, label='seed') + seed = nw.new_node(Nodes.Value, label="seed") seed.outputs[0].default_value = 0.0000 - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping_2, 'W': seed, 'Scale': 10.0000, 'Detail': 15.0000, 'Dimension': 7.0000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_2, 3: 1.0000, 4: -1.0000}) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'W': seed, 'Scale': 0.5000, 'Detail': 1.0000, 'Distortion': 1.1000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': seed, 'Scale': noise_texture_1.outputs["Fac"], 'Detail': 15.0000, 'Dimension': 0.2000, 'Lacunarity': 2.4000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_1, 3: -1.4000, 4: 1.5000}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate, 'Scale': (0.1500, 1.0000, 0.1500)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': seed, 'Detail': 5.0000, 'Distortion': 1.0000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'W': seed, 'Scale': 4.0000, 'Detail': 10.0000, 'Dimension': 0.0000}, - attrs={'musgrave_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping_2, + "W": seed, + "Scale": 10.0000, + "Detail": 15.0000, + "Dimension": 7.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_2, 3: 1.0000, 4: -1.0000}, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": vector_rotate}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "W": seed, + "Scale": 0.5000, + "Detail": 1.0000, + "Distortion": 1.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "W": seed, + "Scale": noise_texture_1.outputs["Fac"], + "Detail": 15.0000, + "Dimension": 0.2000, + "Lacunarity": 2.4000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_1, 3: -1.4000, 4: 1.5000}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}, + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": vector_rotate, "Scale": (0.1500, 1.0000, 0.1500)}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": seed, + "Detail": 5.0000, + "Distortion": 1.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "W": seed, + "Scale": 4.0000, + "Detail": 10.0000, + "Dimension": 0.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, - attrs={'data_type': 'RGBA'}) + attrs={"data_type": "RGBA"}, + ) - mix_1 = nw.new_node(Nodes.Mix, + mix_1 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9000, 6: map_range_1.outputs["Result"], 7: mix.outputs[2]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) - mix_2 = nw.new_node(Nodes.Mix, + mix_2 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9500, 6: map_range_2.outputs["Result"], 7: mix_1.outputs[2]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) rgb = nw.new_node(Nodes.RGB) rgb.outputs[0].default_value = (0.0242, 0.0056, 0.0027, 1.0000) @@ -149,10 +281,23 @@ def shader_wood(nw: NodeWrangler, **kwargs): rgb_1 = nw.new_node(Nodes.RGB) rgb_1.outputs[0].default_value = (0.5089, 0.2122, 0.0685, 1.0000) - mix_3 = nw.new_node(Nodes.Mix, input_kwargs={0: mix_2.outputs[2], 6: rgb, 7: rgb_1}, attrs={'data_type': 'RGBA'}) - - bump = nw.new_node('ShaderNodeBump', input_kwargs={'Strength': 0.2000, 'Height': mix_2.outputs[2]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix_3.outputs[2], 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={0: mix_2.outputs[2], 6: rgb, 7: rgb_1}, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + "ShaderNodeBump", input_kwargs={"Strength": 0.2000, "Height": mix_2.outputs[2]} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_3.outputs[2], "Normal": bump}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/table_materials.py b/infinigen/assets/materials/table_materials.py index 96c8b5756..3c00da9d6 100644 --- a/infinigen/assets/materials/table_materials.py +++ b/infinigen/assets/materials/table_materials.py @@ -4,149 +4,290 @@ # Authors: Yiming Zuo -import bpy -import bpy -import mathutils import numpy as np -from numpy.random import uniform, normal, randint +from numpy.random import uniform + from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba, rgb2hsv -from infinigen.core import surface +from infinigen.core.util.color import hsv2rgba, rgb2hsv from infinigen.core.util.random import log_uniform -def shader_marble(nw: NodeWrangler,**kwargs): +def shader_marble(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: texture_coordinate.outputs["Object"]}, attrs={'operation': 'SCALE'}) + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["Object"]}, + attrs={"operation": "SCALE"}, + ) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': scale.outputs["Vector"]}, - attrs={'rotation_type': 'EULER_XYZ'}) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": scale.outputs["Vector"]}, + attrs={"rotation_type": "EULER_XYZ"}, + ) - seed = nw.new_node(Nodes.Value, label='seed') + seed = nw.new_node(Nodes.Value, label="seed") seed.outputs[0].default_value = 0.0000 - scale_1 = nw.new_node(Nodes.Value, label='scale') + scale_1 = nw.new_node(Nodes.Value, label="scale") scale_1.outputs[0].default_value = 3.0000 add = nw.new_node(Nodes.Math, input_kwargs={0: scale_1, 1: 1.0000}) - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_rotate, 'W': seed, 'Scale': add, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_texture_2.outputs["Fac"], 1: 0.4800, 2: 0.6000}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_rotate, 'W': seed, 'Scale': scale_1, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'Scale': 8.0000, 'Detail': 15.0000}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': noise_texture_3.outputs["Fac"], 'W': 1.6400, 'Scale': 3.0000}, - attrs={'feature': 'DISTANCE_TO_EDGE', 'voronoi_dimensions': '4D'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_rotate, + "W": seed, + "Scale": add, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_2.outputs["Fac"], 1: 0.4800, 2: 0.6000}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_rotate, + "W": seed, + "Scale": scale_1, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "Scale": 8.0000, + "Detail": 15.0000, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": noise_texture_3.outputs["Fac"], + "W": 1.6400, + "Scale": 3.0000, + }, + attrs={"feature": "DISTANCE_TO_EDGE", "voronoi_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0000 colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp_1.color_ramp.elements[1].position = 0.0300 colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - multiply = nw.new_node(Nodes.Math, + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: colorramp_1.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'W': seed, 'Scale': 8.0000, 'Detail': 15.0000}, - attrs={'noise_dimensions': '4D'}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.8000, 6: noise_texture.outputs["Fac"], 7: noise_texture_1.outputs["Fac"]}, - attrs={'data_type': 'RGBA'}) - - colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': mix_1.outputs[2]}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "W": seed, + "Scale": 8.0000, + "Detail": 15.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: 0.8000, + 6: noise_texture.outputs["Fac"], + 7: noise_texture_1.outputs["Fac"], + }, + attrs={"data_type": "RGBA"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1.outputs[2]}) colorramp.color_ramp.elements[0].position = 0.3000 colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] colorramp.color_ramp.elements[1].position = 0.9000 colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: multiply, 6: colorramp.outputs["Color"], 7: (0.0376, 0.0179, 0.0033, 1.0000)}, - attrs={'data_type': 'RGBA'}) - - bump = nw.new_node('ShaderNodeBump', input_kwargs={'Strength': 0.0200, 'Height': multiply}) + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: multiply, + 6: colorramp.outputs["Color"], + 7: (0.0376, 0.0179, 0.0033, 1.0000), + }, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + "ShaderNodeBump", input_kwargs={"Strength": 0.0200, "Height": multiply} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_1.outputs[2], + "Specular": 0.6000, + "Roughness": 0.1000, + "Normal": bump, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_1.outputs[2], 'Specular': 0.6000, 'Roughness': 0.1000, 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) def perturb(hsv): - return np.array([hsv[0]+uniform(-.02,.02), hsv[1]+uniform(-.2,.2), hsv[2]*log_uniform(.5,2.)]) + return np.array( + [ + hsv[0] + uniform(-0.02, 0.02), + hsv[1] + uniform(-0.2, 0.2), + hsv[2] * log_uniform(0.5, 2.0), + ] + ) + def shader_wood(nw: NodeWrangler, **kwargs): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: texture_coordinate.outputs["Object"]}, attrs={'operation': 'SCALE'}) + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["Object"]}, + attrs={"operation": "SCALE"}, + ) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': scale.outputs["Vector"]}, - attrs={'rotation_type': 'EULER_XYZ'}) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": scale.outputs["Vector"]}, + attrs={"rotation_type": "EULER_XYZ"}, + ) - mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate, 'Scale': (5.0000, 100.0000, 100.0000)}) + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": vector_rotate, "Scale": (5.0000, 100.0000, 100.0000)}, + ) - seed = nw.new_node(Nodes.Value, label='seed') + seed = nw.new_node(Nodes.Value, label="seed") seed.outputs[0].default_value = 0.0000 - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping_2, 'W': seed, 'Scale': 10.0000, 'Detail': 15.0000, 'Dimension': 7.0000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_2, 3: 1.0000, 4: -1.0000}) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'W': seed, 'Scale': 0.5000, 'Detail': 1.0000, 'Distortion': 1.1000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': seed, 'Scale': noise_texture_1.outputs["Fac"], 'Detail': 15.0000, 'Dimension': 0.2000, 'Lacunarity': 2.4000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_1, 3: -1.4000, 4: 1.5000}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vector_rotate, 'Scale': (0.1500, 1.0000, 0.1500)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': seed, 'Detail': 5.0000, 'Distortion': 1.0000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'W': seed, 'Scale': 4.0000, 'Detail': 10.0000, 'Dimension': 0.0000}, - attrs={'musgrave_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping_2, + "W": seed, + "Scale": 10.0000, + "Detail": 15.0000, + "Dimension": 7.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_2, 3: 1.0000, 4: -1.0000}, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": vector_rotate}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "W": seed, + "Scale": 0.5000, + "Detail": 1.0000, + "Distortion": 1.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "W": seed, + "Scale": noise_texture_1.outputs["Fac"], + "Detail": 15.0000, + "Dimension": 0.2000, + "Lacunarity": 2.4000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_1, 3: -1.4000, 4: 1.5000}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}, + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": vector_rotate, "Scale": (0.1500, 1.0000, 0.1500)}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": seed, + "Detail": 5.0000, + "Distortion": 1.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "W": seed, + "Scale": 4.0000, + "Detail": 10.0000, + "Dimension": 0.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, - attrs={'data_type': 'RGBA'}) + attrs={"data_type": "RGBA"}, + ) - mix_1 = nw.new_node(Nodes.Mix, + mix_1 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9000, 6: map_range_1.outputs["Result"], 7: mix.outputs[2]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) - mix_2 = nw.new_node(Nodes.Mix, + mix_2 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9500, 6: map_range_2.outputs["Result"], 7: mix_1.outputs[2]}, - attrs={'data_type': 'RGBA', 'blend_type': 'MULTIPLY'}) + attrs={"data_type": "RGBA", "blend_type": "MULTIPLY"}, + ) rgb = nw.new_node(Nodes.RGB) rgb.outputs[0].default_value = hsv2rgba(perturb(rgb2hsv(0.0242, 0.0056, 0.0027))) @@ -154,10 +295,23 @@ def shader_wood(nw: NodeWrangler, **kwargs): rgb_1 = nw.new_node(Nodes.RGB) rgb_1.outputs[0].default_value = hsv2rgba(perturb(rgb2hsv(0.5089, 0.2122, 0.0685))) - mix_3 = nw.new_node(Nodes.Mix, input_kwargs={0: mix_2.outputs[2], 6: rgb, 7: rgb_1}, attrs={'data_type': 'RGBA'}) - - bump = nw.new_node('ShaderNodeBump', input_kwargs={'Strength': 0.2000, 'Height': mix_2.outputs[2]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': mix_3.outputs[2], 'Normal': bump}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={0: mix_2.outputs[2], 6: rgb, 7: rgb_1}, + attrs={"data_type": "RGBA"}, + ) + + bump = nw.new_node( + "ShaderNodeBump", input_kwargs={"Strength": 0.2000, "Height": mix_2.outputs[2]} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_3.outputs[2], "Normal": bump}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/text.py b/infinigen/assets/materials/text.py index fe7ee649a..4ad34cccc 100644 --- a/infinigen/assets/materials/text.py +++ b/infinigen/assets/materials/text.py @@ -1,7 +1,7 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Lingjie Mei: text & art generators # - Stamatis Alexandropoulos: image postprocessing effects # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=hpamCaVrbTk by Joey Carlino @@ -9,62 +9,78 @@ import colorsys import inspect import io -import string import logging -import colorsys +import bpy import matplotlib.font_manager import matplotlib.pyplot as plt -from matplotlib.patches import Arrow, BoxStyle, Circle, Ellipse, FancyBboxPatch, Rectangle, RegularPolygon, Wedge - -import bpy import numpy as np +from matplotlib.patches import ( + Arrow, + BoxStyle, + Circle, + Ellipse, + FancyBboxPatch, + Rectangle, + RegularPolygon, + Wedge, +) +from numpy.random import rand, uniform from PIL import Image -from numpy.random import uniform, rand +from infinigen.assets.materials import common from infinigen.assets.utils.decorate import decimate from infinigen.assets.utils.misc import generate_text from infinigen.assets.utils.object import new_plane from infinigen.assets.utils.uv import compute_uv_direction -from infinigen.assets.materials import common - from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed, clip_gaussian from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil - from infinigen.core.util.random import random_general as rg logger = logging.getLogger(__name__) + class Text: font_names_all = matplotlib.font_manager.get_font_names() - default_font_name = 'DejaVu Sans' - patch_fns = 'weighted_choice', (2, Circle), (4, Rectangle), (1, Wedge), (1, RegularPolygon), (1, Ellipse), ( - 2, Arrow), (2, FancyBboxPatch) - hatches = {'/', '\\', '|', '-', '+', 'x', 'o', 'O', '.', '*'} - font_weights = ['normal', 'bold', 'heavy'] - font_styles = ['normal', 'italic', 'oblique'] + default_font_name = "DejaVu Sans" + patch_fns = ( + "weighted_choice", + (2, Circle), + (4, Rectangle), + (1, Wedge), + (1, RegularPolygon), + (1, Ellipse), + (2, Arrow), + (2, FancyBboxPatch), + ) + hatches = {"/", "\\", "|", "-", "+", "x", "o", "O", ".", "*"} + font_weights = ["normal", "bold", "heavy"] + font_styles = ["normal", "italic", "oblique"] def __init__(self, factory_seed, has_barcode=True, emission=0): self.factory_seed = factory_seed with FixedSeed(self.factory_seed): self.size = 4 self.dpi = 100 - self.colormap = self.build_sequential_colormap() if uniform() < .5 else \ - self.build_diverging_colormap() - self.white_chance = .03 - self.black_chance = .05 + self.colormap = ( + self.build_sequential_colormap() + if uniform() < 0.5 + else self.build_diverging_colormap() + ) + self.white_chance = 0.03 + self.black_chance = 0.05 self.n_patches = np.random.randint(5, 8) - self.force_horizontal = uniform() < .75 + self.force_horizontal = uniform() < 0.75 self.font_names = np.random.choice(self.font_names_all, 3) self.n_texts = np.random.randint(2, 4) - self.n_barcodes = 1 if has_barcode and uniform() < .5 else 0 - self.barcode_scale = uniform(.3, .6) + self.n_barcodes = 1 if has_barcode and uniform() < 0.5 else 0 + self.barcode_scale = uniform(0.3, 0.6) self.barcode_length = np.random.randint(25, 40) self.barcode_aspect = log_uniform(1.5, 3) @@ -73,31 +89,48 @@ def __init__(self, factory_seed, has_barcode=True, emission=0): @staticmethod def build_diverging_colormap(): count = 20 - hue = (uniform() + np.linspace(0, .5, count)) % 1 - mid = uniform(.6, .8) + hue = (uniform() + np.linspace(0, 0.5, count)) % 1 + mid = uniform(0.6, 0.8) lightness = np.concatenate( - [np.linspace(uniform(.1, .3), mid, count // 2), np.linspace(mid, uniform(.1, .3), count // 2)] + [ + np.linspace(uniform(0.1, 0.3), mid, count // 2), + np.linspace(mid, uniform(0.1, 0.3), count // 2), + ] + ) + saturation = np.concatenate( + [np.linspace(1, 0.5, count // 2), np.linspace(0.5, 1, count // 2)] ) - saturation = np.concatenate([np.linspace(1, .5, count // 2), np.linspace(.5, 1, count // 2)]) # TODO hack saturation *= uniform(0, 1) lightness *= uniform(0.5, 1) - return np.array([colorsys.hls_to_rgb(h, l, s) for h, l, s in zip(hue, lightness, saturation)]) + return np.array( + [ + colorsys.hls_to_rgb(h, l, s) + for h, l, s in zip(hue, lightness, saturation) + ] + ) @staticmethod def build_sequential_colormap(): count = 20 - hue = (uniform() + np.linspace(0, .5, count)) % 1 - lightness = np.linspace(uniform(.0), uniform(.6, .8), count) - saturation = np.concatenate([np.linspace(1, .5, count // 2), np.linspace(.5, 1, count // 2)]) + hue = (uniform() + np.linspace(0, 0.5, count)) % 1 + lightness = np.linspace(uniform(0.0), uniform(0.6, 0.8), count) + saturation = np.concatenate( + [np.linspace(1, 0.5, count // 2), np.linspace(0.5, 1, count // 2)] + ) # TODO hack saturation *= uniform(0, 1) lightness *= uniform(0.5, 1) - return np.array([colorsys.hls_to_rgb(h, l, s) for h, l, s in zip(hue, lightness, saturation)]) + return np.array( + [ + colorsys.hls_to_rgb(h, l, s) + for h, l, s in zip(hue, lightness, saturation) + ] + ) @property def random_color(self): @@ -113,7 +146,7 @@ def random_color(self): def random_colors(self): while True: c, d = self.random_color, self.random_color - if np.abs(c - d).sum() > .2: + if np.abs(c - d).sum() > 0.2: return c, d def build_image(self, bbox): @@ -122,65 +155,93 @@ def build_image(self, bbox): ax.set_facecolor(self.random_color) locs = self.get_locs(bbox, self.n_patches + self.n_texts + self.n_barcodes) self.add_divider(bbox) - self.add_patches(locs[:self.n_patches], bbox) - self.add_texts(locs[self.n_patches:self.n_patches + self.n_texts]) - self.add_barcodes(locs[self.n_patches + self.n_texts:]) + self.add_patches(locs[: self.n_patches], bbox) + self.add_texts(locs[self.n_patches : self.n_patches + self.n_texts]) + self.add_barcodes(locs[self.n_patches + self.n_texts :]) buffer = io.BytesIO() - fig.savefig(buffer, format='png') + fig.savefig(buffer, format="png") buffer.seek(0) size = self.size * self.dpi - image = bpy.data.images.new('text_texture', width=size, height=size, alpha=True) - data = np.asarray(Image.open(buffer), dtype=np.float32)[::-1, :] / 255. + image = bpy.data.images.new("text_texture", width=size, height=size, alpha=True) + data = np.asarray(Image.open(buffer), dtype=np.float32)[::-1, :] / 255.0 image.pixels.foreach_set(data.ravel()) image.pack() - plt.close('all') + plt.close("all") plt.clf() return image @staticmethod def loc_uniform(min_, max_, size=None): - ratio = .1 - return uniform(min_ + ratio * (max_ - min_), min_ + (1 - ratio) * (max_ - min_), size) + ratio = 0.1 + return uniform( + min_ + ratio * (max_ - min_), min_ + (1 - ratio) * (max_ - min_), size + ) @staticmethod def scale_uniform(min_, max_): - return (max_ - min_) * log_uniform(.2, .8) + return (max_ - min_) * log_uniform(0.2, 0.8) def get_locs(self, bbox, n): m = 8 * n - x, y = self.loc_uniform(bbox[0], bbox[1], m), self.loc_uniform(bbox[2], bbox[3], m) + x, y = ( + self.loc_uniform(bbox[0], bbox[1], m), + self.loc_uniform(bbox[2], bbox[3], m), + ) return decimate(np.stack([x, y], -1), n) def add_divider(self, rs): - if uniform() < .6: return - a = 0 if uniform() < .7 else uniform(5, 10) + if uniform() < 0.6: + return + a = 0 if uniform() < 0.7 else uniform(5, 10) x, y = self.loc_uniform(rs[0], rs[1]), self.loc_uniform(rs[2], rs[3]) if rs[0] == 0 or self.force_horizontal: - args_list = [[(0, y), 2, 2, a], [(0, y), 2, -2, -a], [(1, y), -2, -2, a], [(1, y), -2, 2, -a]] + args_list = [ + [(0, y), 2, 2, a], + [(0, y), 2, -2, -a], + [(1, y), -2, -2, a], + [(1, y), -2, 2, -a], + ] else: - args_list = [[(x, 0), -2, 2, a], [(x, 0), 2, 2, -a], [(x, 1), 2, -2, a], [(x, 1), -2, -2, -a]] + args_list = [ + [(x, 0), -2, 2, a], + [(x, 0), 2, 2, -a], + [(x, 1), 2, -2, a], + [(x, 1), -2, -2, -a], + ] args = args_list[np.random.randint(len(args_list))] - plt.gca().add_patch(Rectangle(*args[:-1], angle=args[-1], color=self.random_color)) + plt.gca().add_patch( + Rectangle(*args[:-1], angle=args[-1], color=self.random_color) + ) def add_patches(self, locs, bbox): for x, y in locs: - w, h = self.scale_uniform(bbox[0], bbox[1]), self.scale_uniform(bbox[2], bbox[3]) + w, h = ( + self.scale_uniform(bbox[0], bbox[1]), + self.scale_uniform(bbox[2], bbox[3]), + ) x_, y_ = x - w / 2, y - h / 2 r = min(w, h) / 2 fn = rg(self.patch_fns) kwargs = { - 'alpha': uniform(.5, .8) if uniform() < .2 else 1, - 'fill': uniform() < .2, - 'angle': 0 if uniform() < .8 else uniform(-30, 30), - 'orientation': uniform(0, np.pi * 2) + "alpha": uniform(0.5, 0.8) if uniform() < 0.2 else 1, + "fill": uniform() < 0.2, + "angle": 0 if uniform() < 0.8 else uniform(-30, 30), + "orientation": uniform(0, np.pi * 2), + } + kwargs = { + k: kwargs[k] + for k, v in inspect.signature(fn).parameters.items() + if k in kwargs } - kwargs = {k: kwargs[k] for k, v in inspect.signature(fn).parameters.items() if k in kwargs} face_color, edge_color = self.random_colors kwargs.update( { - 'facecolor': face_color, 'edgecolor': edge_color, - 'hatch': np.random.choice(list(self.hatches)) if uniform() < .3 else 'none', - 'linewidth': uniform(2, 5) + "facecolor": face_color, + "edgecolor": edge_color, + "hatch": np.random.choice(list(self.hatches)) + if uniform() < 0.3 + else "none", + "linewidth": uniform(2, 5), } ) match fn.__name__: @@ -190,42 +251,78 @@ def add_patches(self, locs, bbox): patch = Rectangle((x_, y_), w, h, **kwargs) case Wedge.__name__: start = uniform(0, 360) - patch = Wedge((x, y), r, start, start + uniform(0, 360), width=uniform(.2, .8) * r, **kwargs) + patch = Wedge( + (x, y), + r, + start, + start + uniform(0, 360), + width=uniform(0.2, 0.8) * r, + **kwargs, + ) case RegularPolygon.__name__: - patch = RegularPolygon((x, y), np.random.randint(3, 9), radius=r, **kwargs) + patch = RegularPolygon( + (x, y), np.random.randint(3, 9), radius=r, **kwargs + ) case Ellipse.__name__: patch = Ellipse((x, y), w, h, **kwargs) case Arrow.__name__: - w_, h_ = (w if uniform() < .5 else -w), (h if uniform() < .5 else -h) - patch = Arrow(x - w_ / 2, y - h_ / 2, w, h, width=log_uniform(.6, 1.5), **kwargs) + w_, h_ = ( + (w if uniform() < 0.5 else -w), + (h if uniform() < 0.5 else -h), + ) + patch = Arrow( + x - w_ / 2, + y - h_ / 2, + w, + h, + width=log_uniform(0.6, 1.5), + **kwargs, + ) case FancyBboxPatch.__name__: - pad = uniform(.2, .4) * min(w, h) - box_style = np.random.choice(list(BoxStyle.get_styles().values()))(pad=pad) + pad = uniform(0.2, 0.4) * min(w, h) + box_style = np.random.choice(list(BoxStyle.get_styles().values()))( + pad=pad + ) patch = FancyBboxPatch( - (x_, y_), w - pad, h - pad, box_style, mutation_scale=log_uniform(.6, 1.5), - mutation_aspect=log_uniform(.6, 1.5), **kwargs + (x_, y_), + w - pad, + h - pad, + box_style, + mutation_scale=log_uniform(0.6, 1.5), + mutation_aspect=log_uniform(0.6, 1.5), + **kwargs, ) case _: raise NotImplementedError try: plt.gca().add_patch(patch) except MemoryError: - logger.warning(f'Failed to add patch {fn.__name__} at {x, y} with {w, h} due to MemoryError') + logger.warning( + f"Failed to add patch {fn.__name__} at {x, y} with {w, h} due to MemoryError" + ) def add_texts(self, locs): for x, y in locs: - x = .5 + (x - .5) * .6 + x = 0.5 + (x - 0.5) * 0.6 text = generate_text() - family = np.random.permutation(self.font_names).tolist() + ['DejaVu Sans'] + family = np.random.permutation(self.font_names).tolist() + ["DejaVu Sans"] color, background_color = self.random_colors plt.figtext( - x, y, text, family=family, - size=log_uniform(.75, 1) * self.dpi * clip_gaussian(0.3, 0.2, 0.2, 0.65), - ha='center', va='center', c=color, - rotation=uniform(-10, 10), wrap=True, + x, + y, + text, + family=family, + size=log_uniform(0.75, 1) + * self.dpi + * clip_gaussian(0.3, 0.2, 0.2, 0.65), + ha="center", + va="center", + c=color, + rotation=uniform(-10, 10), + wrap=True, fontweight=np.random.choice(self.font_weights), fontstyle=np.random.choice(self.font_styles), - backgroundcolor=background_color + backgroundcolor=background_color, ) def add_barcodes(self, locs): @@ -236,42 +333,86 @@ def add_barcodes(self, locs): w = h * self.barcode_aspect ax = fig.add_axes((x - w / 2, y - h / 2, w, h)) ax.set_axis_off() - ax.imshow(code.reshape(1, -1), cmap='binary', aspect='auto', interpolation='nearest') + ax.imshow( + code.reshape(1, -1), + cmap="binary", + aspect="auto", + interpolation="nearest", + ) def make_shader_func(self, bbox): - assert bbox[1] - bbox[0] > .001 and bbox[3] - bbox[2] >.001 + assert bbox[1] - bbox[0] > 0.001 and bbox[3] - bbox[2] > 0.001 image = self.build_image(bbox) def shader_text(nw: NodeWrangler, **kwargs): uv_map = nw.new_node(Nodes.UVMap) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': uv_map}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': reroute, 'Scale': 60.0000}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': reroute, 'Scale': 60.0000}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={6: voronoi_texture.outputs["Position"], 7: voronoi_texture_1.outputs["Position"]}, - attrs={'data_type': 'RGBA'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Vector': reroute, 'Detail': 5.6000, 'Dimension': 1.4000}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': reroute, 'Scale': 35.4000, 'Detail': 3.3000, 'Roughness': 1.0000}) - - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: uniform(0.2,1.0), 6: musgrave_texture, 7: noise_texture_1.outputs["Color"]}, - attrs={'data_type': 'RGBA'}) - - mix_1 = nw.new_node(Nodes.Mix, input_kwargs={0: 0.0417, 6: mix.outputs[2], 7: mix_3.outputs[2]}, attrs={'data_type': 'RGBA'}) - + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": uv_map}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": reroute, "Scale": 60.0000} + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": reroute, "Scale": 60.0000} + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 6: voronoi_texture.outputs["Position"], + 7: voronoi_texture_1.outputs["Position"], + }, + attrs={"data_type": "RGBA"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": reroute, "Detail": 5.6000, "Dimension": 1.4000}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": reroute, + "Scale": 35.4000, + "Detail": 3.3000, + "Roughness": 1.0000, + }, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: uniform(0.2, 1.0), + 6: musgrave_texture, + 7: noise_texture_1.outputs["Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 0.0417, 6: mix.outputs[2], 7: mix_3.outputs[2]}, + attrs={"data_type": "RGBA"}, + ) + if rand() < 0.5: - mix_2 = nw.new_node(Nodes.Mix, input_kwargs={0: uniform(0, 0.4), 6: mix_1.outputs[2], 7: uv_map}, attrs={'data_type': 'RGBA'}) + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={0: uniform(0, 0.4), 6: mix_1.outputs[2], 7: uv_map}, + attrs={"data_type": "RGBA"}, + ) else: - mix_2 = nw.new_node(Nodes.Mix, input_kwargs={0: 1.0, 6: mix_1.outputs[2], 7: uv_map}, attrs={'data_type': 'RGBA'}) + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 1.0, 6: mix_1.outputs[2], 7: uv_map}, + attrs={"data_type": "RGBA"}, + ) # mix_2 = nw.new_node(Nodes.Mix, input_kwargs={0: 0.7375, 6: uv, 7: mix_1.outputs[2]}, attrs={'data_type': 'RGBA'}) - color = nw.new_node(Nodes.ShaderImageTexture, [mix_2], attrs={'image': image}).outputs[0] + color = nw.new_node( + Nodes.ShaderImageTexture, [mix_2], attrs={"image": image} + ).outputs[0] roughness = nw.new_node(Nodes.NoiseTexture) if self.emission > 0: emission = color @@ -280,16 +421,17 @@ def shader_text(nw: NodeWrangler, **kwargs): else: emission = None principled_bsdf = nw.new_node( - Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Metallic': uniform(0, .5), - 'Specular': uniform(0, .2), - 'Emission': emission, - 'Emission Strength': self.emission - } + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Metallic": uniform(0, 0.5), + "Specular": uniform(0, 0.2), + "Emission": emission, + "Emission Strength": self.emission, + }, ) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf}) return shader_text @@ -297,13 +439,17 @@ def apply(self, obj, selection=None, bbox=(0, 1, 0, 1), **kwargs): common.apply(obj, self.make_shader_func(bbox), selection, **kwargs) -def apply(obj, selection=None, bbox=(0, 1, 0, 1), has_barcode=True, emission=0, **kwargs): - Text(np.random.randint(1e5), has_barcode, emission).apply(obj, selection, bbox, **kwargs) +def apply( + obj, selection=None, bbox=(0, 1, 0, 1), has_barcode=True, emission=0, **kwargs +): + Text(np.random.randint(1e5), has_barcode, emission).apply( + obj, selection, bbox, **kwargs + ) def make_sphere(): obj = new_plane() obj.rotation_euler[0] = np.pi / 2 butil.apply_transform(obj) - compute_uv_direction(obj, 'x', 'z') + compute_uv_direction(obj, "x", "z") return obj diff --git a/infinigen/assets/materials/text_no_barcode.py b/infinigen/assets/materials/text_no_barcode.py index 51021c598..78ee51f54 100644 --- a/infinigen/assets/materials/text_no_barcode.py +++ b/infinigen/assets/materials/text_no_barcode.py @@ -5,7 +5,6 @@ import numpy as np from .text import Text -from .text import make_sphere def apply(obj, selection=None, bbox=(0, 1, 0, 1), emission=0, **kwargs): diff --git a/infinigen/assets/materials/three_color_spots.py b/infinigen/assets/materials/three_color_spots.py index eb26bb29f..2bdd1a9fe 100644 --- a/infinigen/assets/materials/three_color_spots.py +++ b/infinigen/assets/materials/three_color_spots.py @@ -4,68 +4,89 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from infinigen.assets.materials.utils.surface_utils import ( + geo_voronoi_noise, + sample_color, + sample_range, +) from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes -def shader_spot(nw, rand=True, **input_kwargs): +def shader_spot(nw, rand=True, **input_kwargs): texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.7, 'Color1': noise_texture.outputs["Color"], 'Color2': texture_coordinate.outputs["Object"]}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Object"]}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.7, + "Color1": noise_texture.outputs["Color"], + "Color2": texture_coordinate.outputs["Object"], + }, + ) if rand: mix.inputs["Factor"].default_value = sample_range(0.5, 0.9) scale = nw.new_node(Nodes.Value) - scale.outputs["Value"].default_value = input_kwargs['scale'] if 'scale' in input_kwargs else 2 - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': mix, 'Scale': scale}) + scale.outputs["Value"].default_value = ( + input_kwargs["scale"] if "scale" in input_kwargs else 2 + ) + + mapping = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": mix, "Scale": scale}) if rand: for i in range(3): - mapping.inputs['Location'].default_value[i] = sample_range(-1, 1) - mapping.inputs['Rotation'].default_value[i] = sample_range(0, 2*ma.pi) + mapping.inputs["Location"].default_value[i] = sample_range(-1, 1) + mapping.inputs["Rotation"].default_value[i] = sample_range(0, 2 * ma.pi) spot1_1 = nw.new_node(Nodes.Value) spot1_1.outputs["Value"].default_value = 7.5 - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': spot1_1}) - - mix_7 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.95, 'Color1': noise_texture_1.outputs["Color"], 'Color2': mapping}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': mix_7}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Scale": spot1_1} + ) + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.95, + "Color1": noise_texture_1.outputs["Color"], + "Color2": mapping, + }, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": mix_7}) if rand: for i in range(3): - mapping_1.inputs['Scale'].default_value[i] = sample_range(0.8, 1.2) - + mapping_1.inputs["Scale"].default_value[i] = sample_range(0.8, 1.2) + spot2_size = nw.new_node(Nodes.Value) spot2_size.outputs["Value"].default_value = sample_range(1, 3) if rand else 1.5 - voronoi_texture_2 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_1, 'Scale': spot2_size}, - attrs={'voronoi_dimensions': '4D'}) + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_1, "Scale": spot2_size}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: - voronoi_texture_2.inputs['W'].default_value = sample_range(-5, 5) - - math_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_2.outputs["Distance"], 1: voronoi_texture_2.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': math_4}) + voronoi_texture_2.inputs["W"].default_value = sample_range(-5, 5) + + math_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_2.outputs["Distance"], + 1: voronoi_texture_2.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": math_4}) colorramp_1.color_ramp.elements.new(1) colorramp_1.color_ramp.elements[0].position = 0.1409 colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -76,54 +97,81 @@ def shader_spot(nw, rand=True, **input_kwargs): if rand: colorramp_1.color_ramp.elements[1].position = sample_range(0.18, 0.23) - math_2 = nw.new_node(Nodes.Math, + math_2 = nw.new_node( + Nodes.Math, input_kwargs={0: spot2_size, 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture_3 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_1, 'Scale': math_2}, - attrs={'voronoi_dimensions': '4D'}) + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture_3 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_1, "Scale": math_2}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: - voronoi_texture_3.inputs['W'].default_value = sample_range(-5, 5) - - math_3 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_3.outputs["Distance"], 1: voronoi_texture_3.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - mix_4 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.4467, 'Color1': colorramp_1.outputs["Color"], 'Color2': math_3}, - attrs={'blend_type': 'BURN'}) - + voronoi_texture_3.inputs["W"].default_value = sample_range(-5, 5) + + math_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_3.outputs["Distance"], + 1: voronoi_texture_3.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.4467, + "Color1": colorramp_1.outputs["Color"], + "Color2": math_3, + }, + attrs={"blend_type": "BURN"}, + ) + spot2 = nw.new_node(Nodes.Value) spot2.outputs["Value"].default_value = 1 - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': spot2}) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.875, 'Color1': noise_texture_2.outputs["Color"], 'Color2': mapping}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': mix_3}) + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Scale": spot2} + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.875, + "Color1": noise_texture_2.outputs["Color"], + "Color2": mapping, + }, + ) + + mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": mix_3}) if rand: for i in range(3): - mapping_2.inputs['Scale'].default_value[i] = sample_range(0.8, 1.2) + mapping_2.inputs["Scale"].default_value[i] = sample_range(0.8, 1.2) spot1_size = nw.new_node(Nodes.Value) spot1_size.outputs["Value"].default_value = sample_range(1, 3) if rand else 1.5 - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_2, 'W': 1.0, 'Scale': spot1_size}, - attrs={'voronoi_dimensions': '4D'}) + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_2, "W": 1.0, "Scale": spot1_size}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: - voronoi_texture.inputs['W'].default_value = sample_range(-5, 5) - - math_5 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': math_5}) + voronoi_texture.inputs["W"].default_value = sample_range(-5, 5) + + math_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": math_5}) colorramp_2.color_ramp.elements.new(1) colorramp_2.color_ramp.elements[0].position = 0.0 colorramp_2.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -137,55 +185,87 @@ def shader_spot(nw, rand=True, **input_kwargs): value = nw.new_node(Nodes.Value) value.outputs["Value"].default_value = sample_range(2, 8) if rand else 5 - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_2, 'Scale': value}, - attrs={'voronoi_dimensions': '4D'}) + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_2, "Scale": value}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: - voronoi_texture_1.inputs['W'].default_value = sample_range(-5, 5) - - math_6 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.82, 'Color1': colorramp_2.outputs["Color"], 'Color2': math_6}, - attrs={'blend_type': 'BURN'}) - - math = nw.new_node(Nodes.Math, - input_kwargs={0: mix_4, 1: mix_1}, - attrs={'operation': 'LESS_THAN'}) - + voronoi_texture_1.inputs["W"].default_value = sample_range(-5, 5) + + math_6 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.82, + "Color1": colorramp_2.outputs["Color"], + "Color2": math_6, + }, + attrs={"blend_type": "BURN"}, + ) + + math = nw.new_node( + Nodes.Math, input_kwargs={0: mix_4, 1: mix_1}, attrs={"operation": "LESS_THAN"} + ) + rgb = nw.new_node(Nodes.RGB) - sample_color(rgb.outputs['Color'].default_value) + sample_color(rgb.outputs["Color"].default_value) - color1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': mix_1, 'Color1': (1.0, 0.0223, 0.0, 1.0), 'Color2': rgb}) + color1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": mix_1, "Color1": (1.0, 0.0223, 0.0, 1.0), "Color2": rgb}, + ) sample_color(color1.inputs[6].default_value) - color2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': mix_4, 'Color1': (0.0021, 0.0021, 0.0144, 1.0), 'Color2': rgb}) + color2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": mix_4, + "Color1": (0.0021, 0.0021, 0.0144, 1.0), + "Color2": rgb, + }, + ) sample_color(color2.inputs[6].default_value) - mix_6 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': math, 'Color1': color1, 'Color2': color2}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) + mix_6 = nw.new_node( + Nodes.MixRGB, input_kwargs={"Fac": math, "Color1": color1, "Color2": color2} + ) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset"}) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.3036, 0.3036, 0.3036, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_6, 'Roughness': colorramp.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_6, "Roughness": colorramp.outputs["Color"]}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_voronoi_noise, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) + surface.add_geomod( + obj, + geo_voronoi_noise, + apply=False, + input_kwargs=geo_kwargs, + attributes=["offset"], + ) surface.add_material(obj, shader_spot, reuse=False, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/tiger_attr.py b/infinigen/assets/materials/tiger_attr.py index c5e691bca..671221284 100644 --- a/infinigen/assets/materials/tiger_attr.py +++ b/infinigen/assets/materials/tiger_attr.py @@ -5,154 +5,235 @@ # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=b9lukB7cWag by Sam Bowman -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform as U, normal as N, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface +import os -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category +from numpy.random import normal, uniform +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.utils.nodegroups.shader import nodegroup_color_mask from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba -from infinigen.assets.creatures.util.nodegroups.shader import nodegroup_color_mask -@node_utils.to_nodegroup('nodegroup_tiger_fac', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_tiger_fac", singleton=False, type="ShaderNodeTree") def nodegroup_tiger_fac(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000))]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': group_input.outputs["Vector"]}, - attrs={'noise_dimensions': '4D'}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000))], + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": group_input.outputs["Vector"]}, + attrs={"noise_dimensions": "4D"}, + ) noise_texture.inputs["W"].default_value = uniform(-10, 10) - - mix_3 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.8500, 'Color1': noise_texture.outputs["Color"], 'Color2': group_input.outputs["Vector"]}) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.8500, + "Color1": noise_texture.outputs["Color"], + "Color2": group_input.outputs["Vector"], + }, + ) mix_3.inputs["Factor"].default_value = uniform(0.8, 0.9) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mix_3, 'W': 1.0000, 'Scale': 1.0000}, - attrs={'musgrave_dimensions': '4D'}) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": mix_3, "W": 1.0000, "Scale": 1.0000}, + attrs={"musgrave_dimensions": "4D"}, + ) musgrave_texture_1.inputs["W"].default_value = uniform(-10, 10) - + value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = normal(0.1180, 0.0100) - + value_2 = nw.new_node(Nodes.Value) value_2.outputs[0].default_value = normal(0.0600, 0.0100) - + add = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: value_2}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: musgrave_texture_1, 1: add}, attrs={'operation': 'GREATER_THAN'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: value_2}, attrs={'operation': 'SUBTRACT'}) - - greater_than_1 = nw.new_node(Nodes.Math, input_kwargs={0: musgrave_texture_1, 1: subtract}, attrs={'operation': 'GREATER_THAN'}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: greater_than, 1: greater_than_1}, attrs={'operation': 'LESS_THAN'}) - - colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': musgrave_texture_1}) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture_1, 1: add}, + attrs={"operation": "GREATER_THAN"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: value_1, 1: value_2}, + attrs={"operation": "SUBTRACT"}, + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture_1, 1: subtract}, + attrs={"operation": "GREATER_THAN"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than, 1: greater_than_1}, + attrs={"operation": "LESS_THAN"}, + ) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp_3.color_ramp.interpolation = "CONSTANT" colorramp_3.color_ramp.elements[0].position = 0.0000 colorramp_3.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] colorramp_3.color_ramp.elements[1].position = 0.1182 colorramp_3.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': mix_3, 'Location': (3.0000, 0.0000, 1.0000)}) - mapping_2.inputs["Location"].default_value = (uniform(0, 10), uniform(0, 10), uniform(0, 10)) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': mix_3, 'Location': (1.0000, 5.0000, -10.0000), 'Rotation': (0.7854, 0.0000, 0.0000)}) - mapping_1.inputs["Location"].default_value = (uniform(0, 10), uniform(0, 10), uniform(0, 10)) - - mix_5 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': colorramp_3.outputs["Color"], 'Color1': mapping_2, 'Color2': mapping_1}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mix_5, 'Scale': 1.0000, 'Distortion': 10.0000, 'Detail': 0.0000, 'Phase Offset': 4.0000}) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": mix_3, "Location": (3.0000, 0.0000, 1.0000)}, + ) + mapping_2.inputs["Location"].default_value = ( + uniform(0, 10), + uniform(0, 10), + uniform(0, 10), + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": mix_3, + "Location": (1.0000, 5.0000, -10.0000), + "Rotation": (0.7854, 0.0000, 0.0000), + }, + ) + mapping_1.inputs["Location"].default_value = ( + uniform(0, 10), + uniform(0, 10), + uniform(0, 10), + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_3.outputs["Color"], + "Color1": mapping_2, + "Color2": mapping_1, + }, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mix_5, + "Scale": 1.0000, + "Distortion": 10.0000, + "Detail": 0.0000, + "Phase Offset": 4.0000, + }, + ) wave_texture.inputs["Scale"].default_value = normal(1.0000, 0.1000) wave_texture.inputs["Distortion"].default_value = normal(10.0000, 0.5000) wave_texture.inputs["Phase Offset"].default_value = uniform(-20, 20) - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': wave_texture.outputs["Fac"], 1: 0.2000, 2: 0.4000}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': less_than, 'Color1': map_range.outputs["Result"], 'Color2': (1.0000, 1.0000, 1.0000, 1.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Color': mix_2}, attrs={'is_active_output': True}) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": wave_texture.outputs["Fac"], 1: 0.2000, 2: 0.4000}, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": less_than, + "Color1": map_range.outputs["Result"], + "Color2": (1.0000, 1.0000, 1.0000, 1.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Color": mix_2}, + attrs={"is_active_output": True}, + ) def shader_tiger_attr(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - local_pos = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'local_pos'}) - + local_pos = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "local_pos"}) + value = nw.new_node(Nodes.Value) value.outputs[0].default_value = U(2, 4) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'tag_head'}) - - multiply = nw.new_node(Nodes.Math, + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_head"}) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: attribute.outputs["Fac"], 1: N(3, 1)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: value, 1: multiply}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: local_pos.outputs["Vector"], 'Scale': add}, - attrs={'operation': 'SCALE'}) - - group_1 = nw.new_node(nodegroup_tiger_fac().name, - input_kwargs={'Vector': scale.outputs["Vector"]}) - + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: value, 1: multiply}) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: local_pos.outputs["Vector"], "Scale": add}, + attrs={"operation": "SCALE"}, + ) + + group_1 = nw.new_node( + nodegroup_tiger_fac().name, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + group = nw.new_node(nodegroup_color_mask().name) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': group}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": group}) colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = hsv2rgba((U(0.015, 0.06), U(0.85, 0.95), U(0.15, 0.7))) + colorramp.color_ramp.elements[0].color = hsv2rgba( + (U(0.015, 0.06), U(0.85, 0.95), U(0.15, 0.7)) + ) colorramp.color_ramp.elements[1].position = 1.0 - colorramp.color_ramp.elements[1].color = hsv2rgba((U(0.02, 0.05), U(0.3, 0.7), U(0.15, 0.7))) - - mix_5 = nw.new_node(Nodes.MixRGB, + colorramp.color_ramp.elements[1].color = hsv2rgba( + (U(0.02, 0.05), U(0.3, 0.7), U(0.15, 0.7)) + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, input_kwargs={ - 'Fac': group_1, - 'Color1': (0.01, 0.01, 0.01, 1.0), - 'Color2': colorramp.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_5, 'Specular': 0.0}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + "Fac": group_1, + "Color1": (0.01, 0.01, 0.01, 1.0), + "Color2": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_5, "Specular": 0.0}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, selection=None, **kwargs): surface.add_material(obj, shader_tiger_attr, selection=selection) + if __name__ == "__main__": for i in range(1): - bpy.ops.wm.open_mainfile(filepath='dev_scene_1019.blend') - #creature(73349, 0).parts(0, factory=QuadrupedBody) - apply(bpy.data.objects['creature(73349, 0).parts(0, factory=QuadrupedBody)'], geo_kwargs={'rand': True}, shader_kwargs={'rand': True}) - fn = os.path.join(os.path.abspath(os.curdir), 'dev_scene_test_tiger_attr.blend') + bpy.ops.wm.open_mainfile(filepath="dev_scene_1019.blend") + # creature(73349, 0).parts(0, factory=QuadrupedBody) + apply( + bpy.data.objects["creature(73349, 0).parts(0, factory=QuadrupedBody)"], + geo_kwargs={"rand": True}, + shader_kwargs={"rand": True}, + ) + fn = os.path.join(os.path.abspath(os.curdir), "dev_scene_test_tiger_attr.blend") bpy.ops.wm.save_as_mainfile(filepath=fn) - #bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) - #bpy.context.scene.render.image_settings.file_format='JPEG' - #bpy.ops.render.render(write_still=True) \ No newline at end of file + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', 'bone%d.jpg'%(i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/materials/tile.py b/infinigen/assets/materials/tile.py index a926f67d9..f6d4417d6 100644 --- a/infinigen/assets/materials/tile.py +++ b/infinigen/assets/materials/tile.py @@ -1,45 +1,46 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +from functools import partial + # Authors: Lingjie Mei from inspect import signature import numpy as np from numpy.random import uniform -from . import ceramic, common -from .utils.surface_utils import perturb_coordinates -from ..utils.object import new_cube - -from ...core.nodes import NodeWrangler, Nodes +from ...core.nodes import Nodes, NodeWrangler from ...core.util.math import FixedSeed from ...core.util.random import log_uniform - -from functools import partial +from ..utils.object import new_cube +from . import ceramic, common +from .utils.surface_utils import perturb_coordinates def mix_shader(nw, base_shader, offset, rotations, mortar, alternating, selections): n = len(selections) + 1 - seeds = np.random.randint(0, 1e7, n) if alternating else [np.random.randint(1e7)] * n + seeds = ( + np.random.randint(0, 1e7, n) if alternating else [np.random.randint(1e7)] * n + ) shaders, disps = [], [] - darken_factor = uniform(.4, 1.) + darken_factor = uniform(0.4, 1.0) for i, seed in enumerate(seeds): with FixedSeed(seed): kwargs = {} names = signature(base_shader).parameters - if 'random_seed' in names: - kwargs['random_seed'] = np.random.randint(1e7) - if 'w' in names: - kwargs['w'] = offset - if 'hscale' in names: + if "random_seed" in names: + kwargs["random_seed"] = np.random.randint(1e7) + if "w" in names: + kwargs["w"] = offset + if "hscale" in names: if i % 2 == 0: - kwargs['hscale'] = log_uniform(20, 30) - kwargs['vscale'] = .01 + kwargs["hscale"] = log_uniform(20, 30) + kwargs["vscale"] = 0.01 else: - kwargs['hscale'] = .01 - kwargs['vscale'] = log_uniform(20, 30) + kwargs["hscale"] = 0.01 + kwargs["vscale"] = log_uniform(20, 30) base_shader(nw, **kwargs) - bsdfs = nw.find('Bsdf') + bsdfs = nw.find("Bsdf") n = nw.nodes[-1] if len(bsdfs) > 0: bsdf = bsdfs[-1] @@ -48,263 +49,483 @@ def mix_shader(nw, base_shader, offset, rotations, mortar, alternating, selectio color = links[0].from_socket else: color = bsdf.inputs[0].default_value - color = nw.new_node(Nodes.MixRGB, - input_kwargs={0: darken_factor, 6: color, 7: nw.scalar_sub(1, mortar)}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}).outputs[2] + color = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + 0: darken_factor, + 6: color, + 7: nw.scalar_sub(1, mortar), + }, + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ).outputs[2] nw.connect_input(color, bsdf.inputs[0]) match type(n).__name__: case Nodes.GroupOutput: shaders.append(nw.find_from(n.inputs[0])[0].from_socket) disp_links = nw.find_from(n.inputs[1]) - disps.append(disp_links[0].from_socket if len(disp_links) > 0 else None) + disps.append( + disp_links[0].from_socket if len(disp_links) > 0 else None + ) nw.nodes.remove(n) - case Nodes.PrincipledBSDF | Nodes.GlassBSDF | Nodes.GlossyBSDF | Nodes.TranslucentBSDF | \ - Nodes.TransparentBSDF | Nodes.TranslucentBSDF: + case ( + Nodes.PrincipledBSDF + | Nodes.GlassBSDF + | Nodes.GlossyBSDF + | Nodes.TranslucentBSDF + | Nodes.TransparentBSDF + | Nodes.TranslucentBSDF + ): shaders.append(n.outputs[0]) disps.append(None) case _: n = nw.find(Nodes.MaterialOutput)[-1] - shaders.append(nw.find_from(n.inputs['Surface'])[0].from_socket) - disp_links = nw.find_from(n.inputs['Displacement']) - disps.append(disp_links[0].from_socket if len(disp_links) > 0 else None) + shaders.append(nw.find_from(n.inputs["Surface"])[0].from_socket) + disp_links = nw.find_from(n.inputs["Displacement"]) + disps.append( + disp_links[0].from_socket if len(disp_links) > 0 else None + ) shader = shaders[0] disp = disps[0] rotation = rotations[0] for sel, sh, dis, rot in zip(selections, shaders[1:], disps[1:], rotations[1:]): shader = nw.new_node(Nodes.MixShader, [sel, shader, sh]) - disp = nw.new_node(Nodes.Mix, input_kwargs={'Factor': sel, 'A': disp, 'B': dis}, - attrs={'data_type': 'VECTOR'}) - rotation = nw.new_node(Nodes.Mix, input_kwargs={'Factor': sel, 'A': rotation, 'B': rot}, - attrs={'data_type': 'FLOAT'}) + disp = nw.new_node( + Nodes.Mix, + input_kwargs={"Factor": sel, "A": disp, "B": dis}, + attrs={"data_type": "VECTOR"}, + ) + rotation = nw.new_node( + Nodes.Mix, + input_kwargs={"Factor": sel, "A": rotation, "B": rot}, + attrs={"data_type": "FLOAT"}, + ) for node in nw.find(Nodes.TextureCoord)[1:] + nw.find(Nodes.NewGeometry): perturb_coordinates(nw, node, offset, rotation) - disp = nw.add(disp, nw.new_node(Nodes.Displacement, input_kwargs={ - 'Height': nw.scalar_multiply(mortar, -uniform(.01, .02)), - 'Midlevel': 0. - })) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': shader, 'Displacement': disp}) + disp = nw.add( + disp, + nw.new_node( + Nodes.Displacement, + input_kwargs={ + "Height": nw.scalar_multiply(mortar, -uniform(0.01, 0.02)), + "Midlevel": 0.0, + }, + ), + ) + nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": shader, "Displacement": disp} + ) -def shader_square_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs): +def shader_square_tile( + nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs +): if alternating is None: - alternating = uniform() < .75 - size = log_uniform(.2, .4) - vec = nw.new_node(Nodes.TextureCoord).outputs['Object'] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + alternating = uniform() < 0.75 + size = log_uniform(0.2, 0.4) + vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) - rotation = np.pi / 4 if uniform() < .3 else 0 + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) + rotation = np.pi / 4 if uniform() < 0.3 else 0 vec = nw.new_node( Nodes.Mapping, - [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4) + rotation), [scale] * 3] + [ + vec, + uniform(0, 1, 3), + (0, 0, np.pi / 2 * np.random.randint(4) + rotation), + [scale] * 3, + ], ) vec = nw.combine(*nw.separate(vec)[:2], 0) - offset, mortar = map(nw.new_node(Nodes.BrickTexture, [vec], input_kwargs={ - 'Scale': 1 / size, - 'Row Height': 1, - 'Brick Width': 1, - 'Mortar Size': uniform(.005, .01), - 'Color2': (0, 0, 0, 1) - }, attrs={'offset': .0, 'offset_frequency': 1}).outputs.get, ['Color', 'Fac']) - selections = [nw.new_node(Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size]).outputs[1]] + offset, mortar = map( + nw.new_node( + Nodes.BrickTexture, + [vec], + input_kwargs={ + "Scale": 1 / size, + "Row Height": 1, + "Brick Width": 1, + "Mortar Size": uniform(0.005, 0.01), + "Color2": (0, 0, 0, 1), + }, + attrs={"offset": 0.0, "offset_frequency": 1}, + ).outputs.get, + ["Color", "Fac"], + ) + selections = [ + nw.new_node( + Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size] + ).outputs[1] + ] rotations = np.pi / 2 * np.arange(2) mix_shader(nw, base_shader, offset, rotations, mortar, alternating, selections) -def shader_rectangle_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs): +def shader_rectangle_tile( + nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs +): if alternating is None: - alternating = uniform() < .75 - size = log_uniform(.2, .4) - vec = nw.new_node(Nodes.TextureCoord).outputs['Object'] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + alternating = uniform() < 0.75 + size = log_uniform(0.2, 0.4) + vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) vec = nw.new_node( Nodes.Mapping, - [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4)), [scale, scale * log_uniform(1.3, 2), scale]] + [ + vec, + uniform(0, 1, 3), + (0, 0, np.pi / 2 * np.random.randint(4)), + [scale, scale * log_uniform(1.3, 2), scale], + ], ) vec = nw.combine(*nw.separate(vec)[:2], 0) - offset, mortar = map(nw.new_node(Nodes.BrickTexture, [vec], input_kwargs={ - 'Scale': 1 / size, - 'Row Height': 1, - 'Brick Width': 1, - 'Mortar Size': uniform(.005, .01), - 'Color2': (0, 0, 0, 1) - }, attrs={'offset': .0, 'offset_frequency': 1}).outputs.get, ['Color', 'Fac']) - selections = [nw.new_node(Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size]).outputs[1]] + offset, mortar = map( + nw.new_node( + Nodes.BrickTexture, + [vec], + input_kwargs={ + "Scale": 1 / size, + "Row Height": 1, + "Brick Width": 1, + "Mortar Size": uniform(0.005, 0.01), + "Color2": (0, 0, 0, 1), + }, + attrs={"offset": 0.0, "offset_frequency": 1}, + ).outputs.get, + ["Color", "Fac"], + ) + selections = [ + nw.new_node( + Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size] + ).outputs[1] + ] rotations = np.pi / 2 * np.arange(2) mix_shader(nw, base_shader, offset, rotations, mortar, alternating, selections) -def shader_hexagon_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs): +def shader_hexagon_tile( + nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs +): if alternating is None: - alternating = uniform() < .6 - size = log_uniform(.15, .3) - vec = nw.new_node(Nodes.TextureCoord).outputs['Object'] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + alternating = uniform() < 0.6 + size = log_uniform(0.15, 0.3) + vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) - vec = nw.new_node(Nodes.Mapping, - [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4)), [scale] * 3]) + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) + vec = nw.new_node( + Nodes.Mapping, + [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4)), [scale] * 3], + ) qs = [] - for n in np.array([[1 / np.sqrt(3), -1 / 3, 0], [0, 2 / 3, 0], [-1 / np.sqrt(3), -1 / 3, 0]]) / size: - qs.append(nw.vector_math('DOT_PRODUCT', vec, n)) - qs_ = [nw.math('ROUND', q) for q in qs] - qs_diff = [nw.math('ABSOLUTE', nw.scalar_sub(q, q_)) for q, q_ in zip(qs, qs_)] + for n in ( + np.array( + [[1 / np.sqrt(3), -1 / 3, 0], [0, 2 / 3, 0], [-1 / np.sqrt(3), -1 / 3, 0]] + ) + / size + ): + qs.append(nw.vector_math("DOT_PRODUCT", vec, n)) + qs_ = [nw.math("ROUND", q) for q in qs] + qs_diff = [nw.math("ABSOLUTE", nw.scalar_sub(q, q_)) for q, q_ in zip(qs, qs_)] coords = [] for i in range(3): - coords.append(nw.new_node(Nodes.Mix, [ - nw.scalar_multiply(nw.math('GREATER_THAN', qs_diff[i], qs_diff[(i + 1) % 3]), - nw.math('GREATER_THAN', qs_diff[i], qs_diff[(i + 2) % 3])), None, qs_[i], - nw.scalar_sub(0, nw.scalar_add(qs_[(i + 1) % 3], qs_[(i + 2) % 3]))])) + coords.append( + nw.new_node( + Nodes.Mix, + [ + nw.scalar_multiply( + nw.math("GREATER_THAN", qs_diff[i], qs_diff[(i + 1) % 3]), + nw.math("GREATER_THAN", qs_diff[i], qs_diff[(i + 2) % 3]), + ), + None, + qs_[i], + nw.scalar_sub(0, nw.scalar_add(qs_[(i + 1) % 3], qs_[(i + 2) % 3])), + ], + ) + ) offset = nw.combine(coords[0], coords[1], coords[2]) i = np.random.randint(3) - fraction = nw.math('FRACT', - nw.scalar_divide(nw.scalar_add(nw.scalar_sub(coords[i], coords[(i + 1) % 3]), .5), 3)) - diffs = [nw.math('ABSOLUTE', nw.scalar_sub(q, c)) for q, c in zip(qs, coords)] - max_dist = nw.math('MAXIMUM', - nw.math('MAXIMUM', nw.scalar_add(diffs[0], diffs[1]), nw.scalar_add(diffs[1], diffs[2])), - nw.scalar_add(diffs[2], diffs[0])) - mortar = nw.math('GREATER_THAN', max_dist, 1 - uniform(.005, .01) / size / 2) + fraction = nw.math( + "FRACT", + nw.scalar_divide( + nw.scalar_add(nw.scalar_sub(coords[i], coords[(i + 1) % 3]), 0.5), 3 + ), + ) + diffs = [nw.math("ABSOLUTE", nw.scalar_sub(q, c)) for q, c in zip(qs, coords)] + max_dist = nw.math( + "MAXIMUM", + nw.math( + "MAXIMUM", + nw.scalar_add(diffs[0], diffs[1]), + nw.scalar_add(diffs[1], diffs[2]), + ), + nw.scalar_add(diffs[2], diffs[0]), + ) + mortar = nw.math("GREATER_THAN", max_dist, 1 - uniform(0.005, 0.01) / size / 2) rotations = np.pi * 2 / 3 * np.arange(3) - mix_shader(nw, base_shader, offset, rotations, mortar, alternating, - [nw.math('LESS_THAN', fraction, 2 / 3), nw.math('LESS_THAN', fraction, 1 / 3), ]) + mix_shader( + nw, + base_shader, + offset, + rotations, + mortar, + alternating, + [ + nw.math("LESS_THAN", fraction, 2 / 3), + nw.math("LESS_THAN", fraction, 1 / 3), + ], + ) -def shader_staggered_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, - vertical_scale=None, **kwargs): - horizontal_scale = scale * log_uniform(2., 3.5) +def shader_staggered_tile( + nw: NodeWrangler, + base_shader, + vertical=False, + alternating=None, + scale=1, + vertical_scale=None, + **kwargs, +): + horizontal_scale = scale * log_uniform(2.0, 3.5) if vertical_scale is None: vertical_scale = horizontal_scale * log_uniform(0.05, 0.2) vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) vec = nw.new_node(Nodes.Mapping, [vec, uniform(0, 1, 3)]) - vec = nw.add(vec, nw.combine(0, nw.scalar_divide(.5, horizontal_scale), 0)) - - offset, mortar = map(nw.new_node(Nodes.BrickTexture, input_kwargs={ - 'Vector': vec, - 'Color2': (0, 0, 0, 1.0000), - 'Scale': 1.0000, - 'Mortar Size': uniform(.005, .01), - 'Mortar Smooth': 1.0000, - 'Bias': -0.5000, - 'Brick Width': nw.scalar_divide(1, vertical_scale), - 'Row Height': nw.scalar_divide(1, horizontal_scale) - }, attrs={'squash_frequency': 1}).outputs.get, ['Color', 'Fac']) + vec = nw.add(vec, nw.combine(0, nw.scalar_divide(0.5, horizontal_scale), 0)) + + offset, mortar = map( + nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": vec, + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": uniform(0.005, 0.01), + "Mortar Smooth": 1.0000, + "Bias": -0.5000, + "Brick Width": nw.scalar_divide(1, vertical_scale), + "Row Height": nw.scalar_divide(1, horizontal_scale), + }, + attrs={"squash_frequency": 1}, + ).outputs.get, + ["Color", "Fac"], + ) mix_shader(nw, base_shader, offset, [0], mortar, alternating, []) -def shader_crossed_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, n=None, - **kwargs): +def shader_crossed_tile( + nw: NodeWrangler, + base_shader, + vertical=False, + alternating=None, + scale=1, + n=None, + **kwargs, +): n = np.random.randint(4, 8) vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) - vec = nw.new_node(Nodes.Mapping, - [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4)), [scale] * 3]) + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) + vec = nw.new_node( + Nodes.Mapping, + [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(4)), [scale] * 3], + ) x, y, z = nw.separate(vec) - x_ = nw.scalar_sub(x, nw.scalar_divide(nw.math('FLOOR', nw.scalar_multiply(y, n)), n)) + x_ = nw.scalar_sub( + x, nw.scalar_divide(nw.math("FLOOR", nw.scalar_multiply(y, n)), n) + ) vec = nw.combine(x_, y, 0) - offset, mortar = map(nw.new_node(Nodes.BrickTexture, input_kwargs={ - 'Vector': vec, - 'Color2': (0, 0, 0, 1.0000), - 'Scale': 1.0000, - 'Mortar Size': uniform(.005, .01), - 'Brick Width': 1, - 'Row Height': 1 / n - }, attrs={'squash_frequency': 1, 'offset': 0}).outputs.get, ['Color', 'Fac']) + offset, mortar = map( + nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": vec, + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": uniform(0.005, 0.01), + "Brick Width": 1, + "Row Height": 1 / n, + }, + attrs={"squash_frequency": 1, "offset": 0}, + ).outputs.get, + ["Color", "Fac"], + ) vec_ = nw.combine( - nw.scalar_sub(y, nw.scalar_divide(nw.scalar_add(nw.math('FLOOR', nw.scalar_multiply(x, n)), 1), n)), - nw.scalar_sub(0, x), 0) - - offset_, mortar_ = map(nw.new_node(Nodes.BrickTexture, input_kwargs={ - 'Vector': vec_, - 'Color2': (0, 0, 0, 1.0000), - 'Scale': 1.0000, - 'Mortar Size': uniform(.005, .01), - 'Brick Width': 1, - 'Row Height': 1 / n, - }, attrs={'squash_frequency': 1, 'offset': 0}).outputs.get, ['Color', 'Fac']) - selection = nw.math('LESS_THAN', - nw.scalar_sub(nw.scalar_divide(x_, 2), nw.math('FLOOR', nw.scalar_divide(x_, 2))), .5) - offset = nw.new_node(Nodes.Mix, input_kwargs={'Factor': selection, 'A': offset, 'B': offset_}, - attrs={'data_type': 'FLOAT'}) - mortar = nw.new_node(Nodes.Mix, input_kwargs={'Factor': selection, 'A': mortar, 'B': mortar_}, - attrs={'data_type': 'FLOAT'}) - - mix_shader(nw, base_shader, offset, [0, np.pi / 2], mortar, alternating, [selection]) - - -def shader_composite_tile(nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs): + nw.scalar_sub( + y, + nw.scalar_divide( + nw.scalar_add(nw.math("FLOOR", nw.scalar_multiply(x, n)), 1), n + ), + ), + nw.scalar_sub(0, x), + 0, + ) + + offset_, mortar_ = map( + nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": vec_, + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": uniform(0.005, 0.01), + "Brick Width": 1, + "Row Height": 1 / n, + }, + attrs={"squash_frequency": 1, "offset": 0}, + ).outputs.get, + ["Color", "Fac"], + ) + selection = nw.math( + "LESS_THAN", + nw.scalar_sub( + nw.scalar_divide(x_, 2), nw.math("FLOOR", nw.scalar_divide(x_, 2)) + ), + 0.5, + ) + offset = nw.new_node( + Nodes.Mix, + input_kwargs={"Factor": selection, "A": offset, "B": offset_}, + attrs={"data_type": "FLOAT"}, + ) + mortar = nw.new_node( + Nodes.Mix, + input_kwargs={"Factor": selection, "A": mortar, "B": mortar_}, + attrs={"data_type": "FLOAT"}, + ) + + mix_shader( + nw, base_shader, offset, [0, np.pi / 2], mortar, alternating, [selection] + ) + + +def shader_composite_tile( + nw: NodeWrangler, base_shader, vertical=False, alternating=None, scale=1, **kwargs +): if alternating is None: - alternating = uniform() < .75 - size = log_uniform(.2, .4) - vec = nw.new_node(Nodes.TextureCoord).outputs['Object'] - normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs['Normal'] + alternating = uniform() < 0.75 + size = log_uniform(0.2, 0.4) + vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] + normal = nw.new_node(Nodes.ShaderNodeNormalMap).outputs["Normal"] if vertical: - vec = nw.combine(nw.separate(nw.vector_math('CROSS_PRODUCT', vec, normal))[-1], nw.separate(vec)[-1], 0) + vec = nw.combine( + nw.separate(nw.vector_math("CROSS_PRODUCT", vec, normal))[-1], + nw.separate(vec)[-1], + 0, + ) vec = nw.new_node( Nodes.Mapping, - [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(8)), [scale] * 3] + [vec, uniform(0, 1, 3), (0, 0, np.pi / 2 * np.random.randint(8)), [scale] * 3], ) vec = nw.combine(*nw.separate(vec)[:2], 0) - selections = [nw.new_node(Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size]).outputs[1]] + selections = [ + nw.new_node( + Nodes.CheckerTexture, [vec, (0, 0, 0, 1), (1, 1, 1, 1), 1 / size] + ).outputs[1] + ] rotations = np.pi / 2 * np.arange(2) - mortar_size = uniform(.002, .005) + mortar_size = uniform(0.002, 0.005) stride = np.random.randint(4, 7) offset_h, mortar_h = map( nw.new_node( - Nodes.BrickTexture, input_kwargs={ - 'Vector': vec, - 'Color2': (0, 0, 0, 1.0000), - 'Scale': 1.0000, - 'Mortar Size': mortar_size, - 'Mortar Smooth': 1.0000, - 'Brick Width': size / stride, - 'Row Height': 1000 - }, attrs={'squash_frequency': 1} - ).outputs.get, ['Color', 'Fac'] + Nodes.BrickTexture, + input_kwargs={ + "Vector": vec, + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": mortar_size, + "Mortar Smooth": 1.0000, + "Brick Width": size / stride, + "Row Height": 1000, + }, + attrs={"squash_frequency": 1}, + ).outputs.get, + ["Color", "Fac"], ) offset_v, mortar_v = map( nw.new_node( - Nodes.BrickTexture, input_kwargs={ - 'Vector': vec, - 'Color2': (0, 0, 0, 1.0000), - 'Scale': 1.0000, - 'Mortar Size': mortar_size, - 'Mortar Smooth': 1.0000, - 'Brick Width': 1000, - 'Row Height': size / stride, - }, attrs={'squash_frequency': 1} - ).outputs.get, ['Color', 'Fac'] + Nodes.BrickTexture, + input_kwargs={ + "Vector": vec, + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": mortar_size, + "Mortar Smooth": 1.0000, + "Brick Width": 1000, + "Row Height": size / stride, + }, + attrs={"squash_frequency": 1}, + ).outputs.get, + ["Color", "Fac"], ) mortar = nw.new_node( - Nodes.Mix, input_kwargs={'Factor': selections[0], 'A': mortar_h, 'B': mortar_v}, - attrs={'data_type': 'FLOAT'} + Nodes.Mix, + input_kwargs={"Factor": selections[0], "A": mortar_h, "B": mortar_v}, + attrs={"data_type": "FLOAT"}, ) offset = nw.new_node( - Nodes.Mix, input_kwargs={'Factor': selections[0], 'A': offset_h, 'B': offset_v}, - attrs={'data_type': 'VECTOR'} + Nodes.Mix, + input_kwargs={"Factor": selections[0], "A": offset_h, "B": offset_v}, + attrs={"data_type": "VECTOR"}, ) mix_shader(nw, base_shader, offset, rotations, mortar, alternating, selections) def get_shader_funcs(): from . import bone, ceramic, cobble_stone, dirt, stone - from .woods.wood import shader_wood from .table_materials import shader_marble - return [(bone.shader_bone, 1), (cobble_stone.shader_cobblestone, 1), (ceramic.shader_ceramic, 4), - (dirt.shader_dirt, 1), (stone.shader_stone, 1), (shader_marble, 2), (shader_wood, 5), ] - + from .woods.wood import shader_wood -def apply(obj, selection=None, vertical=False, shader_func=None, scale=None, alternating=None, shape=None, - **kwargs): + return [ + (bone.shader_bone, 1), + (cobble_stone.shader_cobblestone, 1), + (ceramic.shader_ceramic, 4), + (dirt.shader_dirt, 1), + (stone.shader_stone, 1), + (shader_marble, 2), + (shader_wood, 5), + ] + + +def apply( + obj, + selection=None, + vertical=False, + shader_func=None, + scale=None, + alternating=None, + shape=None, + **kwargs, +): funcs, weights = zip(*get_shader_funcs()) weights = np.array(weights) / sum(weights) if shader_func is None: @@ -312,35 +533,50 @@ def apply(obj, selection=None, vertical=False, shader_func=None, scale=None, alt name = shader_func.__name__ if scale is None: - scale = log_uniform(1., 2.) + scale = log_uniform(1.0, 2.0) if shader_func == ceramic.shader_ceramic: - low = uniform(.1, .3) - high = uniform(.6, .8) - shader_func = partial(ceramic.shader_ceramic, roughness_min=low, roughness_max=high) + low = uniform(0.1, 0.3) + high = uniform(0.6, 0.8) + shader_func = partial( + ceramic.shader_ceramic, roughness_min=low, roughness_max=high + ) match shape: - case 'square': + case "square": method = shader_square_tile - case 'rectangle': + case "rectangle": method = shader_rectangle_tile - case 'hexagon': + case "hexagon": method = shader_hexagon_tile - case 'staggered': + case "staggered": method = shader_staggered_tile - case 'crossed': + case "crossed": method = shader_crossed_tile - case 'composite': + case "composite": method = shader_composite_tile case _: method = np.random.choice( - [shader_hexagon_tile, shader_square_tile, shader_rectangle_tile, shader_staggered_tile, - shader_crossed_tile] + [ + shader_hexagon_tile, + shader_square_tile, + shader_rectangle_tile, + shader_staggered_tile, + shader_crossed_tile, + ] ) return common.apply( - obj, method, selection, shader_func, vertical, alternating, name=f'{name}_{method.__name__}_tile', - scale=scale, **kwargs) + obj, + method, + selection, + shader_func, + vertical, + alternating, + name=f"{name}_{method.__name__}_tile", + scale=scale, + **kwargs, + ) def make_sphere(): - return new_cube() \ No newline at end of file + return new_cube() diff --git a/infinigen/assets/materials/tongue.py b/infinigen/assets/materials/tongue.py index 8ecd0c4a8..2bb618755 100644 --- a/infinigen/assets/materials/tongue.py +++ b/infinigen/assets/materials/tongue.py @@ -4,32 +4,35 @@ # Authors: Alexander Raistrick -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def shader_tongue(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Scale': 37.88}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture}) + musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={"Scale": 37.88}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture}) colorramp.color_ramp.elements[0].position = 0.24 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 1.0 colorramp.color_ramp.elements[1].color = (0.0979, 0.0979, 0.0979, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.8, 0.0605, 0.0437, 1.0), 'Subsurface': 0.0312, 'Subsurface Color': (0.8, 0.0, 0.2679, 1.0), 'Roughness': colorramp.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.8, 0.0605, 0.0437, 1.0), + "Subsurface": 0.0312, + "Subsurface Color": (0.8, 0.0, 0.2679, 1.0), + "Roughness": colorramp.outputs["Color"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, selection=None, **kwargs): - surface.add_material(obj, shader_tongue, selection=selection) \ No newline at end of file + surface.add_material(obj, shader_tongue, selection=selection) diff --git a/infinigen/assets/materials/two_color_spots.py b/infinigen/assets/materials/two_color_spots.py index d282d268e..a9fe2b2c2 100644 --- a/infinigen/assets/materials/two_color_spots.py +++ b/infinigen/assets/materials/two_color_spots.py @@ -4,67 +4,94 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, +) from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes + def shader_two_color_spots(nw, rand=True, **input_kwargs): - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset'}) - - mix_2 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Fac"], 'Color1': (1.0, 0.2397, 0.0028, 1.0), 'Color2': (0.4915, 0.4636, 0.3855, 1.0)}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset"}) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Fac"], + "Color1": (1.0, 0.2397, 0.0028, 1.0), + "Color2": (0.4915, 0.4636, 0.3855, 1.0), + }, + ) if rand: sample_color(mix_2.inputs[6].default_value) sample_color(mix_2.inputs[7].default_value) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix_2}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_2}, + attrs={"subsurface_method": "BURLEY"}, + ) -def geo_two_color_spots(nw, rand=True, **input_kwargs): + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + +def geo_two_color_spots(nw, rand=True, **input_kwargs): group_input = nw.new_node(Nodes.GroupInput) position = nw.new_node(Nodes.InputPosition) - + scale = nw.new_node(Nodes.Value) - scale.outputs["Value"].default_value = input_kwargs['scale'] if 'scale' in input_kwargs else 0.2 + scale.outputs["Value"].default_value = ( + input_kwargs["scale"] if "scale" in input_kwargs else 0.2 + ) - vector_math = nw.new_node(Nodes.VectorMath, + vector_math = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: scale}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_math.outputs["Vector"], 'Scale': 10.0, 'Detail': 10.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': noise_texture.outputs["Color"], 'Color2': vector_math.outputs["Vector"]}) + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_math.outputs["Vector"], + "Scale": 10.0, + "Detail": 10.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": noise_texture.outputs["Color"], + "Color2": vector_math.outputs["Vector"], + }, + ) if rand: mix.inputs["Factor"].default_value = sample_range(0.5, 0.9) - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix}, - attrs={'voronoi_dimensions': '4D'}) + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: voronoi_texture.inputs["W"].default_value = sample_range(-5, 5) - voronoi_texture.inputs['Scale'].default_value = sample_range(5, 20) - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': math_1}) + voronoi_texture.inputs["Scale"].default_value = sample_range(5, 20) + + math_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": math_1}) colorramp.color_ramp.elements.new(1) colorramp.color_ramp.elements[0].position = 0.0 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) @@ -76,49 +103,91 @@ def geo_two_color_spots(nw, rand=True, **input_kwargs): color = sample_range(0.45, 0.7) for i in range(3): colorramp.color_ramp.elements[1].color[i] = color - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mix, 'Scale': 5}, - attrs={'voronoi_dimensions': '4D'}) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": 5}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: voronoi_texture_1.inputs["W"].default_value = sample_range(-5, 5) - voronoi_texture_1.inputs['Scale'].default_value = sample_range(5, 20) - - math_2 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.82, 'Color1': colorramp.outputs["Color"], 'Color2': math_2}, - attrs={'blend_type': 'BURN'}) - - vector_math_1 = nw.new_node(Nodes.VectorMath, + voronoi_texture_1.inputs["Scale"].default_value = sample_range(5, 20) + + math_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.82, + "Color1": colorramp.outputs["Color"], + "Color2": math_2, + }, + attrs={"blend_type": "BURN"}, + ) + + vector_math_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: (1.0, 1.0, 1.0), 1: mix_1}, - attrs={'operation': 'SUBTRACT'}) - + attrs={"operation": "SUBTRACT"}, + ) + normal = nw.new_node(Nodes.InputNormal) - - vector_math_2 = nw.new_node(Nodes.VectorMath, + + vector_math_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_math_1.outputs["Vector"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - + attrs={"operation": "MULTIPLY"}, + ) + offsetscale = nw.new_node(Nodes.Value) - offsetscale.outputs["Value"].default_value = input_kwargs['offsetscale'] if 'offsetscale' in input_kwargs else 0.1 - - vector_math_3 = nw.new_node(Nodes.VectorMath, + offsetscale.outputs["Value"].default_value = ( + input_kwargs["offsetscale"] if "offsetscale" in input_kwargs else 0.1 + ) + + vector_math_3 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_math_2.outputs["Vector"], 1: offsetscale}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input, 'Offset': vector_math_3.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: mix_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Attribute': capture_attribute.outputs["Attribute"]}) + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input, + "Offset": vector_math_3.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: mix_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_two_color_spots, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) - surface.add_material(obj, shader_two_color_spots, reuse=False, input_kwargs=shader_kwargs) + surface.add_geomod( + obj, + geo_two_color_spots, + apply=False, + input_kwargs=geo_kwargs, + attributes=["offset"], + ) + surface.add_material( + obj, shader_two_color_spots, reuse=False, input_kwargs=shader_kwargs + ) diff --git a/infinigen/assets/materials/twocolorz.py b/infinigen/assets/materials/twocolorz.py index 6e567e09b..918529a61 100644 --- a/infinigen/assets/materials/twocolorz.py +++ b/infinigen/assets/materials/twocolorz.py @@ -4,42 +4,54 @@ # Authors: Mingzhe Wang -import os, sys -import numpy as np -import math as ma -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color, geo_voronoi_noise -import bpy -import mathutils -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.assets.materials.utils.surface_utils import ( + geo_voronoi_noise, + sample_color, + sample_range, + sample_ratio, +) from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes -def shader_twocolorz(nw, rand=True, **input_kwargs): +def shader_twocolorz(nw, rand=True, **input_kwargs): texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Generated"]}) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Generated"]} + ) if rand: for i in range(2): # do not change Z - mapping.inputs['Location'].default_value[i] = sample_range(-2, 2) + mapping.inputs["Location"].default_value[i] = sample_range(-2, 2) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': mapping}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': 10.0, 'Detail': 3.0, 'Distortion': 0.5}) + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mapping}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 10.0, + "Detail": 3.0, + "Distortion": 0.5, + }, + ) if rand: - for k in ['Scale', 'Detail', 'Distortion', 'Roughness']: - noise_texture.inputs[k].default_value = sample_ratio(noise_texture.inputs[k].default_value, 1/3, 3) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': separate_xyz.outputs["Z"], 'Color2': noise_texture.outputs["Fac"]}, - attrs={'blend_type': 'MULTIPLY'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}) + for k in ["Scale", "Detail", "Distortion", "Roughness"]: + noise_texture.inputs[k].default_value = sample_ratio( + noise_texture.inputs[k].default_value, 1 / 3, 3 + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": separate_xyz.outputs["Z"], + "Color2": noise_texture.outputs["Fac"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix}) colorramp.color_ramp.elements[0].position = 0.2182 colorramp.color_ramp.elements[0].color = (0.2059, 1.0, 0.1039, 1.0) colorramp.color_ramp.elements[1].position = 0.5364 @@ -47,27 +59,43 @@ def shader_twocolorz(nw, rand=True, **input_kwargs): if rand: pos_max = [0.4, 0.8] colorramp.color_ramp.elements[0].position = sample_range(0, pos_max[0]) - _min = (pos_max[1] - colorramp.color_ramp.elements[0].position) / 3 + colorramp.color_ramp.elements[0].position + _min = ( + pos_max[1] - colorramp.color_ramp.elements[0].position + ) / 3 + colorramp.color_ramp.elements[0].position colorramp.color_ramp.elements[1].position = sample_range(_min, pos_max[1]) sample_color(colorramp.color_ramp.elements[0].color) sample_color(colorramp.color_ramp.elements[1].color) - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute.outputs["Color"]}) + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "offset"}) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) colorramp_1.color_ramp.elements[0].position = 0.0 colorramp_1.color_ramp.elements[0].color = (0.2634, 0.2634, 0.2634, 1.0) colorramp_1.color_ramp.elements[1].position = 1.0 colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Specular': 0.0, 'Roughness': colorramp_1.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Specular": 0.0, + "Roughness": colorramp_1.outputs["Color"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): - surface.add_geomod(obj, geo_voronoi_noise, apply=False, input_kwargs=geo_kwargs, attributes=['offset']) + surface.add_geomod( + obj, + geo_voronoi_noise, + apply=False, + input_kwargs=geo_kwargs, + attributes=["offset"], + ) surface.add_material(obj, shader_twocolorz, reuse=False, input_kwargs=shader_kwargs) diff --git a/infinigen/assets/materials/utils/surface_utils.py b/infinigen/assets/materials/utils/surface_utils.py index d740bcc8c..2d6ff380e 100644 --- a/infinigen/assets/materials/utils/surface_utils.py +++ b/infinigen/assets/materials/utils/surface_utils.py @@ -4,70 +4,136 @@ # Authors: Mingzhe Wang, Lingjie Mei -import random import math +import random -import numpy as np +from infinigen.core.nodes import Nodes, NodeWrangler, node_utils -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import Nodes, node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -@node_utils.to_nodegroup('nodegroup_norm_value', singleton=False, type='GeometryNodeTree') +@node_utils.to_nodegroup( + "nodegroup_norm_value", singleton=False, type="GeometryNodeTree" +) def nodegroup_norm_value(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Attribute', 0.0000), - ('NodeSocketGeometry', 'Geometry', None)]) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 2: group_input.outputs["Attribute"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Attribute"], 1: attribute_statistic_1.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: attribute_statistic_1.outputs["Max"], 1: attribute_statistic_1.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: subtract_1}, attrs={'operation': 'DIVIDE'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_norm_vec', singleton=False, type='GeometryNodeTree') + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Attribute", 0.0000), + ("NodeSocketGeometry", "Geometry", None), + ], + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: group_input.outputs["Attribute"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Attribute"], + 1: attribute_statistic_1.outputs["Min"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: attribute_statistic_1.outputs["Max"], + 1: attribute_statistic_1.outputs["Min"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: subtract_1}, + attrs={"operation": "DIVIDE"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "SUBTRACT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_norm_vec", singleton=False, type="GeometryNodeTree") def nodegroup_norm_vec(nw: NodeWrangler): # Code generated using version 2.6.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketString', 'Name', ''), - ('NodeSocketVector', 'Vector', (0.0000, 0.0000, 0.0000))]) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Vector"]}) - - normvalue = nw.new_node(nodegroup_norm_value().name, - input_kwargs={'Attribute': separate_xyz_1.outputs["X"], 'Geometry': group_input.outputs["Geometry"]}) - - normvalue_1 = nw.new_node(nodegroup_norm_value().name, - input_kwargs={'Attribute': separate_xyz_1.outputs["Y"], 'Geometry': group_input.outputs["Geometry"]}) - - normvalue_2 = nw.new_node(nodegroup_norm_value().name, - input_kwargs={'Attribute': separate_xyz_1.outputs["Z"], 'Geometry': group_input.outputs["Geometry"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': normvalue, 'Y': normvalue_1, 'Z': normvalue_2}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Name': group_input.outputs["Name"], 2: combine_xyz}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': store_named_attribute}, attrs={'is_active_output': True}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketString", "Name", ""), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ], + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + normvalue = nw.new_node( + nodegroup_norm_value().name, + input_kwargs={ + "Attribute": separate_xyz_1.outputs["X"], + "Geometry": group_input.outputs["Geometry"], + }, + ) + + normvalue_1 = nw.new_node( + nodegroup_norm_value().name, + input_kwargs={ + "Attribute": separate_xyz_1.outputs["Y"], + "Geometry": group_input.outputs["Geometry"], + }, + ) + + normvalue_2 = nw.new_node( + nodegroup_norm_value().name, + input_kwargs={ + "Attribute": separate_xyz_1.outputs["Z"], + "Geometry": group_input.outputs["Geometry"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": normvalue, "Y": normvalue_1, "Z": normvalue_2}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": group_input.outputs["Name"], + 2: combine_xyz, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": store_named_attribute}, + attrs={"is_active_output": True}, + ) def sample_range(x_min, x_max): @@ -97,16 +163,16 @@ def clip(x, v_min=0, v_max=1): # if offset is not 0, the color is sampled from [color-offset, color+offset] def sample_color(color, offset=0, keep_sum=False): if keep_sum: - mean = (color[0]+color[1]+color[2])/3 - offset = min(mean, 1-mean)*random.random() + mean = (color[0] + color[1] + color[2]) / 3 + offset = min(mean, 1 - mean) * random.random() idx = random.randint(0, 2) f = 1 pcg = random.random() for i in range(3): if i == idx: - color[i] = mean+offset + color[i] = mean + offset else: - color[i] = mean-offset*(f*pcg+(1-f)*(1-pcg)) + color[i] = mean - offset * (f * pcg + (1 - f) * (1 - pcg)) f = 0 return @@ -117,82 +183,140 @@ def sample_color(color, offset=0, keep_sum=False): color[i] += (random.random() - 0.5) * 2 * offset color[i] = clip(color[i]) + # generate a random voronoi offset def geo_voronoi_noise(nw, rand=False, **input_kwargs): group_input = nw.new_node(Nodes.GroupInput) - subdivide_mesh = nw.new_node('GeometryNodeSubdivideMesh', - input_kwargs={'Mesh': group_input.outputs["Geometry"], - 'Level': input_kwargs.get('subdivide_mesh_level', 0)}) + subdivide_mesh = nw.new_node( + "GeometryNodeSubdivideMesh", + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Level": input_kwargs.get("subdivide_mesh_level", 0), + }, + ) position = nw.new_node(Nodes.InputPosition) scale = nw.new_node(Nodes.Value) - scale.outputs["Value"].default_value = input_kwargs.get('scale', 2) - - vector_math = nw.new_node(Nodes.VectorMath, input_kwargs={0: position, 1: scale}, - attrs={'operation': 'MULTIPLY'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_math.outputs["Vector"], 'Scale': 10.0}) + scale.outputs["Value"].default_value = input_kwargs.get("scale", 2) + + vector_math = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: scale}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": vector_math.outputs["Vector"], "Scale": 10.0}, + ) if rand: - sample_max = input_kwargs['noise_scale_max'] if 'noise_scale_max' in input_kwargs else 3 - sample_min = input_kwargs['noise_scale_min'] if 'noise_scale_min' in input_kwargs else 1 / sample_max - noise_texture.inputs["Scale"].default_value = sample_ratio(noise_texture.inputs["Scale"].default_value, - sample_min, sample_max) - - mix = nw.new_node(Nodes.MixRGB, input_kwargs={'Fac': 0.8, 'Color1': noise_texture.outputs["Color"], - 'Color2': vector_math.outputs["Vector"]}) + sample_max = ( + input_kwargs["noise_scale_max"] if "noise_scale_max" in input_kwargs else 3 + ) + sample_min = ( + input_kwargs["noise_scale_min"] + if "noise_scale_min" in input_kwargs + else 1 / sample_max + ) + noise_texture.inputs["Scale"].default_value = sample_ratio( + noise_texture.inputs["Scale"].default_value, sample_min, sample_max + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.8, + "Color1": noise_texture.outputs["Color"], + "Color2": vector_math.outputs["Vector"], + }, + ) if rand: mix.inputs["Fac"].default_value = sample_range(0.7, 0.9) - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': mix}, - attrs={'voronoi_dimensions': '4D'}) + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix}, + attrs={"voronoi_dimensions": "4D"}, + ) if rand: - sample_max = input_kwargs['voronoi_scale_max'] if 'voronoi_scale_max' in input_kwargs else 3 - sample_min = input_kwargs[ - 'voronoi_scale_min'] if 'voronoi_scale_min' in input_kwargs else 1 / sample_max + sample_max = ( + input_kwargs["voronoi_scale_max"] + if "voronoi_scale_max" in input_kwargs + else 3 + ) + sample_min = ( + input_kwargs["voronoi_scale_min"] + if "voronoi_scale_min" in input_kwargs + else 1 / sample_max + ) voronoi_texture.inputs["Scale"].default_value = sample_ratio( - voronoi_texture.inputs["Scale"].default_value, sample_min, sample_max) - voronoi_texture.inputs['W'].default_value = sample_range(-5, 5) + voronoi_texture.inputs["Scale"].default_value, sample_min, sample_max + ) + voronoi_texture.inputs["W"].default_value = sample_range(-5, 5) - subtract = nw.new_node(Nodes.Math, + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: voronoi_texture.outputs["Distance"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) normal = nw.new_node(Nodes.InputNormal) - vector_math_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract, 1: normal}, - attrs={'operation': 'MULTIPLY'}) + vector_math_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) offsetscale = nw.new_node(Nodes.Value) - offsetscale.outputs["Value"].default_value = input_kwargs.get('offsetscale', 0.02) - - vector_math_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math_1.outputs["Vector"], 1: offsetscale}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': subdivide_mesh, - 'Offset': vector_math_2.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, input_kwargs={'Geometry': set_position, - 1: voronoi_texture.outputs["Distance"]}, attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Attribute': capture_attribute.outputs["Attribute"]}) + offsetscale.outputs["Value"].default_value = input_kwargs.get("offsetscale", 0.02) + + vector_math_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_math_1.outputs["Vector"], 1: offsetscale}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": subdivide_mesh, + "Offset": vector_math_2.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: voronoi_texture.outputs["Distance"]}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Attribute": capture_attribute.outputs["Attribute"], + }, + ) def perturb_coordinates(nw, node, location, rotation): - for name in ['Generated', 'Object', "Position", "UV"]: + for name in ["Generated", "Object", "Position", "UV"]: if name in node.outputs: node_socket = node.outputs[name] to_links = nw.find_to(node_socket) if len(to_links) == 0: continue - shifted = nw.new_node(Nodes.Mapping, [node_socket], input_kwargs={'Location': location, - 'Rotation': nw.combine(0, 0, rotation)}).outputs[0] + shifted = nw.new_node( + Nodes.Mapping, + [node_socket], + input_kwargs={ + "Location": location, + "Rotation": nw.combine(0, 0, rotation), + }, + ).outputs[0] to_sockets = [tl.to_socket for tl in to_links] for to_link in to_links: nw.links.remove(to_link) diff --git a/infinigen/assets/materials/vase_shaders.py b/infinigen/assets/materials/vase_shaders.py index 674f9a553..4b61d2c97 100644 --- a/infinigen/assets/materials/vase_shaders.py +++ b/infinigen/assets/materials/vase_shaders.py @@ -4,14 +4,10 @@ # Authors: Yiming Zuo -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint +from numpy.random import uniform + from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category, hsv2rgba -from infinigen.core import surface +from infinigen.core.util.color import hsv2rgba def shader_ceramic(nw: NodeWrangler): @@ -20,18 +16,43 @@ def shader_ceramic(nw: NodeWrangler): rgb = nw.new_node(Nodes.RGB) rgb.outputs[0].default_value = hsv2rgba(hsv) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': rgb, 'Subsurface': 0.3, 'Subsurface Radius': (0.002, 0.002, 0.002), 'Subsurface Color': rgb, 'Subsurface IOR': 1.4700, 'Subsurface Anisotropy': 0.2000, 'Specular': 0.2000, 'Roughness': 0.0500, 'Clearcoat': 0.5000, 'Clearcoat Roughness': 0.0500, 'IOR': 1.4700}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, attrs={'is_active_output': True}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": rgb, + "Subsurface": 0.3, + "Subsurface Radius": (0.002, 0.002, 0.002), + "Subsurface Color": rgb, + "Subsurface IOR": 1.4700, + "Subsurface Anisotropy": 0.2000, + "Specular": 0.2000, + "Roughness": 0.0500, + "Clearcoat": 0.5000, + "Clearcoat Roughness": 0.0500, + "IOR": 1.4700, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + def shader_glass(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler hsv = (uniform(0.0, 1.0), uniform(0.0, 0.2), 1.0) - glass_bsdf = nw.new_node(Nodes.GlassBSDF, input_kwargs={'Color': hsv2rgba(hsv), 'Roughness': uniform(0.05, 0.2)}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': glass_bsdf}, attrs={'is_active_output': True}) + glass_bsdf = nw.new_node( + Nodes.GlassBSDF, + input_kwargs={"Color": hsv2rgba(hsv), "Roughness": uniform(0.05, 0.2)}, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": glass_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/materials/water.py b/infinigen/assets/materials/water.py index 0fd2de9b9..4b8100aa4 100644 --- a/infinigen/assets/materials/water.py +++ b/infinigen/assets/materials/water.py @@ -5,28 +5,27 @@ # Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=X3LlsdddMLo by Kev Binge -import os - import bpy import gin import numpy as np from mathutils import Vector -from infinigen.core.nodes.node_wrangler import Nodes -from numpy.random import normal, uniform +from numpy.random import uniform + from infinigen.core import surface -from infinigen.terrain.assets.ocean import ocean_asset, spatial_size -from infinigen.core.util.organization import SurfaceTypes -from infinigen.terrain.utils import drive_param +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util.math import FixedSeed +from infinigen.core.util.organization import Attributes, SurfaceTypes from infinigen.core.util.random import random_general as rg -from infinigen.core.util.organization import Attributes +from infinigen.terrain.assets.ocean import spatial_size +from infinigen.terrain.utils import drive_param type = SurfaceTypes.BlenderDisplacement mod_name = "geo_water" name = "water" info = {} -@gin.configurable('geo') + +@gin.configurable("geo") def geo_water( nw, asset_paths, @@ -43,7 +42,7 @@ def geo_water( water_lacunarity=("uniform", 1.8, 2.0), height_modulation_scale=("uniform", 1, 5), ripples_lattice=2, - selection=None + selection=None, ): nw.force_input_consistency() group_input = nw.new_node(Nodes.GroupInput) @@ -51,42 +50,59 @@ def geo_water( if asset_paths == []: with_ripples = rg(with_ripples) - water_scale_node = nw.new_node(Nodes.Value, label=f"water_scale") + water_scale_node = nw.new_node(Nodes.Value, label="water_scale") water_scale_node.outputs[0].default_value = rg(water_scale) - water_detail_node = nw.new_node(Nodes.Value, label=f"water_detail") + water_detail_node = nw.new_node(Nodes.Value, label="water_detail") water_detail_node.outputs[0].default_value = rg(water_detail) - water_height_node = nw.new_node(Nodes.Value, label=f"water_height") + water_height_node = nw.new_node(Nodes.Value, label="water_height") water_height = rg(water_height) if with_waves: water_height_node.outputs[0].default_value = water_height else: water_height_node.outputs[0].default_value = 0 - ripple_height_node = nw.new_node(Nodes.Value, label=f"ripple_height") + ripple_height_node = nw.new_node(Nodes.Value, label="ripple_height") ripple_height_node.outputs[0].default_value = 0 if with_ripples: water_height_node.outputs[0].default_value *= 0.1 ripple_height_node.outputs[0].default_value = water_height - water_height_node = nw.scalar_multiply(water_height_node, nw.scalar_add(0.5, nw.new_node("ShaderNodeTexMusgrave", input_kwargs={"Scale": rg(height_modulation_scale)}))) - water_dimension_node = nw.new_node(Nodes.Value, label=f"water_dimension") + water_height_node = nw.scalar_multiply( + water_height_node, + nw.scalar_add( + 0.5, + nw.new_node( + "ShaderNodeTexMusgrave", + input_kwargs={"Scale": rg(height_modulation_scale)}, + ), + ), + ) + water_dimension_node = nw.new_node(Nodes.Value, label="water_dimension") water_dimension_node.outputs[0].default_value = rg(water_dimension) - water_lacunarity_node = nw.new_node(Nodes.Value, label=f"water_lacunarity") + water_lacunarity_node = nw.new_node(Nodes.Value, label="water_lacunarity") water_lacunarity_node.outputs[0].default_value = rg(water_lacunarity) - position_shift = nw.new_node(Nodes.Vector, label="wave") - position_shift.vector = nw.get_position_translation_seed(f"wave") + position_shift.vector = nw.get_position_translation_seed("wave") animated_position = nw.add(Vector([0, 0, 0]), position0, position_shift) if waves_animation_speed is not None: - drive_param(animated_position.inputs[0], rg(waves_animation_speed), offset=uniform(0, 10), index=1) + drive_param( + animated_position.inputs[0], + rg(waves_animation_speed), + offset=uniform(0, 10), + index=1, + ) - wave0 = nw.new_node("ShaderNodeTexMusgrave", [ - animated_position, None, - water_scale_node, - water_detail_node, - water_dimension_node, - water_lacunarity_node, - ]) + wave0 = nw.new_node( + "ShaderNodeTexMusgrave", + [ + animated_position, + None, + water_scale_node, + water_detail_node, + water_dimension_node, + water_lacunarity_node, + ], + ) # normal_direction = nw.new_node("GeometryNodeInputNormal", []) # temporarily assume flat water @@ -96,16 +112,26 @@ def geo_water( position_shift = nw.new_node(Nodes.Vector, label=f"ripple{i}") position_shift.vector = nw.get_position_translation_seed(f"ripple{i}") position = nw.add(position_shift, position0) - voronoi = nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Vector': position, 'Scale': 0.1, 'Randomness': 1}) + voronoi = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": position, "Scale": 0.1, "Randomness": 1}, + ) instance_offset = nw.new_node( - Nodes.WaveTexture, [nw.sub(position, (voronoi, 2)), 1], - attrs={'wave_type': 'RINGS', 'rings_direction': 'SPHERICAL'}, + Nodes.WaveTexture, + [nw.sub(position, (voronoi, 2)), 1], + attrs={"wave_type": "RINGS", "rings_direction": "SPHERICAL"}, ) if animate_ripples: - drive_param(instance_offset.inputs["Phase Offset"], -uniform(0.2, 1), offset=uniform(0, 10)) - edgeweight = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': position, 'Scale': 0.1, 'Randomness': 1}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) + drive_param( + instance_offset.inputs["Phase Offset"], + -uniform(0.2, 1), + offset=uniform(0, 10), + ) + edgeweight = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": position, "Scale": 0.1, "Randomness": 1}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) waves.append(nw.multiply(edgeweight, instance_offset)) offset = nw.multiply( nw.scalar_add( @@ -119,34 +145,73 @@ def geo_water( directory = asset_paths[0] / "cache" filepath = directory / "disp_0001.exr" seq = bpy.data.images.load(str(filepath)) - seq.source = 'SEQUENCE' + seq.source = "SEQUENCE" angle = np.random.uniform(0, np.pi * 2) - position_shift = nw.get_position_translation_seed(f"wave") + position_shift = nw.get_position_translation_seed("wave") position = nw.add(position0, position_shift) - position = nw.multiply(nw.new_node(Nodes.VectorRotate, input_kwargs={"Vector": position, "Angle": angle}), [1/tile_size] * 3) + position = nw.multiply( + nw.new_node( + Nodes.VectorRotate, input_kwargs={"Vector": position, "Angle": angle} + ), + [1 / tile_size] * 3, + ) sampled_disp = nw.new_node(Nodes.ImageTexture, [seq, position]) drive_param(sampled_disp.inputs["Frame"], 1, 0) - offset = nw.multiply(sampled_disp, Vector([tile_size / spatial_size, tile_size / spatial_size, -tile_size / spatial_size])) - offset = nw.new_node(Nodes.VectorRotate, input_kwargs={"Vector": offset, "Angle": -angle}) + offset = nw.multiply( + sampled_disp, + Vector( + [ + tile_size / spatial_size, + tile_size / spatial_size, + -tile_size / spatial_size, + ] + ), + ) + offset = nw.new_node( + Nodes.VectorRotate, input_kwargs={"Vector": offset, "Angle": -angle} + ) filepath = directory / "foam_0001.exr" seq = bpy.data.images.load(str(filepath)) - seq.source = 'SEQUENCE' + seq.source = "SEQUENCE" foam = nw.new_node(Nodes.ImageTexture, [seq, position]) drive_param(foam.inputs["Frame"], 1, 0) if coastal: X = nw.new_node(Nodes.SeparateXYZ, [position0]) - weight1 = nw.scalar_multiply(1 / np.pi, nw.scalar_sub(np.pi / 2, nw.new_node(Nodes.Math, [nw.scalar_multiply(0.1, nw.scalar_add(30, X))], attrs={'operation': 'ARCTANGENT'}))) - weight2 = nw.scalar_add(0.5, nw.scalar_multiply(1 / np.pi, nw.new_node(Nodes.Math, [nw.scalar_multiply(0.1, nw.scalar_add(60, X))], attrs={'operation': 'ARCTANGENT'}))) + weight1 = nw.scalar_multiply( + 1 / np.pi, + nw.scalar_sub( + np.pi / 2, + nw.new_node( + Nodes.Math, + [nw.scalar_multiply(0.1, nw.scalar_add(30, X))], + attrs={"operation": "ARCTANGENT"}, + ), + ), + ) + weight2 = nw.scalar_add( + 0.5, + nw.scalar_multiply( + 1 / np.pi, + nw.new_node( + Nodes.Math, + [nw.scalar_multiply(0.1, nw.scalar_add(60, X))], + attrs={"operation": "ARCTANGENT"}, + ), + ), + ) offset = nw.multiply(offset, nw.scalar_multiply(weight1, weight2)) - offset = nw.add(offset, nw.multiply(nw.new_node("ShaderNodeTexMusgrave", input_kwargs={"Scale": 1}), [0, 0, 0.03])) + offset = nw.add( + offset, + nw.multiply( + nw.new_node("ShaderNodeTexMusgrave", input_kwargs={"Scale": 1}), + [0, 0, 0.03], + ), + ) foam = nw.multiply(foam, weight2) group_input = nw.new_node( Nodes.CaptureAttribute, - input_kwargs={ - "Geometry": group_input, - "Value": foam - }, + input_kwargs={"Geometry": group_input, "Value": foam}, attrs={"data_type": "FLOAT"}, ) @@ -160,7 +225,7 @@ def geo_water( "Offset": offset, }, ) - input_kwargs = {'Geometry': set_position} + input_kwargs = {"Geometry": set_position} if asset_paths != []: input_kwargs["foam"] = (group_input, "Attribute") group_output = nw.new_node(Nodes.GroupOutput, input_kwargs=input_kwargs) @@ -171,7 +236,7 @@ def shader( nw, asset_paths, coastal, - color=("color_category", 'water'), + color=("color_category", "water"), enable_scatter=True, colored=False, emissive_foam=False, @@ -181,7 +246,7 @@ def shader( random_seed=0, ): nw.force_input_consistency() - position = nw.new_node('ShaderNodeNewGeometry', []) + position = nw.new_node("ShaderNodeNewGeometry", []) # Code generated using version 2.3.1 of the node_transpiler (partly) with FixedSeed(random_seed): color = rg(color) @@ -192,48 +257,96 @@ def shader( else: color_of_transparent_bsdf_principled_bsdf = (1, 1, 1, 1) - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, input_kwargs={"Color": color_of_transparent_bsdf_principled_bsdf}) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - "Base Color": color_of_transparent_bsdf_principled_bsdf, "Roughness": 0.0, "IOR": 1.33, "Transmission": 1.0 - }) + transparent_bsdf = nw.new_node( + Nodes.TransparentBSDF, + input_kwargs={"Color": color_of_transparent_bsdf_principled_bsdf}, + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color_of_transparent_bsdf_principled_bsdf, + "Roughness": 0.0, + "IOR": 1.33, + "Transmission": 1.0, + }, + ) if mix_surface: - surface_shader = nw.new_node(Nodes.MixShader, input_kwargs={ - 'Fac': nw.scalar_multiply(1.0, light_path.outputs["Is Camera Ray"]), - 1: transparent_bsdf, - 2: principled_bsdf - }) + surface_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": nw.scalar_multiply(1.0, light_path.outputs["Is Camera Ray"]), + 1: transparent_bsdf, + 2: principled_bsdf, + }, + ) else: surface_shader = principled_bsdf if asset_paths != []: if emissive_foam: - foam_bsdf = nw.new_node(Nodes.Emission, input_kwargs={'Strength': 1}) + foam_bsdf = nw.new_node(Nodes.Emission, input_kwargs={"Strength": 1}) else: foam_bsdf = nw.new_node(Nodes.DiffuseBSDF) foam = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "foam"}) if coastal: - weight = nw.scalar_multiply(3, nw.scalar_sub2(1, nw.scalar_multiply(5, nw.new_node(Nodes.Attribute, attrs={"attribute_name": Attributes.BoundarySDF})))) + weight = nw.scalar_multiply( + 3, + nw.scalar_sub2( + 1, + nw.scalar_multiply( + 5, + nw.new_node( + Nodes.Attribute, + attrs={"attribute_name": Attributes.BoundarySDF}, + ), + ), + ), + ) weight.use_clamp = 1 - interior_weight = nw.scalar_multiply(1 / np.pi, nw.scalar_sub( - np.pi / 2, - nw.new_node(Nodes.Math, [nw.scalar_multiply(0.1, nw.scalar_add(30, nw.new_node(Nodes.SeparateXYZ, [position])))], attrs={'operation': 'ARCTANGENT'}) - )) + interior_weight = nw.scalar_multiply( + 1 / np.pi, + nw.scalar_sub( + np.pi / 2, + nw.new_node( + Nodes.Math, + [ + nw.scalar_multiply( + 0.1, + nw.scalar_add( + 30, nw.new_node(Nodes.SeparateXYZ, [position]) + ), + ) + ], + attrs={"operation": "ARCTANGENT"}, + ), + ), + ) weight = nw.scalar_add(weight, interior_weight) weight.use_clamp = 1 else: weight = 1 foam = nw.scalar_multiply(foam, weight) - surface_shader = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': foam, 1: surface_shader, 2: foam_bsdf}) - + surface_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": foam, 1: surface_shader, 2: foam_bsdf}, + ) + rgb = nw.new_node(Nodes.RGB) rgb.outputs[0].default_value = color - principled_volume = nw.new_node(Nodes.PrincipledVolume, input_kwargs={ - 'Color': rgb, - 'Absorption Color': rgb, - 'Density': rg(volume_density) if enable_scatter else 0, - 'Anisotropy': rg(anisotropy), - }) + principled_volume = nw.new_node( + Nodes.PrincipledVolume, + input_kwargs={ + "Color": rgb, + "Absorption Color": rgb, + "Density": rg(volume_density) if enable_scatter else 0, + "Anisotropy": rg(anisotropy), + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": surface_shader, "Volume": principled_volume}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': surface_shader, 'Volume': principled_volume}) @gin.configurable("water") def apply(objs, is_ocean=False, coastal=0, selection=None, **kwargs): @@ -246,7 +359,13 @@ def apply(objs, is_ocean=False, coastal=0, selection=None, **kwargs): (ocean_folder / "cache/foam_0001.exr").touch() asset_paths.append(ocean_folder) input_kwargs = {"asset_paths": asset_paths, "coastal": coastal} - surface.add_geomod(objs, geo_water, selection=selection, input_kwargs=input_kwargs, attributes=["foam"] if is_ocean else None) + surface.add_geomod( + objs, + geo_water, + selection=selection, + input_kwargs=input_kwargs, + attributes=["foam"] if is_ocean else None, + ) surface.add_material(objs, shader, selection=selection, input_kwargs=input_kwargs) if is_ocean: (ocean_folder / "cache/disp_0001.exr").unlink() diff --git a/infinigen/assets/materials/waterfall_material.py b/infinigen/assets/materials/waterfall_material.py index 2805a708c..a500035da 100644 --- a/infinigen/assets/materials/waterfall_material.py +++ b/infinigen/assets/materials/waterfall_material.py @@ -3,13 +3,10 @@ # Authors: Karhan Kayan -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + def waterfall_shader(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler @@ -19,76 +16,128 @@ def waterfall_shader(nw: NodeWrangler): rgb = nw.new_node(Nodes.RGB) rgb.outputs[0].default_value = (0.6866, 0.9357, 1.0, 1.0) - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, - input_kwargs={'Color': rgb}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': rgb, 'Roughness': 0.0, 'IOR': 1.33, 'Transmission': 1.0}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: transparent_bsdf, 2: principled_bsdf}) + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, input_kwargs={"Color": rgb}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": rgb, + "Roughness": 0.0, + "IOR": 1.33, + "Transmission": 1.0, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: transparent_bsdf, + 2: principled_bsdf, + }, + ) texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping, 'Scale': 3.3, 'Detail': 13.0, 'Dimension': 0.3}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': musgrave_texture_1}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": 3.3, + "Detail": 13.0, + "Dimension": 0.3, + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": musgrave_texture_1}) colorramp.color_ramp.interpolation = "B_SPLINE" colorramp.color_ramp.elements[0].position = 0.325 colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) colorramp.color_ramp.elements[1].position = 0.6727 colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Metallic': 0.2636, 'Specular': 1.0, 'Roughness': 0.0, 'IOR': 1.333, 'Transmission': 0.8205, 'Alpha': colorramp.outputs["Color"]}) + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Metallic": 0.2636, + "Specular": 1.0, + "Roughness": 0.0, + "IOR": 1.333, + "Transmission": 0.8205, + "Alpha": colorramp.outputs["Color"], + }, + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, input_kwargs={1: mix_shader, 2: principled_bsdf_1} + ) - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={1: mix_shader, 2: principled_bsdf_1}) + volume_scatter = nw.new_node( + "ShaderNodeVolumeScatter", input_kwargs={"Color": (0.5841, 0.7339, 0.8, 1.0)} + ) - volume_scatter = nw.new_node('ShaderNodeVolumeScatter', - input_kwargs={'Color': (0.5841, 0.7339, 0.8, 1.0)}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader_1, "Volume": volume_scatter}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader_1, 'Volume': volume_scatter}) def geometry_geo_water(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) position = nw.new_node(Nodes.InputPosition) - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: (630.0, 564.0, 374.0)}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 4.1307, 'Detail': 9.7953, 'Dimension': 1.34, 'Lacunarity': 1.8087}) - - multiply = nw.new_node(Nodes.VectorMath, + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position, 1: (630.0, 564.0, 374.0)} + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": 4.1307, + "Detail": 9.7953, + "Dimension": 1.34, + "Lacunarity": 1.8087, + }, + ) + + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: musgrave_texture, 1: (0.0, 0.0, 0.0128)}, - attrs={'operation': 'MULTIPLY'}) + attrs={"operation": "MULTIPLY"}, + ) value = nw.new_node(Nodes.Value) value.outputs[0].default_value = 1.0 - multiply_1 = nw.new_node(Nodes.VectorMath, + multiply_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) + attrs={"operation": "MULTIPLY"}, + ) - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) def apply(obj, selection=None, **kwargs): surface.add_geomod(obj, geometry_geo_water, selection=selection, attributes=[]) - surface.add_material(obj, waterfall_shader, selection=selection) \ No newline at end of file + surface.add_material(obj, waterfall_shader, selection=selection) diff --git a/infinigen/assets/materials/wear_tear/procedural_edge_wear.py b/infinigen/assets/materials/wear_tear/procedural_edge_wear.py index 1b4c44854..5fdd2c068 100644 --- a/infinigen/assets/materials/wear_tear/procedural_edge_wear.py +++ b/infinigen/assets/materials/wear_tear/procedural_edge_wear.py @@ -12,41 +12,44 @@ # https://www.youtube.com/watch?v=_wEXl3LncAc by diivja -from numpy.random import uniform, choice -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler import logging from collections.abc import Iterable +from numpy.random import choice, uniform + +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + logger = logging.getLogger(__name__) + def get_edge_wear_params(): return { - '_worn_off_opacity': uniform(0.01), - '_worn_off_radius': uniform(0.005, 0.01), - '_scratch_radius': uniform(0.01, 0.03), - '_worn_off_mask_randomness': uniform(2.5, 3.0), - '_edge_base_color_hue': uniform(0.0, 1.0), - '_edge_base_color_whiteness': uniform(0.1, 0.6), - '_scratch_mask_randomness': choice([uniform(0.1, 5.0), uniform(1., 10.0)]), - '_scratch_density': choice([uniform(1.5, 10.0)]), - '_scratch_opacity': uniform(0.5, 1.0), + "_worn_off_opacity": uniform(0.01), + "_worn_off_radius": uniform(0.005, 0.01), + "_scratch_radius": uniform(0.01, 0.03), + "_worn_off_mask_randomness": uniform(2.5, 3.0), + "_edge_base_color_hue": uniform(0.0, 1.0), + "_edge_base_color_whiteness": uniform(0.1, 0.6), + "_scratch_mask_randomness": choice([uniform(0.1, 5.0), uniform(1.0, 10.0)]), + "_scratch_density": choice([uniform(1.5, 10.0)]), + "_scratch_opacity": uniform(0.5, 1.0), } -def shader_edge_tear_free_node_group(nw: NodeWrangler, - original_bsdf, - original_displacement, - _worn_off_opacity=0.5, - _worn_off_radius=0.015, - _scratch_radius=0.01, - _worn_off_mask_randomness=2.0, - _edge_base_color_hue=1.0, - _edge_base_color_whiteness=0.1, - _scratch_mask_randomness=0.5, - _scratch_density=5.0, - _scratch_opacity=0.2, - ): - +def shader_edge_tear_free_node_group( + nw: NodeWrangler, + original_bsdf, + original_displacement, + _worn_off_opacity=0.5, + _worn_off_radius=0.015, + _scratch_radius=0.01, + _worn_off_mask_randomness=2.0, + _edge_base_color_hue=1.0, + _edge_base_color_whiteness=0.1, + _scratch_mask_randomness=0.5, + _scratch_density=5.0, + _scratch_opacity=0.2, +): scratch_opacity = nw.new_node(Nodes.Value) scratch_opacity.outputs[0].default_value = _scratch_opacity @@ -74,159 +77,266 @@ def shader_edge_tear_free_node_group(nw: NodeWrangler, scratch_density = nw.new_node(Nodes.Value) scratch_density.outputs[0].default_value = _scratch_density - texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': scratch_mask_randomness, 'Detail': 1.0000}) - - color_ramp = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture.outputs["Fac"]}) + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": scratch_mask_randomness, + "Detail": 1.0000, + }, + ) + + color_ramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) color_ramp.color_ramp.elements[0].position = 0.4436 color_ramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] color_ramp.color_ramp.elements[1].position = 0.5345 color_ramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - bevel = nw.new_node('ShaderNodeBevel', input_kwargs={'Radius': scratch_radius}, attrs={'samples': 20}) + bevel = nw.new_node( + "ShaderNodeBevel", + input_kwargs={"Radius": scratch_radius}, + attrs={"samples": 20}, + ) geometry = nw.new_node(Nodes.NewGeometry) - subtract = nw.new_node(Nodes.VectorMath, + subtract = nw.new_node( + Nodes.VectorMath, input_kwargs={0: bevel, 1: geometry.outputs["Normal"]}, - attrs={'operation': 'SUBTRACT'}) + attrs={"operation": "SUBTRACT"}, + ) - absolute = nw.new_node(Nodes.Math, input_kwargs={0: subtract.outputs["Vector"]}, attrs={'operation': 'ABSOLUTE'}) + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "ABSOLUTE"}, + ) - color_ramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': absolute}) + color_ramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": absolute}) color_ramp_1.color_ramp.elements[0].position = 0.0691 color_ramp_1.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] color_ramp_1.color_ramp.elements[1].position = 0.1564 color_ramp_1.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - multiply = nw.new_node(Nodes.Math, + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: color_ramp.outputs["Color"], 1: color_ramp_1.outputs["Color"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) - multiply_1 = nw.new_node(Nodes.Math, + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: scratch_opacity, 1: multiply}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) - bevel_1 = nw.new_node('ShaderNodeBevel', input_kwargs={'Radius': paint_worn_off_radius}, attrs={'samples': 20}) + bevel_1 = nw.new_node( + "ShaderNodeBevel", + input_kwargs={"Radius": paint_worn_off_radius}, + attrs={"samples": 20}, + ) - subtract_1 = nw.new_node(Nodes.VectorMath, + subtract_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: bevel_1, 1: geometry.outputs["Normal"]}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1.outputs["Vector"]}, attrs={'operation': 'ABSOLUTE'}) - - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Scale': worn_off_mask_randomness, 'Detail': 1.0000}) - - color_ramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + attrs={"operation": "SUBTRACT"}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1.outputs["Vector"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "Scale": worn_off_mask_randomness, + "Detail": 1.0000, + }, + ) + + color_ramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) color_ramp_2.color_ramp.elements[0].position = 0.0764 color_ramp_2.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] color_ramp_2.color_ramp.elements[1].position = 0.5709 color_ramp_2.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - multiply_2 = nw.new_node(Nodes.Math, + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute_1, 1: color_ramp_2.outputs["Color"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) - multiply_3 = nw.new_node(Nodes.Math, + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: worn_off_opacity, 1: multiply_2}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) - color_ramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': multiply_3}) + color_ramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply_3}) color_ramp_3.color_ramp.elements[0].position = 0.0000 color_ramp_3.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] color_ramp_3.color_ramp.elements[1].position = 0.7782 color_ramp_3.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - combine_color = nw.new_node(Nodes.CombineColor, - input_kwargs={'Red': edge_base_color_hue, 'Green': 0.7733, 'Blue': 0.0100}, - attrs={'mode': 'HSV'}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={0: edge_base_color_whiteness, 6: combine_color, 7: (0.02, 0.02, 0.02, 1.0000)}, - attrs={'clamp_result': True, 'data_type': 'RGBA', 'clamp_factor': False}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': mix.outputs[2]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': reroute, 'Metallic': 0.3745, 'Specular': 0.0000, 'Roughness': 0.1436}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': color_ramp_3.outputs["Color"], 1: original_bsdf, 2: principled_bsdf}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (10.0000, 1.0000, 1.0000)}) - - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_1, 'Scale': scratch_density}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0000, 10.0000, 1.0000)}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: scratch_density, 'Scale': 2.0000}, attrs={'operation': 'SCALE'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_2, 'Scale': scale.outputs["Vector"]}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - color_ramp_6 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': multiply_4}) + combine_color = nw.new_node( + Nodes.CombineColor, + input_kwargs={"Red": edge_base_color_hue, "Green": 0.7733, "Blue": 0.0100}, + attrs={"mode": "HSV"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: edge_base_color_whiteness, + 6: combine_color, + 7: (0.02, 0.02, 0.02, 1.0000), + }, + attrs={"clamp_result": True, "data_type": "RGBA", "clamp_factor": False}, + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": mix.outputs[2]}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": reroute, + "Metallic": 0.3745, + "Specular": 0.0000, + "Roughness": 0.1436, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": color_ramp_3.outputs["Color"], + 1: original_bsdf, + 2: principled_bsdf, + }, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (10.0000, 1.0000, 1.0000), + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_1, "Scale": scratch_density}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0000, 10.0000, 1.0000), + }, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scratch_density, "Scale": 2.0000}, + attrs={"operation": "SCALE"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_2, "Scale": scale.outputs["Vector"]}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + color_ramp_6 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": multiply_4}) color_ramp_6.color_ramp.elements[0].position = 0.0000 color_ramp_6.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] color_ramp_6.color_ramp.elements[1].position = 0.0073 color_ramp_6.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: color_ramp_1.outputs["Color"], 1: color_ramp_6.outputs["Color"]}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: color_ramp_1.outputs["Color"], + 1: color_ramp_6.outputs["Color"], + }, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: multiply_5}, - attrs={'use_clamp': True, 'operation': 'MULTIPLY'}) - - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': reroute, 'Metallic': 0.3855, 'Specular': 0.0000, 'Roughness': 0.0000}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': multiply_1, 1: mix_shader, 2: principled_bsdf_1}) + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": reroute, + "Metallic": 0.3855, + "Specular": 0.0000, + "Roughness": 0.0000, + }, + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": multiply_1, 1: mix_shader, 2: principled_bsdf_1}, + ) # add operation - scale_multiply6 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_6, 'Scale': 2.0000}, - attrs={'operation': 'SCALE'}) + scale_multiply6 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_6, "Scale": 2.0000}, + attrs={"operation": "SCALE"}, + ) if original_displacement is None: total_displacement = scale_multiply6 - else: - total_displacement = nw.new_node(Nodes.Math, + else: + total_displacement = nw.new_node( + Nodes.Math, input_kwargs={0: original_displacement, 1: scale_multiply6}, - attrs={'operation': 'ADD', 'use_clamp': True}) - + attrs={"operation": "ADD", "use_clamp": True}, + ) + return mix_shader_1, total_displacement MARKER_LABEL = "wear_tear" + def apply_over(obj, selection=None, **shader_kwargs): - # get all materials # https://blenderartists.org/t/finding-out-if-an-object-has-a-material/512570/6 materials = obj.data.materials.items() if len(materials) == 0: - logging.warning(f"No material exist for {obj.name}! Scratches can only be applied over some existing material.") + logging.warning( + f"No material exist for {obj.name}! Scratches can only be applied over some existing material." + ) return if len(shader_kwargs) == 0: @@ -234,7 +344,6 @@ def apply_over(obj, selection=None, **shader_kwargs): shader_kwargs = get_edge_wear_params() for material_name, material in materials: - # get material node tree # https://blender.stackexchange.com/questions/240278/how-to-access-shader-node-via-python-script material_node_tree = material.node_tree @@ -243,34 +352,41 @@ def apply_over(obj, selection=None, **shader_kwargs): continue nw = NodeWrangler(material_node_tree) - + result = nw.find("ShaderNodeOutputMaterial") if len(result) == 0: - logger.warning("No Material Output Node found in the object's materials! Returning") + logger.warning( + "No Material Output Node found in the object's materials! Returning" + ) continue - + # get nodes and links connected to specific inputs # https://blender.stackexchange.com/questions/5462/is-it-possible-to-find-the-nodes-connected-to-a-node-in-python - initial_bsdf = result[0].inputs['Surface'].links[0].from_node - displacement_links = result[0].inputs['Displacement'].links - + initial_bsdf = result[0].inputs["Surface"].links[0].from_node + displacement_links = result[0].inputs["Displacement"].links + if len(displacement_links) == 0: initial_displacement = None else: - initial_displacement = result[0].inputs['Displacement'].links[0].from_node - - final_bsdf, final_displacement = shader_edge_tear_free_node_group(nw, initial_bsdf, initial_displacement, **shader_kwargs) + initial_displacement = result[0].inputs["Displacement"].links[0].from_node + + final_bsdf, final_displacement = shader_edge_tear_free_node_group( + nw, initial_bsdf, initial_displacement, **shader_kwargs + ) # connecting nodes # https://blender.stackexchange.com/questions/101820/how-to-add-remove-links-to-existing-or-new-nodes-using-python - material_node_tree.links.new(final_bsdf.outputs[0], result[0].inputs['Surface']) - material_node_tree.links.new(final_displacement.outputs[0], result[0].inputs['Displacement']) + material_node_tree.links.new(final_bsdf.outputs[0], result[0].inputs["Surface"]) + material_node_tree.links.new( + final_displacement.outputs[0], result[0].inputs["Displacement"] + ) final_bsdf.label = MARKER_LABEL - + return + def apply(obj): if not isinstance(obj, Iterable): obj = [obj] for o in obj: - apply_over(o) \ No newline at end of file + apply_over(o) diff --git a/infinigen/assets/materials/wear_tear/procedural_scratch.py b/infinigen/assets/materials/wear_tear/procedural_scratch.py index 2a3ebc5fc..304815408 100644 --- a/infinigen/assets/materials/wear_tear/procedural_scratch.py +++ b/infinigen/assets/materials/wear_tear/procedural_scratch.py @@ -12,11 +12,12 @@ # https://www.youtube.com/watch?v=MH8iutCKtYc by ChuckCG -from numpy.random import uniform, choice -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler import logging from collections.abc import Iterable +from numpy.random import uniform + +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import log_uniform logger = logging.getLogger(__name__) @@ -24,29 +25,32 @@ def get_scratch_params(): return { - 'angle1': uniform(10., 80.0), - 'angle2': uniform(-80.0, -10.0), - 'scratch_scale': log_uniform(5,80), - 'scratch_mask_ratio': log_uniform(0.01, 0.9), - 'scratch_mask_noise': log_uniform(5, 40), - 'scratch_depth': log_uniform(.1,1.), + "angle1": uniform(10.0, 80.0), + "angle2": uniform(-80.0, -10.0), + "scratch_scale": log_uniform(5, 80), + "scratch_mask_ratio": log_uniform(0.01, 0.9), + "scratch_mask_noise": log_uniform(5, 40), + "scratch_depth": log_uniform(0.1, 1.0), } -def scratch_shader(nw: NodeWrangler, - original_bsdf, - angle1=45.0, - angle2=-20.0, - scratch_scale=20.0, - scratch_mask_ratio=0.8, - scratch_mask_noise=10.0, - scratch_depth=0.1): + +def scratch_shader( + nw: NodeWrangler, + original_bsdf, + angle1=45.0, + angle2=-20.0, + scratch_scale=20.0, + scratch_mask_ratio=0.8, + scratch_mask_noise=10.0, + scratch_depth=0.1, +): # Code generated using version 2.6.5 of the node_transpiler texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) n_angle1 = nw.new_node(Nodes.Value) n_angle1.outputs[0].default_value = angle1 - + n_angle2 = nw.new_node(Nodes.Value) n_angle2.outputs[0].default_value = angle2 @@ -55,63 +59,124 @@ def scratch_shader(nw: NodeWrangler, n_scratch_mask_ratio = nw.new_node(Nodes.Value) n_scratch_mask_ratio.outputs[0].default_value = scratch_mask_ratio - + n_scratch_mask_noise = nw.new_node(Nodes.Value) n_scratch_mask_noise.outputs[0].default_value = scratch_mask_noise - + n_scratch_depth = nw.new_node(Nodes.Value) n_scratch_depth.outputs[0].default_value = scratch_depth - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': n_angle1}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Rotation': combine_xyz, 'Scale': (25.0000, 1.0000, 1.0000)}, - attrs={'vector_type': 'TEXTURE'}) - - noise_texture_3 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'Scale': n_scratch_scale, 'Detail': 15.0000, 'Roughness': 0.0000, 'Distortion': 22.8000}) - - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': n_angle2}) - - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Rotation': combine_xyz_1, 'Scale': (25.0000, 1.0000, 1.0000)}, - attrs={'vector_type': 'TEXTURE'}) - - noise_texture_5 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_2, 'Scale': n_scratch_scale, 'Detail': 15.0000, 'Roughness': 0.0000, 'Distortion': 22.8000}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture_3.outputs["Fac"], 1: noise_texture_5.outputs["Fac"]}) - - mapping_3 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Rotation': (0.1588, -0.5742, 0.1920)}, - attrs={'vector_type': 'TEXTURE'}) - - noise_texture_6 = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Vector': mapping_3, 'Scale': n_scratch_mask_noise, 'Detail': 1.0000}) - - color_ramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_6.outputs["Fac"]}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": n_angle1}) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Rotation": combine_xyz, + "Scale": (25.0000, 1.0000, 1.0000), + }, + attrs={"vector_type": "TEXTURE"}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": n_scratch_scale, + "Detail": 15.0000, + "Roughness": 0.0000, + "Distortion": 22.8000, + }, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": n_angle2}) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Rotation": combine_xyz_1, + "Scale": (25.0000, 1.0000, 1.0000), + }, + attrs={"vector_type": "TEXTURE"}, + ) + + noise_texture_5 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_2, + "Scale": n_scratch_scale, + "Detail": 15.0000, + "Roughness": 0.0000, + "Distortion": 22.8000, + }, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture_3.outputs["Fac"], + 1: noise_texture_5.outputs["Fac"], + }, + ) + + mapping_3 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Rotation": (0.1588, -0.5742, 0.1920), + }, + attrs={"vector_type": "TEXTURE"}, + ) + + noise_texture_6 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_3, + "Scale": n_scratch_mask_noise, + "Detail": 1.0000, + }, + ) + + color_ramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_6.outputs["Fac"]} + ) color_ramp_2.color_ramp.elements[0].position = 0.4109 color_ramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] color_ramp_2.color_ramp.elements[1].position = 1.0000 color_ramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: n_scratch_mask_ratio, 1: color_ramp_2.outputs["Color"]}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply}, attrs={'use_clamp': True}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': add_1, 1: 0.7000, 2: 0.7200, 4: 0.9000}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: n_scratch_mask_ratio, 1: color_ramp_2.outputs["Color"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: multiply}, attrs={"use_clamp": True} + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add_1, 1: 0.7000, 2: 0.7200, 4: 0.9000} + ) # scaled_scratch = nw.new_node(Nodes.Math, input_kwargs={0: n_scratch_depth, 1: map_range.outputs["Result"]}, attrs={'operation': 'MULTIPLY'}) - # material_output = nw.new_node(Nodes.MaterialOutput, - # input_kwargs={'Surface': original_bsdf, 'Displacement': scaled_scratch}, + # material_output = nw.new_node(Nodes.MaterialOutput, + # input_kwargs={'Surface': original_bsdf, 'Displacement': scaled_scratch}, # attrs={'is_active_output': True}) # return material_output - bump = nw.new_node(Nodes.Bump, input_kwargs={'Strength': n_scratch_depth, 'Height': map_range.outputs["Result"]}) - return {'Normal': bump} + bump = nw.new_node( + Nodes.Bump, + input_kwargs={ + "Strength": n_scratch_depth, + "Height": map_range.outputs["Result"], + }, + ) + return {"Normal": bump} + - def find_normal_input(bsdf): for i, o in enumerate(bsdf.inputs): if o.name == "Normal": @@ -119,23 +184,25 @@ def find_normal_input(bsdf): logger.debug(f"Normal not found for {bsdf}") return None + MARKER_LABEL = "scratch" + def apply_over(obj, selection=None, **shader_kwargs): - # get all materials # https://blenderartists.org/t/finding-out-if-an-object-has-a-material/512570/6 materials = obj.data.materials.items() if len(materials) == 0: - logging.warning(f"No material exist for {obj.name}! Scratches can only be applied over some existing material.") + logging.warning( + f"No material exist for {obj.name}! Scratches can only be applied over some existing material." + ) return - + if len(shader_kwargs) == 0: logging.debug("Obtaining Randomized Scratch Parameters") shader_kwargs = get_scratch_params() - + for material_name, material in materials: - # get material node tree # https://blender.stackexchange.com/questions/240278/how-to-access-shader-node-via-python-script material_node_tree = material.node_tree @@ -145,26 +212,27 @@ def apply_over(obj, selection=None, **shader_kwargs): continue nw = NodeWrangler(material_node_tree) - + result = nw.find_recursive("ShaderNodeBsdf") if len(result) == 0: logging.debug("No BSDF found in the object's materials! Returning") continue - + nw_bsdf, bsdf = result[-1] # final_bsdf = scratch_shader(nw_bsdf, bsdf, **shader_kwargs) - - if 'Normal' in bsdf.inputs.keys(): - if len(nw_bsdf.find_from(bsdf.inputs['Normal'])) == 0: - bump = scratch_shader(nw_bsdf, None, **shader_kwargs)['Normal'] - + + if "Normal" in bsdf.inputs.keys(): + if len(nw_bsdf.find_from(bsdf.inputs["Normal"])) == 0: + bump = scratch_shader(nw_bsdf, None, **shader_kwargs)["Normal"] + # connecting nodes: https://blender.stackexchange.com/questions/101820/how-to-add-remove-links-to-existing-or-new-nodes-using-python - nw_bsdf.links.new(bump.outputs[0], bsdf.inputs['Normal']) + nw_bsdf.links.new(bump.outputs[0], bsdf.inputs["Normal"]) nw_bsdf.label = MARKER_LABEL + def apply(obj): if not isinstance(obj, Iterable): obj = [obj] for o in obj: - apply_over(o) \ No newline at end of file + apply_over(o) diff --git a/infinigen/assets/materials/woods/composite_wood_tile.py b/infinigen/assets/materials/woods/composite_wood_tile.py index 218bc27e9..5566c6042 100644 --- a/infinigen/assets/materials/woods/composite_wood_tile.py +++ b/infinigen/assets/materials/woods/composite_wood_tile.py @@ -2,17 +2,22 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from functools import wraps -from numpy.random import uniform +from infinigen.assets.materials import tile -from infinigen.core.nodes import Nodes -from infinigen.core.util.random import log_uniform -from .tiled_wood import shader_wood_tiled -from .. import shader_wood, tile -from ..tile import shader_staggered_tile +from .wood import shader_wood -def apply(obj, selection=None, vertical=False, scale=None, alternating=None, shape=None, **kwargs): +def apply( + obj, + selection=None, + vertical=False, + scale=None, + alternating=None, + shape=None, + **kwargs, +): shader_func = shader_wood - tile.apply(obj, selection, vertical, shader_func, scale, alternating, 'composite', **kwargs) + tile.apply( + obj, selection, vertical, shader_func, scale, alternating, "composite", **kwargs + ) diff --git a/infinigen/assets/materials/woods/crossed_wood_tile.py b/infinigen/assets/materials/woods/crossed_wood_tile.py index 685915b2a..737f11307 100644 --- a/infinigen/assets/materials/woods/crossed_wood_tile.py +++ b/infinigen/assets/materials/woods/crossed_wood_tile.py @@ -1,14 +1,23 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + # Authors: Lingjie Mei from .. import tile from .wood import shader_wood -from ...utils.object import new_plane -def apply(obj, selection=None, vertical=False, scale=None, alternating=None, shape=None, **kwargs): +def apply( + obj, + selection=None, + vertical=False, + scale=None, + alternating=None, + shape=None, + **kwargs, +): shader_func = shader_wood - tile.apply(obj, selection, vertical, shader_func, scale, False, 'crossed', **kwargs) + tile.apply(obj, selection, vertical, shader_func, scale, False, "crossed", **kwargs) + # def make_sphere():e diff --git a/infinigen/assets/materials/woods/hexagon_wood_tile.py b/infinigen/assets/materials/woods/hexagon_wood_tile.py index e530c0c46..ee414dd10 100644 --- a/infinigen/assets/materials/woods/hexagon_wood_tile.py +++ b/infinigen/assets/materials/woods/hexagon_wood_tile.py @@ -1,12 +1,22 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + # Authors: Lingjie Mei from .. import tile from .wood import shader_wood -from ...utils.object import new_plane -def apply(obj, selection=None, vertical=False, scale=None, alternating=None, shape=None, **kwargs): +def apply( + obj, + selection=None, + vertical=False, + scale=None, + alternating=None, + shape=None, + **kwargs, +): shader_func = shader_wood - tile.apply(obj, selection, vertical, shader_func, scale, alternating, 'hexagon', **kwargs) + tile.apply( + obj, selection, vertical, shader_func, scale, alternating, "hexagon", **kwargs + ) diff --git a/infinigen/assets/materials/woods/non_wood_tile.py b/infinigen/assets/materials/woods/non_wood_tile.py index d63aa058e..d9824728f 100644 --- a/infinigen/assets/materials/woods/non_wood_tile.py +++ b/infinigen/assets/materials/woods/non_wood_tile.py @@ -6,11 +6,18 @@ def apply( - obj, selection=None, vertical=False, shader_func=None, scale=None, alternating=None, shape=None, - **kwargs + obj, + selection=None, + vertical=False, + shader_func=None, + scale=None, + alternating=None, + shape=None, + **kwargs, ): from .. import tile from .wood import shader_wood + shader_funcs = tile.get_shader_funcs() shader_funcs = [(f, w) for f, w in shader_funcs if f != shader_wood] funcs, weights = zip(*shader_funcs) @@ -18,5 +25,7 @@ def apply( if shader_func is None: shader_func = np.random.choice(funcs, p=weights) if shape is None: - shape = np.random.choice(['square', 'hexagon', 'rectangle']) - tile.apply(obj, selection, vertical, shader_func, scale, alternating, shape, **kwargs) + shape = np.random.choice(["square", "hexagon", "rectangle"]) + tile.apply( + obj, selection, vertical, shader_func, scale, alternating, shape, **kwargs + ) diff --git a/infinigen/assets/materials/woods/square_wood_tile.py b/infinigen/assets/materials/woods/square_wood_tile.py index d86306616..768fb3be2 100644 --- a/infinigen/assets/materials/woods/square_wood_tile.py +++ b/infinigen/assets/materials/woods/square_wood_tile.py @@ -1,14 +1,25 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + # Authors: Lingjie Mei from .. import tile from .wood import shader_wood -from ...utils.object import new_plane -def apply(obj, selection=None, vertical=False, scale=None, alternating=None, shape=None, **kwargs): +def apply( + obj, + selection=None, + vertical=False, + scale=None, + alternating=None, + shape=None, + **kwargs, +): shader_func = shader_wood - tile.apply(obj, selection, vertical, shader_func, scale, alternating, 'square', **kwargs) + tile.apply( + obj, selection, vertical, shader_func, scale, alternating, "square", **kwargs + ) + # def make_sphere():e diff --git a/infinigen/assets/materials/woods/staggered_wood_tile.py b/infinigen/assets/materials/woods/staggered_wood_tile.py index e28507145..4bee3aa9c 100644 --- a/infinigen/assets/materials/woods/staggered_wood_tile.py +++ b/infinigen/assets/materials/woods/staggered_wood_tile.py @@ -1,12 +1,22 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + # Authors: Lingjie Mei from .. import tile from .wood import shader_wood -from ...utils.object import new_plane -def apply(obj, selection=None, vertical=False, scale=None, alternating=None, shape=None, **kwargs): +def apply( + obj, + selection=None, + vertical=False, + scale=None, + alternating=None, + shape=None, + **kwargs, +): shader_func = shader_wood - tile.apply(obj, selection, vertical, shader_func, scale, alternating, 'staggered', **kwargs) + tile.apply( + obj, selection, vertical, shader_func, scale, alternating, "staggered", **kwargs + ) diff --git a/infinigen/assets/materials/woods/tiled_wood.py b/infinigen/assets/materials/woods/tiled_wood.py index 3cc818923..1351be44c 100644 --- a/infinigen/assets/materials/woods/tiled_wood.py +++ b/infinigen/assets/materials/woods/tiled_wood.py @@ -5,41 +5,42 @@ # Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=rd2jhGV6tqo by Ryan King Art -import bpy -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint, choice +from numpy.random import randint, uniform from infinigen.assets.materials import common +from infinigen.assets.materials.bark_random import hex_to_rgb from infinigen.assets.materials.woods.wood import get_color -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils -from infinigen.core.util.color import rgb2hsv - +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import clip_gaussian -from infinigen.assets.materials.bark_random import get_random_bark_params, hex_to_rgb - -@node_utils.to_nodegroup('nodegroup_tiling', singleton=False, type='ShaderNodeTree') +@node_utils.to_nodegroup("nodegroup_tiling", singleton=False, type="ShaderNodeTree") def nodegroup_tiling(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Horizontal Scale', 0.5000), - ('NodeSocketFloat', 'Vertical Scale', 0.5), - ('NodeSocketFloat', 'Seed', 0.5000)]) - - divide = nw.new_node(Nodes.Math, + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Horizontal Scale", 0.5000), + ("NodeSocketFloat", "Vertical Scale", 0.5), + ("NodeSocketFloat", "Seed", 0.5000), + ], + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Horizontal Scale"]}, - attrs={'operation': 'DIVIDE'}) + attrs={"operation": "DIVIDE"}, + ) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'MULTIPLY'}) + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "MULTIPLY"} + ) - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply}) + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) vec = texture_coordinate.outputs["Object"] @@ -47,110 +48,243 @@ def nodegroup_tiling(nw: NodeWrangler): add = nw.new_node(Nodes.VectorMath, input_kwargs={0: vec, 1: combine_xyz}) - divide_1 = nw.new_node(Nodes.Math, + divide_1 = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Vertical Scale"]}, - attrs={'operation': 'DIVIDE'}) - - brick_texture = nw.new_node(Nodes.BrickTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Color2': (0,0,0, 1.0000), 'Scale': 1.0000, 'Mortar Size': 0.0050, 'Mortar Smooth': 1.0000, 'Bias': -0.5000, 'Brick Width': divide_1, 'Row Height': divide}, - attrs={'squash_frequency': 1}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: brick_texture.outputs["Color"], 1: 1000.0000, 2: group_input.outputs["Seed"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Tile Color': brick_texture.outputs["Color"], 'Seed': multiply_add, 'Displacement': brick_texture.outputs["Color"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_tiled_wood', singleton=False, type='ShaderNodeTree') + attrs={"operation": "DIVIDE"}, + ) + + brick_texture = nw.new_node( + Nodes.BrickTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Color2": (0, 0, 0, 1.0000), + "Scale": 1.0000, + "Mortar Size": 0.0050, + "Mortar Smooth": 1.0000, + "Bias": -0.5000, + "Brick Width": divide_1, + "Row Height": divide, + }, + attrs={"squash_frequency": 1}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: brick_texture.outputs["Color"], + 1: 1000.0000, + 2: group_input.outputs["Seed"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Tile Color": brick_texture.outputs["Color"], + "Seed": multiply_add, + "Displacement": brick_texture.outputs["Color"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_tiled_wood", singleton=False, type="ShaderNodeTree") def nodegroup_tiled_wood(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler texture_coordinate = nw.new_node(Nodes.TextureCoord) - mapping_2 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (5.0000, 100.0000, 100.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Tile Horizontal Scale', 0.0000), - ('NodeSocketFloat', 'Tile Vertical Scale', 2.9600), - ('NodeSocketColor', 'Main Color', (0.0000, 0.0000, 0.0000, 1.0000)), - ('NodeSocketFloat', 'Seed', 0.0000)]) - - group = nw.new_node(nodegroup_tiling().name, - input_kwargs={'Horizontal Scale': group_input.outputs["Tile Horizontal Scale"], 'Vertical Scale': group_input.outputs["Tile Vertical Scale"], 'Seed': group_input.outputs["Seed"]}) - - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': mapping_2, 'W': group.outputs["Seed"], 'Scale': 10.0000, 'Detail': 15.0000, 'Dimension': 7.0000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_2, 3: 1.0000, 4: -1.0000}) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping_1, 'W': group.outputs["Seed"], 'Scale': 0.5000, 'Detail': 1.0000, 'Distortion': 1.1000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': group.outputs["Seed"], 'Scale': noise_texture_1.outputs["Fac"], 'Detail': 15.0000, 'Dimension': 0.2000, 'Lacunarity': 2.4000}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_1, 3: -1.4000, 4: 1.5000}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (0.1500, 1.0000, 0.1500)}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': group.outputs["Seed"], 'Detail': 5.0000, 'Distortion': 1.0000}, - attrs={'noise_dimensions': '4D'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': noise_texture.outputs["Fac"], 'W': group.outputs["Seed"], 'Scale': 4.0000, 'Detail': 10.0000, 'Dimension': 0.0000}, - attrs={'musgrave_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (5.0000, 100.0000, 100.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Tile Horizontal Scale", 0.0000), + ("NodeSocketFloat", "Tile Vertical Scale", 2.9600), + ("NodeSocketColor", "Main Color", (0.0000, 0.0000, 0.0000, 1.0000)), + ("NodeSocketFloat", "Seed", 0.0000), + ], + ) + + group = nw.new_node( + nodegroup_tiling().name, + input_kwargs={ + "Horizontal Scale": group_input.outputs["Tile Horizontal Scale"], + "Vertical Scale": group_input.outputs["Tile Vertical Scale"], + "Seed": group_input.outputs["Seed"], + }, + ) + + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping_2, + "W": group.outputs["Seed"], + "Scale": 10.0000, + "Detail": 15.0000, + "Dimension": 7.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_2, 3: 1.0000, 4: -1.0000}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "W": group.outputs["Seed"], + "Scale": 0.5000, + "Detail": 1.0000, + "Distortion": 1.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "W": group.outputs["Seed"], + "Scale": noise_texture_1.outputs["Fac"], + "Detail": 15.0000, + "Dimension": 0.2000, + "Lacunarity": 2.4000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_1, 3: -1.4000, 4: 1.5000}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}, + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (0.1500, 1.0000, 0.1500), + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": group.outputs["Seed"], + "Detail": 5.0000, + "Distortion": 1.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "W": group.outputs["Seed"], + "Scale": 4.0000, + "Detail": 10.0000, + "Dimension": 0.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, - attrs={'data_type': 'RGBA'}) + attrs={"data_type": "RGBA"}, + ) - mix_1 = nw.new_node(Nodes.Mix, + mix_1 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9000, 6: map_range_1.outputs["Result"], 7: mix.outputs[2]}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}) + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ) - mix_2 = nw.new_node(Nodes.Mix, + mix_2 = nw.new_node( + Nodes.Mix, input_kwargs={0: 0.9500, 6: map_range_2.outputs["Result"], 7: mix_1.outputs[2]}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Saturation': 0.8000, 'Value': 0.2000, 'Fac': 0.0, 'Color': group_input.outputs["Main Color"]}) - - mix_3 = nw.new_node(Nodes.Mix, - input_kwargs={0: mix_2.outputs[2], 6: hue_saturation_value, 7: group_input.outputs["Main Color"]}, - attrs={'data_type': 'RGBA'}) - - mix_4 = nw.new_node(Nodes.Mix, + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Saturation": 0.8000, + "Value": 0.2000, + "Fac": 0.0, + "Color": group_input.outputs["Main Color"], + }, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: mix_2.outputs[2], + 6: hue_saturation_value, + 7: group_input.outputs["Main Color"], + }, + attrs={"data_type": "RGBA"}, + ) + + mix_4 = nw.new_node( + Nodes.Mix, input_kwargs={0: 1.0000, 6: mix_3.outputs[2], 7: group.outputs["Tile Color"]}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}) + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ) color = mix_4.outputs[2] - roughness = nw.build_float_curve(color, [(0, uniform(.3, .5)), (1, uniform(.8, 1.))]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': color, 'Roughness': roughness}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: mix_2.outputs[2], 1: 0.1000}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group.outputs["Displacement"], 1: multiply}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.0100}, attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'BSDF': principled_bsdf, 'Displacement': multiply_1}, - attrs={'is_active_output': True}) - - -def shader_wood_tiled(nw: NodeWrangler, hscale=None, vscale=None, base_color=None, seed=None, **kwargs): + roughness = nw.build_float_curve( + color, [(0, uniform(0.3, 0.5)), (1, uniform(0.8, 1.0))] + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color, "Roughness": roughness} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: mix_2.outputs[2], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group.outputs["Displacement"], 1: multiply} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 0.0100}, attrs={"operation": "MULTIPLY"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"BSDF": principled_bsdf, "Displacement": multiply_1}, + attrs={"is_active_output": True}, + ) + + +def shader_wood_tiled( + nw: NodeWrangler, hscale=None, vscale=None, base_color=None, seed=None, **kwargs +): # Code generated using version 2.6.4 of the node_transpiler if hscale is None: @@ -162,21 +296,30 @@ def shader_wood_tiled(nw: NodeWrangler, hscale=None, vscale=None, base_color=Non if base_color is None: base_color = get_color() - group = nw.new_node(nodegroup_tiled_wood().name, - input_kwargs={'Tile Horizontal Scale': hscale, - 'Tile Vertical Scale': vscale, - 'Seed': seed, - 'Main Color': base_color}) + group = nw.new_node( + nodegroup_tiled_wood().name, + input_kwargs={ + "Tile Horizontal Scale": hscale, + "Tile Vertical Scale": vscale, + "Seed": seed, + "Main Color": base_color, + }, + ) - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': group.outputs["Displacement"], 'Midlevel': 0.0000}) + displacement = nw.new_node( + "ShaderNodeDisplacement", + input_kwargs={"Height": group.outputs["Displacement"], "Midlevel": 0.0000}, + ) - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group.outputs["BSDF"], 'Displacement': displacement}, - attrs={'is_active_output': True}) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": group.outputs["BSDF"], "Displacement": displacement}, + attrs={"is_active_output": True}, + ) def get_random_light_wood_params(): - color_fac = [0xdeb887, 0xcdaa7d, 0xfff8dc] + color_fac = [0xDEB887, 0xCDAA7D, 0xFFF8DC] color_factory = [hex_to_rgb(c) for c in color_fac] return color_factory[randint(len(color_fac))] @@ -184,5 +327,6 @@ def get_random_light_wood_params(): def apply(obj, selection=None, **kwargs): common.apply(obj, shader_wood_tiled, selection=selection, **kwargs) + # def make_sphere(): # return new_plane() diff --git a/infinigen/assets/materials/woods/wood.py b/infinigen/assets/materials/woods/wood.py index 1891ab51d..91c02bf75 100644 --- a/infinigen/assets/materials/woods/wood.py +++ b/infinigen/assets/materials/woods/wood.py @@ -8,128 +8,197 @@ from numpy.random import uniform from infinigen.assets.materials import common -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.util.color import hsv2rgba, rgb2hsv from infinigen.core.util.random import log_uniform + from ...utils.object import new_cube def get_color(): from infinigen.assets.materials.bark_random import get_random_bark_params + _, color_params = get_random_bark_params(np.random.randint(1e7)) - h, s, v = rgb2hsv(color_params['Color'][:-1]) - return hsv2rgba(h + uniform(-.0, .05), s + uniform(-.3, .2), v * log_uniform(.2, 20)) + h, s, v = rgb2hsv(color_params["Color"][:-1]) + return hsv2rgba( + h + uniform(-0.0, 0.05), s + uniform(-0.3, 0.2), v * log_uniform(0.2, 20) + ) def shader_wood(nw: NodeWrangler, color=None, w=None, vertical=False, **kwargs): # Code generated using version 2.6.4 of the node_transpiler - vec = nw.new_node(Nodes.TextureCoord).outputs['Object'] + vec = nw.new_node(Nodes.TextureCoord).outputs["Object"] if vertical: vec = nw.new_node( - Nodes.Mapping, [vec], input_kwargs={'Rotation': (np.pi / 2, 0, np.pi / 2 * np.random.randint(2))} + Nodes.Mapping, + [vec], + input_kwargs={"Rotation": (np.pi / 2, 0, np.pi / 2 * np.random.randint(2))}, ) mapping_2 = nw.new_node( - Nodes.Mapping, input_kwargs={ - 'Vector': vec, - 'Scale': (5.0000, 100.0000, 100.0000) - }) + Nodes.Mapping, + input_kwargs={"Vector": vec, "Scale": (5.0000, 100.0000, 100.0000)}, + ) if color is None: color = get_color() if w is None: w = uniform(0, 1) - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, input_kwargs={ - 'Vector': mapping_2, - 'W': w, - 'Scale': 10.0000, - 'Detail': 15.0000, - 'Dimension': 7.0000 - }, attrs={'musgrave_dimensions': '4D'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_2, 3: 1.0000, 4: -1.0000}) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={'Vector': vec}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={ - 'Vector': mapping_1, - 'W': w, - 'Scale': 0.5000, - 'Detail': 1.0000, - 'Distortion': 1.1000 - }, attrs={'noise_dimensions': '4D'}) - - musgrave_texture_1 = nw.new_node(Nodes.MusgraveTexture, input_kwargs={ - 'W': w, - 'Scale': noise_texture_1.outputs["Fac"], - 'Detail': 15.0000, - 'Dimension': 0.2000, - 'Lacunarity': 2.4000 - }, attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': musgrave_texture_1, 3: -1.4000, 4: 1.5000}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}) - - mapping = nw.new_node(Nodes.Mapping, input_kwargs={ - 'Vector': vec, - 'Scale': (0.1500, 1.0000, 0.1500) - }) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'W': w, 'Detail': 5.0000, 'Distortion': 1.0000 - }, attrs={'noise_dimensions': '4D'}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, input_kwargs={ - 'Vector': noise_texture.outputs["Fac"], - 'W': w, - 'Scale': 4.0000, - 'Detail': 10.0000, - 'Dimension': 0.0000 - }, attrs={'musgrave_dimensions': '4D'}) - - mix = nw.new_node(Nodes.Mix, input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, - attrs={'data_type': 'RGBA'}) - - mix_1 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.9000, 6: map_range_1.outputs["Result"], 7: mix.outputs[2]}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}) - - mix_2 = nw.new_node(Nodes.Mix, - input_kwargs={0: 0.9500, 6: map_range_2.outputs["Result"], 7: mix_1.outputs[2]}, - attrs={'blend_type': 'MULTIPLY', 'data_type': 'RGBA'}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Saturation': 0.8000, 'Value': 0.2000, 'Color': color, - }) - - mix_3 = nw.new_node(Nodes.Mix, input_kwargs={0: mix_2.outputs[2], 6: hue_saturation_value, 7: color, - }, attrs={'data_type': 'RGBA'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: mix_2.outputs[2], 1: log_uniform(.0002, .01)}, - attrs={'operation': 'MULTIPLY'}) - - displacement = nw.new_node('ShaderNodeDisplacement', input_kwargs={'Height': multiply, 'Midlevel': 0.0000}) + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": mapping_2, + "W": w, + "Scale": 10.0000, + "Detail": 15.0000, + "Dimension": 7.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_2, 3: 1.0000, 4: -1.0000}, + ) + + mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={"Vector": vec}) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping_1, + "W": w, + "Scale": 0.5000, + "Detail": 1.0000, + "Distortion": 1.1000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "W": w, + "Scale": noise_texture_1.outputs["Fac"], + "Detail": 15.0000, + "Dimension": 0.2000, + "Lacunarity": 2.4000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture_1, 3: -1.4000, 4: 1.5000}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": map_range.outputs["Result"], 3: 1.0000, 4: 0.5000}, + ) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": vec, "Scale": (0.1500, 1.0000, 0.1500)} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": mapping, + "W": w, + "Detail": 5.0000, + "Distortion": 1.0000, + }, + attrs={"noise_dimensions": "4D"}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": noise_texture.outputs["Fac"], + "W": w, + "Scale": 4.0000, + "Detail": 10.0000, + "Dimension": 0.0000, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={6: noise_texture.outputs["Fac"], 7: musgrave_texture}, + attrs={"data_type": "RGBA"}, + ) + + mix_1 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 0.9000, 6: map_range_1.outputs["Result"], 7: mix.outputs[2]}, + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ) + + mix_2 = nw.new_node( + Nodes.Mix, + input_kwargs={0: 0.9500, 6: map_range_2.outputs["Result"], 7: mix_1.outputs[2]}, + attrs={"blend_type": "MULTIPLY", "data_type": "RGBA"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Saturation": 0.8000, + "Value": 0.2000, + "Color": color, + }, + ) + + mix_3 = nw.new_node( + Nodes.Mix, + input_kwargs={ + 0: mix_2.outputs[2], + 6: hue_saturation_value, + 7: color, + }, + attrs={"data_type": "RGBA"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: mix_2.outputs[2], 1: log_uniform(0.0002, 0.01)}, + attrs={"operation": "MULTIPLY"}, + ) + + displacement = nw.new_node( + "ShaderNodeDisplacement", input_kwargs={"Height": multiply, "Midlevel": 0.0000} + ) color = mix_3.outputs[2] - roughness = uniform(.0, .4) + roughness = uniform(0.0, 0.4) roughness = nw.build_float_curve( - nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': log_uniform(40, 50)}), - [(0, roughness), (1, roughness + uniform(.0, .8))]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Clearcoat': np.clip(uniform(0, 1.4), 0, 1) - }) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf, 'Displacement': displacement}) + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": log_uniform(40, 50)}), + [(0, roughness), (1, roughness + uniform(0.0, 0.8))], + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Clearcoat": np.clip(uniform(0, 1.4), 0, 1), + }, + ) + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + ) def apply(obj, selection=None, **kwargs): - # TODO HACK - avoiding circular imports for now - from infinigen.assets.materials.shelf_shaders import shader_shelves_white, shader_shelves_black_wood, shader_shelves_wood + from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_wood, + shader_shelves_white, + shader_shelves_wood, + ) r = uniform() if r < 1 / 12: @@ -142,5 +211,6 @@ def apply(obj, selection=None, **kwargs): shader = shader_wood common.apply(obj, shader, selection, **kwargs) + def make_sphere(): return new_cube() diff --git a/infinigen/assets/materials/woods/wood_old.py b/infinigen/assets/materials/woods/wood_old.py index b1c2d2e49..63ac59b60 100644 --- a/infinigen/assets/materials/woods/wood_old.py +++ b/infinigen/assets/materials/woods/wood_old.py @@ -6,11 +6,14 @@ import math as ma import numpy as np - -from infinigen.assets.materials import common -from infinigen.assets.materials.utils.surface_utils import clip, sample_range, sample_ratio, sample_color from numpy.random import uniform +from infinigen.assets.materials import common +from infinigen.assets.materials.utils.surface_utils import ( + sample_color, + sample_range, + sample_ratio, +) from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import log_uniform @@ -21,30 +24,50 @@ def shader_wood_old(nw: NodeWrangler, scale=1, offset=None, rotation=None, **kwa texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - rotation = uniform(0, ma.pi * 2, 3) if rotation is None else surface.eval_argument(nw, rotation) - mapping_2 = nw.new_node(Nodes.Mapping, input_kwargs={ - 'Vector': texture_coordinate_1.outputs["Object"], - 'Location': surface.eval_argument(nw, offset), - 'Rotation': rotation - }) - - mapping_1 = nw.new_node(Nodes.Mapping, input_kwargs={ - 'Vector': mapping_2, - 'Scale': np.array([log_uniform(2, 4), log_uniform(8, 16), log_uniform(2, 4)]) * scale, - }) - - musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Vector': mapping_1, 'Scale': 2.0}, - attrs={'musgrave_dimensions': '4D'}) - musgrave_texture_2.inputs['W'].default_value = sample_range(0, 5) - musgrave_texture_2.inputs['Scale'].default_value = sample_ratio(2.0, 3 / 4, 4 / 3) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': musgrave_texture_2, 'W': 0.7, 'Scale': 10.0}, - attrs={'noise_dimensions': '4D'}) - noise_texture_1.inputs['W'].default_value = sample_range(0, 5) - noise_texture_1.inputs['Scale'].default_value = sample_ratio(5, 0.5, 2) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + rotation = ( + uniform(0, ma.pi * 2, 3) + if rotation is None + else surface.eval_argument(nw, rotation) + ) + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Location": surface.eval_argument(nw, offset), + "Rotation": rotation, + }, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": mapping_2, + "Scale": np.array( + [log_uniform(2, 4), log_uniform(8, 16), log_uniform(2, 4)] + ) + * scale, + }, + ) + + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": mapping_1, "Scale": 2.0}, + attrs={"musgrave_dimensions": "4D"}, + ) + musgrave_texture_2.inputs["W"].default_value = sample_range(0, 5) + musgrave_texture_2.inputs["Scale"].default_value = sample_ratio(2.0, 3 / 4, 4 / 3) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": musgrave_texture_2, "W": 0.7, "Scale": 10.0}, + attrs={"noise_dimensions": "4D"}, + ) + noise_texture_1.inputs["W"].default_value = sample_range(0, 5) + noise_texture_1.inputs["Scale"].default_value = sample_ratio(5, 0.5, 2) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_2.color_ramp.elements.new(0) colorramp_2.color_ramp.elements[0].position = 0.1727 colorramp_2.color_ramp.elements[0].color = (0.1567, 0.0162, 0.0017, 1.0) @@ -58,18 +81,26 @@ def shader_wood_old(nw: NodeWrangler, scale=1, offset=None, rotation=None, **kwa for e in colorramp_2.color_ramp.elements: sample_color(e.color, offset=0.04) - colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': noise_texture_1.outputs["Fac"]}) + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) colorramp_4.color_ramp.elements[0].position = 0.0 colorramp_4.color_ramp.elements[0].color = (0.4855, 0.4855, 0.4855, 1.0) colorramp_4.color_ramp.elements[1].position = 1.0 colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - principled_bsdf_1 = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': colorramp_2.outputs["Color"], - 'Roughness': colorramp_4.outputs["Color"] - }, attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf_1}) + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp_2.outputs["Color"], + "Roughness": colorramp_4.outputs["Color"], + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf_1} + ) def apply(obj, selection=None, scale=1, **kwargs): diff --git a/infinigen/assets/materials/woods/wood_tile.py b/infinigen/assets/materials/woods/wood_tile.py index 07ec1d9d0..7d874fe64 100644 --- a/infinigen/assets/materials/woods/wood_tile.py +++ b/infinigen/assets/materials/woods/wood_tile.py @@ -6,8 +6,21 @@ def get_wood_tiles(): - from . import square_wood_tile, staggered_wood_tile, crossed_wood_tile, composite_wood_tile, hexagon_wood_tile - return [square_wood_tile, staggered_wood_tile, crossed_wood_tile, composite_wood_tile, hexagon_wood_tile] + from . import ( + composite_wood_tile, + crossed_wood_tile, + hexagon_wood_tile, + square_wood_tile, + staggered_wood_tile, + ) + + return [ + square_wood_tile, + staggered_wood_tile, + crossed_wood_tile, + composite_wood_tile, + hexagon_wood_tile, + ] def apply(obj, selection=None, vertical=False, scale=None, alternating=None, **kwargs): diff --git a/infinigen/assets/mollusk/__init__.py b/infinigen/assets/mollusk/__init__.py deleted file mode 100644 index c743835b2..000000000 --- a/infinigen/assets/mollusk/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -from .generate import MolluskFactory, NautilusFactory, ConchFactory, AugerFactory, VoluteFactory, \ - ScallopFactory, ClamFactory, MusselFactory -from .snail import SnailBaseFactory, NautilusBaseFactory, ConchBaseFactory, AugerBaseFactory, VoluteBaseFactory -from .shell import ShellBaseFactory, ScallopBaseFactory, ClamBaseFactory, MusselBaseFactory diff --git a/infinigen/assets/mollusk/snail.py b/infinigen/assets/mollusk/snail.py deleted file mode 100644 index 8c34c48d0..000000000 --- a/infinigen/assets/mollusk/snail.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -import infinigen.core.util.blender as butil -from infinigen.assets.mollusk.base import BaseMolluskFactory -from infinigen.assets.utils.object import center, mesh2obj, data2mesh, new_empty -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup - -class SnailBaseFactory(BaseMolluskFactory): - freq = 256 - - def __init__(self, factory_seed, coarse=False): - super(SnailBaseFactory, self).__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.makers = [self.volute_make, self.nautilus_make, self.snail_make, self.conch_make] - self.maker = np.random.choice(self.makers) - self.ratio = uniform(0, .3) if uniform(0, 1) < .5 else uniform(.7, 1.) - self.z_scale = log_uniform(.2, 1) - self.distortion = log_uniform(2, 20) - - @staticmethod - def build_cross_section(n=64, affine=1, spike=0., concave=2.2): - perturb = 1 / (5 * n) - angles = (np.arange(n) / n + uniform(-perturb, perturb, n)) * 2 * np.pi - radius = np.abs(np.cos(angles)) ** concave + np.abs(np.sin(angles)) ** concave - radius *= 1 + uniform(0, spike, n) * (uniform(0, 1, n) < .2) - vertices = np.stack( - [np.cos(angles) * radius, np.sin(angles) * radius * affine, np.zeros_like(angles)]).T - edges = np.stack([np.arange(n), np.roll(np.arange(n), -1)]).T - obj = mesh2obj(data2mesh(vertices, edges, [], 'circle')) - obj.rotation_euler = 0, 0, uniform(0, np.pi / 12) - butil.apply_transform(obj) - return obj - - def snail_make(self, lateral=.15, longitudinal=0.04, freq=28, scale=.99, loop=8, affine=1, spike=0.): - n = 40 - resolution = loop * freq - concave = uniform(1.9, 2.1) - obj = self.build_cross_section(n, affine, spike, concave) - empty = new_empty(location=(longitudinal * np.random.choice([-1, 1]), 0, 0), - rotation=(2 * np.pi / freq, 0, 0), scale=[scale] * 3) - butil.modify_mesh(obj, 'ARRAY', apply=True, use_relative_offset=False, use_constant_offset=True, - use_object_offset=True, constant_offset_displace=(0, 0, lateral), count=resolution, - offset_object=empty) - butil.delete(empty) - surface.add_geomod(obj, self.geo_shader_vector, apply=True, input_args=[n, lateral], - attributes=['vector']) - - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type="EDGE") - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops() - - return obj - - @staticmethod - def geo_shader_vector(nw: NodeWrangler, n, interval): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - id = nw.new_node(Nodes.InputID) - angle = nw.scalar_multiply(nw.math('MODULO', id, n), 2 * np.pi / n) - height = nw.scalar_multiply(nw.math('FLOOR', nw.scalar_divide(id, n)), interval) - vector = nw.combine(nw.math('COSINE', angle), nw.math('SINE', angle), height) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry, 'Vector': vector}) - - @staticmethod - def solve_longitude(ratio, freq, scale): - return ratio * (1 + scale ** freq) / freq - - @staticmethod - def solve_lateral(ratio, freq, scale): - return ratio / (np.sin(2 * np.pi / freq * np.arange(freq)) * scale ** np.arange(freq)).sum() - - @staticmethod - def solve_scale(shrink, freq): - return shrink ** (1 / freq) - - def conch_make(self): - scale = self.solve_scale(uniform(.7, .8), self.freq) - lateral = self.solve_lateral(uniform(.3, .4), self.freq, scale) - longitude = self.solve_longitude(uniform(.7, .8), self.freq, scale) - loop = np.random.randint(8, 10) - obj = self.snail_make(lateral, longitude, self.freq, scale, loop, affine=uniform(.8, .9), spike=.1) - tag_object(obj, 'conch') - return obj - - def auger_make(self): - scale = self.solve_scale(uniform(.7, .8), self.freq) - lateral = self.solve_lateral(uniform(.1, .15), self.freq, scale) - longitude = self.solve_longitude(uniform(.9, 1.), self.freq, scale) - loop = np.random.randint(8, 12) - obj = self.snail_make(lateral, longitude, self.freq, scale, loop, affine=uniform(.5, .6)) - tag_object(obj, 'auger') - return obj - - def volute_make(self): - scale = self.solve_scale(uniform(.5, .6), self.freq) - lateral = self.solve_lateral(uniform(.4, .5), self.freq, scale) - longitude = self.solve_longitude(uniform(.6, .7), self.freq, scale) - loop = np.random.randint(4, 5) - obj = self.snail_make(lateral, longitude, self.freq, scale, loop) - tag_object(obj, 'volute') - return obj - - def nautilus_make(self): - scale = self.solve_scale(uniform(.4, .5), self.freq) - lateral = self.solve_lateral(uniform(1.2, 1.4), self.freq, scale) - longitude = self.solve_longitude(uniform(.2, .3), self.freq, scale) - loop = np.random.randint(4, 5) - obj = self.snail_make(lateral, longitude, self.freq, scale, loop) - tag_object(obj, 'nautilus') - return obj - - @staticmethod - def geo_affine(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - affine = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': geometry, - 'Offset': nw.combine( - *[nw.vector_math('DOT_PRODUCT', uniform(-.1, .1, 3), nw.new_node(Nodes.InputPosition)) for _ in - range(3)])}) - return affine - - def create_asset(self, **params): - obj = self.maker() - obj.scale = [1 / max(obj.dimensions)] * 3 - obj.rotation_euler = uniform(0, np.pi * 2, 3) - butil.apply_transform(obj) - obj.location = -center(obj) - obj.location[-1] += obj.dimensions[-1] * .4 - butil.apply_transform(obj, loc=True) - surface.add_geomod(obj, self.geo_affine, apply=True) - tag_object(obj, 'snail') - return obj - - -class VoluteBaseFactory(SnailBaseFactory): - def __init__(self, factory_seed, coarse=False): - super(VoluteBaseFactory, self).__init__(factory_seed, coarse) - self.maker = self.volute_make - - -class NautilusBaseFactory(SnailBaseFactory): - def __init__(self, factory_seed, coarse=False): - super(NautilusBaseFactory, self).__init__(factory_seed, coarse) - self.maker = self.nautilus_make - - -class ConchBaseFactory(SnailBaseFactory): - def __init__(self, factory_seed, coarse=False): - super(ConchBaseFactory, self).__init__(factory_seed, coarse) - self.maker = self.conch_make - - -class AugerBaseFactory(SnailBaseFactory): - def __init__(self, factory_seed, coarse=False): - super(AugerBaseFactory, self).__init__(factory_seed, coarse) - self.maker = self.auger_make diff --git a/infinigen/assets/monocot/growth.py b/infinigen/assets/monocot/growth.py deleted file mode 100644 index b35402ed7..000000000 --- a/infinigen/assets/monocot/growth.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory -# of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -from functools import reduce - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import displace_vertices, geo_extension -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.core.placement.detail import adapt_mesh_resolution -from infinigen.core.surface import shaderfunc_to_material -from infinigen.core.util import blender as butil -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_cube, origin2leftmost -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.nodes.node_utils import build_color_ramp - - -class MonocotGrowthFactory(AssetFactory): - use_distance = False - - def __init__(self, factory_seed, coarse=False): - super(MonocotGrowthFactory, self).__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.count = 128 - self.perturb = .05 - self.angle = np.pi / 6 - self.min_y_angle = 0. - self.max_y_angle = np.pi / 2 - self.leaf_prob = uniform(.8, .9) - self.leaf_range = 0, 1 - self.stem_offset = .2 - self.scale_curve = [(0, 1), (1, 1)] - self.radius = .01 - self.bend_angle = np.pi / 4 - self.twist_angle = np.pi / 6 - self.z_drag = 0. - self.z_scale = uniform(1., 1.2) - self.align_factor = 0 - self.align_direction = 1, 0, 0 - self.base_hue = self.build_base_hue() - self.bright_color = hsv2rgba(self.base_hue, uniform(.6, .8), log_uniform(.05, .1)) - self.dark_color = hsv2rgba((self.base_hue + uniform(-.03, .03)) % 1, uniform(.8, 1.), - log_uniform(.05, .2)) - self.material = shaderfunc_to_material(self.shader_monocot, self.dark_color, self.bright_color, - self.use_distance) - - @staticmethod - def build_base_hue(): - return uniform(.15, .35) - - @property - def is_grass(self): - return False - - def build_leaf(self, face_size): - raise NotImplemented - - @staticmethod - def decorate_leaf(obj, y_ratio=4, y_bend_angle=np.pi / 6, z_bend_angle=np.pi / 6, noise_scale=.1, - strength=.02, leftmost=True): - obj.rotation_euler[1] = -np.pi / 2 - butil.apply_transform(obj) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=uniform(.5, 1) * y_bend_angle, - deform_axis='Y') - obj.rotation_euler[1] = np.pi / 2 - butil.apply_transform(obj) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=uniform(-1, 1) * z_bend_angle, - deform_axis='Z') - - displace_vertices(obj, lambda x, y, z: (0, 0, y_ratio * uniform(0, 1) * y * y)) - surface.add_geomod(obj, geo_extension, apply=True) - - texture = bpy.data.textures.new(name='grasses', type='STUCCI') - texture.noise_scale = noise_scale - butil.modify_mesh(obj, 'DISPLACE', strength=strength, texture=texture) - - for direction, width in zip('XY', obj.dimensions[:2]): - texture = bpy.data.textures.new(name='grasses', type='STUCCI') - texture.noise_scale = noise_scale - butil.modify_mesh(obj, 'DISPLACE', strength=uniform(.01, .02) * width, texture=texture, - direction=direction) - if leftmost: - origin2leftmost(obj) - return obj - - def make_geo_flower(self): - def geo_flower(nw: NodeWrangler, leaves): - stem = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0, 0, self.stem_offset)}) - points = nw.new_node(Nodes.ResampleCurve, [line, None, self.count]) - parameter = nw.new_node(Nodes.SplineParameter) - y_rotation = nw.build_float_curve(parameter, [(0, -self.min_y_angle), (1, -self.max_y_angle)]) - z_rotation = nw.new_node(Nodes.AccumulateField, - [None, nw.uniform(self.angle * .95, self.angle * 1.05)]) - rotation = nw.combine(0, y_rotation, z_rotation) - scale = nw.build_float_curve(parameter, self.scale_curve, 'AUTO') - if self.perturb: - rotation = nw.add(rotation, nw.uniform([-self.perturb] * 3, [self.perturb] * 3)) - scale = nw.add(scale, nw.uniform([-self.perturb] * 3, [self.perturb] * 3)) - if self.align_factor: - rotation = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={ - 'Rotation': rotation, - 'Factor': surface.eval_argument(nw, self.align_factor), - 'Vector': self.align_direction - }, attrs={'pivot_axis': 'Z'}) - points, _, z_rotation = nw.new_node(Nodes.CaptureAttribute, [points, None, z_rotation]).outputs[:3] - leaves = nw.new_node(Nodes.CollectionInfo, [leaves, True, True]) - is_leaf = reduce(lambda *xs: nw.boolean_math('AND', *xs), [nw.bernoulli(self.leaf_prob), - nw.compare('GREATER_EQUAL', parameter, self.leaf_range[0]), - nw.compare('LESS_EQUAL', parameter, self.leaf_range[1])]) - instances = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={ - 'Points': points, - 'Selection': is_leaf, - 'Instance': leaves, - 'Pick Instance': True, - 'Rotation': rotation, - 'Scale': scale - }) - geometry = nw.new_node(Nodes.RealizeInstances, [instances]) - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name':'z_rotation', 'Value': z_rotation}) - geometry = nw.new_node(Nodes.JoinGeometry, [[stem, geometry]]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - return geo_flower - - def build_instance(self, i, face_size): - obj = self.build_leaf(face_size) - origin2leftmost(obj) - obj.location[0] -= .01 - butil.apply_transform(obj, loc=True) - return obj - - def make_collection(self, face_size): - return make_asset_collection(self.build_instance, 10, 'leaves', verbose=False, face_size=face_size) - - def build_stem(self, face_size): - obj = mesh2obj(data2mesh([[0, 0, 0], [0, 0, self.stem_offset]], [[0, 1]])) - butil.modify_mesh(obj, 'SUBSURF', True, levels=9, render_levels=9) - surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.radius, 16]) - adapt_mesh_resolution(obj, face_size, 'subdivide') - - texture = bpy.data.textures.new(name='grasses', type='STUCCI') - texture.noise_scale = .1 - butil.modify_mesh(obj, 'DISPLACE', strength=.01, texture=texture) - tag_object(obj, 'stem') - return obj - - def create_asset(self, **params): - obj = self.create_raw(**params) - self.decorate_monocot(obj) - tag_object(obj, 'monocot_growth') - return obj - - def create_raw(self, face_size=.01, apply=True, **params): - if self.angle != 0: - frequency = 2 * np.pi / self.angle - if .01 < frequency - int(frequency) < .05: - frequency += .05 - elif -.05 < frequency - int(frequency) < -.01: - frequency -= .05 - self.angle = 2 * np.pi / frequency - leaves = self.make_collection(face_size) - obj = self.build_stem(face_size) - surface.add_geomod(obj, self.make_geo_flower(), apply=apply, input_args=[leaves]) - if apply: - butil.delete_collection(leaves) - tag_object(obj, 'flower') - return obj - - def decorate_monocot(self, obj): - displace_vertices(obj, lambda x, y, z: (0, 0, -self.z_drag * (x * x + y * y))) - surface.add_geomod(obj, geo_extension, apply=True, input_args=[.4]) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='TWIST', - angle=uniform(-self.twist_angle, self.twist_angle), deform_axis='Z') - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=uniform(0, self.bend_angle)) - obj.scale = uniform(.8, 1.2), uniform(.8, 1.2), self.z_scale - obj.rotation_euler[-1] = uniform(0, np.pi * 2) - butil.apply_transform(obj) - assign_material(obj, self.material) - - @staticmethod - def shader_monocot(nw: NodeWrangler, dark_color, bright_color, use_distance): - specular = uniform(.0, .2) - clearcoat = 0 if uniform(0, 1) < .8 else uniform(.2, .5) - if use_distance: - distance = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'distance'}).outputs['Fac'] - exponent = uniform(1.8, 3.5) - ratio = nw.scalar_sub(1, nw.math('POWER', nw.scalar_sub(1, distance), exponent)) - color = nw.new_node(Nodes.MixRGB, [ratio, bright_color, dark_color]) - else: - color = build_color_ramp(nw, nw.musgrave(10), [.0, .3, .7, 1.], - [bright_color, bright_color, dark_color, dark_color], ) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, .8)]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface': .01, - 'Subsurface Radius': (.01, .01, .01), - }) - return bsdf diff --git a/infinigen/assets/monocot/pinecone.py b/infinigen/assets/monocot/pinecone.py deleted file mode 100644 index c360da329..000000000 --- a/infinigen/assets/monocot/pinecone.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -import bpy -import numpy as np -from numpy.random import uniform - -import infinigen.core.util.blender as butil -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.assets.utils.object import new_circle -from infinigen.assets.utils.draw import shape_by_angles, shape_by_xs -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.surface import shaderfunc_to_material -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.nodes.node_utils import build_color_ramp - -class PineconeFactory(MonocotGrowthFactory): - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.angle = 2 * np.pi / (np.random.randint(4, 8) + .5) - self.max_y_angle = uniform(.7, .8) * np.pi / 2 - self.leaf_prob = uniform(.9, .95) - self.count = int(log_uniform(64, 96)) - self.stem_offset = uniform(.2, .4) - self.perturb = 0 - self.scale_curve = [(0, .5), (.5, uniform(.6, 1.)), (1, uniform(.1, .2))] - self.bright_color = hsv2rgba(uniform(.02, .06), uniform(.8, 1.), .01) - self.dark_color = hsv2rgba(uniform(.02, .06), uniform(.8, 1.), .005) - self.material = shaderfunc_to_material(self.shader_monocot, self.dark_color, self.bright_color, - self.use_distance) - - def build_leaf(self, face_size): - obj = new_circle(vertices=128) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.fill_grid() - angles = np.array([-1, -.8, -.5, 0, .5, .8, 1]) * self.angle / 2 - scale = uniform(.9, .95) - scales = [0, .7, scale, 1, scale, .7, 0] - displacement = [0, 0, 0, -uniform(.2, .3), 0, 0, 0] - shape_by_angles(obj, angles, scales, displacement) - - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.convex_hull() - - xs = [0, 1, 2] - displacement = [0, 0, .5] - shape_by_xs(obj, xs, displacement) - - obj.scale = [.1] * 3 - obj.rotation_euler[1] -= uniform(np.pi / 18, np.pi / 12) - butil.apply_transform(obj) - remesh_with_attrs(obj, face_size) - - texture = bpy.data.textures.new(name='pinecone', type='STUCCI') - texture.noise_scale = log_uniform(.002, .005) - butil.modify_mesh(obj, 'DISPLACE', True, strength=.001, mid_level=0, texture=texture) - - tag_object(obj, 'pinecone') - return obj - - @staticmethod - def shader_monocot(nw: NodeWrangler, dark_color, bright_color, use_distance): - specular = uniform(.2, .4) - color = build_color_ramp(nw, nw.musgrave(10), [.0, .3, .7, 1.], - [bright_color, bright_color, dark_color, dark_color], ) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, .8)]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness, 'Specular': specular}) - return bsdf diff --git a/infinigen/assets/mushroom/cap.py b/infinigen/assets/mushroom/cap.py deleted file mode 100644 index 49374adcf..000000000 --- a/infinigen/assets/mushroom/cap.py +++ /dev/null @@ -1,391 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import displace_vertices, geo_extension, \ - subsurface2face_size -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.draw import spin -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.nodes.node_utils import build_color_ramp - -class MushroomCapFactory(AssetFactory): - - def __init__(self, factory_seed, base_hue, material_func, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.x_scale, self.z_scale = uniform(.7, 1.4, 2) - self.cap_configs = [self.campanulate, self.conical, self.convex, self.depressed, self.flat, - self.infundiuliform, self.ovate, self.umbillicate, self.umbonate] - config_weights = np.array([2, 2, 2, 1, 2, 1, 2, 1, 1]) - cap_config = np.random.choice(self.cap_configs, p=config_weights / config_weights.sum()) - self.cap_config = {**cap_config, - 'x_anchors': [_ * self.x_scale for _ in cap_config['x_anchors']], - 'z_anchors': [_ * self.z_scale for _ in cap_config['z_anchors']] - } - - self.radius = max(self.cap_config['x_anchors']) - self.inner_radius = log_uniform(.2, .35) * self.radius - - self.gill_configs = [self.adnexed_gill, self.decurrent_gill, None] - gill_configs = np.array([1, 1, 1]) - self.gill_config = np.random.choice(self.gill_configs, p=gill_configs / gill_configs.sum()) - if not self.cap_config['has_gill']: - self.gill_config = None - - self.shader_funcs = [self.shader_cap, self.shader_noise, self.shader_voronoi, self.shader_speckle] - shader_weights = np.array([2, 1, 1, 1]) - self.shader_func = np.random.choice(self.shader_funcs, p=shader_weights / shader_weights.sum()) - - self.is_morel = uniform(0, 1) < .5 and self.shader_func == self.shader_cap - - self.base_hue = base_hue - self.material_cap = surface.shaderfunc_to_material(self.shader_func, self.base_hue) - self.material = material_func() - - @property - def campanulate(self): - x = uniform(.12, .15) - return { - 'x_anchors': [0, x, x, .08, .04, 0], - 'z_anchors': [0, 0, uniform(.03, .05), uniform(.1, .12), uniform(.16, .2), .2], - 'vector_locations': [], - 'has_gill': True - } - - @property - def conical(self): - z = uniform(.2, .3) - return { - 'x_anchors': [0, uniform(.12, .15), .01, 0], - 'z_anchors': [0, 0, z, z], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def convex(self): - z = uniform(.14, .16) - return { - 'x_anchors': [0, .15, .12, .01, 0], - 'z_anchors': [0, 0, uniform(.04, .06), z, z], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def depressed(self): - z = uniform(.03, .05) - return { - 'x_anchors': [0, .15, .12, 0], - 'z_anchors': [0, 0, uniform(.06, .08), z], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def flat(self): - z = uniform(.05, .07) - return { - 'x_anchors': [0, .15, .12, 0], - 'z_anchors': [0, 0, z, z], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def infundiuliform(self): - z = uniform(.08, .12) - x = uniform(.12, .15) - return { - 'x_anchors': [0, .03, x, x - .01, 0], - 'z_anchors': [0, 0, z, z + uniform(.005, .01), .02], - 'vector_locations': [], - 'has_gill': False - } - - @property - def ovate(self): - z = uniform(.2, .3) - return { - 'x_anchors': [0, uniform(.12, .15), .08, .01, 0], - 'z_anchors': [0, 0, .8 * z, z, z], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def umbillicate(self): - z = uniform(.03, .05) - return { - 'x_anchors': [0, .15, .12, .02, 0], - 'z_anchors': [0, .04, uniform(.06, .08), z + .02, z], - 'vector_locations': [], - 'has_gill': False - } - - @property - def umbonate(self): - z = uniform(.05, .07) - z_ = z + uniform(.02, .04) - return { - 'x_anchors': [0, .15, .12, .06, .02, 0], - 'z_anchors': [0, 0, z - .01, z, z_, z_], - 'vector_locations': [1], - 'has_gill': True - } - - @property - def adnexed_gill(self): - return { - 'x_anchors': [self.radius, (self.radius + self.inner_radius) / 2, self.inner_radius, - self.inner_radius, self.radius], - 'z_anchors': [0, -uniform(.05, .08), -uniform(0, .02), 0, 0], - 'vector_locations': [3] - } - - @property - def decurrent_gill(self): - return { - 'x_anchors': [self.radius, (self.radius + self.inner_radius) / 2, self.inner_radius, 0, - self.radius], - 'z_anchors': [0, -uniform(.05, .08), -uniform(.08, .1), 0, 0], - 'vector_locations': [2] - } - - @staticmethod - def geo_xyz(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - for name, component in zip('xyz', nw.separate(nw.new_node(Nodes.InputPosition))): - component = nw.math('ABSOLUTE', component) - m = nw.new_node(Nodes.AttributeStatistic, [geometry, None, component]).outputs['Max'] - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={ - 'Geometry': geometry, - 'Name': name, - 'Value': nw.scalar_divide(component, m) - }) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def geo_morel(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.compare('LESS_THAN', nw.new_node(Nodes.VoronoiTexture, input_kwargs={ - 'Scale': uniform(15, 20), - 'Randomness': uniform(.5, 1) - }, attrs={'feature': 'DISTANCE_TO_EDGE'}), .05) - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry':geometry, 'Name':'morel', 'Value': selection}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def apply_cut(self, obj): - if max(self.cap_config['x_anchors']) > .1: - return - n_cuts = np.random.randint(0, 5) - angles = polygon_angles(n_cuts, np.pi / 4, np.pi * 2) - for a in angles: - width = uniform(.15, .2) * .4 - vertices = [[0, 0, .4], [.4, -width, .4], [.4, width, .4], [0, 0, -1], [.4, -width, -.01], - [.4, width, -.01]] - faces = [[0, 1, 2], [1, 0, 3, 4], [2, 1, 4, 5], [0, 2, 5, 3], [5, 4, 3]] - cutter = mesh2obj(data2mesh(vertices, [], faces)) - displace_vertices(cutter, lambda x, y, z: (0, 2 * y * y, 0)) - butil.modify_mesh(cutter, 'SUBSURF', render_levels=5, levels=5, subdivision_type='SIMPLE') - depth = self.radius * uniform(.4, .7) - cutter.location = np.cos(a) * depth, np.sin(a) * depth, 0 - cutter.rotation_euler = 0, 0, a + uniform(-np.pi / 4, np.pi / 4) - butil.modify_mesh(obj, 'WELD', merge_threshold=.002) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE', apply=True) - butil.delete(cutter) - - def create_asset(self, face_size, **params) -> bpy.types.Object: - cap_config = self.cap_config - anchors = cap_config['x_anchors'], 0, cap_config['z_anchors'] - obj = spin(anchors, cap_config['vector_locations']) - self.apply_cut(obj) - remesh_with_attrs(obj, face_size) - surface.add_geomod(obj, self.geo_xyz, apply=True) - surface.add_geomod(obj, self.geo_morel, apply=True) - assign_material(obj, self.material_cap) - - if self.is_morel: - with butil.SelectObjects(obj): - surface.set_active(obj,'morel') - bpy.ops.geometry.attribute_convert(mode='VERTEX_GROUP') - butil.modify_mesh(obj, 'DISPLACE', vertex_group='morel', strength=.04, mid_level=.7) - - if self.gill_config is not None: - gill_config = self.gill_config - anchors = gill_config['x_anchors'], 0, gill_config['z_anchors'] - gill = spin(anchors, gill_config['vector_locations'], dupli=True, loop=True, - resolution=np.random.randint(8, 20)) - subsurface2face_size(gill, face_size) - assign_material(gill, self.material) - obj = join_objects([obj, gill]) - - texture = bpy.data.textures.new(name='cap', type=np.random.choice(['STUCCI', 'MARBLE'])) - texture.noise_scale = log_uniform(.01, .05) - butil.modify_mesh(obj, 'DISPLACE', strength=.008, texture=texture, mid_level=0) - - surface.add_geomod(obj, geo_extension, apply=True, input_args=[.1]) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='TWIST', angle=uniform(-np.pi / 4, np.pi / 4), - deform_axis='X') - r1, r2, r3, r4 = uniform(-.25, .25, 4) - displace_vertices(obj, lambda x, y, z: (np.where(x > 0, r1, r2) * x, np.where(y > 0, r3, r4) * y, 0)) - tag_object(obj, 'cap') - return obj - - @staticmethod - def shader_voronoi(nw: NodeWrangler, base_hue): - bright_color = hsv2rgba(base_hue, uniform(.4, .8), log_uniform(.05, .2)) - dark_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.01, .05)), 1 - subsurface_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.05, .2)), 1 - light_color = hsv2rgba(base_hue, uniform(0, .1), uniform(.2, .8)) - anchors = [.0, .3, .6, 1.] if uniform(0, 1) < .5 else [.0, .4, .7, 1.] - color = build_color_ramp(nw, nw.musgrave(500), anchors, - [dark_color, dark_color, bright_color, bright_color]) - - x = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'x'}).outputs['Fac'] - y = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'y'}).outputs['Fac'] - r = nw.power(nw.add(nw.power(x, 2), nw.power(y, 2)), .5) - coord = nw.scale(nw.combine(x, y, 0), nw.build_float_curve(r, [(0, 1), (uniform(.5, .7), 2), (1, 8)])) - - perturbed_position = nw.add(coord, - nw.scale(nw.new_node(Nodes.NoiseTexture, attrs={'noise_dimensions': '2D'}), - .2)) - voronoi = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Scale': uniform(2, 2.5), 'Vector': perturbed_position}, - attrs={'voronoi_dimensions': '2D', 'feature': 'DISTANCE_TO_EDGE'}) - - ratio = nw.divide(voronoi, nw.scalar_add(1, nw.scalar_multiply(5, nw.power(r, 2)))) - ratio = nw.build_float_curve(ratio, [(0, .4), (.04, 0)]) - ratio = nw.scalar_multiply(ratio, nw.new_node(Nodes.MapRange, [ - nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Scale': 20}), -.2, .1, 0, 1])) - color = nw.new_node(Nodes.MixRGB, [ratio, color, light_color]) - - roughness = uniform(.2, .5) if uniform(0, 1) < .5 else uniform(.8, 1.) - specular = uniform(.2, .8) - clearcoat = uniform(.2, .5) if uniform(0, 1) < .25 else 0 - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface Color': subsurface_color, - 'Subsurface': .01, - 'Subsurface Radius': (.05, .05, .05) - }) - return principled_bsdf - - @staticmethod - def shader_speckle(nw: NodeWrangler, base_hue): - bright_color = hsv2rgba(base_hue, uniform(.4, .8), log_uniform(.05, .2)) - dark_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.01, .05)), 1 - subsurface_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.05, .2)), 1 - light_color = hsv2rgba(base_hue, uniform(0, .1), uniform(.2, .8)) - anchors = [.0, .3, .6, 1.] if uniform(0, 1) < .5 else [.0, .4, .7, 1.] - color = build_color_ramp(nw, nw.musgrave(500), anchors, - [dark_color, dark_color, bright_color, bright_color]) - - musgrave = nw.build_float_curve(nw.musgrave(50), [(.7, 0), (.72, 1.)]) - color = nw.new_node(Nodes.MixRGB, [musgrave, color, light_color]) - - roughness = uniform(.2, .5) if uniform(0, 1) < .5 else uniform(.8, 1.) - specular = uniform(.2, .8) - clearcoat = uniform(.2, .5) if uniform(0, 1) < .25 else 0 - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface Color': subsurface_color, - 'Subsurface': .01, - 'Subsurface Radius': (.05, .05, .05) - }) - return principled_bsdf - - @staticmethod - def shader_noise(nw: NodeWrangler, base_hue): - bright_color = hsv2rgba(base_hue, uniform(.4, .8), log_uniform(.05, .2)) - dark_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.01, .05)), 1 - subsurface_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.05, .2)), 1 - light_color = hsv2rgba(base_hue, uniform(0, .1), uniform(.2, .8)) - anchors = [.0, .3, .6, 1.] if uniform(0, 1) < .5 else [.0, .4, .7, 1.] - color = build_color_ramp(nw, nw.musgrave(500), anchors, - [dark_color, dark_color, bright_color, bright_color]) - - ratio = nw.build_float_curve(nw.musgrave(10), [(.52, 0), (.56, .2), (.6, 0.)]) - ratio = nw.scalar_multiply(ratio, nw.new_node(Nodes.MapRange, [ - nw.new_node(Nodes.MusgraveTexture, input_kwargs={'Scale': 20}), -.2, .1, 0, 1])) - color = nw.new_node(Nodes.MixRGB, [ratio, color, light_color]) - - roughness = uniform(.2, .5) if uniform(0, 1) < .5 else uniform(.8, 1.) - specular = uniform(.2, .8) - clearcoat = uniform(.2, .5) if uniform(0, 1) < .25 else 0 - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface Color': subsurface_color, - 'Subsurface': .01, - 'Subsurface Radius': (.05, .05, .05) - }) - return principled_bsdf - - @staticmethod - def shader_cap(nw: NodeWrangler, base_hue): - bright_color = hsv2rgba(base_hue, uniform(.6, .8), log_uniform(.05, .2)) - dark_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.4, .8), - log_uniform(.01, .05)), 1 - light_color = hsv2rgba(base_hue, uniform(0, .1), uniform(.6, .8)) - subsurface_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.05, .05)) % 1, uniform(.6, .8), - log_uniform(.05, .2)), 1 - - anchors = [.0, .3, .6, 1.] if uniform(0, 1) < .5 else [.0, .4, .7, 1.] - color = build_color_ramp(nw, nw.musgrave(500), anchors, - [dark_color, dark_color, bright_color, bright_color]) - - z = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'z'}) - musgrave = nw.build_float_curve(z, [(uniform(0, .2), uniform(.95, .98)), - (uniform(.2, .4), uniform(.98, 1)), (.8, 1)]) - color = nw.new_node(Nodes.MixRGB, [musgrave, light_color, color]) - - roughness = uniform(.2, .5) if uniform(0, 1) < .5 else uniform(.8, 1.) - specular = uniform(.2, .8) - clearcoat = uniform(.2, .5) if uniform(0, 1) < .25 else 0 - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface Color': subsurface_color, - 'Subsurface': .01, - 'Subsurface Radius': (.05, .05, .05) - }) - return principled_bsdf diff --git a/infinigen/assets/mushroom/growth.py b/infinigen/assets/mushroom/growth.py deleted file mode 100644 index 4164986ec..000000000 --- a/infinigen/assets/mushroom/growth.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys - -from numpy.random import uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.math import FixedSeed -from .cap import MushroomCapFactory -from .stem import MushroomStemFactory -from infinigen.assets.utils.object import join_objects, origin2lowest -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.utils.object import join_objects -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_utils import build_color_ramp - -class MushroomGrowthFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.base_hue = self.build_base_hue() - self.material_func = lambda: surface.shaderfunc_to_material(self.shader_mushroom, self.base_hue) - self.cap_factory = MushroomCapFactory(factory_seed, self.base_hue, self.material_func, coarse) - self.stem_factory = MushroomStemFactory(factory_seed, self.cap_factory.inner_radius, - self.material_func, coarse) - - @staticmethod - def build_base_hue(): - if uniform(0, 1) < .4: - return uniform(0, 1) - else: - return uniform(.02, .15) - - def create_asset(self, **params): - cap = self.cap_factory(**params) - stem = self.stem_factory(**params) - obj = join_objects([cap, stem]) - origin2lowest(obj) - return cap - - @staticmethod - def shader_mushroom(nw: NodeWrangler, base_hue): - roughness = .8 - front_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.1, .1)) % 1, uniform(.1, .3), - log_uniform(.02, .5)), 1 - back_color = *colorsys.hsv_to_rgb((base_hue + uniform(-.1, .1)) % 1, uniform(.1, .3), - log_uniform(.02, .5)), 1 - - x, y, z = nw.separate(nw.new_node(Nodes.TextureCoord).outputs['Generated']) - musgrave = nw.new_node(Nodes.MapRange, [ - nw.new_node(Nodes.MusgraveTexture, [nw.combine(x, y, nw.scalar_multiply(uniform(5, 10), z))], - input_kwargs={'Scale': 200}), -1, 1, 0, 1]) - - color = build_color_ramp(nw, musgrave, [0, .3, .7, 1], - [front_color, front_color, back_color, back_color]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness}) - return principled_bsdf diff --git a/infinigen/assets/mushroom/stem.py b/infinigen/assets/mushroom/stem.py deleted file mode 100644 index d18cfd36f..000000000 --- a/infinigen/assets/mushroom/stem.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import geo_extension, subsurface2face_size -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.draw import spin - -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup - -class MushroomStemFactory(AssetFactory): - - def __init__(self, factory_seed, inner_radius, material_func, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.web_builders = [self.build_hollow_web, self.build_solid_web, None] - web_weights = np.array([1, 1, 2]) - self.web_builder = np.random.choice(self.web_builders, p=web_weights / web_weights.sum()) - self.has_band = uniform(0, 1) < .75 - - self.material = material_func() - self.material_web = material_func() - self.inner_radius = inner_radius - - def build_solid_web(self, inner_radius): - outer_radius = inner_radius * uniform(1.5, 3.5) - z = uniform(.0, .05) - length = uniform(.15, .2) - x_anchors = inner_radius, (outer_radius + inner_radius) / 2, outer_radius - z_anchors = - z, -z - uniform(.3, .4) * length, -z - length - anchors = x_anchors, 0, z_anchors - obj = spin(anchors) - surface.add_geomod(obj, self.geo_inverse_band, apply=True, input_args=[-uniform(.008, .01)]) - tag_object(obj, 'web') - return obj - - @staticmethod - def geo_voronoi(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.compare('LESS_THAN', - nw.new_node(Nodes.VoronoiTexture, input_kwargs={'Scale': uniform(15, 20)}, - attrs={'feature': 'DISTANCE_TO_EDGE'}), .06) - geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - def build_hollow_web(self, inner_radius): - outer_radius = inner_radius * uniform(2, 3.5) - z = uniform(.0, .05) - length = log_uniform(.2, .4) - x_anchors = inner_radius, (outer_radius + inner_radius) / 2, outer_radius - z_anchors = - z, -z - uniform(.3, .4) * length, -z - length - anchors = x_anchors, 0, z_anchors - obj = spin(anchors) - levels = 3 - butil.modify_mesh(obj, 'SUBSURF', True, render_levels=levels, levels=levels) - surface.add_geomod(obj, self.geo_voronoi, apply=True) - butil.modify_mesh(obj, 'SMOOTH', iterations=2) - tag_object(obj, 'web') - return obj - - def create_asset(self, face_size, **params) -> bpy.types.Object: - length = log_uniform(.4, .8) - x_anchors = 0, self.inner_radius, log_uniform(1, 2) * self.inner_radius, self.inner_radius * uniform(1, - 1.2), 0 - z_anchors = 0, 0, -length * uniform(.3, .7), -length, -length - anchors = x_anchors, 0, z_anchors - obj = spin(anchors, [1, 4]) - remesh_with_attrs(obj, face_size) - if self.has_band: - surface.add_geomod(obj, self.geo_band, apply=True, input_args=[length, uniform(.008, .01)]) - assign_material(obj, self.material) - - if self.web_builder is not None: - web = self.web_builder(self.inner_radius) - surface.add_geomod(web, geo_extension, apply=True) - subsurface2face_size(web, face_size / 2) - assign_material(obj, self.material_web) - obj = join_objects([web, obj]) - - texture = bpy.data.textures.new(name='cap', type='STUCCI') - texture.noise_scale = uniform(.005, .01) - butil.modify_mesh(obj, 'DISPLACE', strength=.008, texture=texture, mid_level=0) - - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=-uniform(0, np.pi / 2), - deform_axis='Y') - tag_object(obj, 'stem') - return obj - - @staticmethod - def geo_band(nw: NodeWrangler, length, scale): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - wave = nw.new_node(Nodes.WaveTexture, input_kwargs={ - 'Scale': log_uniform(5, 10), - 'Distortion': uniform(5, 10), - 'Detail Scale': 2, }, attrs={'bands_direction': 'Z', 'wave_profile': 'SAW'}).outputs['Fac'] - selection = nw.compare('LESS_THAN', nw.separate(nw.new_node(Nodes.InputPosition))[-1], - -uniform(.3, .7) * length) - normal = nw.vector_math('NORMALIZE', nw.add(nw.new_node(Nodes.InputNormal), (0, 0, 2))) - geometry = nw.new_node(Nodes.SetPosition, - [geometry, selection, None, nw.scale(nw.scale(wave, scale), normal)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def geo_inverse_band(nw: NodeWrangler, scale): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - vector = nw.combine(x, y, nw.scalar_multiply(-1, z)) - wave = nw.new_node(Nodes.WaveTexture, input_kwargs={ - 'Vector': vector, - 'Scale': log_uniform(5, 10), - 'Distortion': uniform(5, 10), - 'Detail Scale': 2, }, attrs={'bands_direction': 'Z', 'wave_profile': 'SAW'}).outputs['Fac'] - normal = nw.vector_math('NORMALIZE', nw.add(nw.new_node(Nodes.InputNormal), (0, 0, 2))) - geometry = nw.new_node(Nodes.SetPosition, - [geometry, None, None, nw.scale(nw.scale(wave, scale), normal)]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/appliances/__init__.py b/infinigen/assets/objects/appliances/__init__.py similarity index 79% rename from infinigen/assets/appliances/__init__.py rename to infinigen/assets/objects/appliances/__init__.py index 795254199..3230c75db 100644 --- a/infinigen/assets/appliances/__init__.py +++ b/infinigen/assets/objects/appliances/__init__.py @@ -1,5 +1,5 @@ -from .oven import OvenFactory from .beverage_fridge import BeverageFridgeFactory from .dishwasher import DishwasherFactory from .microwave import MicrowaveFactory -from .tv import TVFactory, MonitorFactory +from .oven import OvenFactory +from .tv import MonitorFactory, TVFactory diff --git a/infinigen/assets/objects/appliances/beverage_fridge.py b/infinigen/assets/objects/appliances/beverage_fridge.py new file mode 100644 index 000000000..b37e2db7a --- /dev/null +++ b/infinigen/assets/objects/appliances/beverage_fridge.py @@ -0,0 +1,1541 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +import numpy as np +from numpy.random import normal as N +from numpy.random import randint as RI +from numpy.random import uniform as U + +from infinigen.assets.material_assignments import AssetList +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.bevelling import ( + add_bevel, + complete_bevel, + complete_no_bevel, + get_bevel_edges, +) +from infinigen.core.util.blender import delete +from infinigen.core.util.math import FixedSeed + + +class BeverageFridgeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0]): + super(BeverageFridgeFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["BeverageFridgeFactory"]() + params = { + "Surface": material_assignments["surface"].assign_material(), + "Front": material_assignments["front"].assign_material(), + "Handle": material_assignments["handle"].assign_material(), + "Back": material_assignments["back"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + depth = 1 + N(0, 0.1) + width = 1 + N(0, 0.1) + height = 1 + N(0, 0.1) + # depth, width, height = dimensions + door_thickness = U(0.05, 0.1) * depth + door_rotation = 0 # Set to 0 for now + + rack_radius = U(0.01, 0.02) * depth + rack_h_amount = RI(2, 4) + rack_d_amount = RI(4, 6) + brand_name = "BrandName" + + params = { + "Depth": depth, + "Width": width, + "Height": height, + "DoorThickness": door_thickness, + "DoorRotation": door_rotation, + "RackRadius": rack_radius, + "RackHAmount": rack_h_amount, + "RackDAmount": rack_d_amount, + "BrandName": brand_name, + } + return params + + def create_asset(self, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_beverage_fridge_geometry(preprocess=True), + ng_inputs=self.params, + apply=True, + ) + bevel_edges = get_bevel_edges(obj) + delete(obj) + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_beverage_fridge_geometry(), + ng_inputs=self.params, + apply=True, + ) + obj = add_bevel(obj, bevel_edges, offset=0.01) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup( + "nodegroup_oven_rack", singleton=False, type="GeometryNodeTree" +) +def nodegroup_oven_rack(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Height", 2.0000), + ("NodeSocketFloatDistance", "Radius", 0.0200), + ("NodeSocketInt", "Amount", 5), + ], + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Height"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_1}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_3, "End": combine_xyz_4} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance, + "Amount": group_input.outputs["Amount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Amount"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_3}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + duplicate_elements_1 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance, + "Amount": group_input.outputs["Amount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: group_input.outputs["Amount"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: divide_1}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_5}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_1.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadrilateral, set_position, set_position_1]}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Radius"]} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_text", singleton=False, type="GeometryNodeTree") +def nodegroup_text(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Translation", (1.5000, 0.0000, 0.0000)), + ("NodeSocketString", "String", "BrandName"), + ("NodeSocketFloatDistance", "Size", 0.0500), + ("NodeSocketFloat", "Offset Scale", 0.0020), + ], + ) + + string_to_curves = nw.new_node( + "GeometryNodeStringToCurves", + input_kwargs={ + "String": group_input.outputs["String"], + "Size": group_input.outputs["Size"], + }, + attrs={"align_y": "BOTTOM_BASELINE", "align_x": "CENTER"}, + ) + + fill_curve = nw.new_node( + Nodes.FillCurve, + input_kwargs={"Curve": string_to_curves.outputs["Curve Instances"]}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": fill_curve, + "Offset Scale": group_input.outputs["Offset Scale"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": extrude_mesh.outputs["Mesh"], + "Translation": group_input.outputs["Translation"], + "Rotation": (1.5708, 0.0000, 1.5708), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_handle", singleton=False, type="GeometryNodeTree") +def nodegroup_handle(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "length", 0.0000), + ("NodeSocketFloat", "thickness", 0.0200), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["length"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute_1, "Translation": combine_xyz}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [store_named_attribute, transform]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz_3}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["length"], + 1: group_input.outputs["width"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": add, + "Z": group_input.outputs["thickness"], + }, + ) + + cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": add_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_2, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_center", singleton=False, type="GeometryNodeTree") +def nodegroup_center(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "MarginX", 0.5000), + ("NodeSocketFloat", "MarginY", 0.0000), + ("NodeSocketFloat", "MarginZ", 0.0000), + ], + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract_1.outputs["Vector"]} + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["X"], + 1: group_input.outputs["MarginX"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1} + ) + + greater_than_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + greater_than_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Y"], + 1: group_input.outputs["MarginY"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3} + ) + + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + + greater_than_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + greater_than_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Z"], + 1: group_input.outputs["MarginZ"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_3 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5} + ) + + op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"In": op_and_4, "Out": op_not}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_cube", singleton=False, type="GeometryNodeTree") +def nodegroup_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": group_input.outputs["Size"], + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": store_named_attribute_1, "Name": "uv_map"}, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Size"], + 1: (0.5000, 0.5000, 0.5000), + 2: group_input.outputs["Pos"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": multiply_add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_hollow_cube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_hollow_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ("NodeSocketFloat", "Thickness", 0.0000), + ("NodeSocketBool", "Switch1", False), + ("NodeSocketBool", "Switch2", False), + ("NodeSocketBool", "Switch3", False), + ("NodeSocketBool", "Switch4", False), + ("NodeSocketBool", "Switch5", False), + ("NodeSocketBool", "Switch6", False), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Size"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract, + "Z": subtract_1, + }, + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Pos"]} + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Size"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": subtract_2} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Translation": combine_xyz_5, + }, + ) + + switch_2 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_3, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_2, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute_4 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": add_3, "Z": subtract_4} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_4, + "Translation": combine_xyz_3, + }, + ) + + switch_1 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1} + ) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_5, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_6 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_4, "Y": add_5, "Z": add_6} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_1}, + ) + + switch = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform} + ) + + subtract_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract_6, + "Z": subtract_7, + }, + ) + + cube_3 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_6, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute_5 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_3.outputs["Mesh"], + "Name": "uv_map", + 3: cube_3.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subtract_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_8, "Y": add_7, "Z": subtract_9} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_5, + "Translation": combine_xyz_7, + }, + ) + + switch_3 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3} + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_4 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_9, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_4.outputs["Mesh"], + "Name": "uv_map", + 3: cube_4.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}, + ) + + add_9 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1} + ) + + add_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_8, "Y": add_9, "Z": add_10} + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_8, + }, + ) + + switch_4 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4} + ) + + combine_xyz_10 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_5 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_10, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + ) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_5.outputs["Mesh"], + "Name": "uv_map", + 3: cube_5.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + subtract_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + ) + + combine_xyz_11 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_11, "Y": subtract_10, "Z": add_12} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_3, + "Translation": combine_xyz_11, + }, + ) + + switch_5 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + switch_2.outputs[6], + switch_1.outputs[6], + switch.outputs[6], + switch_3.outputs[6], + switch_4.outputs[6], + switch_5.outputs[6], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_beverage_fridge_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_beverage_fridge_geometry(nw: NodeWrangler, preprocess: bool = False): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Depth", 1.0000), + ("NodeSocketFloat", "Width", 1.0000), + ("NodeSocketFloat", "Height", 1.0000), + ("NodeSocketFloat", "DoorThickness", 0.0700), + ("NodeSocketFloat", "DoorRotation", 0.0000), + ("NodeSocketFloatDistance", "RackRadius", 0.0100), + ("NodeSocketInt", "RackDAmount", 5), + ("NodeSocketInt", "RackHAmount", 2), + ("NodeSocketString", "BrandName", "BrandName"), + ("NodeSocketMaterial", "Surface", None), + ("NodeSocketMaterial", "Front", None), + ("NodeSocketMaterial", "Handle", None), + ("NodeSocketMaterial", "Back", None), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + hollowcube = nw.new_node( + nodegroup_hollow_cube().name, + input_kwargs={ + "Size": combine_xyz, + "Thickness": group_input.outputs["DoorThickness"], + "Switch2": True, + "Switch4": True, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": hollowcube, + "Material": group_input.outputs["Surface"], + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": set_material_1, "Level": 0} + ) + + # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) + + body = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": subdivide_mesh}, label="Body" + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["DoorThickness"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + cube = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_1, "Pos": combine_xyz_2}, + ) + + position = nw.new_node(Nodes.InputPosition) + + center = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": cube, + "Vector": position, + "MarginX": -1.0000, + "MarginY": 0.1000, + "MarginZ": 0.1500, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": cube, + "Selection": center.outputs["In"], + "Material": group_input.outputs["Front"], + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_2, + "Selection": center.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + handle = nw.new_node( + nodegroup_handle().name, + input_kwargs={"width": multiply, "length": multiply_1, "thickness": multiply_2}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.9000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_13 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": multiply_3, "Z": multiply_4} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": handle, + "Translation": combine_xyz_13, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + set_material_8 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_1, + "Material": group_input.outputs["Handle"], + }, + ) + + geometry_to_instance_4 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": set_material_8} + ) + + rotate_instances_2 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": geometry_to_instance_4, + "Rotation": (-1.5708, 0.0000, 0.0000), + "Pivot Point": combine_xyz_13, + }, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_12 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": multiply_5, "Z": 0.0300} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + text = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_12, + "String": group_input.outputs["BrandName"], + "Size": multiply_6, + "Offset Scale": 0.0020, + }, + ) + + text = complete_no_bevel(nw, text, preprocess) + + set_material_9 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": text, "Material": group_input.outputs["Handle"]}, + ) + + rotate_instances_2 = complete_bevel(nw, rotate_instances_2, preprocess) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material_3, rotate_instances_2, set_material_9]}, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_3} + ) + + z = nw.scalar_multiply( + group_input.outputs["DoorRotation"], 1 if not preprocess else 0 + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": z}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": group_input.outputs["Width"], + }, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": geometry_to_instance, + "Rotation": combine_xyz_3, + "Pivot Point": combine_xyz_4, + }, + ) + + door = nw.new_node( + Nodes.Reroute, + input_kwargs={"Input": nw.new_node(Nodes.RealizeInstances, [rotate_instances])}, + label="door", + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"], 1: multiply_7}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: multiply_8}, + attrs={"operation": "SUBTRACT"}, + ) + + ovenrack = nw.new_node( + nodegroup_oven_rack().name, + input_kwargs={ + "Width": subtract, + "Height": subtract_1, + "Radius": group_input.outputs["RackRadius"], + "Amount": group_input.outputs["RackDAmount"], + }, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": ovenrack} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance_1, + "Amount": group_input.outputs["RackHAmount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}, + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: multiply_11}, + attrs={"operation": "SUBTRACT"}, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["RackHAmount"], 1: 1.0000} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: add_3}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: divide}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_9, "Y": multiply_10, "Z": multiply_12}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_5, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": group_input.outputs["Handle"], + }, + ) + + racks = nw.new_node( + Nodes.Reroute, + input_kwargs={"Input": nw.new_node(Nodes.RealizeInstances, [set_material])}, + label="racks", + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add_4}) + + reroute_11 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Width"]} + ) + + reroute_8 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["DoorThickness"]} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": reroute_10, "Y": reroute_11, "Z": reroute_8}, + ) + + reroute_9 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Height"]} + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": reroute_9}) + + cube_1 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_6, "Pos": combine_xyz_7}, + ) + + set_material_5 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": cube_1, "Material": group_input.outputs["Back"]}, + ) + + # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": set_material_5} + ) + + heater = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": join_geometry_2}, label="heater" + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [body, door, racks, heater]} + ) + + geometry = nw.new_node(Nodes.RealizeInstances, [join_geometry]) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/objects/appliances/dishwasher.py b/infinigen/assets/objects/appliances/dishwasher.py new file mode 100644 index 000000000..9f6b3de30 --- /dev/null +++ b/infinigen/assets/objects/appliances/dishwasher.py @@ -0,0 +1,1596 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +import numpy as np +from numpy.random import normal as N +from numpy.random import randint as RI +from numpy.random import uniform as U + +from infinigen.assets.material_assignments import AssetList +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.bevelling import ( + add_bevel, + complete_bevel, + complete_no_bevel, + get_bevel_edges, +) +from infinigen.core.util.blender import delete +from infinigen.core.util.math import FixedSeed + + +class DishwasherFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0]): + super(DishwasherFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["DishwasherFactory"]() + params = { + "Surface": material_assignments["surface"].assign_material(), + "Front": material_assignments["front"].assign_material(), + "WhiteMetal": material_assignments["white_metal"].assign_material(), + "Top": material_assignments["top"].assign_material(), + "NameMaterial": material_assignments["name_material"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # depth, width, height = dimensions + depth = 1 + N(0, 0.1) + width = 1 + N(0, 0.1) + height = 1 + N(0, 0.1) + door_thickness = U(0.05, 0.1) * depth + door_rotation = 0 # Set to 0 for now + + rack_radius = U(0.01, 0.02) * depth + rack_h_amount = RI(2, 3) + brand_name = "BrandName" + + params = { + "Depth": depth, + "Width": width, + "Height": height, + "DoorThickness": door_thickness, + "DoorRotation": door_rotation, + "RackRadius": rack_radius, + "RackAmount": rack_h_amount, + "BrandName": brand_name, + } + return params + + def create_asset(self, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_dishwasher_geometry(preprocess=True), + ng_inputs=self.params, + apply=True, + ) + bevel_edges = get_bevel_edges(obj) + delete(obj) + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_dishwasher_geometry(), + ng_inputs=self.params, + apply=True, + ) + obj = add_bevel(obj, bevel_edges, offset=0.01) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup( + "nodegroup_dish_rack", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dish_rack(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + quadrilateral = nw.new_node("GeometryNodeCurvePrimitiveQuadrilateral") + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (0.0000, -1.0000, 0.0000), + "End": (0.0000, 1.0000, 0.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Depth", 2.0000), + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Radius", 0.0200), + ("NodeSocketInt", "Amount", 5), + ("NodeSocketFloat", "Height", 0.5000), + ], + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": -1.0000, "Z": group_input.outputs["Height"]}, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={"Start": (0.0000, -1.0000, 0.0000), "End": combine_xyz_4}, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line_1} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Amount"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + duplicate_elements_2 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance_1, "Amount": multiply}, + attrs={"domain": "INSTANCE"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["Amount"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_2.outputs["Duplicate Index"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_1}) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_2.outputs["Geometry"], + "Offset": combine_xyz_3, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_line, set_position_2]} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_1} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": multiply}, + attrs={"domain": "INSTANCE"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: duplicate_elements.outputs["Duplicate Index"], + 1: group_input.outputs["Amount"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_2}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Rotation": (0.0000, 0.0000, 1.5708)}, + ) + + duplicate_elements_1 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": multiply}, + attrs={"domain": "INSTANCE"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: duplicate_elements_1.outputs["Duplicate Index"], + 1: group_input.outputs["Amount"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_3}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_1.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + quadrilateral_1 = nw.new_node("GeometryNodeCurvePrimitiveQuadrilateral") + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_4}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": quadrilateral_1, "Translation": combine_xyz_5}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [quadrilateral, transform_1, set_position_1, transform_2] + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Radius"]} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_5, "Y": multiply_6, "Z": 0.5000} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_to_mesh, + "Rotation": (0.0000, 0.0000, 1.5708), + "Scale": combine_xyz_2, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_text", singleton=False, type="GeometryNodeTree") +def nodegroup_text(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Translation", (1.5000, 0.0000, 0.0000)), + ("NodeSocketString", "String", "BrandName"), + ("NodeSocketFloatDistance", "Size", 0.0500), + ("NodeSocketFloat", "Offset Scale", 0.0020), + ], + ) + + string_to_curves = nw.new_node( + "GeometryNodeStringToCurves", + input_kwargs={ + "String": group_input.outputs["String"], + "Size": group_input.outputs["Size"], + }, + attrs={"align_y": "BOTTOM_BASELINE", "align_x": "CENTER"}, + ) + + fill_curve = nw.new_node( + Nodes.FillCurve, + input_kwargs={"Curve": string_to_curves.outputs["Curve Instances"]}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": fill_curve, + "Offset Scale": group_input.outputs["Offset Scale"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": extrude_mesh.outputs["Mesh"], + "Translation": group_input.outputs["Translation"], + "Rotation": (1.5708, 0.0000, 1.5708), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_handle", singleton=False, type="GeometryNodeTree") +def nodegroup_handle(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "length", 0.0000), + ("NodeSocketFloat", "thickness", 0.0200), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["length"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute_1, "Translation": combine_xyz}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [store_named_attribute, transform]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz_3}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["length"], + 1: group_input.outputs["width"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": add, + "Z": group_input.outputs["thickness"], + }, + ) + + cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": add_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_2, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_center", singleton=False, type="GeometryNodeTree") +def nodegroup_center(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "MarginX", 0.5000), + ("NodeSocketFloat", "MarginY", 0.0000), + ("NodeSocketFloat", "MarginZ", 0.0000), + ], + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract_1.outputs["Vector"]} + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["X"], + 1: group_input.outputs["MarginX"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1} + ) + + greater_than_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + greater_than_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Y"], + 1: group_input.outputs["MarginY"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3} + ) + + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + + greater_than_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + greater_than_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Z"], + 1: group_input.outputs["MarginZ"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_3 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5} + ) + + op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"In": op_and_4, "Out": op_not}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_cube", singleton=False, type="GeometryNodeTree") +def nodegroup_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": group_input.outputs["Size"], + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": store_named_attribute_1, "Name": "uv_map"}, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Size"], + 1: (0.5000, 0.5000, 0.5000), + 2: group_input.outputs["Pos"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": multiply_add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_hollow_cube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_hollow_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ("NodeSocketFloat", "Thickness", 0.0000), + ("NodeSocketBool", "Switch1", False), + ("NodeSocketBool", "Switch2", False), + ("NodeSocketBool", "Switch3", False), + ("NodeSocketBool", "Switch4", False), + ("NodeSocketBool", "Switch5", False), + ("NodeSocketBool", "Switch6", False), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Size"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract, + "Z": subtract_1, + }, + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Pos"]} + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Size"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": subtract_2} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Translation": combine_xyz_5, + }, + ) + + switch_2 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_3, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_2, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_4 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": add_3, "Z": subtract_4} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_4, + "Translation": combine_xyz_3, + }, + ) + + switch_1 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1} + ) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_5, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_6 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_4, "Y": add_5, "Z": add_6} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_1}, + ) + + switch = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform} + ) + + subtract_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract_6, + "Z": subtract_7, + }, + ) + + cube_3 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_6, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_5 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_3.outputs["Mesh"], + "Name": "uv_map", + 3: cube_3.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subtract_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_8, "Y": add_7, "Z": subtract_9} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_5, + "Translation": combine_xyz_7, + }, + ) + + switch_3 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3} + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_4 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_9, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_4.outputs["Mesh"], + "Name": "uv_map", + 3: cube_4.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}, + ) + + add_9 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1} + ) + + add_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_8, "Y": add_9, "Z": add_10} + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_8, + }, + ) + + switch_4 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4} + ) + + combine_xyz_10 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_5 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_10, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_5.outputs["Mesh"], + "Name": "uv_map", + 3: cube_5.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + subtract_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + ) + + combine_xyz_11 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_11, "Y": subtract_10, "Z": add_12} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_3, + "Translation": combine_xyz_11, + }, + ) + + switch_5 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + switch_2.outputs[6], + switch_1.outputs[6], + switch.outputs[6], + switch_3.outputs[6], + switch_4.outputs[6], + switch_5.outputs[6], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_dishwasher_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dishwasher_geometry(nw: NodeWrangler, preprocess: bool = False): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Depth", 1.0000), + ("NodeSocketFloat", "Width", 1.0000), + ("NodeSocketFloat", "Height", 1.0000), + ("NodeSocketFloat", "DoorThickness", 0.0700), + ("NodeSocketFloat", "DoorRotation", 0.0000), + ("NodeSocketFloatDistance", "RackRadius", 0.0100), + ("NodeSocketInt", "RackAmount", 2), + ("NodeSocketString", "BrandName", "BrandName"), + ("NodeSocketMaterial", "Surface", None), + ("NodeSocketMaterial", "Front", None), + ("NodeSocketMaterial", "Top", None), + ("NodeSocketMaterial", "WhiteMetal", None), + ("NodeSocketMaterial", "NameMaterial", None), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + hollowcube = nw.new_node( + nodegroup_hollow_cube().name, + input_kwargs={ + "Size": combine_xyz, + "Thickness": group_input.outputs["DoorThickness"], + "Switch2": True, + "Switch4": True, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": hollowcube, + "Material": group_input.outputs["Surface"], + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": set_material_1, "Level": 0} + ) + + # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) + + body = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": subdivide_mesh}, label="Body" + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["DoorThickness"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + cube = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_1, "Pos": combine_xyz_2}, + ) + + position = nw.new_node(Nodes.InputPosition) + + center = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": cube, + "Vector": position, + "MarginX": -1.0000, + "MarginY": 0.1000, + "MarginZ": 0.1500, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": cube, + "Selection": center.outputs["In"], + "Material": group_input.outputs["Front"], + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_2, + "Selection": center.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + handle = nw.new_node( + nodegroup_handle().name, + input_kwargs={"width": multiply, "length": multiply_1, "thickness": multiply_2}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.9500}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_13 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": multiply_3, "Z": multiply_4} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": handle, + "Translation": combine_xyz_13, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + set_material_8 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_1, + "Material": group_input.outputs["WhiteMetal"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_12 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": multiply_5, "Z": 0.0300} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + text = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_12, + "String": group_input.outputs["BrandName"], + "Size": multiply_6, + }, + ) + + text = complete_no_bevel(nw, text, preprocess) + + set_material_9 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": text, + "Material": group_input.outputs["NameMaterial"], + }, + ) + + set_material_8 = complete_bevel(nw, set_material_8, preprocess) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material_3, set_material_8, set_material_9]}, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_3} + ) + + y = nw.scalar_multiply( + group_input.outputs["DoorRotation"], 1 if not preprocess else 0 + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": y}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": geometry_to_instance, + "Rotation": combine_xyz_3, + "Pivot Point": combine_xyz_4, + }, + ) + + rotate_instances = nw.new_node(Nodes.RealizeInstances, [rotate_instances]) + + door = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": rotate_instances}, label="door" + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"], 1: multiply_7}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: multiply_8}, + attrs={"operation": "SUBTRACT"}, + ) + + dishrack = nw.new_node( + nodegroup_dish_rack().name, + input_kwargs={ + "Depth": subtract_1, + "Width": subtract, + "Radius": group_input.outputs["RackRadius"], + "Amount": 4, + "Height": 0.1000, + }, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": dishrack} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance_1, + "Amount": group_input.outputs["RackAmount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}, + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: multiply_11}, + attrs={"operation": "SUBTRACT"}, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["RackAmount"], 1: 1.0000} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: add_3}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: divide}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_9, "Y": multiply_10, "Z": multiply_12}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_5, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": group_input.outputs["Surface"], + }, + ) + + set_material = nw.new_node(Nodes.RealizeInstances, [set_material]) + + racks = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": set_material}, label="racks" + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add_4}) + + reroute_11 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Width"]} + ) + + reroute_8 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["DoorThickness"]} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": reroute_10, "Y": reroute_11, "Z": reroute_8}, + ) + + reroute_9 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Height"]} + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": reroute_9}) + + cube_1 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_6, "Pos": combine_xyz_7}, + ) + + set_material_5 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": cube_1, "Material": group_input.outputs["Top"]}, + ) + + # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": set_material_5} + ) + + heater = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": join_geometry_2}, label="heater" + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [body, door, racks, heater]} + ) + + geometry = nw.new_node(Nodes.RealizeInstances, [join_geometry]) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/objects/appliances/microwave.py b/infinigen/assets/objects/appliances/microwave.py new file mode 100644 index 000000000..c5a373d6c --- /dev/null +++ b/infinigen/assets/objects/appliances/microwave.py @@ -0,0 +1,849 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +import numpy as np +from numpy.random import uniform as U + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.misc import generate_text +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.bevelling import add_bevel, complete_no_bevel, get_bevel_edges +from infinigen.core.util.blender import delete +from infinigen.core.util.math import FixedSeed + + +class MicrowaveFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0]): + super(MicrowaveFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["MicrowaveFactory"]() + params = { + "Surface": material_assignments["surface"].assign_material(), + "Back": material_assignments["back"].assign_material(), + "BlackGlass": material_assignments["black_glass"].assign_material(), + "Glass": material_assignments["glass"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + depth = U(0.5, 0.7) + width = U(0.6, 1.0) + height = U(0.35, 0.45) + panel_width = U(0.2, 0.4) + margin_z = U(0.05, 0.1) + door_thickness = U(0.02, 0.04) + door_margin = U(0.03, 0.1) + door_rotation = 0 # Set to 0 for now + brand_name = generate_text() + params = { + "Depth": depth, + "Width": width, + "Height": height, + "PanelWidth": panel_width, + "MarginZ": margin_z, + "DoorThickness": door_thickness, + "DoorMargin": door_margin, + "DoorRotation": door_rotation, + "BrandName": brand_name, + } + return params + + def create_asset(self, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_microwave_geometry(preprocess=True), + ng_inputs=self.params, + apply=True, + ) + bevel_edges = get_bevel_edges(obj) + delete(obj) + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_microwave_geometry(), + ng_inputs=self.params, + apply=True, + ) + obj = add_bevel(obj, bevel_edges) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup("nodegroup_plate", singleton=False, type="GeometryNodeTree") +def nodegroup_plate(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": 128}) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Start Handle": (0.0000, 0.0000, 0.0000), + "End": (1.0000, 0.0000, 0.4000), + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": bezier_segment, "Rotation": (1.5708, 0.0000, 0.0000)}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_circle.outputs["Curve"], + "Profile Curve": transform, + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketVectorXYZ", "Scale", (1.0000, 1.0000, 1.0000))], + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Scale": group_input.outputs["Scale"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_text", singleton=False, type="GeometryNodeTree") +def nodegroup_text(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Translation", (1.5000, 0.0000, 0.0000)), + ("NodeSocketString", "String", "BrandName"), + ("NodeSocketFloatDistance", "Size", 0.0500), + ("NodeSocketFloat", "Offset Scale", 0.0020), + ], + ) + + string_to_curves = nw.new_node( + "GeometryNodeStringToCurves", + input_kwargs={ + "String": group_input.outputs["String"], + "Size": group_input.outputs["Size"], + }, + attrs={"align_y": "BOTTOM_BASELINE", "align_x": "CENTER"}, + ) + + fill_curve = nw.new_node( + Nodes.FillCurve, + input_kwargs={"Curve": string_to_curves.outputs["Curve Instances"]}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": fill_curve, + "Offset Scale": group_input.outputs["Offset Scale"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": extrude_mesh.outputs["Mesh"], + "Translation": group_input.outputs["Translation"], + "Rotation": (1.5708, 0.0000, 1.5708), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_center", singleton=False, type="GeometryNodeTree") +def nodegroup_center(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "MarginX", 0.5000), + ("NodeSocketFloat", "MarginY", 0.0000), + ("NodeSocketFloat", "MarginZ", 0.0000), + ], + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract_1.outputs["Vector"]} + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["X"], + 1: group_input.outputs["MarginX"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1} + ) + + greater_than_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + greater_than_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Y"], + 1: group_input.outputs["MarginY"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3} + ) + + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + + greater_than_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + greater_than_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Z"], + 1: group_input.outputs["MarginZ"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_3 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5} + ) + + op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"In": op_and_4, "Out": op_not}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_cube", singleton=False, type="GeometryNodeTree") +def nodegroup_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 10), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": group_input.outputs["Size"], + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": store_named_attribute_1, "Name": "uv_map"}, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Size"], + 1: (0.5000, 0.5000, 0.5000), + 2: group_input.outputs["Pos"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": multiply_add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_microwave_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_microwave_geometry(nw: NodeWrangler, preprocess: bool = False): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Depth", 0.0000), + ("NodeSocketFloat", "Width", 0.0000), + ("NodeSocketFloat", "Height", 0.0000), + ("NodeSocketFloat", "PanelWidth", 0.5000), + ("NodeSocketFloat", "MarginZ", 0.0000), + ("NodeSocketFloat", "DoorThickness", 0.0000), + ("NodeSocketFloat", "DoorMargin", 0.0500), + ("NodeSocketFloat", "DoorRotation", 0.0000), + ("NodeSocketString", "BrandName", "BrandName"), + ("NodeSocketMaterial", "Surface", None), + ("NodeSocketMaterial", "Back", None), + ("NodeSocketMaterial", "BlackGlass", None), + ("NodeSocketMaterial", "Glass", None), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + cube = nw.new_node(nodegroup_cube().name, input_kwargs={"Size": combine_xyz}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["PanelWidth"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Height"], + 1: group_input.outputs["MarginZ"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": subtract, + "Z": subtract_1, + }, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["MarginZ"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + cube_1 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_1, "Pos": scale.outputs["Vector"]}, + ) + + difference = nw.new_node( + Nodes.MeshBoolean, input_kwargs={"Mesh 1": cube, "Mesh 2": cube_1} + ) + + cube_2 = nw.new_node( + nodegroup_cube().name, + input_kwargs={ + "Size": (0.0300, 0.0300, 0.0100), + "Pos": (0.1000, 0.0000, 0.0500), + "Resolution": 2, + }, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": cube_2} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance_1, "Amount": 10}, + attrs={"domain": "INSTANCE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 0.0400}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_7, + }, + ) + + duplicate_elements_1 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": set_position_1, "Amount": 7}, + attrs={"domain": "INSTANCE"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: 0.0200}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_1.outputs["Geometry"], + "Offset": combine_xyz_8, + }, + ) + + difference_1 = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": difference.outputs["Mesh"], + "Mesh 2": [duplicate_elements_1.outputs["Geometry"], set_position_2], + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": difference_1.outputs["Mesh"], + "Material": group_input.outputs["Back"], + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["DoorThickness"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + cube_3 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_2, "Pos": combine_xyz_3, "Resolution": 10}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["PanelWidth"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["MarginZ"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: multiply_2}) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: add}, + attrs={"operation": "LESS_THAN"}, + ) + + separate_geometry = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={"Geometry": cube_3, "Selection": less_than}, + attrs={"domain": "FACE"}, + ) + + convex_hull = nw.new_node( + Nodes.ConvexHull, + input_kwargs={"Geometry": separate_geometry.outputs["Selection"]}, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": convex_hull, "Level": 0} + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + center = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": subdivide_mesh, + "Vector": position_1, + "MarginX": -1.0000, + "MarginZ": group_input.outputs["DoorMargin"], + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": subdivide_mesh, + "Selection": center.outputs["In"], + "Material": group_input.outputs["BlackGlass"], + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_3, + "Selection": center.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + bounding_box_1 = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": subdivide_mesh} + ) + + add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: bounding_box_1.outputs["Min"], + 1: bounding_box_1.outputs["Max"], + }, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_2.outputs["Vector"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale_1.outputs["Vector"]} + ) + + separate_xyz_4 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box_1.outputs["Min"]} + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_4.outputs["Z"], + 1: group_input.outputs["DoorMargin"], + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add_1, "Y": separate_xyz_3.outputs["Y"], "Z": add_3}, + ) + + text = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_5, + "String": group_input.outputs["BrandName"], + "Size": 0.0300, + "Offset Scale": 0.0020, + }, + ) + + text = complete_no_bevel(nw, text, preprocess) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_2, text]} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_1} + ) + + z = nw.scalar_multiply( + group_input.outputs["DoorRotation"], 1 if not preprocess else 0 + ) + + combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": z}) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": geometry_to_instance, + "Rotation": combine_xyz_6, + "Pivot Point": combine_xyz_3, + }, + ) + + plate = nw.new_node( + nodegroup_plate().name, input_kwargs={"Scale": (0.1000, 0.1000, 0.1000)} + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: combine_xyz_1, + 1: (0.5000, 0.5000, 0.0000), + 2: scale.outputs["Vector"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": plate, "Offset": multiply_add.outputs["Vector"]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": group_input.outputs["Glass"], + }, + ) + + convex_hull_1 = nw.new_node( + Nodes.ConvexHull, + input_kwargs={"Geometry": separate_geometry.outputs["Inverted"]}, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": convex_hull_1, "Level": 0} + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + center_1 = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": subdivide_mesh_1, + "Vector": position_2, + "MarginX": -1.0000, + "MarginY": 0.0010, + "MarginZ": group_input.outputs["DoorMargin"], + }, + ) + + set_material_4 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": subdivide_mesh_1, + "Selection": center_1.outputs["In"], + "Material": group_input.outputs["BlackGlass"], + }, + ) + + set_material_5 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_4, + "Selection": center_1.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": subdivide_mesh_1} + ) + + add_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Min"], 1: bounding_box.outputs["Max"]}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_5.outputs["Vector"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale_2.outputs["Vector"]} + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Max"]} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_2.outputs["Z"], + 1: group_input.outputs["DoorMargin"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_3, 1: -0.1000}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add_4, "Y": separate_xyz_1.outputs["Y"], "Z": add_6}, + ) + + text_1 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_4, + "String": "12:01", + "Offset Scale": 0.0050, + }, + ) + + text_1 = complete_no_bevel(nw, text_1, preprocess) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + set_material_1, + rotate_instances, + set_material, + set_material_5, + text_1, + ] + }, + ) + geometry = nw.new_node(Nodes.RealizeInstances, [join_geometry]) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": geometry}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/appliances/oven.py b/infinigen/assets/objects/appliances/oven.py new file mode 100644 index 000000000..5f3179693 --- /dev/null +++ b/infinigen/assets/objects/appliances/oven.py @@ -0,0 +1,2456 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +import bpy +import numpy as np +from numpy.random import normal as N +from numpy.random import randint as RI +from numpy.random import uniform as U + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.misc import generate_text +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.bevelling import ( + add_bevel, + complete_bevel, + complete_no_bevel, + get_bevel_edges, +) +from infinigen.core.util.blender import delete +from infinigen.core.util.math import FixedSeed + + +class OvenFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0]): + super(OvenFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + with FixedSeed(factory_seed): + self.params, self.geometry_node_params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + self.geometry_node_params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["OvenFactory"]() + params = { + "Surface": material_assignments["surface"].assign_material(), + "Back": material_assignments["back"].assign_material(), + "WhiteMetal": material_assignments["white_metal"].assign_material(), + "SuperBlackGlass": material_assignments["black_glass"].assign_material(), + "Glass": material_assignments["glass"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # depth, width, height = dimensions + depth = 1 + N(0, 0.1) + width = 1 + N(0, 0.1) + height = 1 + N(0, 0.1) + door_thickness = U(0.05, 0.1) * depth + door_rotation = 0 # Set to 0 for now + + rack_radius = U(0.01, 0.02) * depth + rack_h_amount = RI(2, 4) + rack_d_amount = RI(4, 6) + + panel_height = U(0.2, 0.4) * height + panel_thickness = U(0.15, 0.25) * depth + botton_amount = RI(1, 3) * 2 + botton_radius = U(0.05, 0.1) * width + botton_thickness = U(0.02, 0.04) * depth + heat_radius_ratio = U(0.1, 0.2) + brand_name = generate_text() + + use_gas = RI(2) + n_grids = RI(2, 5) + grids = [RI(1, 4) for i in range(n_grids)] + branches = 2 * RI(2, 9) + grate_thickness = U(0.01, 0.03) + center_ratio = U(0.05, 0.15) + middle_ratio = U(0.5, 0.7) + + params = { + "UseGas": use_gas, + "Grids": grids, + "Branches": branches, + "GrateThickness": grate_thickness, + "CenterRatio": center_ratio, + "MiddleRatio": middle_ratio, + "Depth": depth, + "Width": width, + "Height": height, + "DoorThickness": door_thickness, + "DoorRotation": door_rotation, + "RackRadius": rack_radius, + "RackHAmount": rack_h_amount, + "RackDAmount": rack_d_amount, + "PanelHeight": panel_height, + "PanelThickness": panel_thickness, + "BottonAmount": botton_amount, + "BottonRadius": botton_radius, + "BottonThickness": botton_thickness, + "HeaterRadiusRatio": heat_radius_ratio, + "BrandName": brand_name, + } + geometry_node_params = { + k: params[k] + for k in params.keys() + if k + not in [ + "UseGas", + "Grids", + "Branches", + "GrateThickness", + "CenterRatio", + "MiddleRatio", + ] + } + return params, geometry_node_params + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + # x, y, z = self.params["Depth"], self.params["Width"], self.params["Height"] + # box = new_bbox(-x/2 - 0.05, x/2 + self.params["DoorThickness"] + 0.1, -y/2, y/2, 0, z + 0.1) + # tagging.tag_object(box, f'{PREFIX}{t.Subpart.SupportSurface.value}', read_normal(box)[:, -1] > .5) + # box_top = new_bbox(-x/2 - 0.05, -x/2 - 0.05 + self.params["PanelThickness"], -y/2, y/2, z + 0.1, z+ 0.1 + 0.5) + # box_top.rotation_euler[1] = -0.1 + # box = butil.join_objects([box, box_top]) + obj = butil.spawn_cube() + return butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_oven_geometry( + use_gas=self.params["UseGas"], is_placeholder=True + ), + ng_inputs=self.geometry_node_params, + apply=True, + ) + + def create_asset(self, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_oven_geometry( + preprocess=True, use_gas=self.params["UseGas"] + ), + ng_inputs=self.geometry_node_params, + apply=True, + ) + bevel_edges = get_bevel_edges(obj) + delete(obj) + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_oven_geometry(use_gas=self.params["UseGas"]), + ng_inputs=self.geometry_node_params, + apply=True, + ) + obj = add_bevel(obj, bevel_edges, offset=0.01) + if not self.params["UseGas"]: + return obj + width, depth = ( + self.params["Width"], + self.params["Depth"] + 2 * self.params["DoorThickness"], + ) + grate_width, grate_depth = width * 0.8, depth * 0.6 + grate_thickness = self.params["GrateThickness"] + grates = gas_grates( + width, + depth, + grate_width, + grate_depth, + self.params["Height"] + self.params["DoorThickness"] - grate_thickness, + grate_thickness, + self.params["Grids"], + self.params["Branches"], + self.params["CenterRatio"], + self.params["MiddleRatio"], + ) + grates.data.materials.append(self.geometry_node_params["WhiteMetal"]) + obj.data.materials.append(self.geometry_node_params["Back"]) + with butil.SelectObjects(obj): + obj.active_material_index = len(obj.material_slots) - 1 + for i in range(len(obj.material_slots)): + bpy.ops.object.material_slot_move(direction="UP") + hollow = butil.spawn_cube( + size=1, + location=( + depth / 2, + width / 2, + self.params["Height"] + self.params["DoorThickness"], + ), + scale=( + grate_depth + grate_thickness, + grate_width + grate_thickness, + grate_thickness * 2, + ), + ) + with butil.SelectObjects(hollow): + bpy.ops.object.modifier_add(type="BEVEL") + bpy.context.object.modifiers["Bevel"].segments = 8 + bpy.context.object.modifiers["Bevel"].width = grate_thickness + bpy.ops.object.modifier_apply(modifier="Bevel") + with butil.SelectObjects(obj): + bpy.ops.object.modifier_add(type="BOOLEAN") + bpy.context.object.modifiers["Boolean"].object = hollow + bpy.context.object.modifiers["Boolean"].use_hole_tolerant = True + bpy.ops.object.modifier_apply(modifier="Boolean") + butil.delete(hollow) + butil.join_objects([obj, grates], check_attributes=True) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +def gas_grates( + width, + depth, + grate_width, + grate_depth, + height, + thickness, + grids, + branches, + center_ratio, + middle_ratio, +): + high_height = height + thickness * 0.9 + grates = [] + for i, n in enumerate(grids): + cubes = [ + butil.spawn_cube( + size=1, + location=( + depth / 2, + grate_width / len(grids) * i + + (width - grate_width) / 2 + + thickness / 2, + height, + ), + scale=(grate_depth + thickness, thickness, thickness), + name=None, + ), + butil.spawn_cube( + size=1, + location=( + depth / 2, + grate_width / len(grids) * (i + 1) + + (width - grate_width) / 2 + - thickness / 2, + height, + ), + scale=(grate_depth + thickness, thickness, thickness), + name=None, + ), + ] + for j in range(n + 1): + cubes.append( + butil.spawn_cube( + size=1, + location=( + grate_depth / n * j + (depth - grate_depth) / 2, + grate_width / len(grids) * (i + 0.5) + + (width - grate_width) / 2, + high_height, + ), + scale=(thickness, grate_width / len(grids), thickness), + ) + ) + for j in range(n): + min_dist = min(grate_width / len(grids) / 2, grate_depth / n / 2) + line_len = max(grate_width / len(grids) / 2, grate_depth / n / 2) - min_dist + center_dist = min_dist * center_ratio + middle_dist = min_dist * middle_ratio + if grate_width / len(grids) / 2 > grate_depth / n / 2: + x_center, y_center = center_dist, line_len + center_dist + x_middle, y_middle = middle_dist, line_len + middle_dist + x_full, y_full = min_dist, line_len + min_dist + else: + x_center, y_center = center_dist + line_len, center_dist + x_middle, y_middle = middle_dist + line_len, middle_dist + x_full, y_full = min_dist + line_len, min_dist + center = ( + (grate_depth / n * (j + 0.5) + (depth - grate_depth) / 2), + grate_width / len(grids) * (i + 0.5) + (width - grate_width) / 2, + ) + for k in range(branches): + angle = 2 * np.pi / branches * k + x0, y0 = x_center * np.cos(angle), y_center * np.sin(angle) + x1, y1 = x_middle * np.cos(angle), y_middle * np.sin(angle) + location = ( + center[0] + (x0 + x1) / 2, + center[1] + (y0 + y1) / 2, + high_height, + ) + scale = ((x0 - x1) ** 2 + (y0 - y1) ** 2) ** 0.5, thickness, thickness + actual_angle = np.arctan2(y1 - y0, x1 - x0) + obj = butil.spawn_cube(size=1, location=location, scale=scale) + bpy.context.object.rotation_euler[2] = actual_angle + cubes.append(obj) + x0, y0 = x1, y1 + if x_full - abs(x0) < y_full - abs(y0): + x1, y1 = x_full * np.sign(x0), y0 + else: + x1, y1 = x0, y_full * np.sign(y0) + location = ( + center[0] + (x0 + x1) / 2, + center[1] + (y0 + y1) / 2, + high_height, + ) + scale = ((x0 - x1) ** 2 + (y0 - y1) ** 2) ** 0.5, thickness, thickness + actual_angle = np.arctan2(y1 - y0, x1 - x0) + obj = butil.spawn_cube(size=1, location=location, scale=scale) + bpy.context.object.rotation_euler[2] = actual_angle + cubes.append(obj) + grates.append( + butil.spawn_cylinder( + center_dist + thickness, + thickness / 2, + location=(center[0], center[1], height), + ) + ) + obj = butil.boolean(cubes) + for i in range(1, len(cubes)): + butil.delete(cubes[i]) + with butil.SelectObjects(obj): + bpy.ops.object.modifier_add(type="REMESH") + remesh_type = "VOXEL" + bpy.context.object.modifiers["Remesh"].mode = remesh_type + bpy.context.object.modifiers["Remesh"].voxel_size = 0.004 + bpy.ops.object.modifier_apply(modifier="Remesh") + bpy.ops.object.modifier_add(type="SMOOTH") + bpy.context.object.modifiers["Smooth"].iterations = 8 + bpy.context.object.modifiers["Smooth"].factor = 1 + bpy.ops.object.modifier_apply(modifier="Smooth") + grates.append(obj) + obj = butil.boolean(grates) + for i in range(1, len(grates)): + butil.delete(grates[i]) + return obj + + +@node_utils.to_nodegroup( + "nodegroup_hollow_cube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_hollow_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ("NodeSocketFloat", "Thickness", 0.0000), + ("NodeSocketBool", "Switch1", False), + ("NodeSocketBool", "Switch2", False), + ("NodeSocketBool", "Switch3", False), + ("NodeSocketBool", "Switch4", False), + ("NodeSocketBool", "Switch5", False), + ("NodeSocketBool", "Switch6", False), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Size"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract, + "Z": subtract_1, + }, + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Pos"]} + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["X"]} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Size"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": subtract_2} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Translation": combine_xyz_5, + }, + ) + + switch_2 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch3"], 14: transform_2} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_3, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_2, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_4 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": add_3, "Z": subtract_4} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_4, + "Translation": combine_xyz_3, + }, + ) + + switch_1 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch2"], 14: transform_1} + ) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": subtract_5, + "Z": group_input.outputs["Thickness"], + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + add_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_6 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1, 1: separate_xyz_1.outputs["Z"]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_4, "Y": add_5, "Z": add_6} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_1}, + ) + + switch = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch1"], 14: transform} + ) + + subtract_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Thickness"], + "Y": subtract_6, + "Z": subtract_7, + }, + ) + + cube_3 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_6, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_5 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_3.outputs["Mesh"], + "Name": "uv_map", + 3: cube_3.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subtract_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + subtract_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_8, "Y": add_7, "Z": subtract_9} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_5, + "Translation": combine_xyz_7, + }, + ) + + switch_3 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch4"], 14: transform_3} + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_4 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_9, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_4.outputs["Mesh"], + "Name": "uv_map", + 3: cube_4.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_2.outputs["X"]}, + ) + + add_9 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1} + ) + + add_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Z"], 1: separate_xyz_2.outputs["Z"]}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_8, "Y": add_9, "Z": add_10} + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_8, + }, + ) + + switch_4 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch5"], 14: transform_4} + ) + + combine_xyz_10 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": group_input.outputs["Thickness"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + cube_5 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_10, + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_5.outputs["Mesh"], + "Name": "uv_map", + 3: cube_5.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + add_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + ) + + subtract_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + add_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + ) + + combine_xyz_11 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_11, "Y": subtract_10, "Z": add_12} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_3, + "Translation": combine_xyz_11, + }, + ) + + switch_5 = nw.new_node( + Nodes.Switch, input_kwargs={1: group_input.outputs["Switch6"], 14: transform_5} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + switch_2.outputs[6], + switch_1.outputs[6], + switch.outputs[6], + switch_3.outputs[6], + switch_4.outputs[6], + switch_5.outputs[6], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_o", singleton=False, type="GeometryNodeTree") +def nodegroup_o(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0.0000, 0.0000, 0.0020)} + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloatDistance", "Size", 1.0000)] + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Size"]} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle_1.outputs["Curve"], + }, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": curve_to_mesh, "Offset Scale": 0.0030} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": extrude_mesh.outputs["Mesh"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_heater", singleton=False, type="GeometryNodeTree") +def nodegroup_heater(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + curve_line_1 = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0.0000, 0.0000, 0.0010)} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "depth", 0.0000), + ("NodeSocketFloat", "radius_ratio", 0.2000), + ("NodeSocketFloat", "arrangement_ratio", 0.5000), + ("NodeSocketShader", "SuperBlackGlass", None), + ], + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"], 1: group_input.outputs["depth"]}, + attrs={"operation": "MINIMUM"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: minimum, 1: group_input.outputs["radius_ratio"]}, + label="Multiply", + attrs={"operation": "MULTIPLY"}, + ) + + curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": multiply}) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Material": group_input.outputs["SuperBlackGlass"], + }, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": set_material} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: minimum, 1: group_input.outputs["arrangement_ratio"]}, + label="Multiply", + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"], 1: multiply_1}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "FLOOR"} + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"], 1: multiply_1}, + attrs={"operation": "DIVIDE"}, + ) + + floor_1 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_1}, attrs={"operation": "FLOOR"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: floor, 1: floor_1}, attrs={"operation": "MULTIPLY"} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": multiply_2}, + attrs={"domain": "INSTANCE"}, + ) + + divide_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"], 1: floor_1}, + attrs={"operation": "DIVIDE"}, + ) + + divide_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: floor}, + attrs={"operation": "DIVIDE"}, + ) + + floor_2 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_3}, attrs={"operation": "FLOOR"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: floor_2, 1: divide_2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: divide_2, 2: multiply_3}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + divide_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"], 1: floor}, + attrs={"operation": "DIVIDE"}, + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: floor}, + attrs={"operation": "MODULO"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: divide_4}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide_4, 2: multiply_4}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_add, "Y": multiply_add_1} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_oven_rack", singleton=False, type="GeometryNodeTree" +) +def nodegroup_oven_rack(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Height", 2.0000), + ("NodeSocketFloatDistance", "Radius", 0.0200), + ("NodeSocketInt", "Amount", 5), + ], + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Height"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_1}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_3, "End": combine_xyz_4} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance, + "Amount": group_input.outputs["Amount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Amount"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_3}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + duplicate_elements_1 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance, + "Amount": group_input.outputs["Amount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: group_input.outputs["Amount"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: divide_1}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_5}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_1.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadrilateral, set_position, set_position_1]}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Radius"]} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_text", singleton=False, type="GeometryNodeTree") +def nodegroup_text(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Translation", (1.5000, 0.0000, 0.0000)), + ("NodeSocketString", "String", "BrandName"), + ("NodeSocketFloatDistance", "Size", 0.0500), + ("NodeSocketFloat", "Offset Scale", 0.0020), + ], + ) + + string_to_curves = nw.new_node( + "GeometryNodeStringToCurves", + input_kwargs={ + "String": group_input.outputs["String"], + "Size": group_input.outputs["Size"], + }, + attrs={"align_y": "BOTTOM_BASELINE", "align_x": "CENTER"}, + ) + + fill_curve = nw.new_node( + Nodes.FillCurve, + input_kwargs={"Curve": string_to_curves.outputs["Curve Instances"]}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": fill_curve, + "Offset Scale": group_input.outputs["Offset Scale"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": extrude_mesh.outputs["Mesh"], + "Translation": group_input.outputs["Translation"], + "Rotation": (1.5708, 0.0000, 1.5708), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_handle", singleton=False, type="GeometryNodeTree") +def nodegroup_handle(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "length", 0.0000), + ("NodeSocketFloat", "thickness", 0.0200), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["width"]} + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["length"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute_1, "Translation": combine_xyz}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [store_named_attribute, transform]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz_3}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["length"], + 1: group_input.outputs["width"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": add, + "Z": group_input.outputs["thickness"], + }, + ) + + cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply_2} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": add_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_2, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_center", singleton=False, type="GeometryNodeTree") +def nodegroup_center(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "MarginX", 0.5000), + ("NodeSocketFloat", "MarginY", 0.0000), + ("NodeSocketFloat", "MarginZ", 0.0000), + ], + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: bounding_box.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: group_input.outputs["MarginX"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: group_input.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract_1.outputs["Vector"]} + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["X"], + 1: group_input.outputs["MarginX"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: greater_than_1} + ) + + greater_than_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["MarginY"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + greater_than_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Y"], + 1: group_input.outputs["MarginY"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_2, 1: greater_than_3} + ) + + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + + greater_than_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["MarginZ"]}, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + greater_than_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Z"], + 1: group_input.outputs["MarginZ"], + }, + attrs={"operation": "GREATER_THAN", "use_clamp": True}, + ) + + op_and_3 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than_4, 1: greater_than_5} + ) + + op_and_4 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_2, 1: op_and_3}) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_4}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"In": op_and_4, "Out": op_not}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_cube", singleton=False, type="GeometryNodeTree") +def nodegroup_cube(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (0.1000, 10.0000, 4.0000)), + ("NodeSocketVector", "Pos", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Resolution", 2), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": group_input.outputs["Size"], + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + "Vertices Z": group_input.outputs["Resolution"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": store_named_attribute_1, "Name": "uv_map"}, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Size"], + 1: (0.5000, 0.5000, 0.5000), + 2: group_input.outputs["Pos"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": multiply_add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_oven_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_oven_geometry( + nw: NodeWrangler, + preprocess: bool = False, + use_gas: bool = False, + is_placeholder: bool = False, +): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Depth", 1.0000), + ("NodeSocketFloat", "Width", 1.0000), + ("NodeSocketFloat", "Height", 1.0000), + ("NodeSocketFloat", "DoorThickness", 0.0700), + ("NodeSocketFloat", "DoorRotation", 0.0000), + ("NodeSocketFloatDistance", "RackRadius", 0.0100), + ("NodeSocketInt", "RackHAmount", 2), + ("NodeSocketInt", "RackDAmount", 5), + ("NodeSocketFloat", "PanelHeight", 0.3000), + ("NodeSocketFloat", "PanelThickness", 0.2000), + ("NodeSocketInt", "BottonAmount", 4), + ("NodeSocketFloatDistance", "BottonRadius", 0.0500), + ("NodeSocketFloat", "BottonThickness", 0.0300), + ("NodeSocketFloat", "HeaterRadiusRatio", 0.1500), + ("NodeSocketString", "BrandName", "BrandName"), + ("NodeSocketMaterial", "Glass", None), + ("NodeSocketMaterial", "Surface", None), + ("NodeSocketMaterial", "WhiteMetal", None), + ("NodeSocketMaterial", "SuperBlackGlass", None), + ("NodeSocketMaterial", "Back", None), + ("NodeSocketBool", "is_placeholder", is_placeholder), + ], + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["DoorThickness"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + cube = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_1, "Pos": combine_xyz_2}, + ) + + position = nw.new_node(Nodes.InputPosition) + + center = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": cube, + "Vector": position, + "MarginX": -1.0000, + "MarginY": 0.1000, + "MarginZ": 0.1500, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": cube, + "Selection": center.outputs["In"], + "Material": group_input.outputs["Glass"], + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_2, + "Selection": center.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + # set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_3}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + handle = nw.new_node( + nodegroup_handle().name, + input_kwargs={"width": multiply, "length": multiply_1, "thickness": multiply_2}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.9200}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_13 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": multiply_5} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": handle, + "Translation": combine_xyz_13, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + set_material_8 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_1, + "Material": group_input.outputs["WhiteMetal"], + }, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_12 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": multiply_6, "Z": 0.0300} + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + text = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_12, + "String": group_input.outputs["BrandName"], + "Size": multiply_7, + }, + ) + + text = complete_no_bevel(nw, text, preprocess) + + set_material_9 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": text, "Material": group_input.outputs["WhiteMetal"]}, + ) + + set_material_8 = complete_bevel(nw, set_material_8, preprocess) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material_3, set_material_8, set_material_9]}, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_3} + ) + + y = nw.scalar_multiply( + group_input.outputs["DoorRotation"], 1 if not preprocess else 0 + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": y}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["Depth"]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": geometry_to_instance, + "Rotation": combine_xyz_3, + "Pivot Point": combine_xyz_4, + }, + ) + + rotate_instances = nw.new_node(Nodes.RealizeInstances, [rotate_instances]) + + door = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": rotate_instances}, label="door" + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"], 1: multiply_8}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: multiply_9}, + attrs={"operation": "SUBTRACT"}, + ) + + ovenrack = nw.new_node( + nodegroup_oven_rack().name, + input_kwargs={ + "Width": subtract, + "Height": subtract_1, + "Radius": group_input.outputs["RackRadius"], + "Amount": group_input.outputs["RackDAmount"], + }, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": ovenrack} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance_1, + "Amount": group_input.outputs["RackHAmount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["DoorThickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: multiply_12}, + attrs={"operation": "SUBTRACT"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["RackHAmount"], 1: 1.0000} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: add_4}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_13 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: divide}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_10, "Y": multiply_11, "Z": multiply_13}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_5, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": group_input.outputs["Surface"], + }, + ) + + set_material = nw.new_node(Nodes.RealizeInstances, [set_material]) + + racks = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": set_material}, label="racks" + ) + + add_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add_5}) + + reroute_11 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Width"]} + ) + + reroute_8 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["DoorThickness"]} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": reroute_10, "Y": reroute_11, "Z": reroute_8}, + ) + + reroute_9 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Height"]} + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": reroute_9}) + + cube_1 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_6, "Pos": combine_xyz_7}, + ) + + set_material_5 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": cube_1, "Material": group_input.outputs["Back"]}, + ) + + # set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_5}) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_10, 1: group_input.outputs["PanelThickness"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["HeaterRadiusRatio"], + 1: 2.0000, + 2: 0.1000, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + heater = nw.new_node( + nodegroup_heater().name, + input_kwargs={ + "width": reroute_11, + "depth": subtract_3, + "radius_ratio": group_input.outputs["HeaterRadiusRatio"], + "arrangement_ratio": multiply_add, + }, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_8, 1: reroute_9}) + + combine_xyz_15 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["PanelThickness"], "Z": add_6}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": heater, "Translation": combine_xyz_15}, + ) + + transform_2 = complete_no_bevel(nw, transform_2, preprocess) + + if use_gas: + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_5]} + ) + else: + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_5, transform_2]} + ) + + heater_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": join_geometry_2}, label="heater" + ) + + reroute_14 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Width"]} + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["PanelThickness"], + "Y": reroute_14, + "Z": group_input.outputs["PanelHeight"], + }, + ) + + add_7 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Height"], + 1: group_input.outputs["DoorThickness"], + }, + ) + + combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add_7}) + + cube_2 = nw.new_node( + nodegroup_cube().name, + input_kwargs={"Size": combine_xyz_9, "Pos": combine_xyz_8}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + center_1 = nw.new_node( + nodegroup_center().name, + input_kwargs={ + "Geometry": cube_2, + "Vector": position_1, + "MarginX": -1.0000, + "MarginY": 0.0500, + "MarginZ": 0.0500, + }, + ) + + set_material_4 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": cube_2, + "Selection": center_1.outputs["In"], + "Material": group_input.outputs["Back"], + }, + ) + + set_material_7 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_material_4, + "Selection": center_1.outputs["Out"], + "Material": group_input.outputs["Surface"], + }, + ) + + # set_shade_smooth_3 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': set_material_7}) + + reroute_13 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["PanelThickness"]} + ) + + multiply_14 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_14}, attrs={"operation": "MULTIPLY"} + ) + + bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={"Geometry": cube_2}) + + add_8 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Min"], 1: bounding_box.outputs["Max"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add_8.outputs["Vector"], "Scale": 0.5000}, + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + + combine_xyz_16 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": reroute_13, + "Y": multiply_14, + "Z": separate_xyz.outputs["Z"], + }, + ) + + multiply_15 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["PanelHeight"], 1: 0.2000}, + attrs={"operation": "MULTIPLY"}, + ) + + text_1 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_16, + "String": "12:01", + "Size": multiply_15, + }, + ) + + set_material_7 = complete_bevel(nw, set_material_7, preprocess) + text_1 = complete_no_bevel(nw, text_1, preprocess) + + join_geometry_5 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_7, text_1]} + ) + + combine_xyz_21 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["BottonThickness"]} + ) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_21}) + + reroute_12 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["BottonRadius"]} + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": reroute_12}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + add_9 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_12, 1: 0.0050}) + + o = nw.new_node(nodegroup_o().name, input_kwargs={"Size": add_9}) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh, o]} + ) + + combine_xyz_10 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": reroute_13, "Z": separate_xyz.outputs["Z"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_4, + "Translation": combine_xyz_10, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + reroute_16 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz.outputs["Z"]} + ) + + reroute_15 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["BottonRadius"]} + ) + + multiply_16 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["PanelHeight"], 1: 0.0500}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_15, 1: 1.0000, 2: multiply_16}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add_10 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_16, 1: multiply_add_1}) + + combine_xyz_17 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": reroute_13, "Z": add_10} + ) + + multiply_17 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["BottonRadius"], 1: 0.2500}, + attrs={"operation": "MULTIPLY"}, + ) + + text_2 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_17, + "String": "Off", + "Size": multiply_17, + }, + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_15, 1: 0.7000, 2: multiply_16}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add_11 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_16, 1: multiply_add_2}) + + combine_xyz_18 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": reroute_13, "Y": multiply_add_2, "Z": add_11}, + ) + + text_3 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_18, + "String": "High", + "Size": multiply_17, + }, + ) + + multiply_18 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_16, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_15, 1: -0.7000, 2: multiply_18}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_19 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": reroute_13, "Y": multiply_add_3, "Z": add_11}, + ) + + text_4 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_19, + "String": "Low", + "Size": multiply_17, + }, + ) + + add_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_13, 1: group_input.outputs["BottonThickness"]}, + ) + + combine_xyz_20 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_12, "Z": separate_xyz.outputs["Z"]} + ) + + multiply_19 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["BottonThickness"], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + text_5 = nw.new_node( + nodegroup_text().name, + input_kwargs={ + "Translation": combine_xyz_20, + "String": "1", + "Size": group_input.outputs["BottonRadius"], + "Offset Scale": multiply_19, + }, + ) + + join_geometry_6 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform, text_2, text_3, text_4, text_5]}, + ) + + geometry_to_instance_2 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_6} + ) + + add_13 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["BottonAmount"], 1: 2.0000} + ) + + reroute_6 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add_13}) + + duplicate_elements_1 = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance_2, "Amount": reroute_6}, + attrs={"domain": "INSTANCE"}, + ) + + add_14 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: 1.0000}, + ) + + add_15 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_6, 1: 1.0000}) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: add_15}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_20 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_14, 1: divide_1}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_20}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements_1.outputs["Geometry"], + "Offset": combine_xyz_11, + }, + ) + + multiply_21 = nw.new_node( + Nodes.Math, input_kwargs={0: add_13}, attrs={"operation": "MULTIPLY"} + ) + + add_16 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_21, 1: -1.0100}) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: add_16}, + attrs={"operation": "GREATER_THAN"}, + ) + + add_17 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_21, 1: 0.9900}) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements_1.outputs["Duplicate Index"], 1: add_17}, + attrs={"operation": "LESS_THAN"}, + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than, 1: less_than}, + attrs={"operation": "MINIMUM"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": set_position_1, "Selection": minimum}, + attrs={"domain": "INSTANCE"}, + ) + + set_material_6 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": delete_geometry, + "Material": group_input.outputs["WhiteMetal"], + }, + ) + + botton = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": set_material_6}, label="botton" + ) + + botton = complete_no_bevel(nw, botton, preprocess) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [join_geometry_5, botton]} + ) + + geometry_to_instance_3 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": join_geometry_1} + ) + + combine_xyz_14 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Height"]} + ) + + panel_bbox = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": geometry_to_instance_3} + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + "Switch": group_input.outputs["is_placeholder"], + "False": geometry_to_instance_3, + "True": panel_bbox, + }, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": switch_1, + "Rotation": (0.0000, -0.1745, 0.0000), + "Pivot Point": combine_xyz_14, + }, + ) + + rotate_instances_1 = nw.new_node(Nodes.RealizeInstances, [rotate_instances_1]) + + panel = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": rotate_instances_1}, label="panel" + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Depth"], + "Y": group_input.outputs["Width"], + "Z": group_input.outputs["Height"], + }, + ) + + hollowcube = nw.new_node( + nodegroup_hollow_cube().name, + input_kwargs={ + "Size": combine_xyz, + "Thickness": group_input.outputs["DoorThickness"], + "Switch2": True, + "Switch4": True, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": hollowcube, + "Material": group_input.outputs["Surface"], + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": set_material_1, "Level": 0} + ) + + # set_shade_smooth_2 = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivide_mesh}) + + body = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": subdivide_mesh}, label="Body" + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [door, racks, heater_1, panel, body]}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [door, racks, heater_1, body]} + ) + body_bbox = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": join_geometry_2} + ) + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [body_bbox, panel]} + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + "Switch": group_input.outputs["is_placeholder"], + "False": join_geometry, + "True": join_geometry_3, + }, + ) + geometry = nw.new_node(Nodes.RealizeInstances, [switch_2]) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/objects/appliances/tv.py b/infinigen/assets/objects/appliances/tv.py new file mode 100644 index 000000000..b94d9c5a6 --- /dev/null +++ b/infinigen/assets/objects/appliances/tv.py @@ -0,0 +1,293 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: +# - Lingjie Mei: primary author +# - Karhan Kayan: fix rotation + +import bmesh +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials.text import Text +from infinigen.assets.utils.decorate import ( + mirror, + read_area, + read_co, + read_normal, + write_attribute, + write_co, +) +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.assets.utils.object import ( + data2mesh, + join_objects, + mesh2obj, + new_bbox, + new_cube, + new_plane, +) +from infinigen.assets.utils.uv import ( + compute_uv_direction, + face_corner2faces, + unwrap_faces, +) +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import write_attr_data +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class TVFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(TVFactory, self).__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.aspect_ratio = np.random.choice([9 / 16, 3 / 4]) + self.width = uniform(0.6, 2.1) + self.screen_bevel_width = uniform(0, 0.01) + self.side_margin = log_uniform(0.005, 0.01) + self.bottom_margin = uniform(0.005, 0.03) + self.depth = uniform(0.02, 0.04) + self.has_depth_extrude = uniform() < 0.4 + if self.has_depth_extrude: + self.depth_extrude = self.depth * uniform(2, 5) + else: + self.depth_extrude = self.depth * 1.5 + self.leg_type = np.random.choice(["two-legged", "single-legged"]) # 'none', + self.leg_length = uniform(0.1, 0.2) + self.leg_length_y = uniform(0.1, 0.15) + self.leg_radius = uniform(0.008, 0.015) + self.leg_width = uniform(0.5, 0.8) + self.leg_bevel_width = uniform(0.01, 0.02) + + materials = self.get_material_params() + self.surface = materials["surface"] + self.scratch = materials["scratch"] + self.edge_wear = materials["edge_wear"] + self.screen_surface = materials["screen_surface"] + self.support_surface = materials["support"] + + def get_material_params(self): + material_assignments = AssetList["TVFactory"]() + surface = material_assignments["surface"].assign_material() + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + args = (self.factory_seed, False) + kwargs = {"emission": 0.01 if uniform() < 0.1 else uniform(2, 3)} + screen_surface = material_assignments["screen_surface"].assign_material() + if screen_surface == Text: + screen_surface = screen_surface(*args, **kwargs) + support = material_assignments["support"].assign_material() + return { + "surface": surface, + "scratch": scratch, + "edge_wear": edge_wear, + "screen_surface": screen_surface, + "support": support, + } + + @property + def height(self): + return self.aspect_ratio * self.width + + @property + def total_width(self): + return self.width + 2 * self.side_margin + + @property + def total_height(self): + return self.height + self.side_margin + self.bottom_margin + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + match self.leg_type: + case "two-legged": + max_x = ( + self.leg_length_y / 2 - (1 - self.leg_width) * self.depth_extrude + ) + case _: + max_x = self.leg_length_y / 2 - self.depth_extrude / 2 + return new_bbox( + -self.depth_extrude - self.depth, + max_x, + -self.total_width / 2, + self.total_width / 2, + -self.leg_length - self.leg_radius / 2, + self.total_height, + ) + + def create_asset(self, **params) -> bpy.types.Object: + obj = self.make_base() + self.make_screen(obj) + parts = [obj] + match self.leg_type: + case "two-legged": + legs = self.add_two_legs() + case _: + legs = self.add_single_leg() + for leg_obj in legs: + write_attribute(leg_obj, 1, "leg", "FACE", "INT") + parts.extend(legs) + obj = join_objects(parts) + obj.rotation_euler[2] = np.pi / 2 + butil.apply_transform(obj) + return obj + + def make_screen(self, obj): + cutter = new_cube() + cutter.location = 0, -1, 1 + butil.apply_transform(cutter, True) + cutter.scale = self.width / 2, 1, self.height / 2 + cutter.location = 0, 1e-3, self.bottom_margin + butil.apply_transform(cutter, True) + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") + butil.delete(cutter) + areas = read_area(obj) + screen = np.zeros(len(areas), int) + y = read_normal(obj)[:, 1] < 0 + screen[np.argmax(areas + 1e5 * y)] = 1 + fc2f = face_corner2faces(obj) + unwrap_faces(obj, screen) + bbox = compute_uv_direction(obj, "x", "z", screen[fc2f]) + write_attr_data(obj, "screen", screen, domain="FACE", type="INT") + self.screen_surface.apply(obj, "screen", bbox) + + def make_base(self): + obj = new_cube() + obj.location = 0, 1, 1 + butil.apply_transform(obj, True) + obj.scale = self.total_width / 2, self.depth / 2, self.total_height / 2 + butil.apply_transform(obj) + butil.modify_mesh(obj, "BEVEL", width=self.screen_bevel_width, segments=8) + if not self.has_depth_extrude: + return obj + with butil.ViewportMode(obj, "EDIT"): + bm = bmesh.from_edit_mesh(obj.data) + geom = [f for f in bm.faces if f.normal[1] > 0.5] + bmesh.ops.delete(bm, geom=geom, context="FACES_KEEP_BOUNDARY") + bmesh.update_edit_mesh(obj.data) + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + height_min, height_max = ( + self.total_height * uniform(0.1, 0.3), + self.total_height * uniform(0.5, 0.7), + ) + width = self.total_width * uniform(0.3, 0.6) + extra = new_plane() + extra.scale = width / 2, (height_max - height_min) / 2, 1 + extra.rotation_euler[0] = -np.pi / 2 + extra.location = 0, self.depth_extrude + self.depth, self.total_height / 2 + obj = join_objects([obj, extra]) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=32, profile_shape_factor=-uniform(0.0, 0.4) + ) + x, y, z = read_co(obj).T + z += ( + (height_max + height_min - self.total_height) + / 2 + * np.clip(y - self.depth, 0, None) + / self.depth_extrude + ) + write_co(obj, np.stack([x, y, z], -1)) + return obj + + def add_two_legs(self): + vertices = ( + ( + -self.total_width / 2 * self.leg_width * uniform(0, 0.6), + 0, + self.total_height * uniform(0.3, 0.5), + ), + (0, 0, -self.leg_length), + (0, self.leg_length_y / 2, -self.leg_length), + (0, -self.leg_length_y / 2, -self.leg_length), + ) + edges = (0, 1), (1, 2), (1, 3) + leg = mesh2obj(data2mesh(vertices, edges)) + surface.add_geomod( + leg, geo_radius, apply=True, input_args=[self.leg_radius, 16] + ) + x, y, z = read_co(leg).T + write_co( + leg, + np.stack( + [ + x, + y, + np.maximum( + z, -self.leg_length - self.leg_radius * uniform(0.0, 0.6) + ), + ], + -1, + ), + ) + leg_ = deep_clone_obj(leg) + butil.select_none() + leg.location = ( + self.total_width / 2 * self.leg_width, + (1 - self.leg_width) * self.depth_extrude, + 0, + ) + butil.apply_transform(leg, True) + mirror(leg_) + leg_.location = ( + -self.total_width / 2 * self.leg_width, + (1 - self.leg_width) * self.depth_extrude, + 0, + ) + butil.apply_transform(leg_, True) + return [leg, leg_] + + def add_single_leg(self): + leg = new_cube() + leg.location = 0, 1, 1 + butil.apply_transform(leg, True) + leg.location = 0, self.depth_extrude / 2, -self.leg_length + leg.scale = [ + self.total_width * uniform(0.05, 0.1), + self.leg_radius, + (self.leg_length + self.total_height * uniform(0.3, 0.5)) / 2, + ] + butil.apply_transform(leg, True) + butil.modify_mesh(leg, "BEVEL", width=self.leg_bevel_width, segments=8) + base = new_cube() + base.location = 0, self.depth_extrude / 2, -self.leg_length + base.scale = [ + self.total_width * uniform(0.15, 0.3), + self.leg_length_y / 2, + self.leg_radius, + ] + butil.apply_transform(base, True) + butil.modify_mesh(base, "BEVEL", width=self.leg_bevel_width, segments=8) + return [leg, base] + + def finalize_assets(self, assets): + self.surface.apply(assets, selection="!screen", rough=True, metal_color="bw") + self.support_surface.apply( + assets, selection="leg", rough=True, metal_color="bw" + ) + + +class MonitorFactory(TVFactory): + def __init__(self, factory_seed, coarse=False): + super(MonitorFactory, self).__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.width = log_uniform(0.4, 0.8) + self.leg_type = "single-legged" diff --git a/infinigen/assets/bathroom/__init__.py b/infinigen/assets/objects/bathroom/__init__.py similarity index 99% rename from infinigen/assets/bathroom/__init__.py rename to infinigen/assets/objects/bathroom/__init__.py index ab167ac6d..b2bff87e9 100644 --- a/infinigen/assets/bathroom/__init__.py +++ b/infinigen/assets/objects/bathroom/__init__.py @@ -2,7 +2,8 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from .bathtub import BathtubFactory + from .bathroom_sink import BathroomSinkFactory, StandingSinkFactory +from .bathtub import BathtubFactory from .hardware import HardwareFactory from .toilet import ToiletFactory diff --git a/infinigen/assets/bathroom/bathroom_sink.py b/infinigen/assets/objects/bathroom/bathroom_sink.py similarity index 56% rename from infinigen/assets/bathroom/bathroom_sink.py rename to infinigen/assets/objects/bathroom/bathroom_sink.py index c1474d254..9bbcfbee1 100644 --- a/infinigen/assets/bathroom/bathroom_sink.py +++ b/infinigen/assets/objects/bathroom/bathroom_sink.py @@ -1,78 +1,93 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform -from infinigen.assets.bathroom import BathtubFactory -from infinigen.assets.table_decorations import TapFactory +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.bathroom.bathtub import BathtubFactory +from infinigen.assets.objects.table_decorations import TapFactory from infinigen.assets.utils.decorate import read_co, subdivide_edge_ring, subsurf -from infinigen.assets.utils.object import join_objects, new_base_cylinder, new_bbox, new_cube, origin2lowest +from infinigen.assets.utils.object import ( + join_objects, + new_base_cylinder, + new_bbox, + new_cube, +) from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList class BathroomSinkFactory(BathtubFactory): - def __init__(self, factory_seed, coarse=False): super(BathroomSinkFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.width = uniform(.6, .9) - self.size = self.width * log_uniform(.55, .8) - self.depth = self.width * log_uniform(.2, .4) + self.width = uniform(0.6, 0.9) + self.size = self.width * log_uniform(0.55, 0.8) + self.depth = self.width * log_uniform(0.2, 0.4) self.contour_fn = self.make_box_contour - self.sink_types = np.random.choice(['undermount', 'drop-in', 'vessel']) + self.sink_types = np.random.choice(["undermount", "drop-in", "vessel"]) self.has_stand = False match self.sink_types: - case 'undermount': - self.bathtub_type = 'freestanding' - self.has_extrude = uniform() < .7 - case 'drop-in': - self.bathtub_type = 'alcove' + case "undermount": + self.bathtub_type = "freestanding" + self.has_extrude = uniform() < 0.7 + case "drop-in": + self.bathtub_type = "alcove" self.has_extrude = True case _: - self.bathtub_type = np.random.choice(['alcove', 'freestanding']) - self.has_extrude = uniform() < .7 + self.bathtub_type = np.random.choice(["alcove", "freestanding"]) + self.has_extrude = uniform() < 0.7 self.has_stand = True self.tap_factory = TapFactory(self.factory_seed) self.disp_x = [self.disp_x[0], self.disp_x[0]] - self.alcove_levels = 0 if uniform() < .5 else np.random.randint(2, 4) - self.thickness = .01 if self.has_base else uniform(.01, .03) - self.size_extrude = uniform(.2, .35) - self.tap_offset = uniform(.0, .05) - self.stand_radius = self.width / 2 * log_uniform(.15, .2) - self.stand_bottom = self.width * log_uniform(.2, .3) if uniform() < .6 else self.stand_radius - self.stand_height = uniform(.7, .9) - self.depth - self.is_stand_circular = uniform() < .5 + self.alcove_levels = 0 if uniform() < 0.5 else np.random.randint(2, 4) + self.thickness = 0.01 if self.has_base else uniform(0.01, 0.03) + self.size_extrude = uniform(0.2, 0.35) + self.tap_offset = uniform(0.0, 0.05) + self.stand_radius = self.width / 2 * log_uniform(0.15, 0.2) + self.stand_bottom = ( + self.width * log_uniform(0.2, 0.3) + if uniform() < 0.6 + else self.stand_radius + ) + self.stand_height = uniform(0.7, 0.9) - self.depth + self.is_stand_circular = uniform() < 0.5 self.is_hole_centered = True - material_assignments = AssetList['BathroomSinkFactory']() + material_assignments = AssetList["BathroomSinkFactory"]() self.surface = material_assignments["surface"].assign_material() def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox( - -(self.size_extrude + 1) * self.size, 0, 0, self.width, - -self.stand_height if self.has_stand else 0, self.depth + -(self.size_extrude + 1) * self.size, + 0, + 0, + self.width, + -self.stand_height if self.has_stand else 0, + self.depth, ) def create_asset(self, **params) -> bpy.types.Object: if self.has_base: obj = self.make_base() cutter = self.make_cutter() - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) else: obj = self.make_bowl() self.remove_top(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) subsurf(obj, self.side_levels) obj.location = np.array(obj.location) - np.min(read_co(obj), 0) butil.apply_transform(obj, True) - obj.scale = np.array([self.width, self.size, self.depth]) / np.array(obj.dimensions) + obj.scale = np.array([self.width, self.size, self.depth]) / np.array( + obj.dimensions + ) butil.apply_transform(obj, True) if self.has_extrude: self.extrude_back(obj) @@ -82,26 +97,32 @@ def create_asset(self, **params) -> bpy.types.Object: obj = join_objects([obj, hole]) obj.rotation_euler[-1] = np.pi / 2 butil.apply_transform(obj, True) - self.surface.apply(obj, clear=True, metal_color='plain') + self.surface.apply(obj, clear=True, metal_color="plain") if self.has_extrude: tap = self.tap_factory(np.random.randint(1e7)) min_x = np.min(read_co(tap)[:, 0]) - tap.location = (-1 - self.size_extrude + self.tap_offset) * self.size - min_x, self.width / 2, self.depth + tap.location = ( + (-1 - self.size_extrude + self.tap_offset) * self.size - min_x, + self.width / 2, + self.depth, + ) butil.apply_transform(tap, True) obj = join_objects([obj, tap]) return obj def extrude_back(self, obj): - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) for f in bm.faces: - f.select_set(f.calc_center_median()[1] > self.size / 2 and f.normal[1] > .1) + f.select_set( + f.calc_center_median()[1] > self.size / 2 and f.normal[1] > 0.1 + ) bm.select_flush(False) bmesh.update_edit_mesh(obj.data) bpy.ops.mesh.extrude_region_move( - TRANSFORM_OT_translate={'value': (0, self.size_extrude * self.size, 0)} + TRANSFORM_OT_translate={"value": (0, self.size_extrude * self.size, 0)} ) def add_stand(self, obj): @@ -113,15 +134,19 @@ def add_stand(self, obj): stand.location = self.width / 2, self.size / 2, -self.stand_height / 2 butil.apply_transform(stand, True) subdivide_edge_ring(stand, np.random.randint(3, 6)) - with butil.ViewportMode(stand, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') + with butil.ViewportMode(stand, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") bm = bmesh.from_edit_mesh(stand.data) for f in bm.faces: - f.select_set(f.normal[-1] < -.1) + f.select_set(f.normal[-1] < -0.1) bm.select_flush(False) bmesh.update_edit_mesh(stand.data) bpy.ops.transform.resize( - value=(self.stand_bottom / self.stand_radius, self.stand_bottom / self.stand_radius, 1) + value=( + self.stand_bottom / self.stand_radius, + self.stand_bottom / self.stand_radius, + 1, + ) ) subsurf(stand, 2, True) subsurf(stand, 1) @@ -138,6 +163,6 @@ def finalize_assets(self, assets): class StandingSinkFactory(BathroomSinkFactory): def __init__(self, factory_seed, coarse=False): super(StandingSinkFactory, self).__init__(factory_seed, coarse) - self.bathtub_type = 'freestanding' + self.bathtub_type = "freestanding" self.has_extrude = True self.has_stand = True diff --git a/infinigen/assets/bathroom/bathtub.py b/infinigen/assets/objects/bathroom/bathtub.py similarity index 60% rename from infinigen/assets/bathroom/bathtub.py rename to infinigen/assets/objects/bathroom/bathtub.py index 1d6df5012..8a6f0528f 100644 --- a/infinigen/assets/bathroom/bathtub.py +++ b/infinigen/assets/objects/bathroom/bathtub.py @@ -1,81 +1,99 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.autobevel import BevelSharp from infinigen.assets.utils.decorate import ( - read_center, read_co, read_normal, subsurf, write_attribute, + read_center, + read_co, + read_normal, + subsurf, + write_attribute, write_co, ) from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.assets.utils.object import join_objects, new_bbox, new_cube, new_cylinder, new_line +from infinigen.assets.utils.object import ( + join_objects, + new_bbox, + new_cube, + new_cylinder, + new_line, +) from infinigen.core import surface from infinigen.core.placement.factory import AssetFactory from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed -from infinigen.assets.utils.autobevel import BevelSharp -from infinigen.assets.material_assignments import AssetList - class BathtubFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(BathtubFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): self.width = uniform(1.5, 2) - self.size = uniform(.8, 1) - self.depth = uniform(.55, .7) + self.size = uniform(0.8, 1) + self.depth = uniform(0.55, 0.7) prob = np.array([2, 2]) - self.bathtub_type = np.random.choice(['alcove', 'freestanding'], p=prob / prob.sum()) # , 'corner' - self.contour_fn = self.make_corner_contour if self.has_corner else self.make_box_contour - self.has_curve = uniform() < .5 - self.has_legs = uniform() < .5 + self.bathtub_type = np.random.choice( + ["alcove", "freestanding"], p=prob / prob.sum() + ) # , 'corner' + self.contour_fn = ( + self.make_corner_contour if self.has_corner else self.make_box_contour + ) + self.has_curve = uniform() < 0.5 + self.has_legs = uniform() < 0.5 - self.thickness = uniform(.04, .08) if self.has_base else uniform(.02, .04) - self.disp_x = uniform(0, .2, 2) - self.disp_y = uniform(0, .1) + self.thickness = ( + uniform(0.04, 0.08) if self.has_base else uniform(0.02, 0.04) + ) + self.disp_x = uniform(0, 0.2, 2) + self.disp_y = uniform(0, 0.1) - self.leg_height = uniform(.2, .3) * self.depth - self.leg_side = uniform(.05, .1) - self.leg_radius = uniform(.02, .03) + self.leg_height = uniform(0.2, 0.3) * self.depth + self.leg_side = uniform(0.05, 0.1) + self.leg_radius = uniform(0.02, 0.03) self.leg_y_scale = uniform() self.leg_subsurf_level = np.random.randint(3) - self.taper_factor = uniform(-.1, .1) - self.stretch_factor = uniform(-.2, .2) + self.taper_factor = uniform(-0.1, 0.1) + self.stretch_factor = uniform(-0.2, 0.2) self.alcove_levels = np.random.randint(1, 3) if self.has_base else 1 self.levels = 5 self.side_levels = 2 self.is_hole_centered = False - self.hole_radius = uniform(.015, .02) + self.hole_radius = uniform(0.015, 0.02) # /////////////////// assign materials /////////////////// - material_assignments = AssetList['BathtubFactory']() + material_assignments = AssetList["BathtubFactory"]() self.surface = material_assignments["surface"].assign_material() self.leg_surface = material_assignments["leg"].assign_material() self.hole_surface = material_assignments["hole"].assign_material() is_scratch = uniform() < material_assignments["wear_tear_prob"][0] is_edge_wear = uniform() < material_assignments["wear_tear_prob"][1] self.scratch = material_assignments["wear_tear"][0] if is_scratch else None - self.edge_wear = material_assignments["wear_tear"][1] if is_edge_wear else None + self.edge_wear = ( + material_assignments["wear_tear"][1] if is_edge_wear else None + ) # //////////////////////////////////////////////////////// self.beveler = BevelSharp(mult=5, segments=5) @property def has_base(self): - return self.bathtub_type != 'freestanding' + return self.bathtub_type != "freestanding" @property def has_corner(self): - return self.bathtub_type == 'corner' + return self.bathtub_type == "corner" def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox(-self.size, 0, 0, self.width, 0, self.depth) @@ -84,7 +102,7 @@ def create_asset(self, **params) -> bpy.types.Object: if self.has_base: obj = self.make_base() cutter = self.make_cutter() - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) else: obj = self.make_freestanding() @@ -93,7 +111,7 @@ def create_asset(self, **params) -> bpy.types.Object: parts.extend(self.make_legs(obj)) else: parts.append(self.add_base(obj)) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) subsurf(obj, self.side_levels) obj = join_objects(parts) hole = self.add_hole(obj) @@ -101,8 +119,8 @@ def create_asset(self, **params) -> bpy.types.Object: obj.rotation_euler[-1] = np.pi / 2 butil.apply_transform(obj, True) - if self.bathtub_type == 'freestanding': - butil.modify_mesh(obj, 'SUBSURF', levels=1, apply=True) + if self.bathtub_type == "freestanding": + butil.modify_mesh(obj, "SUBSURF", levels=1, apply=True) else: self.beveler(obj) @@ -111,28 +129,40 @@ def create_asset(self, **params) -> bpy.types.Object: def make_freestanding(self): obj = self.make_bowl() self.remove_top(obj) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() bpy.ops.mesh.extrude_edges_move() bpy.ops.transform.resize( - value=(1 + self.thickness * 2 / self.width, 1 + self.thickness / self.size, 1) + value=( + 1 + self.thickness * 2 / self.width, + 1 + self.thickness / self.size, + 1, + ) ) obj.location[1] -= self.size / 2 butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='TAPER', angle=self.taper_factor) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='STRETCH', angle=self.taper_factor) - obj.location = 0, self.size / 2, -np.min(read_co(obj)[:, -1]) * uniform(.5, .7) + butil.modify_mesh( + obj, "SIMPLE_DEFORM", deform_method="TAPER", angle=self.taper_factor + ) + butil.modify_mesh( + obj, "SIMPLE_DEFORM", deform_method="STRETCH", angle=self.taper_factor + ) + obj.location = ( + 0, + self.size / 2, + -np.min(read_co(obj)[:, -1]) * uniform(0.5, 0.7), + ) butil.apply_transform(obj, True) return obj def remove_top(self, obj): butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [f for f in bm.faces if f.calc_center_median()[-1] > self.depth] - bmesh.ops.delete(bm, geom=geom, context='FACES_KEEP_BOUNDARY') + bmesh.ops.delete(bm, geom=geom, context="FACES_KEEP_BOUNDARY") bmesh.update_edit_mesh(obj.data) def make_legs(self, obj): @@ -146,19 +176,26 @@ def make_legs(self, obj): i = np.argmax(metric) p = co[i] n = normal[i] - q = co[i] + self.leg_side * np.array([n[0], n[1] * self.leg_y_scale, n[2]]) + q = co[i] + self.leg_side * np.array( + [n[0], n[1] * self.leg_y_scale, n[2]] + ) r = np.array([q[0], q[1], 0]) leg = new_line(2) write_co(leg, np.stack([p, q, r])) subsurf(leg, self.leg_subsurf_level) surface.add_geomod( - leg, geo_radius, apply=True, input_args=[self.leg_radius, 32], - input_kwargs={'to_align_tilt': False} + leg, + geo_radius, + apply=True, + input_args=[self.leg_radius, 32], + input_kwargs={"to_align_tilt": False}, + ) + butil.modify_mesh( + leg, "BEVEL", width=self.leg_radius * uniform(0.3, 0.7) ) - butil.modify_mesh(leg, 'BEVEL', width=self.leg_radius * uniform(.3, .7)) leg.location[-1] = self.leg_radius butil.apply_transform(leg, True) - write_attribute(leg, 1, 'leg', 'FACE') + write_attribute(leg, 1, "leg", "FACE") legs.append(leg) return legs @@ -168,48 +205,62 @@ def add_base(self, obj): x, y, z_ = read_co(obj).T cutter.scale = 10, 10, np.min(z_) + self.leg_height butil.apply_transform(cutter, True) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='INTERSECT') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="INTERSECT") butil.delete(cutter) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [f for f in bm.faces if len(f.verts) > 10] - bmesh.ops.delete(bm, geom=geom, context='FACES_KEEP_BOUNDARY') + bmesh.ops.delete(bm, geom=geom, context="FACES_KEEP_BOUNDARY") bmesh.update_edit_mesh(obj.data) - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.select_all(action='INVERT') - bpy.ops.mesh.delete(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, -self.depth)}) + bpy.ops.mesh.select_all(action="INVERT") + bpy.ops.mesh.delete(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, -self.depth)} + ) x, y, z = read_co(obj).T z = np.clip(z, 0, None) write_co(obj, np.stack([x, y, z], -1)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.normals_make_consistent(inside=False) subsurf(obj, 2) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) return obj def make_box_contour(self, t, i): - return [(t + self.disp_x[0] * i, t + self.disp_y * i), + return [ + (t + self.disp_x[0] * i, t + self.disp_y * i), (self.width - t - self.disp_x[1] * i, t + self.disp_y * i), (self.width - t - self.disp_x[1] * i, self.size - t - self.disp_y * i), - (t + self.disp_x[0] * i, self.size - t - self.disp_y * i)] + (t + self.disp_x[0] * i, self.size - t - self.disp_y * i), + ] def make_corner_contour(self, t, i): - return [(t + self.disp_y * i, t + self.disp_y * i), + return [ + (t + self.disp_y * i, t + self.disp_y * i), (self.width - t - self.disp_x[1] * i, t + self.disp_y * i), - (self.width - t - self.disp_x[1] * i, self.size - (t + self.disp_y * i) / np.sqrt(2)), - (self.size - (t + self.disp_y * i) / np.sqrt(2), self.width - t - self.disp_x[0] * i), - (t + self.disp_y * i, self.width - t - self.disp_x[0] * i)] + ( + self.width - t - self.disp_x[1] * i, + self.size - (t + self.disp_y * i) / np.sqrt(2), + ), + ( + self.size - (t + self.disp_y * i) / np.sqrt(2), + self.width - t - self.disp_x[0] * i, + ), + (t + self.disp_y * i, self.width - t - self.disp_x[0] * i), + ] # noinspection PyArgumentList def make_base(self): contour = self.contour_fn(0, 0) obj = new_cylinder(vertices=len(contour)) - co = np.concatenate([np.array([[x, y, 0], [x, y, self.depth]]) for x, y in contour]) + co = np.concatenate( + [np.array([[x, y, 0], [x, y, self.depth]]) for x, y in contour] + ) write_co(obj, co) return obj @@ -223,7 +274,10 @@ def make_bowl(self): upper = self.contour_fn(0, 0) obj = new_cylinder(vertices=len(lower)) co = np.concatenate( - [np.array([[x, y, 0], [z, w, self.depth * 2]]) for (x, y), (z, w) in zip(lower[::-1], upper[::-1])] + [ + np.array([[x, y, 0], [z, w, self.depth * 2]]) + for (x, y), (z, w) in zip(lower[::-1], upper[::-1]) + ] ) write_co(obj, co) subsurf(obj, self.alcove_levels, True) @@ -241,8 +295,12 @@ def make_cutter(self): upper = self.contour_fn(self.thickness, 0) obj = new_cylinder(vertices=len(lower)) co = np.concatenate( - [np.array([[x, y, self.thickness], [z, w, self.depth * 2 - self.thickness]]) for (x, y), (z, w) in - zip(lower[::-1], upper[::-1])] + [ + np.array( + [[x, y, self.thickness], [z, w, self.depth * 2 - self.thickness]] + ) + for (x, y), (z, w) in zip(lower[::-1], upper[::-1]) + ] ) write_co(obj, co) subsurf(obj, self.alcove_levels, True) @@ -262,26 +320,26 @@ def find_hole(self, obj, x=None, y=None): def add_hole(self, obj): match self.bathtub_type: - case 'alcove': + case "alcove": location = self.find_hole(obj) - case 'freestanding': - location = self.find_hole(obj, uniform(.35, .4) * self.width) + case "freestanding": + location = self.find_hole(obj, uniform(0.35, 0.4) * self.width) case _: location = self.find_hole(obj, self.size / 2, self.size / 2) if self.is_hole_centered: location = self.find_hole(obj) obj = new_cylinder() - obj.scale = self.hole_radius, self.hole_radius, .005 + obj.scale = self.hole_radius, self.hole_radius, 0.005 obj.location = location butil.apply_transform(obj, True) - write_attribute(obj, 1, 'hole', 'FACE') + write_attribute(obj, 1, "hole", "FACE") return obj def finalize_assets(self, assets): self.surface.apply(assets, clear=True) if self.has_legs and not self.has_base: - self.leg_surface.apply(assets, 'leg', metal_color='bw+natural') - self.hole_surface.apply(assets, 'hole', metal_color='bw+natural') + self.leg_surface.apply(assets, "leg", metal_color="bw+natural") + self.hole_surface.apply(assets, "hole", metal_color="bw+natural") if self.scratch: self.scratch.apply(assets) diff --git a/infinigen/assets/bathroom/hardware.py b/infinigen/assets/objects/bathroom/hardware.py similarity index 69% rename from infinigen/assets/bathroom/hardware.py rename to infinigen/assets/objects/bathroom/hardware.py index 411dcbbda..49d42c371 100644 --- a/infinigen/assets/bathroom/hardware.py +++ b/infinigen/assets/objects/bathroom/hardware.py @@ -6,41 +6,47 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.utils.decorate import subsurf from infinigen.assets.utils.object import join_objects, new_base_cylinder, new_cube from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList class HardwareFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(HardwareFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.attachment_radius = uniform(.02, .03) - self.attachment_depth = uniform(.01, .015) - self.radius = uniform(.01, .015) - self.depth = uniform(.06, .1) - self.is_circular = uniform() < .5 - self.hardware_type = np.random.choice(['hook', 'holder', 'bar', 'ring']) + self.attachment_radius = uniform(0.02, 0.03) + self.attachment_depth = uniform(0.01, 0.015) + self.radius = uniform(0.01, 0.015) + self.depth = uniform(0.06, 0.1) + self.is_circular = uniform() < 0.5 + self.hardware_type = np.random.choice(["hook", "holder", "bar", "ring"]) self.hook_length = self.attachment_radius * uniform(2, 4) - self.holder_length = uniform(.15, .25) - self.bar_length = uniform(.4, .8) + self.holder_length = uniform(0.15, 0.25) + self.bar_length = uniform(0.4, 0.8) self.extension_length = self.attachment_radius * uniform(2, 3) self.ring_radius = log_uniform(2, 6) * self.attachment_radius - material_assignments = AssetList['HardwareFactory']() - self.surface = material_assignments['surface'].assign_material() - is_scratch = uniform() < material_assignments['wear_tear_prob'][0] - is_edge_wear = uniform() < material_assignments['wear_tear_prob'][1] - self.scratch = material_assignments['wear_tear'][0] if is_scratch else None - self.edge_wear = material_assignments['wear_tear'][1] if is_edge_wear else None + material_assignments = AssetList["HardwareFactory"]() + self.surface = material_assignments["surface"].assign_material() + is_scratch = uniform() < material_assignments["wear_tear_prob"][0] + is_edge_wear = uniform() < material_assignments["wear_tear_prob"][1] + self.scratch = material_assignments["wear_tear"][0] if is_scratch else None + self.edge_wear = ( + material_assignments["wear_tear"][1] if is_edge_wear else None + ) def make_attachment(self): base = new_base_cylinder() if self.is_circular else new_cube() - base.scale = self.attachment_radius, self.attachment_radius, self.attachment_depth / 2 + base.scale = ( + self.attachment_radius, + self.attachment_radius, + self.attachment_depth / 2, + ) base.rotation_euler[0] = np.pi / 2 base.location[1] = -self.attachment_depth / 2 butil.apply_transform(base, True) @@ -61,7 +67,11 @@ def make_hook(self): def make_holder(self): obj = new_base_cylinder() if self.is_circular else new_cube() - obj.scale = self.radius, self.radius, (self.holder_length + self.extension_length) / 2 + obj.scale = ( + self.radius, + self.radius, + (self.holder_length + self.extension_length) / 2, + ) obj.rotation_euler[1] = np.pi / 2 obj.location[0] = (self.holder_length - self.extension_length) / 2 butil.apply_transform(obj, True) @@ -69,7 +79,11 @@ def make_holder(self): def make_bar(self): obj = new_base_cylinder() if self.is_circular else new_cube() - obj.scale = self.radius, self.radius, self.bar_length / 2 + self.extension_length + obj.scale = ( + self.radius, + self.radius, + self.bar_length / 2 + self.extension_length, + ) obj.rotation_euler[1] = np.pi / 2 obj.location[0] = self.bar_length / 2 butil.apply_transform(obj, True) @@ -77,8 +91,9 @@ def make_bar(self): def make_ring(self): bpy.ops.mesh.primitive_torus_add( - major_segments=128, major_radius=self.ring_radius, - minor_radius=self.radius * uniform(.4, .7) + major_segments=128, + major_radius=self.ring_radius, + minor_radius=self.radius * uniform(0.4, 0.7), ) obj = bpy.context.active_object obj.rotation_euler[0] = np.pi / 2 @@ -89,13 +104,13 @@ def make_ring(self): def create_asset(self, **params) -> bpy.types.Object: match self.hardware_type: - case 'hook': + case "hook": extra = self.make_hook() - case 'holder': + case "holder": extra = self.make_holder() - case 'bar': + case "bar": extra = self.make_bar() - case 'ring': + case "ring": extra = self.make_ring() case _: return self.make_attachment() @@ -103,7 +118,7 @@ def create_asset(self, **params) -> bpy.types.Object: extra.location[1] = -self.depth butil.apply_transform(extra, True) parts = [self.make_attachment(), extra] - if self.hardware_type == 'bar': + if self.hardware_type == "bar": attachment_ = self.make_attachment() attachment_.location[0] = self.bar_length butil.apply_transform(attachment_, True) @@ -114,7 +129,7 @@ def create_asset(self, **params) -> bpy.types.Object: return obj def finalize_assets(self, assets): - self.surface.apply(assets, metal_color='plain') + self.surface.apply(assets, metal_color="plain") if self.scratch: self.scratch.apply(assets) if self.edge_wear: diff --git a/infinigen/assets/objects/bathroom/toilet.py b/infinigen/assets/objects/bathroom/toilet.py new file mode 100644 index 000000000..1c1d03378 --- /dev/null +++ b/infinigen/assets/objects/bathroom/toilet.py @@ -0,0 +1,370 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.decorate import ( + read_center, + read_co, + read_edge_center, + read_edges, + read_normal, + select_edges, + select_faces, + select_vertices, + subsurf, + write_attribute, + write_co, +) +from infinigen.assets.utils.draw import align_bezier +from infinigen.assets.utils.object import join_objects, new_bbox, new_cube, new_cylinder +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed, normalize +from infinigen.core.util.random import log_uniform + + +class ToiletFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.size = uniform(0.4, 0.5) + self.width = self.size * uniform(0.7, 0.8) + self.height = self.size * uniform(0.8, 0.9) + self.size_mid = uniform(0.6, 0.65) + self.curve_scale = log_uniform(0.8, 1.2, 4) + self.depth = self.size * uniform(0.5, 0.6) + self.tube_scale = uniform(0.25, 0.3) + self.thickness = uniform(0.05, 0.06) + self.extrude_height = uniform(0.015, 0.02) + self.stand_depth = self.depth * uniform(0.85, 0.95) + self.stand_scale = uniform(0.7, 0.85) + self.bottom_offset = uniform(0.5, 1.5) + self.back_thickness = self.thickness * uniform(0, 0.8) + self.back_size = self.size * uniform(0.55, 0.65) + self.back_scale = uniform(0.8, 1.0) + self.seat_thickness = uniform(0.1, 0.3) * self.thickness + self.seat_size = self.thickness * uniform(1.2, 1.6) + self.has_seat_cut = uniform() < 0.1 + self.tank_width = self.width * uniform(1.0, 1.2) + self.tank_height = self.height * uniform(0.6, 1.0) + self.tank_size = self.back_size - self.seat_size - uniform(0.02, 0.03) + self.tank_cap_height = uniform(0.03, 0.04) + self.tank_cap_extrude = 0 if uniform() < 0.5 else uniform(0.005, 0.01) + self.cover_rotation = -uniform(0, np.pi / 2) + self.hardware_type = np.random.choice(["button", "handle"]) + self.hardware_cap = uniform(0.01, 0.015) + self.hardware_radius = uniform(0.015, 0.02) + self.hardware_length = uniform(0.04, 0.05) + self.hardware_on_side = uniform() < 0.5 + material_assignments = AssetList["ToiletFactory"]() + self.surface = material_assignments["surface"].assign_material() + self.hardware_surface = material_assignments[ + "hardware_surface" + ].assign_material() + + is_scratch = uniform() < material_assignments["wear_tear_prob"][0] + is_edge_wear = uniform() < material_assignments["wear_tear_prob"][1] + self.scratch = material_assignments["wear_tear"][0] if is_scratch else None + self.edge_wear = ( + material_assignments["wear_tear"][1] if is_edge_wear else None + ) + + @property + def mid_offset(self): + return (1 - self.size_mid) * self.size + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + return new_bbox( + -self.mid_offset - self.back_size - self.tank_cap_extrude, + self.size_mid * self.size + self.thickness + self.thickness, + -self.width / 2 - self.thickness * 1.1, + self.width / 2 + self.thickness * 1.1, + -self.height, + max( + self.tank_height, + -np.sin(self.cover_rotation) + * (self.seat_size + self.size + self.thickness + self.thickness), + ), + ) + + def create_asset(self, **params) -> bpy.types.Object: + upper = self.build_curve() + lower = deep_clone_obj(upper) + lower.scale = [self.tube_scale] * 3 + lower.location = 0, self.tube_scale * self.mid_offset / 2, -self.depth + butil.apply_transform(lower, True) + bottom = deep_clone_obj(upper) + bottom.scale = [self.stand_scale] * 3 + bottom.location = ( + 0, + self.tube_scale * (1 - self.size_mid) * self.size / 2 * self.bottom_offset, + -self.height, + ) + butil.apply_transform(bottom, True) + + obj = self.make_tube(lower, upper) + seat, cover = self.make_seat(obj) + stand = self.make_stand(obj, bottom) + back = self.make_back(obj) + tank = self.make_tank() + butil.modify_mesh(obj, "BEVEL", segments=2) + match self.hardware_type: + case "button": + hardware = self.add_button() + case _: + hardware = self.add_handle() + write_attribute(hardware, 1, "hardware", "FACE") + obj = join_objects([obj, seat, cover, stand, back, tank, hardware]) + obj.rotation_euler[-1] = np.pi / 2 + butil.apply_transform(obj) + return obj + + def build_curve(self): + x_anchors = [0, self.width / 2, 0] + y_anchors = [-self.size_mid * self.size, 0, self.mid_offset] + axes = [np.array([1, 0, 0]), np.array([0, 1, 0]), np.array([1, 0, 0])] + obj = align_bezier([x_anchors, y_anchors, 0], axes, self.curve_scale) + butil.modify_mesh(obj, "MIRROR", use_axis=(True, False, False)) + return obj + + def make_tube(self, lower, upper): + obj = join_objects([upper, lower]) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=np.random.randint(12, 16), + profile_shape_factor=uniform(0.1, 0.2), + interpolation="SURFACE", + ) + butil.modify_mesh( + obj, + "SOLIDIFY", + thickness=self.thickness, + offset=1, + solidify_mode="NON_MANIFOLD", + nonmanifold_boundary_mode="FLAT", + ) + normal = read_normal(obj) + select_faces(obj, normal[:, -1] > 0.9) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.extrude_region_move( + TRANSFORM_OT_translate={ + "value": (0, 0, self.thickness + self.extrude_height) + } + ) + x, y, z = read_co(obj).T + write_co(obj, np.stack([x, y, np.clip(z, None, self.extrude_height)], -1)) + return obj + + def make_seat(self, obj): + seat = self.make_plane(obj) + cover = deep_clone_obj(seat) + butil.modify_mesh(seat, "SOLIDIFY", thickness=self.extrude_height, offset=1) + if self.has_seat_cut: + cutter = new_cube() + cutter.scale = [self.thickness] * 3 + cutter.location = 0, -self.thickness / 2 - self.size_mid * self.size, 0 + butil.apply_transform(cutter, True) + butil.select_none() + butil.modify_mesh(seat, "BOOLEAN", object=cutter, operation="DIFFERENCE") + butil.delete(cutter) + butil.modify_mesh(seat, "BEVEL", segments=2) + + x, y, _ = read_edge_center(cover).T + i = np.argmin(np.abs(x) + np.abs(y)) + selection = np.full(len(x), False) + selection[i] = True + select_edges(cover, selection) + with butil.ViewportMode(cover, "EDIT"): + bpy.ops.mesh.loop_multi_select() + bpy.ops.mesh.fill_grid() + butil.modify_mesh(cover, "SOLIDIFY", thickness=self.extrude_height, offset=1) + cover.location = [ + 0, + -self.mid_offset - self.seat_size + self.extrude_height / 2, + -self.extrude_height / 2, + ] + butil.apply_transform(cover, True) + cover.rotation_euler[0] = self.cover_rotation + cover.location = [ + 0, + self.mid_offset + self.seat_size - self.extrude_height / 2, + self.extrude_height * 1.5, + ] + butil.apply_transform(cover, True) + butil.modify_mesh(cover, "BEVEL", segments=2) + return seat, cover + + def make_plane(self, obj): + select_faces(obj, lambda x, y, z: z > self.extrude_height * 2 / 3) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.duplicate_move() + bpy.ops.mesh.separate(type="SELECTED") + seat = next(o for o in bpy.context.selected_objects if o != obj) + butil.select_none() + select_vertices(seat, lambda x, y, z: y > self.mid_offset + self.seat_thickness) + with butil.ViewportMode(seat, "EDIT"): + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={ + "value": (0, self.seat_size + self.thickness * 2, 0) + } + ) + x, y, z = read_co(seat).T + write_co( + seat, + np.stack([x, np.clip(y, None, self.mid_offset + self.seat_size), z], -1), + ) + return seat + + def make_stand(self, obj, bottom): + co = read_co(obj)[read_edges(obj).reshape(-1)].reshape(-1, 2, 3) + horizontal = np.abs(normalize(co[:, 0] - co[:, 1])[:, -1]) < 0.1 + x, y, z = read_edge_center(obj).T + under_depth = z < -self.stand_depth + i = np.argmin(y - horizontal - under_depth) + selection = np.full(len(co), False) + selection[i] = True + select_edges(obj, selection) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.loop_multi_select() + bpy.ops.mesh.duplicate_move() + bpy.ops.mesh.separate(type="SELECTED") + stand = next(o for o in bpy.context.selected_objects if o != obj) + stand = join_objects([stand, bottom]) + with butil.ViewportMode(stand, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=np.random.randint(12, 16), + profile_shape_factor=uniform(0.0, 0.15), + ) + return stand + + def make_back(self, obj): + back = read_center(obj)[:, 1] > self.mid_offset - self.back_thickness + back_facing = read_normal(obj)[:, 1] > 0.1 + butil.select_none() + select_faces(obj, back & back_facing) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.duplicate_move() + bpy.ops.mesh.separate(type="SELECTED") + back = next(o for o in bpy.context.selected_objects if o != obj) + butil.modify_mesh(back, "CORRECTIVE_SMOOTH") + butil.select_none() + with butil.ViewportMode(back, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={ + "value": (0, self.back_size + self.thickness * 2, 0) + } + ) + bpy.ops.transform.resize(value=(self.back_scale, 1, 1)) + bpy.ops.mesh.edge_face_add() + back.location[1] -= 0.01 + butil.apply_transform(back, True) + x, y, z = read_co(back).T + write_co( + back, + np.stack([x, np.clip(y, None, self.mid_offset + self.back_size), z], -1), + ) + return back + + def make_tank(self): + tank = new_cube() + tank.scale = self.tank_width / 2, self.tank_size / 2, self.tank_height / 2 + tank.location = ( + 0, + self.mid_offset + self.back_size - self.tank_size / 2, + self.tank_height / 2, + ) + butil.apply_transform(tank, True) + subsurf(tank, 2, True) + butil.modify_mesh(tank, "BEVEL", segments=2) + cap = new_cube() + cap.scale = ( + self.tank_width / 2 + self.tank_cap_extrude, + self.tank_size / 2 + self.tank_cap_extrude, + self.tank_cap_height / 2, + ) + cap.location = ( + 0, + self.mid_offset + self.back_size - self.tank_size / 2, + self.tank_height, + ) + butil.apply_transform(cap, True) + butil.modify_mesh( + cap, "BEVEL", width=uniform(0, self.extrude_height), segments=4 + ) + tank = join_objects([tank, cap]) + return tank + + def add_button(self): + obj = new_cylinder() + obj.scale = ( + self.hardware_radius, + self.hardware_radius, + self.tank_cap_height / 2 + 1e-3, + ) + obj.location = ( + 0, + self.mid_offset + self.back_size - self.tank_size / 2, + self.tank_height, + ) + butil.apply_transform(obj, True) + return obj + + def add_handle(self): + obj = new_cylinder() + obj.scale = self.hardware_radius, self.hardware_radius, self.hardware_cap + obj.rotation_euler[0] = np.pi / 2 + butil.apply_transform(obj, True) + lever = new_cylinder() + lever.scale = ( + self.hardware_radius / 2, + self.hardware_radius / 2, + self.hardware_length, + ) + lever.rotation_euler[1] = np.pi / 2 + lever.location = [ + -self.hardware_radius * uniform(0, 0.5), + -self.hardware_cap, + -self.hardware_radius * uniform(0, 0.5), + ] + butil.apply_transform(lever, True) + obj = join_objects([obj, lever]) + if self.hardware_on_side: + obj.location = [ + -self.tank_width / 2 + self.hardware_radius + uniform(0.01, 0.02), + self.mid_offset + self.back_size - self.tank_size, + self.tank_height - self.hardware_radius - uniform(0.02, 0.03), + ] + else: + obj.location = [ + -self.tank_width / 2, + self.mid_offset + + self.back_size + - self.tank_size + + self.hardware_radius + + uniform(0.01, 0.02), + self.tank_height - self.hardware_radius - uniform(0.02, 0.03), + ] + obj.rotation_euler[-1] = -np.pi / 2 + butil.apply_transform(obj, True) + butil.modify_mesh(obj, "BEVEL", width=uniform(0.005, 0.01), segments=2) + return obj + + def finalize_assets(self, assets): + self.surface.apply(assets, clear=True, metal_color="plain") + self.hardware_surface.apply(assets, "hardware", metal_color="natural") + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) diff --git a/infinigen/assets/cactus/__init__.py b/infinigen/assets/objects/cactus/__init__.py similarity index 72% rename from infinigen/assets/cactus/__init__.py rename to infinigen/assets/objects/cactus/__init__.py index 2b6e36f4b..9e97f4e30 100644 --- a/infinigen/assets/cactus/__init__.py +++ b/infinigen/assets/objects/cactus/__init__.py @@ -4,9 +4,14 @@ # Authors: Lingjie Mei # Date: April 13 2023 -from .generate import CactusFactory, ColumnarCactusFactory, GlobularCactusFactory, PrickyPearCactusFactory, \ - KalidiumCactusFactory from .columnar import ColumnarBaseCactusFactory +from .generate import ( + CactusFactory, + ColumnarCactusFactory, + GlobularCactusFactory, + KalidiumCactusFactory, + PrickyPearCactusFactory, +) from .globular import GlobularBaseCactusFactory -from .pricky_pear import PrickyPearBaseCactusFactory from .kalidium import KalidiumBaseCactusFactory +from .pricky_pear import PrickyPearBaseCactusFactory diff --git a/infinigen/assets/cactus/base.py b/infinigen/assets/objects/cactus/base.py similarity index 51% rename from infinigen/assets/cactus/base.py rename to infinigen/assets/objects/cactus/base.py index b83ec5fa0..5dad2b470 100644 --- a/infinigen/assets/cactus/base.py +++ b/infinigen/assets/objects/cactus/base.py @@ -6,22 +6,22 @@ import bpy -from infinigen.assets.cactus.spike import make_default_selections -from infinigen.assets.utils.decorate import write_attribute -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.assets.objects.cactus.spike import make_default_selections from infinigen.core.placement.factory import AssetFactory + class BaseCactusFactory(AssetFactory): - spike_distance = .025 - cap_percentage = .1 - noise_strength = .02 - base_radius = .002 + spike_distance = 0.025 + cap_percentage = 0.1 + noise_strength = 0.02 + base_radius = 0.002 density = 5e4 def __init__(self, factory_seed, coarse=False): super(BaseCactusFactory, self).__init__(factory_seed, coarse) - self.points_fn = make_default_selections(self.spike_distance, self.cap_percentage, self.density) + self.points_fn = make_default_selections( + self.spike_distance, self.cap_percentage, self.density + ) def create_asset(self, **params) -> bpy.types.Object: - raise NotImplemented + raise NotImplementedError() diff --git a/infinigen/assets/objects/cactus/columnar.py b/infinigen/assets/objects/cactus/columnar.py new file mode 100644 index 000000000..dc3ac8586 --- /dev/null +++ b/infinigen/assets/objects/cactus/columnar.py @@ -0,0 +1,141 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.cactus.base import BaseCactusFactory +from infinigen.assets.objects.trees.tree import build_radius_tree +from infinigen.assets.utils.decorate import geo_extension +from infinigen.assets.utils.nodegroup import align_tilt +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler + + +class ColumnarBaseCactusFactory(BaseCactusFactory): + spike_distance = 0.08 + + @staticmethod + def radius_fn(base_radius, size, resolution): + radius_decay = uniform(0.5, 0.8) + radius_decay_root = uniform(0.7, 0.9) + leaf_alpha = uniform(2, 3) + radius = base_radius * radius_decay * np.ones(size * resolution) + radius[:resolution] *= radius_decay_root ** ( + 1 - np.arange(resolution) / resolution + ) + radius[-resolution:] *= ( + 1 - (np.arange(resolution) / resolution) ** leaf_alpha + ) ** (1 / leaf_alpha) + return radius + + @property + def branch_config(self): + n_major = 16 + n_minor = np.random.randint(10, 14) + b_minor = np.random.randint(2, 4) + while True: + angles = uniform(0, np.pi * 2, b_minor) + s = np.sort(angles) + if (np.concatenate([s[1:], [s[0] + np.pi * 2]]) - s > np.pi / 3).all(): + break + minor_config = { + "n": b_minor, + "path_kargs": lambda idx: { + "n_pts": n_minor, + "std": 0.4, + "momentum": 0.1, + "sz": 0.2, + "pull_dir": [0, 0, 1], + "pull_init": 0.0, + "pull_factor": 4.0, + }, + "spawn_kargs": lambda idx: { + "ang_min": np.pi / 2.5, + "ang_max": np.pi / 2, + "rng": [0.2, 0.6], + "axis2": [np.cos(angles[idx]), np.sin(angles[idx]), 0], + }, + "children": [], + } + major_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": n_major, + "std": 0.4, + "momentum": 0.99, + "sz": 0.3, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [minor_config], + } + return major_config + + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: + resolution = 16 + base_radius = 0.25 + obj = build_radius_tree( + self.radius_fn, self.branch_config, base_radius, resolution, True + ) + surface.add_geomod( + obj, + self.geo_star, + apply=True, + input_attributes=[None, "radius"], + attributes=["selection"], + ) + surface.add_geomod( + obj, geo_extension, apply=True, input_kwargs={"musgrave_dimensions": "2D"} + ) + return obj + + @staticmethod + def geo_star(nw: NodeWrangler): + perturb = 0.1 + curve, radius = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Radius", None), + ], + ).outputs[:2] + star_resolution = np.random.randint(5, 8) + circle = nw.new_node(Nodes.MeshCircle, [star_resolution * 3]) + circle = nw.new_node( + Nodes.SetPosition, + [circle, None, None, nw.uniform([-perturb] * 3, [perturb] * 3)], + ) + circle = nw.new_node( + Nodes.Transform, + [circle], + input_kwargs={"Scale": [*uniform(0.8, 1.0, 2), 1]}, + ) + selection = nw.compare( + "EQUAL", nw.math("MODULO", nw.new_node(Nodes.Index), 2), 0 + ) + circle, _, selection = nw.new_node( + Nodes.CaptureAttribute, [circle, None, selection] + ).outputs[:3] + circle = nw.new_node( + Nodes.SetPosition, + [ + circle, + selection, + nw.scale(nw.new_node(Nodes.InputPosition), uniform(1.15, 1.25)), + ], + ) + profile_curve = nw.new_node(Nodes.MeshToCurve, [circle]) + + curve = nw.new_node(Nodes.MeshToCurve, [curve]) + curve = align_tilt(nw, curve, noise_strength=uniform(np.pi / 4, np.pi / 2)) + curve = nw.new_node(Nodes.SetCurveRadius, [curve, None, radius]) + geometry = nw.curve2mesh(curve, profile_curve) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": geometry, "Selection": selection}, + ) diff --git a/infinigen/assets/cactus/generate.py b/infinigen/assets/objects/cactus/generate.py similarity index 50% rename from infinigen/assets/cactus/generate.py rename to infinigen/assets/objects/cactus/generate.py index 674afb418..b61762b67 100644 --- a/infinigen/assets/cactus/generate.py +++ b/infinigen/assets/objects/cactus/generate.py @@ -4,47 +4,48 @@ # Authors: Lingjie Mei -import colorsys - import bpy import numpy as np from numpy.random import uniform import infinigen.core.util.blender as butil -from .base import BaseCactusFactory -from .globular import GlobularBaseCactusFactory -from .columnar import ColumnarBaseCactusFactory -from .pricky_pear import PrickyPearBaseCactusFactory -from .kalidium import KalidiumBaseCactusFactory - -from infinigen.assets.cactus import spike +from infinigen.assets.objects.cactus import spike from infinigen.assets.utils.misc import assign_material from infinigen.assets.utils.object import join_objects - -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes +from infinigen.core import surface, tagging +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core import surface from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.color import hsv2rgba from infinigen.core.util.math import FixedSeed -from infinigen.core import tagging -from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.util.random import log_uniform + +from .base import BaseCactusFactory +from .columnar import ColumnarBaseCactusFactory +from .globular import GlobularBaseCactusFactory +from .kalidium import KalidiumBaseCactusFactory +from .pricky_pear import PrickyPearBaseCactusFactory -class CactusFactory(AssetFactory): +class CactusFactory(AssetFactory): def __init__(self, factory_seed, coarse=False, factory_method=None): super(CactusFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.factory_methods = [GlobularBaseCactusFactory, ColumnarBaseCactusFactory, - PrickyPearBaseCactusFactory]#, KalidiumBaseCactusFactory] + self.factory_methods = [ + GlobularBaseCactusFactory, + ColumnarBaseCactusFactory, + PrickyPearBaseCactusFactory, + ] # , KalidiumBaseCactusFactory] weights = np.array([1] * len(self.factory_methods)) self.weights = weights / weights.sum() if factory_method is None: with FixedSeed(self.factory_seed): - factory_method = np.random.choice(self.factory_methods, p=self.weights) + factory_method = np.random.choice( + self.factory_methods, p=self.weights + ) self.factory: BaseCactusFactory = factory_method(factory_seed, coarse) - base_hue = uniform(.2, .4) + base_hue = uniform(0.2, 0.4) self.material = surface.shaderfunc_to_material(self.shader_cactus, base_hue) def create_asset(self, face_size=0.01, realize=True, **params): @@ -53,59 +54,86 @@ def create_asset(self, face_size=0.01, realize=True, **params): remesh_with_attrs(obj, face_size) if self.factory.noise_strength > 0: - t = np.random.choice(['STUCCI', 'MARBLE']) - texture = bpy.data.textures.new(name='coral', type=t) - texture.noise_scale = log_uniform(.1, .15) - butil.modify_mesh(obj, 'DISPLACE', True, strength=self.factory.noise_strength, mid_level=0, - texture=texture) + t = np.random.choice(["STUCCI", "MARBLE"]) + texture = bpy.data.textures.new(name="coral", type=t) + texture.noise_scale = log_uniform(0.1, 0.15) + butil.modify_mesh( + obj, + "DISPLACE", + True, + strength=self.factory.noise_strength, + mid_level=0, + texture=texture, + ) assign_material(obj, self.material) - if face_size <= .05 and self.factory.density > 0: - t = spike.apply(obj, self.factory.points_fn, self.factory.base_radius, realize) - + if face_size <= 0.05 and self.factory.density > 0: + t = spike.apply( + obj, self.factory.points_fn, self.factory.base_radius, realize + ) + tagging.tag_object(obj, "cactus_spike") obj = join_objects([obj, t]) - tagging.tag_object(obj, 'cactus') + tagging.tag_object(obj, "cactus") return obj @staticmethod def shader_cactus(nw: NodeWrangler, base_hue): - shift = uniform(-.15, .15) - bright_color = hsv2rgba((base_hue + shift) % 1, 1., .02) - dark_color = hsv2rgba(base_hue, .8, .01) - fresnel_color = hsv2rgba((base_hue - uniform(.05, .1)) % 1, .9, uniform(.3, .5)) - specular = .25 - - fresnel = nw.scalar_multiply(nw.new_node(Nodes.Fresnel), log_uniform(.6, 1.)) - color = build_color_ramp(nw, nw.musgrave(log_uniform(10, 50)), [.0, .3, .7, 1.], - [dark_color, dark_color, bright_color, bright_color]) + shift = uniform(-0.15, 0.15) + bright_color = hsv2rgba((base_hue + shift) % 1, 1.0, 0.02) + dark_color = hsv2rgba(base_hue, 0.8, 0.01) + fresnel_color = hsv2rgba( + (base_hue - uniform(0.05, 0.1)) % 1, 0.9, uniform(0.3, 0.5) + ) + specular = 0.25 + + fresnel = nw.scalar_multiply(nw.new_node(Nodes.Fresnel), log_uniform(0.6, 1.0)) + color = build_color_ramp( + nw, + nw.musgrave(log_uniform(10, 50)), + [0.0, 0.3, 0.7, 1.0], + [dark_color, dark_color, bright_color, bright_color], + ) color = nw.new_node(Nodes.MixRGB, [fresnel, color, fresnel_color]) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, .8)]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness, 'Specular': specular}) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 0.8)]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + }, + ) return bsdf class GlobularCactusFactory(CactusFactory): def __init__(self, factory_seed, coarse=False): - super(GlobularCactusFactory, self).__init__(factory_seed, coarse, GlobularBaseCactusFactory) + super(GlobularCactusFactory, self).__init__( + factory_seed, coarse, GlobularBaseCactusFactory + ) class ColumnarCactusFactory(CactusFactory): def __init__(self, factory_seed, coarse=False): - super(ColumnarCactusFactory, self).__init__(factory_seed, coarse, ColumnarBaseCactusFactory) + super(ColumnarCactusFactory, self).__init__( + factory_seed, coarse, ColumnarBaseCactusFactory + ) class PrickyPearCactusFactory(CactusFactory): def __init__(self, factory_seed, coarse=False): - super(PrickyPearCactusFactory, self).__init__(factory_seed, coarse, PrickyPearBaseCactusFactory) + super(PrickyPearCactusFactory, self).__init__( + factory_seed, coarse, PrickyPearBaseCactusFactory + ) class KalidiumCactusFactory(CactusFactory): - def __init__(self, factory_seed, coarse=False): - super(KalidiumCactusFactory, self).__init__(factory_seed, coarse, KalidiumBaseCactusFactory) + super(KalidiumCactusFactory, self).__init__( + factory_seed, coarse, KalidiumBaseCactusFactory + ) diff --git a/infinigen/assets/objects/cactus/globular.py b/infinigen/assets/objects/cactus/globular.py new file mode 100644 index 000000000..5d3d116f4 --- /dev/null +++ b/infinigen/assets/objects/cactus/globular.py @@ -0,0 +1,86 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.cactus.base import BaseCactusFactory +from infinigen.assets.utils.decorate import geo_extension +from infinigen.assets.utils.object import new_cube +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform + + +class GlobularBaseCactusFactory(BaseCactusFactory): + spike_distance = 0.08 + + @staticmethod + def geo_globular(nw: NodeWrangler): + star_resolution = np.random.randint(6, 12) + resolution = 64 + frequency = uniform(-0.2, 0.2) + circle = nw.new_node(Nodes.MeshCircle, [star_resolution * 3]) + selection = nw.compare( + "EQUAL", nw.math("MODULO", nw.new_node(Nodes.Index), 2), 0 + ) + circle, _, selection = nw.new_node( + Nodes.CaptureAttribute, [circle, None, selection] + ).outputs[:3] + circle = nw.new_node( + Nodes.SetPosition, + [ + circle, + selection, + nw.scale(nw.new_node(Nodes.InputPosition), uniform(1.1, 1.2)), + ], + ) + profile_curve = nw.new_node(Nodes.MeshToCurve, [circle]) + curve = nw.new_node( + Nodes.ResampleCurve, [nw.new_node(Nodes.CurveLine), None, resolution] + ) + anchors = [ + (0, uniform(0.2, 0.4)), + (uniform(0.4, 0.6), log_uniform(0.5, 0.8)), + (uniform(0.8, 0.85), uniform(0.4, 0.6)), + (1.0, 0.05), + ] + radius = nw.scalar_multiply( + nw.build_float_curve(nw.new_node(Nodes.SplineParameter), anchors, "AUTO"), + log_uniform(0.5, 1.0), + ) + curve = nw.new_node(Nodes.SetCurveRadius, [curve, None, radius]) + curve = nw.new_node( + Nodes.SetCurveTilt, + [ + curve, + None, + nw.scalar_multiply( + nw.new_node(Nodes.SplineParameter), 2 * np.pi * frequency + ), + ], + ) + geometry = nw.curve2mesh(curve, profile_curve) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": geometry, "Selection": selection}, + ) + + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: + obj = new_cube() + surface.add_geomod(obj, self.geo_globular, apply=True, attributes=["selection"]) + surface.add_geomod( + obj, geo_extension, apply=True, input_kwargs={"musgrave_dimensions": "2D"} + ) + + obj.scale = uniform(0.8, 1.5, 3) + obj.rotation_euler[-1] = uniform(0, np.pi * 2) + butil.apply_transform(obj) + + return obj diff --git a/infinigen/assets/objects/cactus/kalidium.py b/infinigen/assets/objects/cactus/kalidium.py new file mode 100644 index 000000000..7b74874a5 --- /dev/null +++ b/infinigen/assets/objects/cactus/kalidium.py @@ -0,0 +1,163 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.cactus.base import BaseCactusFactory +from infinigen.assets.objects.trees.tree import build_radius_tree +from infinigen.assets.utils.decorate import ( + displace_vertices, + geo_extension, + read_co, + remove_vertices, + subsurface2face_size, +) +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.assets.utils.object import new_cube, origin2lowest, separate_loose +from infinigen.assets.utils.shortest_path import geo_shortest_path +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +class KalidiumBaseCactusFactory(BaseCactusFactory): + cap_percentage = 0.0 + noise_strength = 0.0 + density = 0.0 + + @staticmethod + def build_twig(i): + branch_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": 5, + "std": 0.5, + "momentum": 0.85, + "sz": 0.01, + }, + "spawn_kargs": lambda idx: {"init_vec": (0, 0, 1)}, + } + obj = build_radius_tree(None, branch_config, 0.005) + surface.add_geomod(obj, geo_radius, apply=True, input_args=["radius"]) + return obj + + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: + resolution = 20 + obj = new_cube(location=(1, 1, 1)) + butil.modify_mesh( + obj, + "ARRAY", + count=resolution, + relative_offset_displace=(1, 0, 0), + use_merge_vertices=True, + ) + butil.modify_mesh( + obj, + "ARRAY", + count=resolution, + relative_offset_displace=(0, 1, 0), + use_merge_vertices=True, + ) + butil.modify_mesh( + obj, + "ARRAY", + count=resolution, + relative_offset_displace=(0, 0, 1), + use_merge_vertices=True, + ) + obj.scale = [1 / resolution] * 3 + obj.location = -1, -1, -0.1 + butil.apply_transform(obj, loc=True) + remove_vertices( + obj, + lambda x, y, z: (x**2 + y**2 + (z - 1) ** 2 > 1.1) + | (uniform(0, 1, len(x)) < 0.05), + ) + end_indices = np.nonzero(read_co(obj)[:, -1] < 5 / resolution)[0] + + def end_index(nw): + nw.build_index_case(np.random.choice(end_indices, 5)) + + displace_vertices( + obj, + lambda x, y, z: uniform(-0.8 / resolution, 0.8 / resolution, (3, len(x))), + ) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) + surface.add_geomod(obj, geo_extension, apply=True) + + def weight(nw): + return nw.scalar_multiply( + nw.vector_math( + "DISTANCE", *nw.new_node(Nodes.InputEdgeVertices).outputs[2:] + ), + nw.uniform(0.8, 1), + ) + + surface.add_geomod( + obj, geo_shortest_path, apply=True, input_args=[end_index, weight, 0.05] + ) + surface.add_geomod(obj, geo_radius, apply=True, input_args=[0.006]) + + twigs = make_asset_collection(self.build_twig, 5, verbose=False) + surface.add_geomod(obj, self.geo_twigs, apply=True, input_args=[twigs]) + butil.delete_collection(twigs) + obj = separate_loose(obj) + + obj.scale = uniform(0.8, 1.2, 3) + butil.apply_transform(obj) + subsurface2face_size(obj, face_size) + origin2lowest(obj) + tag_object(obj, "kalidium_cactus") + return obj + + @staticmethod + def geo_twigs(nw: NodeWrangler, instances): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + points, _, rotation = nw.new_node( + Nodes.DistributePointsOnFaces, [geometry], input_kwargs={"Density": 2e3} + ).outputs[:3] + points = nw.new_node(Nodes.MergeByDistance, [points, None, 0.005]) + perturb = 0.4 + rotation = nw.new_node( + Nodes.AlignEulerToVector, + [ + nw.add(rotation, nw.uniform([-perturb] * 3, [perturb] * 3)), + nw.uniform(0.2, 0.5), + ], + attrs={"axis": "Z"}, + ) + instances = nw.new_node(Nodes.CollectionInfo, [instances, True, True]) + + twigs = nw.new_node( + Nodes.RealizeInstances, + [ + nw.new_node( + Nodes.InstanceOnPoints, + [ + points, + None, + instances, + True, + None, + rotation, + nw.combine(1, 1, nw.uniform(1.0, 1.5)), + ], + ) + ], + ) + geometry = nw.new_node(Nodes.JoinGeometry, [[geometry, twigs]]) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/cactus/pricky_pear.py b/infinigen/assets/objects/cactus/pricky_pear.py similarity index 58% rename from infinigen/assets/cactus/pricky_pear.py rename to infinigen/assets/objects/cactus/pricky_pear.py index 8a74ae34f..47f37d288 100644 --- a/infinigen/assets/cactus/pricky_pear.py +++ b/infinigen/assets/objects/cactus/pricky_pear.py @@ -8,37 +8,48 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.utils.object import join_objects, new_cube +from infinigen.assets.objects.cactus.base import BaseCactusFactory from infinigen.assets.utils.decorate import geo_extension -from infinigen.core.util.random import log_uniform -from infinigen.core.surface import write_attr_data -from infinigen.core.util import blender as butil -from infinigen.assets.cactus.base import BaseCactusFactory +from infinigen.assets.utils.object import join_objects, new_cube +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.surface import write_attr_data +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform + class PrickyPearBaseCactusFactory(BaseCactusFactory): - spike_distance = .08 + spike_distance = 0.08 @staticmethod def geo_leaf(nw: NodeWrangler): resolution = 64 profile_curve = nw.new_node(Nodes.CurveCircle) - curve = nw.new_node(Nodes.ResampleCurve, [nw.new_node(Nodes.CurveLine), None, resolution]) - anchors = [(0, uniform(.15, .2)), (uniform(.4, .6), log_uniform(.4, .5)), (1., .05)] - radius = nw.scalar_multiply(nw.build_float_curve(nw.new_node(Nodes.SplineParameter), anchors, 'AUTO'), - log_uniform(.5, 1.5)) + curve = nw.new_node( + Nodes.ResampleCurve, [nw.new_node(Nodes.CurveLine), None, resolution] + ) + anchors = [ + (0, uniform(0.15, 0.2)), + (uniform(0.4, 0.6), log_uniform(0.4, 0.5)), + (1.0, 0.05), + ] + radius = nw.scalar_multiply( + nw.build_float_curve(nw.new_node(Nodes.SplineParameter), anchors, "AUTO"), + log_uniform(0.5, 1.5), + ) curve = nw.new_node(Nodes.SetCurveRadius, [curve, None, radius]) geometry = nw.curve2mesh(curve, profile_curve) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def build_leaf(self): obj = new_cube() surface.add_geomod(obj, self.geo_leaf, apply=True) - surface.add_geomod(obj, geo_extension, apply=True, input_kwargs={'musgrave_dimensions': '2D'}) - obj.scale = uniform(.8, 1.2), uniform(.2, .25), uniform(.8, 1.2) + surface.add_geomod( + obj, geo_extension, apply=True, input_kwargs={"musgrave_dimensions": "2D"} + ) + obj.scale = uniform(0.8, 1.2), uniform(0.2, 0.25), uniform(0.8, 1.2) butil.apply_transform(obj) return obj @@ -49,22 +60,26 @@ def build_leaves(self, level=0): leaves = [self.build_leaves(level - 1) for _ in range(n)] base = self.build_leaf() angles = np.random.permutation( - [-uniform(np.pi / 3, np.pi / 2), uniform(-np.pi / 16, np.pi / 16), uniform(np.pi / 3, np.pi / 2)])[ - :n] - vectors = [[np.sin(a), 0, np.cos(a) + .5] for a in angles] + [ + -uniform(np.pi / 3, np.pi / 2), + uniform(-np.pi / 16, np.pi / 16), + uniform(np.pi / 3, np.pi / 2), + ] + )[:n] + vectors = [[np.sin(a), 0, np.cos(a) + 0.5] for a in angles] locations = np.array([v.co for v in base.data.vertices]) for a, v, leaf in zip(angles, vectors, leaves): index = np.argmax(locations @ v) - leaf.location[-1] -= .15 + leaf.location[-1] -= 0.15 butil.apply_transform(leaf, loc=True) - leaf.scale = [uniform(.5, .75)] * 3 + leaf.scale = [uniform(0.5, 0.75)] * 3 leaf.location = locations[index] leaf.rotation_euler = 0, a, uniform(-np.pi / 3, np.pi / 3) obj = join_objects([base, *leaves]) return obj - def create_asset(self, face_size=.01, **params) -> bpy.types.Object: + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: obj = self.build_leaves(2) - write_attr_data(obj, 'selection', np.ones(len(obj.data.vertices))) - tag_object(obj, 'prickypear_cactus') + write_attr_data(obj, "selection", np.ones(len(obj.data.vertices))) + tag_object(obj, "prickypear_cactus") return obj diff --git a/infinigen/assets/objects/cactus/spike.py b/infinigen/assets/objects/cactus/spike.py new file mode 100644 index 000000000..7a9615cc0 --- /dev/null +++ b/infinigen/assets/objects/cactus/spike.py @@ -0,0 +1,168 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.trees.tree import build_radius_tree +from infinigen.assets.utils.misc import ( + assign_material, + sample_direction, + toggle_hide, + toggle_show, +) +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.tagging import COMBINED_ATTR_NAME +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.color import hsv2rgba + + +def build_spikes(base_radius=0.002, **kwargs): + n_branch = 4 + n_major = 9 + branch_config = { + "n": n_branch, + "path_kargs": lambda idx: { + "n_pts": n_major, + "std": 0.5, + "momentum": 0.85, + "sz": uniform(0.005, 0.01), + }, + "spawn_kargs": lambda idx: {"init_vec": sample_direction(0.8)}, + } + + def radius_fn(base_radius, size, resolution): + return base_radius * 0.5 ** (np.arange(size * resolution) / (size * resolution)) + + obj = build_radius_tree(radius_fn, branch_config, base_radius) + surface.add_geomod(obj, geo_radius, apply=True, input_args=["radius", None, 0.001]) + return obj + + +def make_default_selections(spike_distance, cap_percentage, density): + def selection(nw: NodeWrangler, selected, geometry): + z = nw.separate(nw.new_node(Nodes.InputPosition))[-1] + z_stat = nw.new_node(Nodes.AttributeStatistic, [geometry, None, z]).outputs + percentage = nw.scalar_divide(nw.scalar_sub(z_stat["Max"], z), z_stat["Range"]) + is_cap = nw.bernoulli( + nw.build_float_curve(percentage, [(0, 1), (cap_percentage, 0.5), (1, 0)]) + ) + cap = nw.new_node(Nodes.SeparateGeometry, [geometry, is_cap]) + cap = nw.new_node(Nodes.MergeByDistance, [cap, None, spike_distance / 2]) + + points = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": geometry, "Selection": selected, "Density": density}, + ).outputs["Points"] + points = nw.new_node(Nodes.MergeByDistance, [points, None, spike_distance]) + + all_points = nw.new_node(Nodes.JoinGeometry, [[cap, points]]) + return all_points + + return selection + + +def geo_spikes(nw: NodeWrangler, spikes, points_fn=None, realize=True): + geometry, selection = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Selection", None), + ], + ).outputs[:2] + + capture = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": geometry, "Value": nw.new_node(Nodes.InputNormal)}, + ) + + selected = nw.compare("GREATER_THAN", selection, 0.8) + spikes = nw.new_node(Nodes.CollectionInfo, [spikes, True, True]) + + rotation = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": (capture, "Attribute")}, + attrs={"axis": "Z"}, + ) + rotation = nw.new_node( + Nodes.RotateEuler, + input_kwargs={"Rotation": rotation, "Angle": nw.uniform(0, 2 * np.pi)}, + attrs={"type": "AXIS_ANGLE", "space": "LOCAL"}, + ) + rotation = nw.new_node( + Nodes.AlignEulerToVector, [rotation, nw.uniform(0.2, 0.5)], attrs={"axis": "Z"} + ) + rotation = nw.add(rotation, nw.uniform([-0.05] * 3, [0.05] * 3)) + + points = surface.eval_argument( + nw, points_fn, selected=selected, geometry=capture.outputs["Geometry"] + ) + spikes = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": points, + "Instance": spikes, + "Pick Instance": True, + "Rotation": rotation, + "Scale": nw.uniform([0.5] * 3, [1.0] * 3), + }, + ) + if realize: + realize_instances = nw.new_node(Nodes.RealizeInstances, [spikes]) + else: + realize_instances = spikes + + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances}) + + +def shader_spikes(nw: NodeWrangler): + roughness = 0.8 + specular = 0.25 + mix_ratio = 0.9 + color = hsv2rgba(uniform(0.2, 0.4), uniform(0.1, 0.3), 0.8) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Subsurface": 0.1, + }, + ) + transparent_bsdf = nw.new_node(Nodes.TranslucentBSDF, [color]) + mix_rgb = nw.new_node( + Nodes.MixShader, [mix_ratio, principled_bsdf, transparent_bsdf] + ) + return mix_rgb + + +def apply(obj, points_fn, base_radius=0.002, realize=True): + spikes = deep_clone_obj(obj) + + if COMBINED_ATTR_NAME in spikes.data.attributes: + spikes.data.attributes.remove(spikes.data.attributes[COMBINED_ATTR_NAME]) + + instances = make_asset_collection( + build_spikes, 5, "spikes", verbose=False, base_radius=base_radius + ) + mat = surface.shaderfunc_to_material(shader_spikes) + toggle_show(instances) + for o in instances.objects: + assign_material(o, mat) + toggle_hide(instances) + surface.add_geomod( + spikes, + geo_spikes, + apply=realize, + input_args=[instances, points_fn, realize], + input_attributes=[None, "selection"], + ) + butil.delete_collection(instances) + return spikes diff --git a/infinigen/assets/clothes/__init__.py b/infinigen/assets/objects/clothes/__init__.py similarity index 99% rename from infinigen/assets/clothes/__init__.py rename to infinigen/assets/objects/clothes/__init__.py index a1b27f179..5a0bb9209 100644 --- a/infinigen/assets/clothes/__init__.py +++ b/infinigen/assets/objects/clothes/__init__.py @@ -3,7 +3,6 @@ # Authors: Lingjie Mei from .blanket import BlanketFactory -from .shirt import ShirtFactory from .pants import PantsFactory +from .shirt import ShirtFactory from .towel import TowelFactory - diff --git a/infinigen/assets/clothes/blanket.py b/infinigen/assets/objects/clothes/blanket.py similarity index 68% rename from infinigen/assets/clothes/blanket.py rename to infinigen/assets/objects/clothes/blanket.py index 2d5d018b7..fbd38b03d 100644 --- a/infinigen/assets/clothes/blanket.py +++ b/infinigen/assets/objects/clothes/blanket.py @@ -6,31 +6,32 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.materials import art, fabrics +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials.art import ArtFabric from infinigen.assets.utils.decorate import read_co, select_vertices, write_co from infinigen.assets.utils.object import new_grid from infinigen.assets.utils.uv import unwrap_faces from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.random import log_uniform from infinigen.core.util import blender as butil -from infinigen.assets.materials.art import ArtFabric -from infinigen.assets.material_assignments import AssetList +from infinigen.core.util.random import log_uniform + class BlanketFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(BlanketFactory, self).__init__(factory_seed, coarse) - self.width = log_uniform(.9, 1.2) - self.size = self.width * log_uniform(.4, .7) - self.thickness = log_uniform(.004, .008) + self.width = log_uniform(0.9, 1.2) + self.size = self.width * log_uniform(0.4, 0.7) + self.thickness = log_uniform(0.004, 0.008) - materials = AssetList['BlanketFactory']() - self.surface = materials['surface'].assign_material() + materials = AssetList["BlanketFactory"]() + self.surface = materials["surface"].assign_material() if self.surface == ArtFabric: self.surface = self.surface(self.factory_seed) - def create_asset(self, **params) -> bpy.types.Object: - obj = new_grid(x_subdivisions=64, y_subdivisions=int(self.size / self.width * 64)) + obj = new_grid( + x_subdivisions=64, y_subdivisions=int(self.size / self.width * 64) + ) obj.scale = self.width / 2, self.size / 2, 1 butil.apply_transform(obj) unwrap_faces(obj) @@ -39,15 +40,15 @@ def create_asset(self, **params) -> bpy.types.Object: def fold(self, obj): theta = uniform(-np.pi / 6, np.pi / 6) - y_margin = self.size * (.5 - uniform(.1, .3)) + y_margin = self.size * (0.5 - uniform(0.1, 0.3)) obj.rotation_euler[-1] = theta obj.location[1] -= y_margin butil.apply_transform(obj, True) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.bisect(plane_co=(0, 0, 0), plane_no=(0, 1, 0)) x, y, z = read_co(obj).T - co = np.stack([x, np.where(y > 0, -y, y), np.where(y > 0, .05 - z, z)], -1) + co = np.stack([x, np.where(y > 0, -y, y), np.where(y > 0, 0.05 - z, z)], -1) write_co(obj, co) obj.location[1] += y_margin butil.apply_transform(obj, True) @@ -56,24 +57,29 @@ def fold(self, obj): class ComforterFactory(BlanketFactory): - def create_asset(self, **params) -> bpy.types.Object: obj = super().create_asset(**params) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=.01) + butil.modify_mesh(obj, "SOLIDIFY", thickness=0.01) return obj class BoxComforterFactory(ComforterFactory): def __init__(self, factory_seed, coarse=False): super(BoxComforterFactory, self).__init__(factory_seed, coarse) - self.margin = uniform(.3, .4) + self.margin = uniform(0.3, 0.4) def create_asset(self, **params) -> bpy.types.Object: obj = super().create_asset(**params) x, y, _ = read_co(obj).T - _x = np.abs(x / self.margin - np.round(x / self.margin)) * self.margin < self.width / 64 / 2 - _y = np.abs(y / self.margin - np.round(y / self.margin)) * self.margin < self.width / 64 / 2 - with butil.ViewportMode(obj, 'EDIT'): + _x = ( + np.abs(x / self.margin - np.round(x / self.margin)) * self.margin + < self.width / 64 / 2 + ) + _y = ( + np.abs(y / self.margin - np.round(y / self.margin)) * self.margin + < self.width / 64 / 2 + ) + with butil.ViewportMode(obj, "EDIT"): select_vertices(obj, _x | _y) - bpy.ops.mesh.remove_doubles(threshold=.02) + bpy.ops.mesh.remove_doubles(threshold=0.02) return obj diff --git a/infinigen/assets/clothes/pants.py b/infinigen/assets/objects/clothes/pants.py similarity index 50% rename from infinigen/assets/clothes/pants.py rename to infinigen/assets/objects/clothes/pants.py index 848f94980..dbc1b9132 100644 --- a/infinigen/assets/clothes/pants.py +++ b/infinigen/assets/objects/clothes/pants.py @@ -6,61 +6,71 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.materials import art, fabrics -from infinigen.assets.utils.decorate import distance2boundary, read_normal, remove_faces, subsurf, write_co +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials.art import ArtFabric +from infinigen.assets.utils.decorate import ( + distance2boundary, + read_normal, + remove_faces, + subsurf, + write_co, +) from infinigen.assets.utils.draw import remesh_fill from infinigen.assets.utils.object import new_circle -from infinigen.assets.utils.uv import unwrap_faces, wrap_front_back, wrap_top_bottom +from infinigen.assets.utils.uv import wrap_top_bottom from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.random import log_uniform from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList -from infinigen.assets.materials.art import ArtFabric +from infinigen.core.util.random import log_uniform class PantsFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(PantsFactory, self).__init__(factory_seed, coarse) - self.width = log_uniform(.45, .55) - self.size = self.width / 2 + uniform(0, .05) - self.type = np.random.choice(['underwear', 'shorts', 'pants']) + self.width = log_uniform(0.45, 0.55) + self.size = self.width / 2 + uniform(0, 0.05) + self.type = np.random.choice(["underwear", "shorts", "pants"]) match self.type: - case 'underwear': - self.length = self.size + uniform(-.02, .02) - case 'shorts': - self.length = self.size + uniform(.05, .1) + case "underwear": + self.length = self.size + uniform(-0.02, 0.02) + case "shorts": + self.length = self.size + uniform(0.05, 0.1) case _: - self.length = self.size + uniform(.5, .7) - self.neck_shrink = uniform(.1, .15) - self.thickness = log_uniform(.02, .03) - materials = AssetList['PantsFactory']() - self.surface = materials['surface'].assign_material() + self.length = self.size + uniform(0.5, 0.7) + self.neck_shrink = uniform(0.1, 0.15) + self.thickness = log_uniform(0.02, 0.03) + materials = AssetList["PantsFactory"]() + self.surface = materials["surface"].assign_material() if self.surface == ArtFabric: self.surface = self.surface(self.factory_seed) def create_asset(self, **params) -> bpy.types.Object: - x_anchors = 0, self.width / 2, self.width / 2 * ( - 1 + self.neck_shrink), self.width / 2 * self.neck_shrink * 2, 0 + x_anchors = ( + 0, + self.width / 2, + self.width / 2 * (1 + self.neck_shrink), + self.width / 2 * self.neck_shrink * 2, + 0, + ) y_anchors = 0, 0, -self.length, -self.length, -self.size obj = new_circle(vertices=len(x_anchors)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.edge_face_add() write_co(obj, np.stack([x_anchors, y_anchors, np.zeros_like(x_anchors)], -1)) - butil.modify_mesh(obj, 'MIRROR', use_axis=(True, False, False)) - remesh_fill(obj, .02) + butil.modify_mesh(obj, "MIRROR", use_axis=(True, False, False)) + remesh_fill(obj, 0.02) distance2boundary(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=0) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0) x_, y_, z_ = read_normal(obj).T - remove_faces(obj, (y_ < -.99) | (y_ > .99)) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + remove_faces(obj, (y_ < -0.99) | (y_ > 0.99)) + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.remove_doubles(threshold=1e-3) bpy.ops.mesh.normals_make_consistent(inside=False) - bpy.ops.mesh.select_mode(type='EDGE') + bpy.ops.mesh.select_mode(type="EDGE") bpy.ops.mesh.select_loose() - bpy.ops.mesh.delete(type='EDGE') + bpy.ops.mesh.delete(type="EDGE") wrap_top_bottom(obj, self.surface) subsurf(obj, 1) return obj diff --git a/infinigen/assets/objects/clothes/shirt.py b/infinigen/assets/objects/clothes/shirt.py new file mode 100644 index 000000000..7669e7465 --- /dev/null +++ b/infinigen/assets/objects/clothes/shirt.py @@ -0,0 +1,94 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials.art import ArtFabric +from infinigen.assets.utils.decorate import ( + read_center, + read_normal, + remove_faces, + subsurf, + write_co, +) +from infinigen.assets.utils.draw import remesh_fill +from infinigen.assets.utils.object import new_circle +from infinigen.assets.utils.uv import wrap_front_back +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform + + +class ShirtFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(ShirtFactory, self).__init__(factory_seed, coarse) + self.width = log_uniform(0.45, 0.55) + self.size = self.width + uniform(0.25, 0.3) + self.size_neck = uniform(0.1, 0.15) * self.size + self.type = np.random.choice(["short", "long"]) + match self.type: + case "short": + self.sleeve_length = self.size / 2 + uniform(-0.35, -0.3) + case _: + self.sleeve_length = self.size / 2 + uniform(-0.05, 0.0) + self.sleeve_width = uniform(0.14, 0.18) + self.sleeve_angle = uniform(np.pi / 6, np.pi / 4) + self.thickness = log_uniform(0.02, 0.03) + materials = AssetList["ShirtFactory"]() + self.surface = materials["surface"].assign_material() + if self.surface == ArtFabric: + self.surface = self.surface(self.factory_seed) + + def create_asset(self, **params) -> bpy.types.Object: + x_anchors = ( + 0, + self.width / 2, + self.width / 2, + self.width / 2 + self.sleeve_length * np.sin(self.sleeve_angle), + self.width / 2 + + self.sleeve_length * np.sin(self.sleeve_angle) + + self.sleeve_width * np.cos(self.sleeve_angle), + self.width / 2, + self.width / 4, + 0, + ) + + y_anchors = ( + 0, + 0, + self.size - self.sleeve_width / np.sin(self.sleeve_angle), + self.size + - self.sleeve_width / np.sin(self.sleeve_angle) + - self.sleeve_length * np.cos(self.sleeve_angle), + self.size + - self.sleeve_width / np.sin(self.sleeve_angle) + - self.sleeve_length * np.cos(self.sleeve_angle) + + self.sleeve_width * np.sin(self.sleeve_angle), + self.size, + self.size + self.size_neck, + self.size + self.size_neck * uniform(0.3, 0.7), + ) + + obj = new_circle(vertices=len(x_anchors)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.edge_face_add() + bpy.ops.mesh.flip_normals() + write_co(obj, np.stack([x_anchors, y_anchors, np.zeros_like(x_anchors)], -1)) + butil.modify_mesh(obj, "MIRROR", use_axis=(True, False, False)) + remesh_fill(obj, 0.02) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) + x, y, z = read_center(obj).T + x_, y_, z_ = read_normal(obj).T + remove_faces(obj, (y_ < -0.5) | ((y_ > 0.5) & (x_ * x < 0))) + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.remove_doubles(threshold=1e-3) + butil.modify_mesh(obj, "BEVEL", width=self.sleeve_width * uniform(0.1, 0.15)) + subsurf(obj, 1) + wrap_front_back(obj, self.surface) + return obj diff --git a/infinigen/assets/clothes/towel.py b/infinigen/assets/objects/clothes/towel.py similarity index 57% rename from infinigen/assets/clothes/towel.py rename to infinigen/assets/objects/clothes/towel.py index 964191c44..860b39bae 100644 --- a/infinigen/assets/clothes/towel.py +++ b/infinigen/assets/objects/clothes/towel.py @@ -1,115 +1,143 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform from scipy.optimize import fsolve -from infinigen.assets.elements.rug import ArtRug -from infinigen.assets.materials import rug -from infinigen.assets.utils.decorate import geo_extension, mirror, read_co, read_edge_direction, \ - subdivide_edge_ring, subsurf, write_co +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.elements.rug import ArtRug +from infinigen.assets.utils.decorate import ( + geo_extension, + mirror, + read_co, + read_edge_direction, + subdivide_edge_ring, + subsurf, + write_co, +) from infinigen.assets.utils.object import center, new_plane -from infinigen.assets.utils.uv import unwrap_faces, wrap_sides +from infinigen.assets.utils.uv import wrap_sides from infinigen.core import surface from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import normalize -from infinigen.core.util.random import log_uniform from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList +from infinigen.core.util.random import log_uniform class TowelFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(TowelFactory, self).__init__(factory_seed, coarse) - self.width = log_uniform(.3, .6) + self.width = log_uniform(0.3, 0.6) self.length = self.width * log_uniform(1, 1.5) - self.thickness = log_uniform(.003, .01) + self.thickness = log_uniform(0.003, 0.01) prob = np.array([2, 1]) - self.fold_type = np.random.choice(['fold', 'roll'], p=prob / prob.sum()) + self.fold_type = np.random.choice(["fold", "roll"], p=prob / prob.sum()) self.folds = np.random.randint(2, 4) - self.extra_thickness = self.thickness * uniform(.2, .3) + self.extra_thickness = self.thickness * uniform(0.2, 0.3) self.fold_count = 15 self.roll_count = 256 self.roll_total = self.compute_roll_total() - materials = AssetList['TowelFactory']() - self.surface = materials['surface'].assign_material() + materials = AssetList["TowelFactory"]() + self.surface = materials["surface"].assign_material() if self.surface == ArtRug: self.surface = self.surface(self.factory_seed) - + def fold(self, obj): x, y, z = read_co(obj).T if np.max(x) - np.min(x) > np.max(y) - np.min(y): - obj.rotation_euler[-1] = np.pi * (uniform() < .5) + obj.rotation_euler[-1] = np.pi * (uniform() < 0.5) else: - obj.rotation_euler[-1] = np.pi * (uniform() < .5) + np.pi / 2 + obj.rotation_euler[-1] = np.pi * (uniform() < 0.5) + np.pi / 2 butil.apply_transform(obj, True) obj.location = *(-center(obj))[:-1], 0 obj.location[0] += uniform(-self.thickness, self.thickness) butil.apply_transform(obj, True) n = len(obj.data.vertices) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.edges.ensure_lookup_table() selected = np.abs(read_edge_direction(obj)[:, 0]) > 1 - 1e-3 edges = [bm.edges[i] for i in np.nonzero(selected)[0]] - bmesh.ops.subdivide_edgering(bm, edges=edges, cuts=self.fold_count, smooth=2) + bmesh.ops.subdivide_edgering( + bm, edges=edges, cuts=self.fold_count, smooth=2 + ) bmesh.update_edit_mesh(obj.data) co = read_co(obj) - order = np.where(co[n::self.fold_count, 0] < co[n + 1::self.fold_count, 0], 1, -1) - x_ = np.linspace(-self.thickness * order, self.thickness * order, self.fold_count).T.ravel() + order = np.where( + co[n :: self.fold_count, 0] < co[n + 1 :: self.fold_count, 0], 1, -1 + ) + x_ = np.linspace( + -self.thickness * order, self.thickness * order, self.fold_count + ).T.ravel() co[n:, 0] = x_ x, y, z = co.T max_z = np.max(z) + self.extra_thickness theta = x / self.thickness * np.pi / 2 - x__ = np.where(x < -self.thickness, x, - np.where(x > self.thickness, -x, -self.thickness + (max_z - z) * np.cos(theta))) - z_ = np.where(x < -self.thickness, z, - np.where(x > self.thickness, max_z * 2 - z, max_z + (max_z - z) * np.sin(theta))) + x__ = np.where( + x < -self.thickness, + x, + np.where( + x > self.thickness, -x, -self.thickness + (max_z - z) * np.cos(theta) + ), + ) + z_ = np.where( + x < -self.thickness, + z, + np.where( + x > self.thickness, max_z * 2 - z, max_z + (max_z - z) * np.sin(theta) + ), + ) write_co(obj, np.stack([x__, y, z_], -1)) - if uniform() < .5: + if uniform() < 0.5: mirror(obj) return obj def compute_roll_total(self): c = self.length / (self.thickness + self.extra_thickness) * (4 * np.pi) - f = lambda t: t * np.sqrt(1 + t * t) + np.log(t + np.sqrt(1 + t * t)) - c + + def f(t): + return t * np.sqrt(1 + t * t) + np.log(t + np.sqrt(1 + t * t)) - c + return fsolve(f, np.zeros(1))[0] def pre_roll(self, obj): subdivide_edge_ring(obj, self.roll_count, (1, 0, 0)) x, y, z = read_co(obj).T - i = np.round((x / self.length + .5) * self.roll_count).astype(int) + i = np.round((x / self.length + 0.5) * self.roll_count).astype(int) t = np.linspace(0, self.roll_total, self.roll_count + 1)[i] - length = (t * np.sqrt(1 + t * t) + np.log(t + np.sqrt(1 + t * t))) * ( - self.thickness + self.extra_thickness) / (4 * np.pi) + length = ( + (t * np.sqrt(1 + t * t) + np.log(t + np.sqrt(1 + t * t))) + * (self.thickness + self.extra_thickness) + / (4 * np.pi) + ) write_co(obj, np.stack([length, y, z], -1)) return i def roll(self, obj, i): t = np.linspace(0, self.roll_total, self.roll_count + 1)[np.concatenate([i, i])] x, y, z = read_co(obj).T - r = (self.thickness + self.extra_thickness) / (2 * np.pi) * t + np.where(z > self.thickness / 2, - -self.thickness / 2, - self.thickness / 2) + r = (self.thickness + self.extra_thickness) / (2 * np.pi) * t + np.where( + z > self.thickness / 2, -self.thickness / 2, self.thickness / 2 + ) write_co(obj, np.stack([r * np.cos(t), y, r * np.sin((t))], -1)) def create_asset(self, **params) -> bpy.types.Object: obj = new_plane() - if self.fold_type == 'roll': + if self.fold_type == "roll": obj.scale = self.length / 2, self.width / 2, 1 else: obj.scale = self.width / 2, self.length / 2, 1 butil.apply_transform(obj, True) i = None - if self.fold_type == 'roll': + if self.fold_type == "roll": i = self.pre_roll(obj) - wrap_sides(obj, self.surface, 'z', 'x', 'y', strength=uniform(.2, .4)) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=1) - if self.fold_type == 'roll': + wrap_sides(obj, self.surface, "z", "x", "y", strength=uniform(0.2, 0.4)) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=1) + if self.fold_type == "roll": self.roll(obj, i) subdivide_edge_ring(obj, 16, (0, 1, 0)) else: @@ -117,7 +145,11 @@ def create_asset(self, **params) -> bpy.types.Object: self.fold(obj) subdivide_edge_ring(obj, 16, (1, 0, 0)) subdivide_edge_ring(obj, 16, (0, 1, 0)) - butil.modify_mesh(obj, 'BEVEL', width=self.thickness * uniform(.4, .8), segments=2) - surface.add_geomod(obj, geo_extension, apply=True, input_args=[uniform(.05, .1)]) + butil.modify_mesh( + obj, "BEVEL", width=self.thickness * uniform(0.4, 0.8), segments=2 + ) + surface.add_geomod( + obj, geo_extension, apply=True, input_args=[uniform(0.05, 0.1)] + ) subsurf(obj, 1) return obj diff --git a/infinigen/assets/objects/cloud/__init__.py b/infinigen/assets/objects/cloud/__init__.py new file mode 100644 index 000000000..8ec810324 --- /dev/null +++ b/infinigen/assets/objects/cloud/__init__.py @@ -0,0 +1,7 @@ +from .generate import ( + AltocumulusFactory, + CloudFactory, + CumulonimbusFactory, + CumulusFactory, + StratocumulusFactory, +) diff --git a/infinigen/assets/weather/cloud/base.py b/infinigen/assets/objects/cloud/base.py similarity index 100% rename from infinigen/assets/weather/cloud/base.py rename to infinigen/assets/objects/cloud/base.py diff --git a/infinigen/assets/weather/cloud/cloud.py b/infinigen/assets/objects/cloud/cloud.py similarity index 68% rename from infinigen/assets/weather/cloud/cloud.py rename to infinigen/assets/objects/cloud/cloud.py index 1383291ad..9779c1082 100644 --- a/infinigen/assets/weather/cloud/cloud.py +++ b/infinigen/assets/objects/cloud/cloud.py @@ -5,25 +5,19 @@ import bpy - import numpy as np -import mathutils - -from tqdm import trange, tqdm -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface - -from infinigen.assets.lighting import sky_lighting -from infinigen.assets.weather.cloud.node import geometry_func, shader_material -from infinigen.assets.weather.cloud.node import scatter_func - from scipy.ndimage import distance_transform_edt from skimage import measure -from infinigen.core.util.logging import Suppress +from infinigen.assets.lighting import sky_lighting +from infinigen.assets.objects.cloud.node import ( + geometry_func, + scatter_func, + shader_material, +) +from infinigen.core import surface from infinigen.core.util import blender as butil +from infinigen.core.util.logging import Suppress def set_curves(curve, points): @@ -83,20 +77,25 @@ def get_params(self): rotate_angle = np.random.uniform(*cls.ANGLE_ROTATE_RANGE) return { - 'density': density, - 'anisotropy': anisotropy, - 'noise_scale': noise_scale, - 'noise_detail': noise_detail, - 'voronoi_scale': voronoi_scale, - 'mix_factor': mix_factor, - 'rotate_angle': rotate_angle, - 'emission_strength': emission, } + "density": density, + "anisotropy": anisotropy, + "noise_scale": noise_scale, + "noise_detail": noise_detail, + "voronoi_scale": voronoi_scale, + "mix_factor": mix_factor, + "rotate_angle": rotate_angle, + "emission_strength": emission, + } def update_geo_params(self, geo_params): return geo_params def update_shader_params(self, shader_params): - shader_params.update({'density': np.random.uniform(0.05, 0.25), }) + shader_params.update( + { + "density": np.random.uniform(0.05, 0.25), + } + ) return shader_params def get_node_params(self): @@ -104,7 +103,12 @@ def get_node_params(self): curve_func = self.get_curve_func() params = self.get_params() - params.update({'scale': scale, 'curve_func': curve_func, }) + params.update( + { + "scale": scale, + "curve_func": curve_func, + } + ) geo_params = self.update_geo_params(dict(params)) shader_params = self.update_shader_params(dict(params)) @@ -120,10 +124,19 @@ def sample_curves(self): forth_pt_x = 1.0 forth_pt_y = np.random.uniform(0.90, 1.00) - return [[first_pt_x, first_pt_y], [second_pt_x, second_pt_y], [third_pt_x, third_pt_y], - [forth_pt_x, forth_pt_y], ] - - def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): + return [ + [first_pt_x, first_pt_y], + [second_pt_x, second_pt_y], + [third_pt_x, third_pt_y], + [forth_pt_x, forth_pt_y], + ] + + def make_cloud( + self, + marching_cubes=False, + resolution=128, + selection=None, + ): cloud = bpy.data.objects.new(self.name, self.ref_cloud.copy()) link_object(cloud) @@ -131,14 +144,27 @@ def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): shader_params = self.shader_params points_only = marching_cubes - mat = surface.add_material(cloud, shader_material, selection=selection, input_kwargs=shader_params, ) - - geo_params['material'] = mat - surface.add_geomod(cloud, geometry_func(points_only=points_only, resolution=resolution, ), - selection=selection, input_kwargs=geo_params, apply=True, ) + mat = surface.add_material( + cloud, + shader_material, + selection=selection, + input_kwargs=shader_params, + ) + + geo_params["material"] = mat + surface.add_geomod( + cloud, + geometry_func( + points_only=points_only, + resolution=resolution, + ), + selection=selection, + input_kwargs=geo_params, + apply=True, + ) if not marching_cubes: - cloud.dimensions = geo_params['scale'] + cloud.dimensions = geo_params["scale"] return cloud name = cloud.name @@ -171,14 +197,14 @@ def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): cloud = bpy.data.objects.new(name, mesh) cloud.active_material = mat - cloud.dimensions = geo_params['scale'] + cloud.dimensions = geo_params["scale"] link_object(cloud) with Suppress(): # Set origin butil.select(cloud) - bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='MEDIAN') + bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY", center="MEDIAN") # Fix normals bpy.context.view_layer.objects.active = cloud @@ -217,12 +243,24 @@ def sample_curves(self): forth_pt_x = 1.0 forth_pt_y = np.random.uniform(-1.0, 0.50) - return [[first_pt_x, first_pt_y], [second_pt_x, second_pt_y], [third_pt_x, third_pt_y], - [forth_pt_x, forth_pt_y], ] - - def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): - return super().make_cloud(marching_cubes=marching_cubes, resolution=resolution * 2, - selection=selection, ) + return [ + [first_pt_x, first_pt_y], + [second_pt_x, second_pt_y], + [third_pt_x, third_pt_y], + [forth_pt_x, forth_pt_y], + ] + + def make_cloud( + self, + marching_cubes=False, + resolution=128, + selection=None, + ): + return super().make_cloud( + marching_cubes=marching_cubes, + resolution=resolution * 2, + selection=selection, + ) def get_scale(self): scale_x = np.random.uniform(512.0, 1024.0) @@ -232,7 +270,11 @@ def get_scale(self): return scales def update_shader_params(self, shader_params): - shader_params.update({'density': np.random.uniform(0.1, 0.3), }) + shader_params.update( + { + "density": np.random.uniform(0.1, 0.3), + } + ) return shader_params @@ -240,7 +282,11 @@ class Stratocumulus(Cumulus): ANGLE_ROTATE_RANGE = [0.0, np.pi / 4] def update_shader_params(self, shader_params): - shader_params.update({'density': np.random.uniform(0.01, 0.10), }) + shader_params.update( + { + "density": np.random.uniform(0.01, 0.10), + } + ) return shader_params def get_scale(self): @@ -301,43 +347,64 @@ def get_params(self): densities = np.random.uniform(*cls.DENSITY_RANGE, size=cls.NUM_SUBCLOUDS) anisotropies = np.random.uniform(*cls.ANISOTROPY_RANGE, size=cls.NUM_SUBCLOUDS) noise_scales = np.random.uniform(*cls.NOISE_SCALE_RANGE, size=cls.NUM_SUBCLOUDS) - noise_details = np.random.uniform(*cls.NOISE_DETAIL_RANGE, size=cls.NUM_SUBCLOUDS) - voronoi_scales = np.random.uniform(*cls.VORONOI_SCALE_RANGE, size=cls.NUM_SUBCLOUDS) + noise_details = np.random.uniform( + *cls.NOISE_DETAIL_RANGE, size=cls.NUM_SUBCLOUDS + ) + voronoi_scales = np.random.uniform( + *cls.VORONOI_SCALE_RANGE, size=cls.NUM_SUBCLOUDS + ) mix_factors = np.random.uniform(*cls.MIX_FACTOR_RANGE, size=cls.NUM_SUBCLOUDS) emissions = np.random.uniform(*cls.EMISSION_RANGE, size=cls.NUM_SUBCLOUDS) - rotate_angles = np.random.uniform(*cls.ANGLE_ROTATE_RANGE, size=cls.NUM_SUBCLOUDS) + rotate_angles = np.random.uniform( + *cls.ANGLE_ROTATE_RANGE, size=cls.NUM_SUBCLOUDS + ) # Scatter Params voronoi_scale = np.random.uniform(*cls.SCATTER_VORONOI_SCALE_RANGE) vertices_x = np.random.randint(*cls.SCATTER_GRID_VERTICES_RANGE) vertices_y = np.random.randint(*cls.SCATTER_GRID_VERTICES_RANGE) - scatter_params = {'voronoi_scale': voronoi_scale, 'vertices_x': vertices_x, 'vertices_y': vertices_y, } + scatter_params = { + "voronoi_scale": voronoi_scale, + "vertices_x": vertices_x, + "vertices_y": vertices_y, + } return { - 'densities': densities, - 'anisotropies': anisotropies, - 'noise_scales': noise_scales, - 'noise_details': noise_details, - 'voronoi_scales': voronoi_scales, - 'mix_factors': mix_factors, - 'rotate_angles': rotate_angles, - 'emission_strengths': emissions, - 'scatter_params': scatter_params, } + "densities": densities, + "anisotropies": anisotropies, + "noise_scales": noise_scales, + "noise_details": noise_details, + "voronoi_scales": voronoi_scales, + "mix_factors": mix_factors, + "rotate_angles": rotate_angles, + "emission_strengths": emissions, + "scatter_params": scatter_params, + } def update_shader_params(self, shader_params): - params = zip(shader_params['anisotropies'], shader_params['noise_scales'], - shader_params['noise_details'], shader_params['voronoi_scales'], shader_params['mix_factors'], - shader_params['rotate_angles'], shader_params['emission_strengths'], ) - - shader_params = [{ - 'density': np.random.uniform(0.05, 0.25), - 'anisotropy': param[0], - 'noise_scale': param[1], - 'noise_detail': param[2], - 'voronoi_scale': param[3], - 'mix_factor': param[4], - 'rotate_angle': param[5], - 'emission_strength': param[6], } for param in params] + params = zip( + shader_params["anisotropies"], + shader_params["noise_scales"], + shader_params["noise_details"], + shader_params["voronoi_scales"], + shader_params["mix_factors"], + shader_params["rotate_angles"], + shader_params["emission_strengths"], + ) + + shader_params = [ + { + "density": np.random.uniform(0.05, 0.25), + "anisotropy": param[0], + "noise_scale": param[1], + "noise_detail": param[2], + "voronoi_scale": param[3], + "mix_factor": param[4], + "rotate_angle": param[5], + "emission_strength": param[6], + } + for param in params + ] return shader_params def get_node_params(self): @@ -347,13 +414,23 @@ def get_node_params(self): curve_funcs = [self.get_curve_func() for _ in range(cls.NUM_SUBCLOUDS)] params = self.get_params() - params.update({'scale': scale, 'curve_funcs': curve_funcs, }) + params.update( + { + "scale": scale, + "curve_funcs": curve_funcs, + } + ) geo_params = self.update_geo_params(dict(params)) shader_params = self.update_shader_params(dict(params)) return geo_params, shader_params - def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): + def make_cloud( + self, + marching_cubes=False, + resolution=128, + selection=None, + ): resolution = min(resolution, 64) cloud = bpy.data.objects.new(self.name, self.ref_cloud.copy()) link_object(cloud) @@ -362,15 +439,25 @@ def make_cloud(self, marching_cubes=False, resolution=128, selection=None, ): shader_params = self.shader_params points_only = apply = marching_cubes - mats = [surface.shaderfunc_to_material(shader_material, **shader_param) for shader_param in - shader_params] - - geo_params['materials'] = mats - surface.add_geomod(cloud, scatter_func(points_only=points_only, resolution=resolution, ), - selection=selection, input_kwargs=geo_params, apply=True, ) + mats = [ + surface.shaderfunc_to_material(shader_material, **shader_param) + for shader_param in shader_params + ] + + geo_params["materials"] = mats + surface.add_geomod( + cloud, + scatter_func( + points_only=points_only, + resolution=resolution, + ), + selection=selection, + input_kwargs=geo_params, + apply=True, + ) # TODO: fix this and check if scales is still needed - cloud.dimensions = geo_params['scale'] + cloud.dimensions = geo_params["scale"] return cloud @@ -429,7 +516,7 @@ def create_3d_grid(steps=64): ys = np.linspace(-1.0, 1.0, steps) zs = np.linspace(-1.0, 1.0, steps) - xs, ys, zs = np.meshgrid(xs, ys, zs, indexing='ij') + xs, ys, zs = np.meshgrid(xs, ys, zs, indexing="ij") xs = xs.reshape(-1) ys = ys.reshape(-1) zs = zs.reshape(-1) @@ -437,7 +524,7 @@ def create_3d_grid(steps=64): return np.stack((xs, ys, zs), axis=1) -def create_cube(name, steps=128, collection='Clouds'): +def create_cube(name, steps=128, collection="Clouds"): grid = create_3d_grid(steps=steps) mesh = bpy.data.meshes.new(name) @@ -450,8 +537,8 @@ def create_cube(name, steps=128, collection='Clouds'): def initialize(collection): - bpy.context.scene.render.engine = 'CYCLES' - bpy.context.scene.cycles.device = 'GPU' + bpy.context.scene.render.engine = "CYCLES" + bpy.context.scene.cycles.device = "GPU" bpy.context.scene.cycles.preview_samples = 32 bpy.context.scene.cycles.samples = 128 @@ -467,20 +554,39 @@ def initialize(collection): resolution = 256 ref_grid = create_3d_grid(steps=128) - ref_cloud = bpy.data.meshes.new('ref_cloud') + ref_cloud = bpy.data.meshes.new("ref_cloud") ref_cloud.from_pydata(ref_grid, [], []) ref_cloud.update() clouds = [] - clouds += [Cumulus(f'Cumulus_{i:03d}', ref_cloud).make_cloud(marching_cubes=False, resolution=resolution, ) - for i in range(6)] - clouds += [Cumulonimbus(f'Cumulonimbus{i:03d}', ref_cloud).make_cloud(marching_cubes=False, - resolution=resolution, ) for i in range(6)] - clouds += [Stratocumulus(f'Stratocumulus{i:03d}', ref_cloud).make_cloud(marching_cubes=False, - resolution=resolution, ) for i in range(6)] clouds += [ - Altocumulus(f'Altocumulus{i:03d}', ref_cloud).make_cloud(marching_cubes=False, resolution=resolution, ) - for i in range(7)] + Cumulus(f"Cumulus_{i:03d}", ref_cloud).make_cloud( + marching_cubes=False, + resolution=resolution, + ) + for i in range(6) + ] + clouds += [ + Cumulonimbus(f"Cumulonimbus{i:03d}", ref_cloud).make_cloud( + marching_cubes=False, + resolution=resolution, + ) + for i in range(6) + ] + clouds += [ + Stratocumulus(f"Stratocumulus{i:03d}", ref_cloud).make_cloud( + marching_cubes=False, + resolution=resolution, + ) + for i in range(6) + ] + clouds += [ + Altocumulus(f"Altocumulus{i:03d}", ref_cloud).make_cloud( + marching_cubes=False, + resolution=resolution, + ) + for i in range(7) + ] for cloud in clouds: bpy.data.collections[collection].objects.link(cloud) bpy.context.view_layer.update() @@ -510,7 +616,7 @@ def create_collection(name): def main(): clean() - collection_name = 'Clouds' + collection_name = "Clouds" remove_collection(collection_name) create_collection(collection_name) @@ -520,13 +626,13 @@ def main(): def single(): clean() - collection_name = 'Clouds' + collection_name = "Clouds" remove_collection(collection_name) create_collection(collection_name) - bpy.context.scene.render.engine = 'CYCLES' - bpy.context.scene.cycles.device = 'GPU' + bpy.context.scene.render.engine = "CYCLES" + bpy.context.scene.cycles.device = "GPU" bpy.context.scene.cycles.preview_samples = 32 bpy.context.scene.cycles.samples = 128 @@ -538,12 +644,17 @@ def single(): ys = ys.reshape(-1) ref_grid = create_3d_grid(steps=256) - ref_cloud = bpy.data.meshes.new('ref_cloud') + ref_cloud = bpy.data.meshes.new("ref_cloud") ref_cloud.from_pydata(ref_grid, [], []) ref_cloud.update() - clouds = [Cumulonimbus(f'Cumulonimbus_{i:03d}', ref_cloud).make_cloud(marching_cubes=True, resolution=128, ) - for i in range(1)] + clouds = [ + Cumulonimbus(f"Cumulonimbus_{i:03d}", ref_cloud).make_cloud( + marching_cubes=True, + resolution=128, + ) + for i in range(1) + ] for cloud in clouds: bpy.data.collections[collection_name].objects.link(cloud) bpy.context.view_layer.update() diff --git a/infinigen/assets/objects/cloud/generate.py b/infinigen/assets/objects/cloud/generate.py new file mode 100644 index 000000000..d565bba1d --- /dev/null +++ b/infinigen/assets/objects/cloud/generate.py @@ -0,0 +1,271 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hei Law + + +import bpy +import gin +import numpy as np + +from infinigen.assets.utils.object import new_cube +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import random_general as rg + +from .cloud import ( + Altocumulus, + Cumulonimbus, + Cumulus, + Stratocumulus, + create_3d_grid, +) + + +@gin.configurable +class CloudFactory(AssetFactory): + def __init__( + self, + factory_seed, + coarse=False, + terrain_mesh=None, + max_distance=300, + steps=128, + cloudy=("bool", 0.01), + ): + super(CloudFactory, self).__init__(factory_seed, coarse=coarse) + + self.max_distance = max_distance + + self.ref_cloud = bpy.data.meshes.new("ref_cloud") + self.ref_cloud.from_pydata(create_3d_grid(steps=steps), [], []) + self.ref_cloud.update() + + with FixedSeed(factory_seed): + self.cloudy = rg(cloudy) + + self.cloud_types = ( + [ + Cumulonimbus, + ] + if self.cloudy + else [ + Cumulus, + Stratocumulus, + Altocumulus, + ] + ) + + self.resolutions = { + Cumulonimbus: [16, 128], + Cumulus: [16, 128], + Stratocumulus: [32, 256], + Altocumulus: [16, 64], + } + scale_resolution = 4 + self.resolutions = { + k: (scale_resolution * u, scale_resolution * v) + for k, (u, v) in self.resolutions.items() + } + + self.min_distance = 256 if self.cloudy else 64 + self.dome_radius = 1024 if self.cloudy else 256 + self.dome_threshold = 32 if self.cloudy else 0 + self.density_range = [1e-5, 1e-4] if self.cloudy else [1e-4, 2e-4] + + self.max_scale = max([t.MAX_EXPECTED_SCALE for t in self.cloud_types]) + self.density = max([t.PLACEHOLDER_DENSITY for t in self.cloud_types]) + + def spawn_locations(self): + obj = new_cube() + surface.add_geomod( + obj, + self.geo_dome, + apply=True, + input_args=[ + self.dome_radius, + self.dome_threshold, + self.density_range, + self.min_distance, + ], + ) + + locations = np.array([obj.matrix_world @ v.co for v in obj.data.vertices]) + butil.delete(obj) + return locations + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + return butil.spawn_empty("placeholder", disp_type="CUBE", s=self.max_scale) + + def create_asset(self, distance, **kwargs): + cloud_type = np.random.choice(self.cloud_types) + + resolution_min, resolution_max = self.resolutions[cloud_type] + resolution = max(1 - distance / self.max_distance, 0) + resolution = resolution * (resolution_max - resolution_min) + resolution_min + resolution = int(resolution) + + new_cloud = cloud_type("Cloud", self.ref_cloud) + new_cloud = new_cloud.make_cloud( + marching_cubes=False, + resolution=resolution, + ) + butil.apply_transform(new_cloud) + + tag_object(new_cloud, "cloud") + return new_cloud + + @staticmethod + def geo_dome( + nw, + dome_radius, + dome_threshold, + density_range, + min_distance, + ): + ico_sphere = nw.new_node( + "GeometryNodeMeshIcoSphere", + input_kwargs={ + "Radius": dome_radius, + "Subdivisions": 8, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": ico_sphere, + "Scale": (1.2, 1.4, 1.0), + }, + ) + + position = nw.new_node(Nodes.InputPosition) + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": position, + }, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz.outputs["Z"], + 1: dome_threshold, + }, + attrs={ + "operation": "LESS_THAN", + }, + ) + + delete_geometry = nw.new_node( + "GeometryNodeDeleteGeometry", + input_kwargs={ + "Geometry": transform, + "Selection": less_than, + }, + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": delete_geometry, + "Distance Min": min_distance, + "Density Max": np.random.uniform(*density_range), + "Seed": np.random.randint(1e5), + }, + attrs={ + "distribute_method": "POISSON", + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "Z": nw.uniform(32, np.random.randint(64, 1e5)), + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": distribute_points_on_faces.outputs["Points"], + "Offset": combine_xyz, + }, + ) + + verts = nw.new_node( + Nodes.PointsToVertices, + input_kwargs={ + "Points": set_position, + }, + ) + + nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": verts, + }, + ) + + +class CumulonimbusFactory(CloudFactory): + def __init__( + self, + factory_seed, + coarse=False, + max_distance=300, + steps=128, + ): + self.cloud_types = [Cumulonimbus] + super(CumulonimbusFactory, self).__init__( + factory_seed, coarse, max_distance, steps + ) + self.cloud_types = [Cumulonimbus] + + +class CumulusFactory(CloudFactory): + def __init__( + self, + factory_seed, + coarse=False, + max_distance=300, + steps=128, + ): + self.cloud_types = [Cumulus] + super(CumulusFactory, self).__init__(factory_seed, coarse, max_distance, steps) + self.cloud_types = [Cumulus] + + +class StratocumulusFactory(CloudFactory): + def __init__( + self, + factory_seed, + coarse=False, + max_distance=300, + steps=128, + ): + self.cloud_types = [Stratocumulus] + super(StratocumulusFactory, self).__init__( + factory_seed, coarse, max_distance, steps + ) + self.cloud_types = [Stratocumulus] + + +class AltocumulusFactory(CloudFactory): + def __init__( + self, + factory_seed, + coarse=False, + max_distance=300, + steps=128, + ): + self.cloud_types = [Altocumulus] + super(AltocumulusFactory, self).__init__( + factory_seed, coarse, max_distance, steps + ) + self.cloud_types = [Altocumulus] diff --git a/infinigen/assets/weather/cloud/node.py b/infinigen/assets/objects/cloud/node.py similarity index 71% rename from infinigen/assets/weather/cloud/node.py rename to infinigen/assets/objects/cloud/node.py index 118f3eb45..a81e62a9c 100644 --- a/infinigen/assets/weather/cloud/node.py +++ b/infinigen/assets/objects/cloud/node.py @@ -4,15 +4,17 @@ # Authors: Hei Law # Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=lPAYX8z9i8M by CGCookie -from infinigen.core.nodes.node_wrangler import Nodes import numpy as np +from infinigen.core.nodes.node_wrangler import Nodes + + def cloud_geometry_func( points_only=False, resolution=256, ): def cloud_geometry_node( - nw, + nw, density, noise_scale, noise_detail, @@ -24,26 +26,26 @@ def cloud_geometry_node( **kwargs, ): scale = (1.5, 1.5, 2.0) - + group_input = nw.new_node(Nodes.GroupInput) - position = nw.new_node(Nodes.InputPosition) + position = nw.new_node(Nodes.InputPosition) vector_rotate = nw.new_node( Nodes.VectorRotate, input_kwargs={ - 'Vector': position, - 'Angle': rotate_angle, + "Vector": position, + "Angle": rotate_angle, }, ) noise_texture_1 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Scale': 2.0000, + "Vector": vector_rotate, + "Scale": 2.0000, }, ) - + subtract = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -51,21 +53,21 @@ def cloud_geometry_node( 1: (0.5000, 0.5000, 0.5000), }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + scale = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: subtract.outputs["Vector"], - 'Scale': 0.1000, + "Scale": 0.1000, }, attrs={ - 'operation': 'SCALE', + "operation": "SCALE", }, ) - + multiply = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -73,18 +75,18 @@ def cloud_geometry_node( 1: (1.5000, 1.5000, 2.0000), }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + vector_curves = nw.new_node( Nodes.VectorCurve, input_kwargs={ - 'Vector': multiply.outputs["Vector"], + "Vector": multiply.outputs["Vector"], }, ) curve_func(vector_curves.mapping.curves) - + add = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -92,16 +94,16 @@ def cloud_geometry_node( 1: vector_curves, }, ) - + noise_texture = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Scale': noise_scale, - 'Detail': noise_detail, + "Vector": vector_rotate, + "Scale": noise_scale, + "Detail": noise_detail, }, ) - + subtract_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -109,21 +111,21 @@ def cloud_geometry_node( 1: (0.5000, 0.5000, 0.5000), }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + scale_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: subtract_1.outputs["Vector"], - 'Scale': 0.1000, + "Scale": 0.1000, }, attrs={ - 'operation': 'SCALE', + "operation": "SCALE", }, ) - + add_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -131,33 +133,33 @@ def cloud_geometry_node( 1: scale_1.outputs["Vector"], }, ) - + voronoi_texture = nw.new_node( Nodes.VoronoiTexture, input_kwargs={ - 'Vector': add_1.outputs["Vector"], - 'Scale': voronoi_scale, + "Vector": add_1.outputs["Vector"], + "Scale": voronoi_scale, }, ) - + noise_texture_2 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Detail': 5.0000, + "Vector": vector_rotate, + "Detail": 5.0000, }, ) - + subtract_2 = nw.new_node( Nodes.Math, input_kwargs={ 0: noise_texture_2.outputs["Fac"], }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + multiply_1 = nw.new_node( Nodes.Math, input_kwargs={ @@ -165,10 +167,10 @@ def cloud_geometry_node( 1: 0.1000, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + add_2 = nw.new_node( Nodes.Math, input_kwargs={ @@ -176,38 +178,38 @@ def cloud_geometry_node( 1: multiply_1, }, ) - + mix_1 = nw.new_node( Nodes.MixRGB, input_kwargs={ - 'Fac': mix_factor, - 'Color1': add.outputs["Vector"], - 'Color2': add_2, + "Fac": mix_factor, + "Color1": add.outputs["Vector"], + "Color2": add_2, }, attrs={ - 'blend_type': 'OVERLAY', + "blend_type": "OVERLAY", }, ) - + length = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: mix_1, }, attrs={ - 'operation': 'LENGTH', + "operation": "LENGTH", }, ) - + noise_texture_3 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Scale': 2.0000, - 'Detail': 5.0000, + "Vector": vector_rotate, + "Scale": 2.0000, + "Detail": 5.0000, }, ) - + multiply_2 = nw.new_node( Nodes.Math, input_kwargs={ @@ -215,41 +217,42 @@ def cloud_geometry_node( 1: 2.0000, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + noise_texture_4 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': vector_rotate, - 'Scale': 1.5000, - 'Detail': 5.0000, + "Vector": vector_rotate, + "Scale": 1.5000, + "Detail": 5.0000, }, ) - + divide = nw.new_node( Nodes.Math, input_kwargs={ 0: noise_texture_4.outputs["Fac"], 1: 100.0000, - }, attrs={ - 'operation': 'DIVIDE', + }, + attrs={ + "operation": "DIVIDE", }, ) - + map_range = nw.new_node( Nodes.MapRange, input_kwargs={ - 'Value': length.outputs["Value"], + "Value": length.outputs["Value"], 3: multiply_2, 4: divide, }, attrs={ - 'clamp': False, + "clamp": False, }, ) - + multiply_3 = nw.new_node( Nodes.Math, input_kwargs={ @@ -257,10 +260,10 @@ def cloud_geometry_node( 1: density, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + greater_than = nw.new_node( Nodes.Math, input_kwargs={ @@ -268,41 +271,42 @@ def cloud_geometry_node( 1: 0.01, }, attrs={ - 'operation': 'GREATER_THAN', + "operation": "GREATER_THAN", }, ) - + separate_geometry = nw.new_node( Nodes.SeparateGeometry, input_kwargs={ - 'Geometry': group_input.outputs["Geometry"], - 'Selection': greater_than, + "Geometry": group_input.outputs["Geometry"], + "Selection": greater_than, }, ) - + points_to_volume = nw.new_node( Nodes.PointsToVolume, input_kwargs={ - 'Points': separate_geometry.outputs["Selection"], - 'Radius': 0.0150, + "Points": separate_geometry.outputs["Selection"], + "Radius": 0.0150, }, ) - + volume_to_mesh = nw.new_node( Nodes.VolumeToMesh, input_kwargs={ - 'Volume': points_to_volume, + "Volume": points_to_volume, }, ) - + set_material = nw.new_node( Nodes.SetMaterial, input_kwargs={ - 'Geometry': volume_to_mesh, - 'Material': material, + "Geometry": volume_to_mesh, + "Material": material, }, ) return set_material + return cloud_geometry_node @@ -316,7 +320,7 @@ def geometry_func( ) def geometry_nodes( - nw, + nw, density, noise_scale, noise_detail, @@ -328,7 +332,7 @@ def geometry_nodes( **kwargs, ): cloud_mesh = cloud_func( - nw, + nw, density, noise_scale, noise_detail, @@ -343,9 +347,10 @@ def geometry_nodes( group_output = nw.new_node( Nodes.GroupOutput, input_kwargs={ - 'Geometry': cloud_mesh, + "Geometry": cloud_mesh, }, ) + return geometry_nodes @@ -361,25 +366,25 @@ def shader_material( **kwargs, ): location = (0.0, 0.0, 0.0) - scale = (0.9, 0.9, 0.9) + scale = (0.9, 0.9, 0.9) texture_coordinate = nw.new_node(Nodes.TextureCoord) - + mapping = nw.new_node( Nodes.Mapping, input_kwargs={ - 'Vector': texture_coordinate.outputs["Object"], + "Vector": texture_coordinate.outputs["Object"], }, ) - + noise_texture_3 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': mapping, - 'Scale': 2.0000, + "Vector": mapping, + "Scale": 2.0000, }, ) - + subtract = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -387,21 +392,21 @@ def shader_material( 1: (0.5000, 0.5000, 0.5000), }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + scale = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: subtract.outputs["Vector"], - 'Scale': 0.1000, + "Scale": 0.1000, }, attrs={ - 'operation': 'SCALE', + "operation": "SCALE", }, ) - + add = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -409,15 +414,16 @@ def shader_material( 1: mapping, }, ) - + noise_texture = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': mapping, - 'Scale': noise_scale, - 'Detail': noise_detail, - }) - + "Vector": mapping, + "Scale": noise_scale, + "Detail": noise_detail, + }, + ) + subtract_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ @@ -425,54 +431,54 @@ def shader_material( 1: (0.5000, 0.5000, 0.5000), }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + scale_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: subtract_1.outputs["Vector"], - 'Scale': 0.1000, + "Scale": 0.1000, }, attrs={ - 'operation': 'SCALE', + "operation": "SCALE", }, ) - + add_1 = nw.new_node( Nodes.VectorMath, input_kwargs={ 1: scale_1.outputs["Vector"], }, ) - + voronoi_texture_1 = nw.new_node( Nodes.VoronoiTexture, input_kwargs={ - 'Vector': add_1.outputs["Vector"], - 'Scale': voronoi_scale, + "Vector": add_1.outputs["Vector"], + "Scale": voronoi_scale, }, ) - + noise_texture_2 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Scale': mapping, - 'Detail': 5.0000, + "Scale": mapping, + "Detail": 5.0000, }, ) - + subtract_2 = nw.new_node( Nodes.Math, input_kwargs={ 0: noise_texture_2.outputs["Fac"], }, attrs={ - 'operation': 'SUBTRACT', + "operation": "SUBTRACT", }, ) - + multiply = nw.new_node( Nodes.Math, input_kwargs={ @@ -480,10 +486,10 @@ def shader_material( 1: 0.1000, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + add_2 = nw.new_node( Nodes.Math, input_kwargs={ @@ -491,38 +497,38 @@ def shader_material( 1: multiply, }, ) - + mix_1 = nw.new_node( Nodes.MixRGB, input_kwargs={ - 'Fac': 0.3, - 'Color1': add.outputs["Vector"], - 'Color2': add_2, + "Fac": 0.3, + "Color1": add.outputs["Vector"], + "Color2": add_2, }, attrs={ - 'blend_type': 'OVERLAY', + "blend_type": "OVERLAY", }, ) - + length = nw.new_node( Nodes.VectorMath, input_kwargs={ 0: mix_1, }, attrs={ - 'operation': 'LENGTH', + "operation": "LENGTH", }, ) - + noise_texture_4 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': mapping, - 'Scale': 2.0000, - 'Detail': 5.0000, + "Vector": mapping, + "Scale": 2.0000, + "Detail": 5.0000, }, ) - + multiply_1 = nw.new_node( Nodes.Math, input_kwargs={ @@ -530,19 +536,19 @@ def shader_material( 1: 2.0000, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + noise_texture_5 = nw.new_node( Nodes.NoiseTexture, input_kwargs={ - 'Vector': mapping, - 'Scale': 1.5000, - 'Detail': 5.0000, + "Vector": mapping, + "Scale": 1.5000, + "Detail": 5.0000, }, ) - + divide = nw.new_node( Nodes.Math, input_kwargs={ @@ -550,22 +556,22 @@ def shader_material( 1: 100.0000, }, attrs={ - 'operation': 'DIVIDE', + "operation": "DIVIDE", }, ) - + map_range_1 = nw.new_node( Nodes.MapRange, input_kwargs={ - 'Value': length.outputs["Value"], + "Value": length.outputs["Value"], 3: multiply_1, 4: divide, }, attrs={ - 'clamp': False, + "clamp": False, }, ) - + multiply_2 = nw.new_node( Nodes.Math, input_kwargs={ @@ -573,29 +579,29 @@ def shader_material( 1: density, }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) - + principled_volume = nw.new_node( Nodes.PrincipledVolume, input_kwargs={ - 'Color': (1.0000, 1.0000, 1.0000, 1.0000), - 'Density': multiply_2, - 'Anisotropy': anisotropy, - 'Absorption Color': (1.0000, 1.0000, 1.0000, 1.0000), - 'Temperature': 0.0, - 'Emission Strength': emission_strength, + "Color": (1.0000, 1.0000, 1.0000, 1.0000), + "Density": multiply_2, + "Anisotropy": anisotropy, + "Absorption Color": (1.0000, 1.0000, 1.0000, 1.0000), + "Temperature": 0.0, + "Emission Strength": emission_strength, }, ) - + material_output = nw.new_node( Nodes.MaterialOutput, input_kwargs={ - 'Volume': principled_volume, + "Volume": principled_volume, }, attrs={ - 'is_active_output': True, + "is_active_output": True, }, ) @@ -610,7 +616,7 @@ def scatter_func( ) def scatter_nodes( - nw, + nw, densities, noise_scales, noise_details, @@ -638,20 +644,21 @@ def scatter_nodes( nw, *param, **kwargs, - ) for param in params + ) + for param in params ] # Selection voronoi_texture_2 = nw.new_node( Nodes.VoronoiTexture, input_kwargs={ - 'Scale': scatter_params['voronoi_scale'], + "Scale": scatter_params["voronoi_scale"], }, ) map_range = nw.new_node( Nodes.MapRange, input_kwargs={ - 'Value': voronoi_texture_2.outputs["Distance"], + "Value": voronoi_texture_2.outputs["Distance"], }, ) greater_than = nw.new_node( @@ -661,7 +668,7 @@ def scatter_nodes( 1: 0.6, }, attrs={ - 'operation': 'GREATER_THAN', + "operation": "GREATER_THAN", }, ) @@ -669,55 +676,55 @@ def scatter_nodes( grid = nw.new_node( Nodes.MeshGrid, input_kwargs={ - 'Size X': 2.0, - 'Size Y': 2.0, - 'Vertices X': scatter_params['vertices_x'], - 'Vertices Y': scatter_params['vertices_y'], + "Size X": 2.0, + "Size Y": 2.0, + "Vertices X": scatter_params["vertices_x"], + "Vertices Y": scatter_params["vertices_y"], }, ) distribute_points_on_faces = nw.new_node( Nodes.DistributePointsOnFaces, input_kwargs={ - 'Mesh': grid, - 'Distance Min': 0.3, - 'Density Max': 64.0, + "Mesh": grid, + "Distance Min": 0.3, + "Density Max": 64.0, }, attrs={ - 'distribute_method': 'POISSON', + "distribute_method": "POISSON", }, ) # Convert cloud geometry to instance geometry_to_instance = nw.new_node( - 'GeometryNodeGeometryToInstance', + "GeometryNodeGeometryToInstance", input_kwargs={ - 'Geometry': cloud_meshes, + "Geometry": cloud_meshes, }, ) random_value_2 = nw.new_node( Nodes.RandomValue, attrs={ - 'data_type': 'INT', + "data_type": "INT", }, ) instance_on_points = nw.new_node( Nodes.InstanceOnPoints, input_kwargs={ - 'Points': distribute_points_on_faces, - 'Instance': geometry_to_instance, - 'Pick Instance': True, - 'Instance Index': random_value_2.outputs[2], + "Points": distribute_points_on_faces, + "Instance": geometry_to_instance, + "Pick Instance": True, + "Instance Index": random_value_2.outputs[2], }, ) random_value = nw.new_node( Nodes.RandomValue, input_kwargs={ 0: (0.5, 0.5, 0.5), - 'Seed': np.random.randint(int(1e5)), + "Seed": np.random.randint(int(1e5)), }, attrs={ - 'data_type': 'FLOAT_VECTOR', + "data_type": "FLOAT_VECTOR", }, ) random_value_4 = nw.new_node( @@ -734,14 +741,14 @@ def scatter_nodes( 1: random_value_4.outputs[1], }, attrs={ - 'operation': 'MULTIPLY', + "operation": "MULTIPLY", }, ) scale_instances = nw.new_node( - 'GeometryNodeScaleInstances', + "GeometryNodeScaleInstances", input_kwargs={ - 'Instances': instance_on_points, - 'Scale': multiply_4, + "Instances": instance_on_points, + "Scale": multiply_4, }, ) random_value_1 = nw.new_node( @@ -757,46 +764,47 @@ def scatter_nodes( 0: random_value_1.outputs[1], }, attrs={ - 'operation': 'RADIANS', + "operation": "RADIANS", }, ) combine_xyz = nw.new_node( Nodes.CombineXYZ, input_kwargs={ - 'Z': radians, + "Z": radians, }, ) rotate_instances = nw.new_node( - 'GeometryNodeRotateInstances', + "GeometryNodeRotateInstances", input_kwargs={ - 'Instances': scale_instances, - 'Rotation': combine_xyz, + "Instances": scale_instances, + "Rotation": combine_xyz, }, ) random_value_3 = nw.new_node(Nodes.RandomValue) combine_xyz_2 = nw.new_node( Nodes.CombineXYZ, input_kwargs={ - 'Z': random_value_3.outputs[1], + "Z": random_value_3.outputs[1], }, ) translate_instances = nw.new_node( Nodes.TranslateInstances, input_kwargs={ - 'Instances': rotate_instances, - 'Translation': combine_xyz_2, + "Instances": rotate_instances, + "Translation": combine_xyz_2, }, ) realize_instances = nw.new_node( Nodes.RealizeInstances, input_kwargs={ - 'Geometry': translate_instances, + "Geometry": translate_instances, }, ) group_output_1 = nw.new_node( Nodes.GroupOutput, input_kwargs={ - 'Geometry': realize_instances, + "Geometry": realize_instances, }, ) + return scatter_nodes diff --git a/infinigen/assets/objects/corals/__init__.py b/infinigen/assets/objects/corals/__init__.py new file mode 100644 index 000000000..47754a5d1 --- /dev/null +++ b/infinigen/assets/objects/corals/__init__.py @@ -0,0 +1,36 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +from .diff_growth import ( + DiffGrowthBaseCoralFactory, + LeatherBaseCoralFactory, + TableBaseCoralFactory, +) +from .elkhorn import ElkhornBaseCoralFactory +from .fan import FanBaseCoralFactory +from .generate import ( + BrainCoralFactory, + BushCoralFactory, + CauliflowerCoralFactory, + CoralFactory, + ElkhornCoralFactory, + FanCoralFactory, + HoneycombCoralFactory, + LeatherCoralFactory, + StarCoralFactory, + TableCoralFactory, + TubeCoralFactory, + TwigCoralFactory, +) +from .laplacian import CauliflowerBaseCoralFactory +from .reaction_diffusion import ( + BrainBaseCoralFactory, + HoneycombBaseCoralFactory, + ReactionDiffusionBaseCoralFactory, +) +from .star import StarBaseCoralFactory +from .tree import BushBaseCoralFactory, TreeBaseCoralFactory, TwigBaseCoralFactory +from .tube import TubeBaseCoralFactory diff --git a/infinigen/assets/corals/base.py b/infinigen/assets/objects/corals/base.py similarity index 77% rename from infinigen/assets/corals/base.py rename to infinigen/assets/objects/corals/base.py index 3fcec97e1..b415f7a4b 100644 --- a/infinigen/assets/corals/base.py +++ b/infinigen/assets/objects/corals/base.py @@ -10,12 +10,11 @@ class BaseCoralFactory(AssetFactory): - - tentacle_distance = .05 - default_scale = [.8] * 3 - noise_strength = .02 - tentacle_prob = .5 - bump_prob = .3 + tentacle_distance = 0.05 + default_scale = [0.8] * 3 + noise_strength = 0.02 + tentacle_prob = 0.5 + bump_prob = 0.3 density = 500 def __init__(self, factory_seed, coarse=False): @@ -23,4 +22,4 @@ def __init__(self, factory_seed, coarse=False): self.points_fn = lambda nw, points: points def create_asset(self, **params) -> bpy.types.Object: - raise NotImplemented + raise NotImplementedError diff --git a/infinigen/assets/corals/diff_growth.py b/infinigen/assets/objects/corals/diff_growth.py similarity index 57% rename from infinigen/assets/corals/diff_growth.py rename to infinigen/assets/objects/corals/diff_growth.py index 036f104ec..8b985ff9f 100644 --- a/infinigen/assets/corals/diff_growth.py +++ b/infinigen/assets/objects/corals/diff_growth.py @@ -7,16 +7,20 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.corals.tentacles import make_upward_points_fn, make_radius_points_fn -from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth -from infinigen.assets.utils.object import mesh2obj, data2mesh +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.objects.corals.tentacles import ( + make_radius_points_fn, + make_upward_points_fn, +) from infinigen.assets.utils.decorate import geo_extension, read_co from infinigen.assets.utils.mesh import polygon_angles -import infinigen.core.util.blender as butil +from infinigen.assets.utils.object import data2mesh, mesh2obj from infinigen.core import surface +from infinigen.core.tagging import tag_object from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth + class DiffGrowthBaseCoralFactory(BaseCoralFactory): default_scale = [1] * 3 @@ -24,15 +28,15 @@ class DiffGrowthBaseCoralFactory(BaseCoralFactory): def __init__(self, factory_seed, coarse=False): super(DiffGrowthBaseCoralFactory, self).__init__(factory_seed, coarse) self.makers = [self.leather_make, self.flat_make] - self.weights = [.7, .3] + self.weights = [0.7, 0.3] with FixedSeed(self.factory_seed): self.maker = np.random.choice(self.makers, p=self.weights) if self.maker == self.flat_make: - self.tentacle_prob = .8 - self.points_fn = make_upward_points_fn(.05, np.pi / 3) + self.tentacle_prob = 0.8 + self.points_fn = make_upward_points_fn(0.05, np.pi / 3) else: - self.tentacle_prob = .5 - self.points_fn = make_radius_points_fn(.05, .5) + self.tentacle_prob = 0.5 + self.points_fn = make_radius_points_fn(0.05, 0.5) @staticmethod def diff_growth_make(name, n_colonies=1, **kwargs): @@ -40,7 +44,10 @@ def diff_growth_make(name, n_colonies=1, **kwargs): stride = 2 if n_colonies > 1: angles = polygon_angles(np.random.randint(2, 6)) - colony_offsets = np.stack([np.cos(angles), np.sin(angles), np.zeros_like(angles)]).T * stride + colony_offsets = ( + np.stack([np.cos(angles), np.sin(angles), np.zeros_like(angles)]).T + * stride + ) else: colony_offsets = np.zeros((1, 3)) @@ -48,58 +55,78 @@ def diff_growth_make(name, n_colonies=1, **kwargs): for i, offset in enumerate(colony_offsets): angles = polygon_angles(n_base) vertices = np.block( - [[np.cos(angles), 0], [np.sin(angles), 0], [np.zeros(n_base + 1)]]).T + np.expand_dims(offset, - 0) - faces = np.stack([np.arange(n_base), np.roll(np.arange(n_base), 1), np.full(n_base, n_base)]).T + ( - n_base + 1) * i + [[np.cos(angles), 0], [np.sin(angles), 0], [np.zeros(n_base + 1)]] + ).T + np.expand_dims(offset, 0) + faces = ( + np.stack( + [ + np.arange(n_base), + np.roll(np.arange(n_base), 1), + np.full(n_base, n_base), + ] + ).T + + (n_base + 1) * i + ) vertices_all.append(vertices) faces_all.append(faces) vertices = np.concatenate(vertices_all) faces = np.concatenate(faces_all) - obj = mesh2obj(data2mesh(vertices, [], faces, 'polygon')) + obj = mesh2obj(data2mesh(vertices, [], faces, "polygon")) - boundary = obj.vertex_groups.new(name='Boundary') + boundary = obj.vertex_groups.new(name="Boundary") boundary_vertices = set(range(len(vertices))) boundary_vertices.difference(range(n_base, len(vertices), n_base + 1)) - boundary.add(list(boundary_vertices), 1.0, 'REPLACE') + boundary.add(list(boundary_vertices), 1.0, "REPLACE") build_diff_growth(obj, boundary.index, **kwargs) obj.name = name return obj @staticmethod def leather_make(): - prob_multiple_colonies = .5 - n_colonies = np.random.randint(2, 3) if uniform() < prob_multiple_colonies else 1 - growth_vec = 0, 0, uniform(.8, 1.2) - growth_scale = 1, 1, uniform(.5, .7) - obj = DiffGrowthBaseCoralFactory.diff_growth_make('leather_coral', n_colonies, - max_polygons=1e3 * n_colonies, fac_noise=2., dt=.25, - growth_scale=growth_scale, growth_vec=growth_vec) + prob_multiple_colonies = 0.5 + n_colonies = ( + np.random.randint(2, 3) if uniform() < prob_multiple_colonies else 1 + ) + growth_vec = 0, 0, uniform(0.8, 1.2) + growth_scale = 1, 1, uniform(0.5, 0.7) + obj = DiffGrowthBaseCoralFactory.diff_growth_make( + "leather_coral", + n_colonies, + max_polygons=1e3 * n_colonies, + fac_noise=2.0, + dt=0.25, + growth_scale=growth_scale, + growth_vec=growth_vec, + ) return obj @staticmethod def flat_make(): n_colonies = 1 - obj = DiffGrowthBaseCoralFactory.diff_growth_make('flat_coral', n_colonies, - max_polygons=4e2 * n_colonies, repulsion_radius=2, - inhibit_shell=1) + obj = DiffGrowthBaseCoralFactory.diff_growth_make( + "flat_coral", + n_colonies, + max_polygons=4e2 * n_colonies, + repulsion_radius=2, + inhibit_shell=1, + ) obj.scale = 1, 1, uniform(1, 2) butil.apply_transform(obj) return obj def create_asset(self, face_size=0.01, **params): obj = self.maker() - butil.modify_mesh(obj, 'SMOOTH', iterations=2) + butil.modify_mesh(obj, "SMOOTH", iterations=2) levels = 2 - butil.modify_mesh(obj, 'SUBSURF', render_levels=levels, levels=levels) + butil.modify_mesh(obj, "SUBSURF", render_levels=levels, levels=levels) obj.scale = 2 * np.array(self.default_scale) / max(obj.dimensions[:2]) butil.apply_transform(obj) surface.add_geomod(obj, geo_extension, apply=True) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=.01) + butil.modify_mesh(obj, "SOLIDIFY", thickness=0.01) obj.location = 0, 0, -np.amin(read_co(obj).T[:, -1]) * 0.8 butil.apply_transform(obj, loc=True) - tag_object(obj, 'diffgrowth_coral') + tag_object(obj, "diffgrowth_coral") return obj diff --git a/infinigen/assets/corals/elkhorn.py b/infinigen/assets/objects/corals/elkhorn.py similarity index 50% rename from infinigen/assets/corals/elkhorn.py rename to infinigen/assets/objects/corals/elkhorn.py index 57302fdf0..cfb06ae37 100644 --- a/infinigen/assets/corals/elkhorn.py +++ b/infinigen/assets/objects/corals/elkhorn.py @@ -4,56 +4,90 @@ # Authors: Lingjie Mei -import bpy import bmesh +import bpy import numpy as np from mathutils import kdtree from numpy.random import uniform -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.corals.tentacles import make_radius_points_fn -from infinigen.assets.utils.decorate import displace_vertices, geo_extension, read_co, remove_vertices, write_co +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.objects.corals.tentacles import make_radius_points_fn +from infinigen.assets.utils.decorate import ( + displace_vertices, + geo_extension, + read_co, + remove_vertices, + write_co, +) from infinigen.assets.utils.draw import make_circular_interp -from infinigen.core.util.random import log_uniform from infinigen.assets.utils.object import new_circle, origin2lowest, separate_loose +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.random import log_uniform + class ElkhornBaseCoralFactory(BaseCoralFactory): - tentacle_prob = 0. - noise_strength = .005 + tentacle_prob = 0.0 + noise_strength = 0.005 def __init__(self, factory_seed, coarse=False): super(ElkhornBaseCoralFactory, self).__init__(factory_seed, coarse) - self.points_fn = make_radius_points_fn(.05, .6) + self.points_fn = make_radius_points_fn(0.05, 0.6) @staticmethod def geo_elkhorn(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - start_index = nw.boolean_math('AND', nw.compare('GREATER_THAN', nw.vector_math('LENGTH', nw.new_node( - Nodes.InputPosition)), .7), nw.bernoulli(.005)) - end_index = nw.compare('LESS_THAN', nw.vector_math('LENGTH', nw.new_node(Nodes.InputPosition)), .02) - distance = nw.vector_math('DISTANCE', *nw.new_node(Nodes.InputEdgeVertices).outputs[2:]) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + start_index = nw.boolean_math( + "AND", + nw.compare( + "GREATER_THAN", + nw.vector_math("LENGTH", nw.new_node(Nodes.InputPosition)), + 0.7, + ), + nw.bernoulli(0.005), + ) + end_index = nw.compare( + "LESS_THAN", + nw.vector_math("LENGTH", nw.new_node(Nodes.InputPosition)), + 0.02, + ) + distance = nw.vector_math( + "DISTANCE", *nw.new_node(Nodes.InputEdgeVertices).outputs[2:] + ) weight = nw.scale(distance, nw.musgrave(10)) - curve = nw.new_node(Nodes.EdgePathToCurve, [geometry, start_index, - nw.new_node(Nodes.ShortestEdgePath, [end_index, weight]).outputs[0]]) - curve = nw.new_node(Nodes.SplineType, [curve], attrs={'spline_type': 'NURBS'}) - - geometry = nw.new_node(Nodes.MergeByDistance, [nw.curve2mesh(curve), None, .005]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + curve = nw.new_node( + Nodes.EdgePathToCurve, + [ + geometry, + start_index, + nw.new_node(Nodes.ShortestEdgePath, [end_index, weight]).outputs[0], + ], + ) + curve = nw.new_node(Nodes.SplineType, [curve], attrs={"spline_type": "NURBS"}) + + geometry = nw.new_node( + Nodes.MergeByDistance, [nw.curve2mesh(curve), None, 0.005] + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def create_asset(self, face_size=0.01, **params): obj = new_circle(location=(0, 0, 0), vertices=1024) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.fill_grid() - displace_vertices(obj, lambda x, y, z: (*uniform(-.005, .005, (2, len(x))), 0)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + displace_vertices( + obj, lambda x, y, z: (*uniform(-0.005, 0.005, (2, len(x))), 0) + ) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) temp = deep_clone_obj(obj) surface.add_geomod(temp, self.geo_elkhorn, apply=True) @@ -67,14 +101,23 @@ def create_asset(self, face_size=0.01, **params): obj.rotation_euler[-1] = uniform(0, np.pi * 2) butil.apply_transform(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=.02) - surface.add_geomod(obj, geo_extension, apply=True, input_kwargs={'musgrave_dimensions': '2D'}) - texture = bpy.data.textures.new(name='elkhorn_coral', type='STUCCI') - texture.noise_scale = log_uniform(.1, .5) - butil.modify_mesh(obj, 'DISPLACE', True, strength=uniform(.1, .2), texture=texture, mid_level=0, - direction='Z') + butil.modify_mesh(obj, "SOLIDIFY", thickness=0.02) + surface.add_geomod( + obj, geo_extension, apply=True, input_kwargs={"musgrave_dimensions": "2D"} + ) + texture = bpy.data.textures.new(name="elkhorn_coral", type="STUCCI") + texture.noise_scale = log_uniform(0.1, 0.5) + butil.modify_mesh( + obj, + "DISPLACE", + True, + strength=uniform(0.1, 0.2), + texture=texture, + mid_level=0, + direction="Z", + ) origin2lowest(obj) - tag_object(obj, 'elkhorn_coral') + tag_object(obj, "elkhorn_coral") return obj @staticmethod @@ -84,17 +127,19 @@ def tree2mesh(obj, locations): kd.insert(loc, i) kd.balance() - large_radius = uniform(.08, .12) - remove_vertices(obj, lambda x, y, z: np.array( - [kd.find(v)[-1] for v in np.stack([x, y, z], -1)]) > .015 + large_radius * ( - 1 - np.sqrt(x * x + y * y))) + large_radius = uniform(0.08, 0.12) + remove_vertices( + obj, + lambda x, y, z: np.array([kd.find(v)[-1] for v in np.stack([x, y, z], -1)]) + > 0.015 + large_radius * (1 - np.sqrt(x * x + y * y)), + ) @staticmethod def build_angles(obj): - angle_radius = .2 - with butil.ViewportMode(obj, 'EDIT'): + angle_radius = 0.2 + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) - angles = np.full(len(bm.verts), -100.) + angles = np.full(len(bm.verts), -100.0) queue = set() for v in bm.verts: x, y, z = v.co @@ -107,38 +152,40 @@ def build_angles(obj): new_queue = set() for v in queue: pairs = [] - if angles[v.index] <= -100.: + if angles[v.index] <= -100.0: for e in v.link_edges: o = e.other_vert(v) - if angles[o.index] > -100.: + if angles[o.index] > -100.0: pairs.append((e.calc_length(), angles[o.index])) angles[v.index] = min(pairs)[1] for e in v.link_edges: o = e.other_vert(v) - if angles[o.index] <= -100.: + if angles[o.index] <= -100.0: new_queue.add(o) queue = new_queue return angles @staticmethod def cluster_displace(obj, angles): - f_scale = make_circular_interp(.3, 1., 5) + f_scale = make_circular_interp(0.3, 1.0, 5) f_rotation = make_circular_interp(0, np.pi / 3, 10) - f_power = make_circular_interp(1., 1.6, 5) + f_power = make_circular_interp(1.0, 1.6, 5) x, y, z = read_co(obj).T a = np.array([angles[_] for _ in range(len(x))]) + np.pi z += f_scale(a) * (x * x + y * y) ** f_power(a) rotation = f_rotation(a) c, s = np.cos(rotation), np.sin(rotation) - co = np.stack([c * x - s * z, c * y - s * z, c * z + s * np.sqrt(x * x + y * y)], -1) + co = np.stack( + [c * x - s * z, c * y - s * z, c * z + s * np.sqrt(x * x + y * y)], -1 + ) write_co(obj, co) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [] for e in bm.edges: - if e.calc_length() > .04: + if e.calc_length() > 0.04: geom.append(e) - bmesh.ops.delete(bm, geom=geom, context='EDGES') + bmesh.ops.delete(bm, geom=geom, context="EDGES") bmesh.update_edit_mesh(obj.data) return obj diff --git a/infinigen/assets/corals/fan.py b/infinigen/assets/objects/corals/fan.py similarity index 59% rename from infinigen/assets/corals/fan.py rename to infinigen/assets/objects/corals/fan.py index 58fe2ca76..a6122f821 100644 --- a/infinigen/assets/corals/fan.py +++ b/infinigen/assets/objects/corals/fan.py @@ -4,69 +4,89 @@ # Authors: Lingjie Mei -import bpy import bmesh +import bpy import numpy as np from numpy.random import uniform import infinigen.core.util.blender as butil -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.utils.decorate import displace_vertices, geo_extension, read_co, subsurface2face_size -from infinigen.assets.utils.mesh import treeify +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.utils.decorate import ( + displace_vertices, + geo_extension, + read_co, + subsurface2face_size, +) from infinigen.assets.utils.draw import shape_by_angles +from infinigen.assets.utils.mesh import treeify from infinigen.assets.utils.nodegroup import geo_radius from infinigen.assets.utils.object import new_circle, origin2lowest from infinigen.assets.utils.shortest_path import geo_shortest_path +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.tagging import tag_object class FanBaseCoralFactory(BaseCoralFactory): - tentacle_prob = 0. - noise_strength = 0. + tentacle_prob = 0.0 + noise_strength = 0.0 @staticmethod def weight(nw: NodeWrangler): u, v = nw.new_node(Nodes.InputEdgeVertices).outputs[2:] - length = nw.vector_math('DISTANCE', u, v) - return nw.uniform(nw.scalar_multiply(length, .4), length) + length = nw.vector_math("DISTANCE", u, v) + return nw.uniform(nw.scalar_multiply(length, 0.4), length) def create_asset(self, face_size=0.01, **params): obj = new_circle(vertices=512) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.fill_grid() - displace_vertices(obj, lambda x, y, z: uniform(-.005, .005, (3, len(x)))) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') - shape_by_angles(obj, np.array([-np.pi / 2, 0, np.pi / 2]), - np.array([uniform(.2, .8), 1, uniform(.2, .8)])) + displace_vertices(obj, lambda x, y, z: uniform(-0.005, 0.005, (3, len(x)))) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) + shape_by_angles( + obj, + np.array([-np.pi / 2, 0, np.pi / 2]), + np.array([uniform(0.2, 0.8), 1, uniform(0.2, 0.8)]), + ) obj.rotation_euler = np.pi / 2, -np.pi / 2, 0 butil.apply_transform(obj) end_indices = np.nonzero(read_co(obj)[:, -1] < 1e-2)[0] - end_index = lambda nw: nw.build_index_case(np.random.choice(end_indices, 5)) - texture = bpy.data.textures.new(name='fan', type='STUCCI') - texture.noise_scale = uniform(.5, 1) - butil.modify_mesh(obj, 'DISPLACE', texture=texture, strength=uniform(.5, 1.), direction='Y') + + def end_index(nw): + return nw.build_index_case(np.random.choice(end_indices, 5)) + + texture = bpy.data.textures.new(name="fan", type="STUCCI") + texture.noise_scale = uniform(0.5, 1) + butil.modify_mesh( + obj, "DISPLACE", texture=texture, strength=uniform(0.5, 1.0), direction="Y" + ) surface.add_geomod(obj, geo_extension, apply=True) - obj.scale = uniform(.6, 1.2), 1, 1 + obj.scale = uniform(0.6, 1.2), 1, 1 butil.apply_transform(obj) - surface.add_geomod(obj, geo_shortest_path, input_args=[end_index, self.weight, .05], apply=True) + surface.add_geomod( + obj, + geo_shortest_path, + input_args=[end_index, self.weight, 0.05], + apply=True, + ) obj = self.add_radius(obj) - surface.add_geomod(obj, geo_radius, apply=True, input_args=['radius', 32]) - butil.modify_mesh(obj, 'WELD', merge_threshold=.001) + surface.add_geomod(obj, geo_radius, apply=True, input_args=["radius", 32]) + butil.modify_mesh(obj, "WELD", merge_threshold=0.001) subsurface2face_size(obj, face_size) origin2lowest(obj) - tag_object(obj, 'fan_coral') + tag_object(obj, "fan_coral") return obj @staticmethod def add_radius(obj): obj = treeify(obj) counts = np.zeros(len(obj.data.vertices)) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) queue = list(sorted(bm.verts, key=lambda v: v.co[-1]))[:1] visited = np.zeros(len(bm.verts)) @@ -85,10 +105,10 @@ def add_radius(obj): for e in v.link_edges: count += counts[e.other_vert(v).index] counts[v.index] = count - vg = obj.vertex_groups.new(name='radius') + vg = obj.vertex_groups.new(name="radius") thresh = uniform(100, 200) - ratio = uniform(.5, 1.5) + ratio = uniform(0.5, 1.5) for i, c in enumerate(counts): r = 1 if c < thresh else 1 + ratio * np.log(c / thresh) - vg.add([i], .008 * r, 'REPLACE') + vg.add([i], 0.008 * r, "REPLACE") return obj diff --git a/infinigen/assets/objects/corals/generate.py b/infinigen/assets/objects/corals/generate.py new file mode 100644 index 000000000..c3742a080 --- /dev/null +++ b/infinigen/assets/objects/corals/generate.py @@ -0,0 +1,256 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import join_objects +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import remesh_with_attrs +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from . import tentacles +from .base import BaseCoralFactory +from .diff_growth import ( + DiffGrowthBaseCoralFactory, + LeatherBaseCoralFactory, + TableBaseCoralFactory, +) +from .elkhorn import ElkhornBaseCoralFactory +from .fan import FanBaseCoralFactory +from .laplacian import CauliflowerBaseCoralFactory +from .reaction_diffusion import ( + BrainBaseCoralFactory, + HoneycombBaseCoralFactory, + ReactionDiffusionBaseCoralFactory, +) +from .star import StarBaseCoralFactory +from .tree import BushBaseCoralFactory, TreeBaseCoralFactory, TwigBaseCoralFactory +from .tube import TubeBaseCoralFactory + + +class CoralFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, factory_method=None): + super(CoralFactory, self).__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.factory_methods = [ + DiffGrowthBaseCoralFactory, + ReactionDiffusionBaseCoralFactory, + TubeBaseCoralFactory, + TreeBaseCoralFactory, + CauliflowerBaseCoralFactory, + ElkhornBaseCoralFactory, + StarBaseCoralFactory, + ] + weights = np.array([0.15, 0.2, 0.15, 0.2, 0.2, 0.15, 0.2]) + self.weights = weights / weights.sum() + if factory_method is None: + factory_method = np.random.choice(self.factory_methods, p=self.weights) + self.factory: BaseCoralFactory = factory_method(factory_seed, coarse) + self.base_hue = self.build_base_hue() + self.material = surface.shaderfunc_to_material( + self.shader_coral, self.base_hue + ) + + def create_asset(self, face_size=0.01, realize=True, **params): + obj = self.factory.create_asset(**params) + obj.scale = ( + 2 + * np.array(self.factory.default_scale) + / max(obj.dimensions[:2]) + * uniform(0.8, 1.2, 3) + ) + butil.apply_transform(obj) + remesh_with_attrs(obj, face_size) + assign_material(obj, self.material) + + has_bump = uniform(0, 1) < self.factory.bump_prob + if self.factory.noise_strength > 0: + if has_bump: + self.apply_noise_texture(obj) + else: + self.apply_bump(obj) + + tag_object(obj, "coral") + + if uniform(0, 1) < self.factory.tentacle_prob and not has_bump: + t = tentacles.apply( + obj, + self.factory.points_fn, + self.factory.density, + realize, + self.base_hue, + ) + obj = join_objects([obj, t]) + + return obj + + def apply_noise_texture(self, obj): + t = np.random.choice(["STUCCI", "MARBLE"]) + texture = bpy.data.textures.new(name="coral", type=t) + texture.noise_scale = log_uniform(0.01, 0.02) + butil.modify_mesh( + obj, + "DISPLACE", + True, + strength=self.factory.noise_strength * uniform(0.9, 1.2), + mid_level=0, + texture=texture, + ) + + def apply_bump(self, obj): + texture = bpy.data.textures.new(name="coral", type="VORONOI") + texture.noise_scale = log_uniform(0.02, 0.03) + texture.noise_intensity = log_uniform(1.5, 2) + texture.distance_metric = "MINKOVSKY" + texture.minkovsky_exponent = uniform(1, 1.5) + butil.modify_mesh( + obj, + "DISPLACE", + True, + strength=-self.factory.noise_strength * uniform(1, 2), + mid_level=1, + texture=texture, + ) + + @staticmethod + def build_base_hue(): + if uniform(0, 1) < 0.25: + base_hue = uniform(0, 1) + else: + base_hue = uniform(-0.2, 0.3) % 1 + return base_hue + + @staticmethod + def shader_coral(nw: NodeWrangler, base_hue): + shift = uniform(0.05, 0.1) * (-1) ** np.random.randint(2) + subsurface_color = hsv2rgba(uniform(0, 1), uniform(0, 1), 1.0) + bright_color = hsv2rgba((base_hue + shift) % 1, uniform(0.7, 0.9), 0.2) + dark_color = hsv2rgba(base_hue, uniform(0.5, 0.7), 0.1) + light_color = hsv2rgba( + (base_hue + uniform(-0.2, 0.2)) % 1, uniform(0.2, 0.4), 0.4 + ) + specular = uniform(0.25, 0.5) + + color = build_color_ramp( + nw, + nw.musgrave(uniform(10, 20)), + [0.0, 0.3, 0.7, 1.0], + [dark_color, dark_color, bright_color, bright_color], + ) + color = nw.new_node( + Nodes.MixRGB, + [ + nw.build_float_curve( + nw.musgrave(uniform(10, 20)), + [(0, 1), (uniform(0.3, 0.4), 0), (1, 0)], + ), + color, + light_color, + ], + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 1.0)]) + subsurface_ratio = uniform(0, 0.05) if uniform(0, 1) > 0.5 else 0 + subsurface_radius = [uniform(0.05, 0.2)] * 3 + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Subsurface": subsurface_ratio, + "Subsurface Radius": subsurface_radius, + "Subsurface Color": subsurface_color, + }, + ) + return bsdf + + +class LeatherCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(LeatherCoralFactory, self).__init__( + factory_seed, coarse, LeatherBaseCoralFactory + ) + + +class TableCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(TableCoralFactory, self).__init__( + factory_seed, coarse, TableBaseCoralFactory + ) + + +class CauliflowerCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(CauliflowerCoralFactory, self).__init__( + factory_seed, coarse, CauliflowerBaseCoralFactory + ) + + +class BrainCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(BrainCoralFactory, self).__init__( + factory_seed, coarse, BrainBaseCoralFactory + ) + + +class HoneycombCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(HoneycombCoralFactory, self).__init__( + factory_seed, coarse, HoneycombBaseCoralFactory + ) + + +class BushCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(BushCoralFactory, self).__init__( + factory_seed, coarse, BushBaseCoralFactory + ) + + +class TwigCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(TwigCoralFactory, self).__init__( + factory_seed, coarse, TwigBaseCoralFactory + ) + + +class TubeCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(TubeCoralFactory, self).__init__( + factory_seed, coarse, TubeBaseCoralFactory + ) + + +class FanCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(FanCoralFactory, self).__init__(factory_seed, coarse, FanBaseCoralFactory) + + +class ElkhornCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(ElkhornCoralFactory, self).__init__( + factory_seed, coarse, ElkhornBaseCoralFactory + ) + + +class StarCoralFactory(CoralFactory): + def __init__(self, factory_seed, coarse=False): + super(StarCoralFactory, self).__init__( + factory_seed, coarse, StarBaseCoralFactory + ) diff --git a/infinigen/assets/corals/laplacian.py b/infinigen/assets/objects/corals/laplacian.py similarity index 69% rename from infinigen/assets/corals/laplacian.py rename to infinigen/assets/objects/corals/laplacian.py index dc10d769a..8e9d2c487 100644 --- a/infinigen/assets/corals/laplacian.py +++ b/infinigen/assets/objects/corals/laplacian.py @@ -4,28 +4,29 @@ # Authors: Lingjie Mei -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.corals.tentacles import make_radius_points_fn +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.objects.corals.tentacles import make_radius_points_fn +from infinigen.assets.utils.decorate import geo_extension from infinigen.assets.utils.laplacian import build_laplacian_3d from infinigen.assets.utils.object import mesh2obj -from infinigen.assets.utils.decorate import geo_extension -import infinigen.core.util.blender as butil from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.tagging import tag_object + class CauliflowerBaseCoralFactory(BaseCoralFactory): tentacle_prob = 0.4 - noise_strength = .015 + noise_strength = 0.015 def __init__(self, factory_seed, coarse=False): super(CauliflowerBaseCoralFactory, self).__init__(factory_seed, coarse) - self.points_fn = make_radius_points_fn(.05, .6) + self.points_fn = make_radius_points_fn(0.05, 0.6) def create_asset(self, face_size=0.01, **params): mesh = build_laplacian_3d() obj = mesh2obj(mesh) surface.add_geomod(obj, geo_extension, apply=True) levels = 1 - butil.modify_mesh(obj, 'SUBSURF', levels=levels, render_levels=levels) - tag_object(obj, 'cauliflower_coral') + butil.modify_mesh(obj, "SUBSURF", levels=levels, render_levels=levels) + tag_object(obj, "cauliflower_coral") return obj diff --git a/infinigen/assets/corals/reaction_diffusion.py b/infinigen/assets/objects/corals/reaction_diffusion.py similarity index 64% rename from infinigen/assets/corals/reaction_diffusion.py rename to infinigen/assets/objects/corals/reaction_diffusion.py index 3acb7eb77..d1a13f5ad 100644 --- a/infinigen/assets/corals/reaction_diffusion.py +++ b/infinigen/assets/objects/corals/reaction_diffusion.py @@ -4,27 +4,31 @@ # Authors: Lingjie Mei -import bpy import numpy as np -from infinigen.assets.corals.base import BaseCoralFactory -from infinigen.assets.utils.object import center, mesh2obj, new_icosphere +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory from infinigen.assets.utils.decorate import geo_extension from infinigen.assets.utils.mesh import build_convex_mesh -from infinigen.assets.utils.reaction_diffusion import feed2kill, make_periodic_weight_fn, reaction_diffusion -import infinigen.core.util.blender as butil -from infinigen.core.util.math import FixedSeed +from infinigen.assets.utils.object import center, mesh2obj, new_icosphere +from infinigen.assets.utils.reaction_diffusion import ( + feed2kill, + make_periodic_weight_fn, + reaction_diffusion, +) from infinigen.core import surface -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed + class ReactionDiffusionBaseCoralFactory(BaseCoralFactory): - tentacle_prob = 0. - noise_strength = .01 + tentacle_prob = 0.0 + noise_strength = 0.01 def __init__(self, factory_seed, coarse=False): super(ReactionDiffusionBaseCoralFactory, self).__init__(factory_seed, coarse) self.makers = [self.brain_make, self.honeycomb_make] - self.weights = [.5, .5] + self.weights = [0.5, 0.5] with FixedSeed(self.factory_seed): self.maker = np.random.choice(self.makers, p=self.weights) @@ -34,37 +38,47 @@ def reaction_diffusion_make(weight_fn, **kwargs): wrapped = mesh2obj(mesh) subsurf_level = 2 - butil.modify_mesh(wrapped, 'SUBSURF', levels=subsurf_level, render_levels=subsurf_level) + butil.modify_mesh( + wrapped, "SUBSURF", levels=subsurf_level, render_levels=subsurf_level + ) obj = new_icosphere(subdivisions=8, radius=3) reaction_diffusion(obj, weight_fn, **kwargs) obj.location = center(wrapped) butil.apply_transform(obj, loc=True) - butil.modify_mesh(obj, 'SHRINKWRAP', target=wrapped, wrap_method="PROJECT", use_negative_direction=True) + butil.modify_mesh( + obj, + "SHRINKWRAP", + target=wrapped, + wrap_method="PROJECT", + use_negative_direction=True, + ) obj.location[-1] = 1 butil.apply_transform(obj, loc=True) surface.add_geomod(obj, geo_extension, apply=True) - butil.modify_mesh(obj, 'DISPLACE', vertex_group='B', strength=.4, mid_level=0.) + butil.modify_mesh( + obj, "DISPLACE", vertex_group="B", strength=0.4, mid_level=0.0 + ) butil.delete(wrapped) - tag_object(obj, 'reactiondiffusion_coral') + tag_object(obj, "reactiondiffusion_coral") return obj @staticmethod def brain_make(): - feed_rate = .055 + feed_rate = 0.055 kill_rate = feed2kill(feed_rate) - return ReactionDiffusionBaseCoralFactory.reaction_diffusion_make(make_periodic_weight_fn(100, 0.02), - feed_rate=feed_rate, - kill_rate=kill_rate) + return ReactionDiffusionBaseCoralFactory.reaction_diffusion_make( + make_periodic_weight_fn(100, 0.02), feed_rate=feed_rate, kill_rate=kill_rate + ) @staticmethod def honeycomb_make(): - feed_rate = .070 - kill_rate = feed2kill(feed_rate) - .001 - return ReactionDiffusionBaseCoralFactory.reaction_diffusion_make(make_periodic_weight_fn(5), - feed_rate=feed_rate, - kill_rate=kill_rate) + feed_rate = 0.070 + kill_rate = feed2kill(feed_rate) - 0.001 + return ReactionDiffusionBaseCoralFactory.reaction_diffusion_make( + make_periodic_weight_fn(5), feed_rate=feed_rate, kill_rate=kill_rate + ) def create_asset(self, face_size=0.01, **params): return self.maker() diff --git a/infinigen/assets/objects/corals/star.py b/infinigen/assets/objects/corals/star.py new file mode 100644 index 000000000..db504bb91 --- /dev/null +++ b/infinigen/assets/objects/corals/star.py @@ -0,0 +1,169 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bmesh +import bpy +import numpy as np +from mathutils import Vector +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.utils.decorate import displace_vertices, geo_extension +from infinigen.assets.utils.object import join_objects, new_empty, new_icosphere +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util.blender import deep_clone_obj + + +class StarBaseCoralFactory(BaseCoralFactory): + tentacle_prob = 1.0 + noise_strength = 0.002 + density = 3000 + + @staticmethod + def points_fn(nw: NodeWrangler, points): + points = nw.new_node( + Nodes.SeparateGeometry, + [points, nw.new_node(Nodes.NamedAttribute, ["outermost"])], + ) + return points + + def __init__(self, factory_seed, coarse=False): + super(StarBaseCoralFactory, self).__init__(factory_seed, coarse) + self.points_fn = StarBaseCoralFactory.points_fn + + @staticmethod + def geo_dual_mesh(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + perturb = 0.05 + geometry = nw.new_node( + Nodes.SetPosition, + [geometry, None, None, nw.uniform([-perturb] * 3, [perturb] * 3)], + ) + + geometry = nw.new_node(Nodes.DualMesh, input_kwargs={"Mesh": geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def geo_separate_faces(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.compare( + "GREATER_THAN", nw.separate(nw.new_node(Nodes.InputPosition))[-1], 0 + ) + geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) + geometry = nw.new_node(Nodes.SplitEdges, [geometry]) + scale = nw.uniform(0.9, 1.2) + geometry = nw.new_node(Nodes.ScaleElements, [geometry, None, scale]) + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": geometry, + "Name": "custom_normal", + "Value": nw.new_node(Nodes.InputNormal), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def geo_flower(nw: NodeWrangler, size, resolution, anchor): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + t = nw.scalar_divide( + nw.math("FLOOR", nw.scalar_divide(nw.new_node(Nodes.Index), size)), + resolution, + ) + offset = nw.build_float_curve(t, [(0, 0), anchor, (1, 0)], "AUTO") + normal = nw.new_node( + Nodes.NamedAttribute, ["custom_normal"], attrs={"data_type": "FLOAT_VECTOR"} + ) + geometry = nw.new_node( + Nodes.SetPosition, [geometry, None, None, nw.scale(offset, normal)] + ) + outer = nw.boolean_math( + "AND", nw.compare("GREATER_THAN", t, 0.4), nw.compare("LESS_THAN", t, 0.6) + ) + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": geometry, "Name": "outermost", "Value": outer}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def create_asset(self, face_size=0.01, **params): + obj = new_icosphere(subdivisions=3) + obj.location[-1] = uniform(0.25, 0.5) + butil.apply_transform(obj, loc=True) + surface.add_geomod(obj, self.geo_dual_mesh, apply=True) + displace_vertices(obj, lambda x, y, z: (0, 0, -0.9 * np.clip(z, None, 0))) + + rings = deep_clone_obj(obj) + levels = 3 + butil.modify_mesh(obj, "SUBSURF", levels=levels, render_levels=levels) + butil.modify_mesh(rings, "SHRINKWRAP", target=obj) + + surface.add_geomod(rings, self.geo_separate_faces, apply=True) + levels = 3 + butil.modify_mesh(rings, "SUBSURF", levels=levels, render_levels=levels) + + butil.select_none() + with butil.ViewportMode(rings, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.select_all(action="INVERT") + bpy.ops.mesh.delete(type="VERT") + + flowers = [] + resolution = 16 + + for ring in butil.split_object(rings): + size = len(ring.data.vertices) + center = np.mean([v.co for v in ring.data.vertices], 0) + empty = new_empty(scale=[uniform(0.3, 0.5) ** (1 / resolution)] * 3) + butil.modify_mesh( + ring, + "ARRAY", + apply=True, + use_relative_offset=False, + use_object_offset=True, + count=resolution + 1, + offset_object=empty, + ) + butil.delete(empty) + + with butil.ViewportMode(ring, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops() + + bm = bmesh.from_edit_mesh(ring.data) + bm.verts.ensure_lookup_table() + for i in range(1, resolution + 1): + c = np.mean([v.co for v in bm.verts[i * size : (i + 1) * size]], 0) + for j in range(i * size, (i + 1) * size): + bm.verts[j].co += Vector(center - c) + bmesh.update_edit_mesh(ring.data) + + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.bridge_edge_loops() + + anchor = uniform(0.4, 0.6), uniform(0.08, 0.15) + surface.add_geomod( + ring, self.geo_flower, apply=True, input_args=[size, resolution, anchor] + ) + flowers.append(ring) + + obj = join_objects([obj, *flowers]) + surface.add_geomod(obj, geo_extension, apply=True) + tag_object(obj, "star_coral") + return obj diff --git a/infinigen/assets/objects/corals/tentacles.py b/infinigen/assets/objects/corals/tentacles.py new file mode 100644 index 000000000..39b552e59 --- /dev/null +++ b/infinigen/assets/objects/corals/tentacles.py @@ -0,0 +1,168 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.trees.tree import build_radius_tree +from infinigen.assets.utils.misc import assign_material, sample_direction +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.tagging import COMBINED_ATTR_NAME +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.color import hsv2rgba + + +def build_tentacles(**kwargs): + n_branch = 5 + n_major = 8 + branch_config = { + "n": n_branch, + "path_kargs": lambda idx: { + "n_pts": n_major, + "std": 0.5, + "momentum": 0.5, + "sz": 0.008, + }, + "spawn_kargs": lambda idx: {"init_vec": sample_direction(0.6)}, + } + + obj = build_radius_tree(None, branch_config, uniform(0.002, 0.004)) + surface.add_geomod(obj, geo_radius, apply=True, input_args=["radius"]) + return obj + + +def make_min_distance_points_fn(min_distance): + def points_fn(nw: NodeWrangler, points): + return nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": points, "Distance": min_distance}, + ) + + return points_fn + + +def make_radius_points_fn(min_distance, radius_threshold): + def points_fn(nw: NodeWrangler, points): + radius = nw.vector_math("DISTANCE", nw.new_node(Nodes.InputPosition), [0] * 3) + points = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={ + "Geometry": points, + "Selection": nw.compare("LESS_THAN", radius, radius_threshold * 1.5), + "Distance": min_distance * 2, + }, + ) + points = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": points, "Distance": min_distance}, + ) + points = nw.new_node( + Nodes.SeparateGeometry, + [points, nw.compare("GREATER_THAN", radius, radius_threshold)], + ) + return points + + return points_fn + + +def make_upward_points_fn(min_distance, max_angle): + def points_fn(nw: NodeWrangler, points, normal): + points = nw.new_node( + Nodes.SeparateGeometry, + [points, nw.compare_direction("LESS_THAN", normal, [0, 0, 1], max_angle)], + ) + return nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": points, "Distance": min_distance}, + ) + + return points_fn + + +def geo_tentacles( + nw: NodeWrangler, tentacles, points_fn=None, density=500, realize=True +): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + tentacles = nw.new_node(Nodes.CollectionInfo, [tentacles, True, True]) + + points, normal, rotation = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": geometry, "Density": density}, + ).outputs + rotation = nw.new_node( + Nodes.RotateEuler, + input_kwargs={"Rotation": rotation, "Angle": nw.uniform(0, 2 * np.pi)}, + attrs={"type": "AXIS_ANGLE", "space": "LOCAL"}, + ) + + points = surface.eval_argument(nw, points_fn, points=points, normal=normal) + tentacles = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": points, + "Instance": tentacles, + "Pick Instance": True, + "Rotation": rotation, + "Scale": nw.uniform([0.6] * 3, [1.0] * 3, data_type="FLOAT_VECTOR"), + }, + ) + if realize: + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": tentacles} + ) + else: + realize_instances = tentacles + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) + + +def shader_tentacles(nw: NodeWrangler, base_hue=0.3): + roughness = 0.8 + specular = 0.25 + color = hsv2rgba((base_hue + uniform(-0.1, 0.1)) % 1, uniform(0.4, 0.6), 0.5) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Subsurface": 0.01, + }, + ) + fresnel_color = hsv2rgba(uniform(0, 1), 0.6, 0.6) + fresnel_bdsf = nw.new_node(Nodes.PrincipledBSDF, [fresnel_color]) + mixed_shader = nw.new_node( + Nodes.MixShader, [nw.new_node(Nodes.Fresnel), principled_bsdf, fresnel_bdsf] + ) + return mixed_shader + + +def apply(obj, points_fn, density, realize=True, base_hue=0.3): + tentacles = deep_clone_obj(obj) + if COMBINED_ATTR_NAME in tentacles.data.attributes: + tentacles.data.attributes.remove(tentacles.data.attributes[COMBINED_ATTR_NAME]) + + instances = make_asset_collection(build_tentacles, 5, "spikes", verbose=False) + surface.add_geomod( + tentacles, + geo_tentacles, + apply=realize, + input_args=[instances, points_fn, density, realize], + ) + + butil.delete_collection(instances) + assign_material( + tentacles, surface.shaderfunc_to_material(shader_tentacles, base_hue) + ) + return tentacles diff --git a/infinigen/assets/objects/corals/tree.py b/infinigen/assets/objects/corals/tree.py new file mode 100644 index 000000000..46177a520 --- /dev/null +++ b/infinigen/assets/objects/corals/tree.py @@ -0,0 +1,203 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import math + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.objects.corals.tentacles import make_radius_points_fn +from infinigen.assets.objects.trees.tree import build_radius_tree +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.core import surface +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed + + +class TreeBaseCoralFactory(BaseCoralFactory): + default_scale = [1] * 3 + tentacle_prob = 0.8 + noise_strength = 0.01 + + def __init__(self, factory_seed, coarse=False, method=None): + super(TreeBaseCoralFactory, self).__init__(factory_seed, coarse) + self.tip = 0.4 + self.configs = { + "twig": {"radius": 0.08, "branch_config": self.twig_config}, + "bush": {"radius": 0.08, "branch_config": self.bush_config}, + } + self.weights = [0.5, 0.5] + with FixedSeed(self.factory_seed): + if method is None: + method = np.random.choice(list(self.configs.keys()), p=self.weights) + self.radius, self.branch_config = map( + self.configs[method].get, ["radius", "branch_config"] + ) + self.points_fn = make_radius_points_fn(0.05, 0.4) + + @property + def bush_config(self): + n_branch = np.random.randint(6, 8) + n_major = np.random.randint(4, 5) + n_minor = np.random.randint(4, 5) + n_detail = np.random.randint(3, 4) + span = uniform(0.4, 0.5) + detail_config = { + "n": n_minor, + "path_kargs": lambda idx: { + "n_pts": n_detail + 1, + "std": 0.4, + "momentum": 0.6, + "sz": 0.01 * (1.5 * n_detail - idx), + }, + "spawn_kargs": lambda idx: { + "rnd_idx": idx + 1, + "ang_min": np.pi / 12, + "ang_max": np.pi / 8, + "axis2": [0, 0, 1], + }, + "children": [], + } + minor_config = { + "n": n_major, + "path_kargs": lambda idx: { + "n_pts": n_minor + 1, + "std": 0.4, + "momentum": 0.4, + "sz": 0.03 * (1.2 * n_minor - idx), + }, + "spawn_kargs": lambda idx: { + "rnd_idx": idx + 1, + "ang_min": np.pi / 12, + "ang_max": np.pi / 8, + "axis2": [0, 0, 1], + }, + "children": [detail_config], + } + major_config = { + "n": n_branch, + "path_kargs": lambda idx: { + "n_pts": n_major + 1, + "std": 0.4, + "momentum": 0.4, + "sz": uniform(0.08, 0.1), + }, + "spawn_kargs": lambda idx: { + "init_vec": [ + span + * np.cos( + 2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9) + ), + span + * np.sin( + 2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9) + ), + math.sqrt(1 - span * span), + ] + }, + "children": [minor_config], + } + return major_config + + @property + def twig_config(self): + n_branch = np.random.randint(6, 8) + n_major = np.random.randint(4, 5) + n_minor = np.random.randint(4, 5) + n_detail = np.random.randint(3, 4) + span = uniform(0.7, 0.8) + detail_config = { + "n": n_minor, + "path_kargs": lambda idx: { + "n_pts": n_detail * 2 + 1, + "std": 0.4, + "momentum": 0.6, + "sz": 0.01 * (2.5 * n_detail - idx), + }, + "spawn_kargs": lambda idx: { + "rnd_idx": 2 * idx + 1, + "ang_min": np.pi / 8, + "ang_max": np.pi / 6, + "axis2": [0, 0, 1], + }, + "children": [], + } + minor_config = { + "n": n_major, + "path_kargs": lambda idx: { + "n_pts": n_minor * 2 + 1, + "std": 0.4, + "momentum": 0.4, + "sz": 0.03 * (2.2 * n_minor - idx), + }, + "spawn_kargs": lambda idx: { + "rnd_idx": 2 * idx + 1, + "ang_min": np.pi / 8, + "ang_max": np.pi / 6, + "axis2": [0, 0, 1], + }, + "children": [detail_config], + } + major_config = { + "n": n_branch, + "path_kargs": lambda idx: { + "n_pts": n_major * 2 + 1, + "std": 0.4, + "momentum": 0.4, + "sz": uniform(0.08, 0.1), + }, + "spawn_kargs": lambda idx: { + "init_vec": [ + span + * np.cos( + 2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9) + ), + span + * np.sin( + 2 * np.pi * idx / n_branch + uniform(-np.pi / 9, np.pi / 9) + ), + math.sqrt(1 - span * span), + ] + }, + "children": [minor_config], + } + return major_config + + @staticmethod + def radius_fn(base_radius, size, resolution): + radius_decay_root = 0.85 + radius_decay_leaf = uniform(0.4, 0.6) + radius = base_radius * radius_decay_root ** ( + np.arange(size * resolution) / resolution + ) + radius[-resolution:] *= radius_decay_leaf ** ( + np.arange(resolution) / resolution + ) + return radius + + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: + resolution = 16 + obj = build_radius_tree( + self.radius_fn, self.branch_config, self.radius, resolution + ) + obj.scale = 2 * np.array(self.default_scale) / max(obj.dimensions[:2]) + butil.apply_transform(obj) + surface.add_geomod(obj, geo_radius, apply=True, input_args=["radius", 32]) + tag_object(obj, "tree_coral") + return obj + + +class TwigBaseCoralFactory(TreeBaseCoralFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse, method="twig") + + +class BushBaseCoralFactory(TreeBaseCoralFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse, method="bush") diff --git a/infinigen/assets/objects/corals/tube.py b/infinigen/assets/objects/corals/tube.py new file mode 100644 index 000000000..9d0bb374b --- /dev/null +++ b/infinigen/assets/objects/corals/tube.py @@ -0,0 +1,111 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.corals.base import BaseCoralFactory +from infinigen.assets.objects.corals.tentacles import make_radius_points_fn +from infinigen.assets.utils.object import new_icosphere +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object + + +class TubeBaseCoralFactory(BaseCoralFactory): + default_scale = [0.7] * 3 + + def __init__(self, factory_seed, coarse=False): + super(TubeBaseCoralFactory, self).__init__(factory_seed, coarse) + self.points_fn = make_radius_points_fn(0.05, 0.4) + + def create_asset(self, face_size=0.01, **params) -> bpy.types.Object: + obj = new_icosphere(subdivisions=2) + obj.name = "tube_coral" + surface.add_geomod(obj, self.geo_coral_tube, apply=True) + butil.modify_mesh( + obj, "BEVEL", True, offset_type="PERCENT", width_pct=10, segments=1 + ) + butil.modify_mesh(obj, "SOLIDIFY", True, thickness=0.05) + butil.modify_mesh(obj, "SUBSURF", True, levels=2, render_levels=2) + butil.modify_mesh( + obj, + "DISPLACE", + True, + strength=0.1, + texture=bpy.data.textures.new(name="tube_coral", type="STUCCI"), + mid_level=0, + ) + tag_object(obj, "tube_coral") + return obj + + @staticmethod + def geo_coral_tube(nw: NodeWrangler): + ico_sphere_perturb = 0.2 + growth_z = 1 + short_length_range = 0.2, 0.4 + long_length_range = 0.4, 1.2 + angles = np.linspace(np.pi * 2 / 5, np.pi / 10, 6) + scales = np.linspace(1, 0.9, 6) + face_perturb = 0.4 + growth_prob = 0.75 + seed = np.random.randint(1e3) + ico_sphere = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + perturbed_ico_sphere = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": ico_sphere, + "Offset": nw.uniform( + [-ico_sphere_perturb] * 3, [ico_sphere_perturb] * 3, seed + ), + }, + ) + mesh = nw.new_node(Nodes.DualMesh, input_kwargs={"Mesh": perturbed_ico_sphere}) + normal = nw.new_node(Nodes.InputNormal) + top = nw.boolean_math( + "AND", + nw.compare_direction("LESS_THAN", normal, (0, 0, 1), angles[0]), + nw.bernoulli(growth_prob, seed), + ) + + for i, (angle, scale) in enumerate(zip(angles, scales)): + direction = nw.vector_math( + "NORMALIZE", + nw.add( + nw.add(normal, nw.combine(0, 0, nw.uniform(0, growth_z, seed + i))), + nw.uniform([face_perturb] * 3, [-face_perturb] * 3, seed + i), + ), + ) + length = nw.switch( + nw.compare_direction("LESS_THAN", normal, (0, 0, 1), angle), + nw.uniform(*long_length_range, seed + i), + nw.uniform(*short_length_range, seed + i), + ) + mesh, top = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": mesh, + "Selection": top, + "Offset": direction, + "Offset Scale": length, + }, + ).outputs[:2] + mesh = nw.new_node( + Nodes.ScaleElements, + input_kwargs={"Geometry": mesh, "Selection": top, "Scale": scale}, + ) + + geometry_without_top = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": mesh, "Selection": top}, + attrs={"domain": "FACE"}, + ) + + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry_without_top}) diff --git a/infinigen/assets/creatures/README.md b/infinigen/assets/objects/creatures/README.md similarity index 100% rename from infinigen/assets/creatures/README.md rename to infinigen/assets/objects/creatures/README.md diff --git a/infinigen/assets/creatures/__init__.py b/infinigen/assets/objects/creatures/__init__.py similarity index 66% rename from infinigen/assets/creatures/__init__.py rename to infinigen/assets/objects/creatures/__init__.py index 0c02caf13..3eb64cf9d 100644 --- a/infinigen/assets/creatures/__init__.py +++ b/infinigen/assets/objects/creatures/__init__.py @@ -1,9 +1,14 @@ -from .beetle import BeetleFactory, AntSwarmFactory +from .beetle import AntSwarmFactory, BeetleFactory from .bird import BirdFactory, FlyingBirdFactory from .carnivore import CarnivoreFactory +from .crustacean import ( + CrabFactory, + CrustaceanFactory, + LobsterFactory, + SpinyLobsterFactory, +) from .fish import FishFactory, FishSchoolFactory from .herbivore import HerbivoreFactory -from .crustacean import CrustaceanFactory, CrabFactory, LobsterFactory, SpinyLobsterFactory -from .reptile import FrogFactory, LizardFactory, SnakeFactory from .insects.dragonfly import DragonflyFactory from .jellyfish import JellyfishFactory +from .reptile import FrogFactory, LizardFactory, SnakeFactory diff --git a/infinigen/assets/objects/creatures/beetle.py b/infinigen/assets/objects/creatures/beetle.py new file mode 100644 index 000000000..c48c0bf11 --- /dev/null +++ b/infinigen/assets/objects/creatures/beetle.py @@ -0,0 +1,234 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import logging + +import bpy +import gin +import numpy as np +from numpy.random import normal as N +from numpy.random import randint +from numpy.random import uniform as U + +import infinigen.assets.materials.chitin +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import creature, genome, joining +from infinigen.assets.objects.creatures.util import hair as creature_hair +from infinigen.assets.objects.creatures.util.animation import ( + run_cycle as creature_animation, +) +from infinigen.assets.objects.creatures.util.boid_swarm import BoidSwarmFactory +from infinigen.assets.objects.creatures.util.creature_util import offset_center +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed, clip_gaussian, lerp + +logger = logging.getLogger(__name__) + + +def insect_hair_params(): + mat_roughness = U(0.7, 1) + + length = U(0.01, 0.04) + puff = U(0.7, 1) + + return { + "density": 4000, + "clump_n": 1, + "avoid_features_dist": 0.01, + "grooming": { + "Length MinMaxScale": np.array( + (length, length * N(2, 0.5), U(15, 60)), dtype=np.float32 + ), + "Puff MinMaxScale": np.array( + (puff, puff * N(3, 0.5), U(15, 60)), dtype=np.float32 + ), + "Combing": U(0, 0.2), + "Strand Random Mag": 0.0, + "Strand Perlin Mag": 0.0, + "Strand Perlin Scale": U(15, 45), + "Tuft Spread": 0.0, + "Tuft Clumping": 0.0, + "Root Radius": 0.001, + "Post Clump Noise Mag": 0, + "Hair Length Pct Min": U(0.7, 1), + }, + "material": { + "Roughness": mat_roughness, + "Radial Roughness": mat_roughness + N(0, 0.07), + "Random Roughness": 0, + "IOR": 1.55, + }, + } + + +def beetle_postprocessing(body_parts, extras, params): + main_template = surface.registry.sample_registry(params["surface_registry"]) + main_template.apply(body_parts) + + +def beetle_genome(): + fac = parts.generic_nurbs.NurbsBody( + prefix="body_insect", tags=["body", "rigid"], var=2 + ) + if U() < 0.5: + n = len(fac.params["proportions"]) + noise = U(1, 3, n) + noise[-n // 3 :] = 1 + fac.params["proportions"] *= noise + + body = genome.part(fac) + + body_length = fac.params["proportions"].sum() * fac.params["length"] + + leg_fac = parts.leg.InsectLeg() + n_leg_pairs = int(np.clip(body_length * clip_gaussian(3, 2, 2, 6), 2, 15)) + leg_fac.params["length_rad1_rad2"][0] /= n_leg_pairs / 1.8 + splay = U(30, 60) + for t in np.linspace(0.15, 0.6, n_leg_pairs): + for side in [-1, 1]: + leg = genome.part(leg_fac) + xrot = lerp(70, -100, t) + genome.attach( + leg, + body, + coord=(t, splay / 180, 1), + joint=Joint((xrot, 5, 90)), + side=side, + ) + + head = genome.part( + parts.generic_nurbs.NurbsHead(prefix="head_insect", tags=["head", "rigid"]) + ) + genome.attach(head, body, coord=(1, 0, 0), joint=Joint((0, -15, 0))) + + if U() < 0.7: + mandible_fac = parts.head_detail.InsectMandible() + rot = np.array((120, 20, 80)) * N(1, 0.15) + for side in [-1, 1]: + genome.attach( + genome.part(mandible_fac), + head, + coord=(0.75, 0.5, 0.1), + joint=Joint(rot), + side=side, + ) + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict( + surface_registry=[(infinigen.assets.materials.chitin, 1)], + hair=insect_hair_params(), + ), + ) + + +@gin.configurable +class BeetleFactory(AssetFactory): + def __init__( + self, factory_seed=None, bvh=None, coarse=False, animation_mode=None, **kwargs + ): + super().__init__(factory_seed, coarse) + self.bvh = bvh + self.animation_mode = animation_mode + + def create_asset(self, i, hair=False, **kwargs): + genome = beetle_genome() + root, parts = creature.genome_to_creature( + genome, name=f"beetle({self.factory_seed}, {i})" + ) + tag_object(root, "beetle") + offset_center(root) + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + rigging=(self.animation_mode is not None), + postprocess_func=beetle_postprocessing, + **kwargs, + ) + if self.animation_mode == "walk_cycle": + creature_animation.animate_run( + root, arma, ik_targets, steps_per_sec=N(2, 0.2) + ) + if hair and U() < 0.5: + creature_hair.configure_hair( + joined, root, genome.postprocess_params["hair"] + ) + return root + + +class AntSwarmFactory(BoidSwarmFactory): + def ant_swarm_settings(self, mode=None): + boids_settings = dict( + use_flight=False, + use_land=True, + use_climb=True, + land_speed_max=U(0.5, 2), + land_acc_max=U(0.7, 1), + land_personal_space=0.05, + land_jump_speed=U(0, 0.05), + bank=0, + pitch=0, + rule_fuzzy=U(0.6, 0.9), + ) + + if mode is None: + mode = np.random.choice(["queues", "goal_swarm", "random_swarm"]) + logger.debug(f"Randomly chose ant_swarm_settings {mode=}") + + if mode == "queues": + boids_settings["rules"] = [ + dict( + type="FOLLOW_LEADER", use_line=True, queue_count=100, distance=0.0 + ), + ] + elif mode == "goal_swarm": + boids_settings["rules"] = [ + dict(type="SEPARATE"), + dict(type="GOAL", use_predict=True), + dict(type="FLOCK"), + ] + elif mode == "random_swarm": + boids_settings["rules"] = [ + dict(type="SEPARATE"), + dict(type="AVERAGE_SPEED"), + dict(type="FLOCK"), + ] + else: + raise ValueError(f"Unrecognized {mode=}") + + return dict( + particle_size=U(0.02, 0.1), + size_random=U(0.3, 0.7), + use_rotation_instance=True, + lifetime=bpy.context.scene.frame_end - bpy.context.scene.frame_start, + warmup_frames=1, + emit_duration=0, # all particles appear immediately + emit_from="VOLUME", + mass=2, + use_multiply_size_mass=True, + boids_settings=boids_settings, + ) + + def __init__(self, factory_seed, bvh, coarse=False): + with FixedSeed(factory_seed): + settings = self.ant_swarm_settings() + col = make_asset_collection( + BeetleFactory(factory_seed=randint(1e7), animation_mode="walk_cycle"), + n=1, + ) + super(AntSwarmFactory, self).__init__( + factory_seed, + child_col=col, + collider_col=bpy.data.collections.get("colliders"), + settings=settings, + bvh=bvh, + volume=N(0.1, 0.015), + coarse=coarse, + ) diff --git a/infinigen/assets/objects/creatures/bird.py b/infinigen/assets/objects/creatures/bird.py new file mode 100644 index 000000000..b1907ae23 --- /dev/null +++ b/infinigen/assets/objects/creatures/bird.py @@ -0,0 +1,471 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Alexander Raistrick: regular bird, hair params +# - Beining Han: adapt to create flying bird + + +import bpy +import gin +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +import infinigen.assets.materials.basic_bsdf +import infinigen.assets.materials.bird +import infinigen.assets.materials.reptile_brown_circle_attr +import infinigen.assets.materials.reptile_two_color_attr +import infinigen.assets.materials.spot_sparse_attr +from infinigen.assets.materials import beak, bone, eyeball, tongue +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import creature, genome, joining +from infinigen.assets.objects.creatures.util import hair as creature_hair +from infinigen.assets.objects.creatures.util.animation import idle, run_cycle +from infinigen.assets.objects.creatures.util.animation.driver_wiggle import ( + animate_wiggle_bones, +) +from infinigen.assets.objects.creatures.util.creature_util import offset_center +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.core import surface +from infinigen.core.placement import animation_policy +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed, clip_gaussian +from infinigen.core.util.random import random_general as rg + + +def bird_hair_params(flying=True): + length = U(0.01, 0.025) if flying else U(0.03, 0.06) + puff = U(0.03, 0.2) + + return { + "density": 70000, + "clump_n": 10, + "avoid_features_dist": 0.02, + "grooming": { + "Length MinMaxScale": np.array( + (length, length * N(2, 0.5), U(15, 60)), dtype=np.float32 + ), + "Puff MinMaxScale": np.array( + (puff, puff * N(1.5, 0.5), U(15, 60)), dtype=np.float32 + ), + "Combing": U(0.6, 1), + "Strand Random Mag": 0.0, + "Strand Perlin Mag": U(0, 0.003), + "Strand Perlin Scale": 30.0, + "Tuft Spread": 0.01, + "Tuft Clumping": U(0.5, 1), + "Root Radius": 0.006, + "Post Clump Noise Mag": 0.001, + "Hair Length Pct Min": U(0.5, 0.9), + }, + "material": { + "Roughness": U(0, 0.4), + "Radial Roughness": U(0.1, 0.3), + "Random Roughness": U(0, 0.2), + "IOR": 1.55, + }, + } + + +def bird_postprocessing(body_parts, extras, params): + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + main_template.apply(body_parts + get_extras("BodyExtra") + get_extras("Feather")) + + tongue.apply(get_extras("Tongue")) + bone.apply(get_extras("Teeth") + get_extras("Claws")) + eyeball.apply(get_extras("Eyeball"), shader_kwargs={"coord": "X"}) + beak.apply(get_extras("Beak")) + + +def duck_genome(mode): + body_lrr = np.array((0.85, 0.25, 0.38)) * N(1, 0.2) * N(1, 0.2, 3) + body_fac = parts.generic_nurbs.NurbsBody( + prefix="body_bird", tags=["body", "rigid"], var=U(0.3, 1) + ) + body = genome.part(body_fac) + body_length = body_fac.params["length"][0] + + tail = genome.part(parts.wings.BirdTail()) + genome.attach( + tail, body, coord=(0.2, 1, 0.5), joint=Joint(rest=(0, 170 * N(1, 0.1), 0)) + ) + + shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) + foot_fac = parts.foot.Foot( + { + "length_rad1_rad2": np.array((body_length * 0.1, 0.025, 0.04)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Toe Length Rad1 Rad2": np.array((body_length * N(0.4, 0.07), 0.03, 0.02)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Toe Splay": 35 * N(1, 0.2), + "Toebean Radius": 0.03 * N(1, 0.1), + "Toe Rotate": (0.0, -1.57, 0.0), + "Claw Curl Deg": 12 * N(1, 0.2), + "Claw Pct Length Rad1 Rad2": np.array((0.13, 0.64, 0.05)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Thumb Pct": np.array((0.61, 1.17, 1.5)) * N(1, 0.1) * N(1, 0.1, 3), + "Toe Curl Scalar": 0.34 * N(1, 0.2), + }, + bald=True, + ) + + leg_fac = parts.leg.BirdLeg( + { + "length_rad1_rad2": ( + body_length * 0.5 * N(1, 0.05), + 0.09 * N(1, 0.1), + 0.06 * N(1, 0.1), + ) + } + ) + leg_coord = (N(0.5, 0.05), N(0.7, 0.05), N(0.95, 0.05)) + for side in [-1, 1]: + leg = genome.attach( + genome.part(foot_fac), + genome.part(leg_fac), + coord=(0.9, 0, 0), + joint=Joint(rest=(0, 0, 0)), + ) + genome.attach( + leg, + body, + coord=leg_coord, + joint=Joint(rest=(0, 90, 0), bounds=shoulder_bounds), + side=side, + ) + + extension = U(0.7, 1) if mode == "flying" else U(0.01, 0.1) + wing_len = body_length * 0.5 * clip_gaussian(1.2, 0.7, 0.5, 2.5) + wing_fac = parts.wings.BirdWing( + { + "length_rad1_rad2": np.array((wing_len, 0.1 * N(1, 0.1), 0.02 * N(1, 0.2))), + "Extension": extension, + } + ) + + wing_coord = (N(0.7, 0.02), 110 / 180 * N(1, 0.1), 0.95) + if wing_fac.params["Extension"] > 0.5: + wing_rot = (90, 0, 90) + else: + wing_rot = (90, 40, 90) + for side in [-1, 1]: + wing = genome.part(wing_fac) + genome.attach( + wing, body, coord=wing_coord, joint=Joint(rest=wing_rot), side=side + ) + + head_fac = parts.head.BirdHead() + head = genome.part(head_fac) + + beak = genome.part(parts.beak.BirdBeak()) + genome.attach(beak, head, coord=(0.75, 0, 0.5), joint=Joint(rest=(0, 0, 0))) + + eye_fac = parts.eye.MammalEye({"Radius": N(0.03, 0.005)}) + t, splay = U(0.6, 0.85), U(80, 110) / 180 + r = 0.85 + rot = np.array([0, 0, 90]) * N(1, 0.1, 3) + for side in [-1, 1]: + eye = genome.part(eye_fac) + genome.attach( + eye, + head, + coord=(t, splay, r), + joint=Joint(rest=(0, 0, 0)), + rotation_basis="normal", + side=side, + ) + + genome.attach(head, body, coord=(1, 0, 0), joint=Joint(rest=(0, 0, 0))) + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict( + animation=dict(), + hair=bird_hair_params(flying=False), + surface_registry=[ + (infinigen.assets.materials.spot_sparse_attr, 4), + (infinigen.assets.materials.reptile_brown_circle_attr, 0.5), + (infinigen.assets.materials.reptile_two_color_attr, 0.5), + (infinigen.assets.materials.bird, 5), + ], + ), + ) + + +def flying_bird_genome(mode): + body_lrr = np.array((0.95, 0.13, 0.18)) * N(1.0, 0.05, size=(3,)) + body = genome.part(parts.body.BirdBody({"length_rad1_rad2": body_lrr})) + body_length = body_lrr[0] + + tail = genome.part(parts.wings.FlyingBirdTail()) + genome.attach( + tail, + body, + coord=(U(0.08, 0.15), 1, 0.5), + joint=Joint(rest=(0, 180 * N(1, 0.1), 0)), + ) + + shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) + foot_fac = parts.foot.Foot( + { + "length_rad1_rad2": np.array((body_length * 0.2, 0.01, 0.02)) + * N(1, 0.1, 3), + "Toe Length Rad1 Rad2": np.array((body_length * N(0.4, 0.02), 0.02, 0.01)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Toe Splay": 8 * N(1, 0.2), + "Toe Rotate": (0.0, -N(0.55, 0.1), 0.0), + "Toebean Radius": 0.01 * N(1, 0.1), + "Claw Curl Deg": 12 * N(1, 0.2), + "Claw Pct Length Rad1 Rad2": np.array((0.13, 0.64, 0.05)) + * N(0.5, 0.05) + * N(1, 0.1, 3), + "Thumb Pct": np.array((0.4, 0.5, 0.75)) * N(1, 0.1) * N(1, 0.1, 3), + "Toe Curl Scalar": 0.34 * N(1, 0.2), + }, + bald=True, + ) + + leg_fac = parts.leg.BirdLeg( + { + "length_rad1_rad2": ( + body_length * 0.5 * N(1, 0.05), + 0.04 * N(1, 0.1), + 0.02 * N(1, 0.1), + ), + "Thigh Rad1 Rad2 Fullness": np.array((0.12, 0.04, 1.26)) * N(1, 0.1, 3), + "Shin Rad1 Rad2 Fullness": np.array((0.1, 0.04, 5.0)) * N(1, 0.1, 3), + } + ) + leg_coord = (N(0.5, 0.05), N(0.2, 0.04), N(0.8, 0.05)) + for side in [-1, 1]: + leg = genome.attach( + genome.part(foot_fac), + genome.part(leg_fac), + coord=(0.9, 0, 0), + joint=Joint(rest=(0, 0, 0)), + ) + genome.attach( + leg, + body, + coord=leg_coord, + joint=Joint(rest=(0, U(135, 175), 0), bounds=shoulder_bounds), + side=side, + ) + + extension = U(0.8, 1) + wing_len = body_length * clip_gaussian(1.0, 0.2, 0.6, 1.5) * 0.8 + wing_fac = parts.wings.FlyingBirdWing( + { + "length_rad1_rad2": np.array((wing_len, U(0.08, 0.15), 0.02 * N(1, 0.2))), + "Extension": extension, + "feather_density": U(25, 40), + } + ) + + wing_coord = (N(0.68, 0.02), 150 / 180 * N(1, 0.1), 0.8) + if wing_fac.params["Extension"] > 0.5: + wing_rot = (90, 0, 90) + else: + wing_rot = (90, 40, 90) + for side in [-1, 1]: + wing = genome.part(wing_fac) + genome.attach( + wing, body, coord=wing_coord, joint=Joint(rest=wing_rot), side=side + ) + + head_fac = parts.head.FlyingBirdHead() + head = genome.part(head_fac) + + beak = genome.part(parts.beak.FlyingBirdBeak()) + genome.attach(beak, head, coord=(0.85, 0, 0.5), joint=Joint(rest=(0, 0, 0))) + + eye_fac = parts.eye.MammalEye({"Radius": N(0.02, 0.005)}) + t, splay = U(0.7, 0.85), U(80, 110) / 180 + r = 0.85 + rot = np.array([0, 0, 90]) * N(1, 0.1, 3) + for side in [-1, 1]: + eye = genome.part(eye_fac) + genome.attach( + eye, + head, + coord=(t, splay, r), + joint=Joint(rest=(0, 0, 0)), + rotation_basis="normal", + side=side, + ) + + genome.attach( + head, + body, + coord=(U(0.84, 0.85), 0, U(1.05, 1.15)), + joint=Joint(rest=(0, N(18, 5), 0)), + ) + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict( + animation=dict(), + hair=bird_hair_params(flying=True), + surface_registry=[ + # (infinigen.assets.materials.spot_sparse_attr, 4), + # (infinigen.assets.materials.reptile_brown_circle_attr, 0.5), + # (infinigen.assets.materials.reptile_two_color_attr, 0.5), + (infinigen.assets.materials.bird, 5) + ], + ), + ) + + +@gin.configurable +class BirdFactory(AssetFactory): + def __init__( + self, factory_seed=None, coarse=False, bvh=None, animation_mode=None, **kwargs + ): + super().__init__(factory_seed, coarse) + self.bvh = bvh + self.animation_mode = animation_mode + + def create_asset(self, i, placeholder, hair=True, **kwargs): + dynamic = self.animation_mode is not None + + genome = duck_genome(mode=self.animation_mode) + root, parts = creature.genome_to_creature( + genome, name=f"bird({self.factory_seed}, {i})" + ) + tag_object(root, "bird") + offset_center(root) + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + rigging=dynamic, + postprocess_func=bird_postprocessing, + **kwargs, + ) + + joined_extras = butil.join_objects(extras) + joined_extras.parent = joined + + butil.parent_to(root, placeholder, no_inverse=True) + + if hair: + creature_hair.configure_hair( + joined, root, genome.postprocess_params["hair"] + ) + if dynamic: + if self.animation_mode == "run": + run_cycle.animate_run(root, arma, ik_targets) + elif self.animation_mode == "idle": + idle.snap_iks_to_floor(ik_targets, self.bvh) + idle.idle_body_noise_drivers(ik_targets, wing_mag=U(0, 0.3)) + elif self.animation_mode == "swim": + spine = [b for b in arma.pose.bones if "Body" in b.name] + tail = [b for b in arma.pose.bones if "Tail" in b.name] + animate_wiggle_bones( + arma=arma, bones=tail, mag_deg=U(0, 30), freq=U(0.5, 2) + ) + else: + raise ValueError(f"Unrecognized mode {self.animation_mode=}") + return root + + +@gin.configurable +class FlyingBirdFactory(AssetFactory): + max_expected_radius = 1 + max_distance = 40 + + def __init__( + self, + factory_seed=None, + coarse=False, + bvh=None, + animation_mode=None, + altitude=("uniform", 15, 30), + ): + super().__init__(factory_seed, coarse) + self.animation_mode = animation_mode + self.altitude = altitude + self.bvh = bvh + with FixedSeed(factory_seed): + self.policy = animation_policy.AnimPolicyRandomForwardWalk( + forward_vec=(1, 0, 0), + speed=U(7, 15), + step_range=(5, 40), + yaw_dist=("normal", 0, 15), + ) + + def create_placeholder(self, i, loc, rot): + p = butil.spawn_cube(size=3) + p.name = f"{self}.create_placeholder({i})" + p.location = loc + p.rotation_euler = rot + + if self.bvh is None: + return p + + altitude = rg(self.altitude) + p.location.z += altitude + curve = animation_policy.policy_create_bezier_path( + p, + self.bvh, + self.policy, + retry_rotation=True, + max_full_retries=30, + fatal=True, + ) + curve.name = f"animhelper:{self}.create_placeholder({i}).path" + + # animate the placeholder to the APPROX location of the snake, so the camera can follow itcurve.location = (0, 0, 0) + run_cycle.follow_path( + p, + curve, + use_curve_follow=True, + offset=0, + duration=bpy.context.scene.frame_end - bpy.context.scene.frame_start, + ) + p.rotation_euler.z += np.pi / 2 + curve.data.twist_mode = "Z_UP" + curve.data.driver_add("eval_time").driver.expression = "frame" + + return p + + def create_asset(self, i, placeholder, hair=True, animate=False, **kwargs): + genome = flying_bird_genome(self.animation_mode) + root, parts = creature.genome_to_creature( + genome, name=f"flying_bird({self.factory_seed}, {i})" + ) + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + rigging=self.animation_mode is not None, + postprocess_func=bird_postprocessing, + **kwargs, + ) + + joined_extras = butil.join_objects(extras) + joined_extras.parent = joined + + if hair: + creature_hair.configure_hair( + joined, root, genome.postprocess_params["hair"] + ) + if self.animation_mode is not None: + if self.animation_mode == "idle": + idle.idle_body_noise_drivers( + ik_targets, body_mag=0.0, foot_motion_chance=1.0, head_benddown=0 + ) + else: + raise ValueError(f"Unrecognized {self.animation_mode=}") + + return root diff --git a/infinigen/assets/objects/creatures/carnivore.py b/infinigen/assets/objects/creatures/carnivore.py new file mode 100644 index 000000000..bec9f3589 --- /dev/null +++ b/infinigen/assets/objects/creatures/carnivore.py @@ -0,0 +1,316 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + +import gin +import mathutils +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +import infinigen.assets.materials.giraffe_attr +import infinigen.assets.materials.spot_sparse_attr +import infinigen.assets.materials.tiger_attr +from infinigen.assets.materials import bone, eyeball, nose, tongue +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import cloth_sim, creature, genome, joining +from infinigen.assets.objects.creatures.util import hair as creature_hair +from infinigen.assets.objects.creatures.util.animation import idle, run_cycle +from infinigen.assets.objects.creatures.util.creature_util import offset_center +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import clip_gaussian + + +def tiger_hair_params(): + mat_roughness = U(0.4, 0.7) + + length = clip_gaussian(0.022, 0.03, 0.01, 0.1) + puff = U(0.14, 0.4) + + return { + "density": 500000, + "clump_n": np.random.randint(5, 70), + "avoid_features_dist": 0.01, + "grooming": { + "Length MinMaxScale": np.array( + (length, length * N(2, 0.5), U(15, 60)), dtype=np.float32 + ), + "Puff MinMaxScale": np.array( + (puff, puff * N(3, 0.5), U(15, 60)), dtype=np.float32 + ), + "Combing": U(0.7, 1), + "Strand Random Mag": 0.0, + "Strand Perlin Mag": U(0, 0.006), + "Strand Perlin Scale": U(15, 45), + "Tuft Spread": N(0.01, 0.002), + "Tuft Clumping": U(0.2, 0.8), + "Root Radius": 0.001, + "Post Clump Noise Mag": 0.0005 * N(1, 0.15), + "Hair Length Pct Min": U(0.5, 0.9), + }, + "material": { + "Roughness": mat_roughness, + "Radial Roughness": mat_roughness + N(0, 0.07), + "Random Roughness": 0, + "IOR": 1.55, + }, + } + + +def tiger_skin_sim_params(): + return { + "bending_stiffness_max": 450.0, + "compression_stiffness_max": 80.0, + "goal_spring": 0.8, + "pin_stiffness": 1, + "shear_stiffness": 15.0, + "shear_stiffness_max": 80.0, + "tension_stiffness_max": 80.0, + "uniform_pressure_force": 5.0, + "use_pressure": True, + } + + +def tiger_postprocessing(body_parts, extras, params): + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + main_template.apply(body_parts + get_extras("BodyExtra")) + + tongue.apply(get_extras("Tongue")) + bone.apply(get_extras("Teeth") + get_extras("Claws")) + eyeball.apply(get_extras("Eyeball"), shader_kwargs={"coord": "X"}) + nose.apply(get_extras("Nose")) + + +def tiger_genome(): + body_fac = parts.generic_nurbs.NurbsBody( + prefix="body_feline", tags=["body"], var=0.7, temperature=0.2 + ) + body_fac.params["thetas"][-3] *= N(1, 0.1) + body = genome.part(body_fac) + + tail = genome.part(parts.tail.Tail()) + genome.attach(tail, body, coord=(0.07, 1, 1), joint=Joint(rest=(N(0, 10), 180, 0))) + + if U() < 0.5: + head_length_rad1_rad2 = np.array((0.36, 0.20, 0.18)) * N(1, 0.1, 3) + head_fac = parts.head.CarnivoreHead({"length_rad1_rad2": head_length_rad1_rad2}) + head = genome.part(head_fac) + + jaw_pct = np.array((1.05, 0.55, 0.5)) + jaw = genome.part( + parts.head.CarnivoreJaw( + {"length_rad1_rad2": head_length_rad1_rad2 * jaw_pct} + ) + ) + genome.attach( + jaw, + head, + coord=(0.2 * N(1, 0.1), 0, 0.35 * N(1, 0.1)), + joint=Joint(rest=(0, U(10, 35), 0), pose=(0, 0, 0)), + ) + + else: + head_fac = parts.generic_nurbs.NurbsHead( + prefix="head_carnivore", tags=["head"], var=0.5 + ) + head = genome.part(head_fac) + + headl = head_fac.params["length"][0] + head_length_rad1_rad2 = np.array((headl, 0.20, 0.18)) * N(1, 0.1, 3) + + jaw_pct = np.array((0.7, 0.55, 0.5)) + jaw = genome.part( + parts.head.CarnivoreJaw( + {"length_rad1_rad2": head_length_rad1_rad2 * jaw_pct} + ) + ) + genome.attach( + jaw, + head, + coord=(0.12, 0, 0.3 * N(1, 0.1)), + joint=Joint(rest=(0, U(10, 35), 0), pose=(0, 0, 0)), + ) + + eye_fac = parts.eye.MammalEye({"Radius": N(0.027, 0.009)}) + eye_t, splay = U(0.61, 0.64), U(90, 140) / 180 + r = U(0.8, 0.9) + rot = np.array([0, 0, 0]) + for side in [-1, 1]: + eye = genome.part(eye_fac) + genome.attach( + eye, + head, + coord=(eye_t, splay, r), + joint=Joint(rest=rot), + rotation_basis="normal", + side=side, + ) + + nose = genome.part(parts.head_detail.CatNose()) + genome.attach( + nose, head, coord=(U(0.9, 0.96), 1, U(0.5, 0.7)), joint=Joint(rest=(0, 20, 0)) + ) + + ear_fac = parts.head_detail.CatEar() + t, splay = N(0.33, 0.07), U(100, 150) / 180 + rot = np.array([-20, -10, -23]) + N(0, 4, 3) + for side in [-1, 1]: + ear = genome.part(ear_fac) + genome.attach( + ear, + head, + coord=(t, splay, 1), + joint=Joint(rest=rot), + rotation_basis="normal", + side=side, + ) + + neck_t = 0.7 + shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) + splay = clip_gaussian(130, 7, 90, 130) / 180 + shoulder_t = clip_gaussian(0.12, 0.05, 0.08, 0.12) + params = { + "length_rad1_rad2": np.array((1.6, 0.1, 0.05)) * N(1, (0.15, 0.05, 0.05), 3) + } + + foot_fac = parts.foot.Foot() + backleg_fac = parts.leg.QuadrupedBackLeg(params=params) + for side in [-1, 1]: + back_leg = genome.attach( + genome.part(foot_fac), + genome.part(backleg_fac), + coord=(0.9, 0, 0), + joint=Joint(rest=(0, 0, 0)), + ) + genome.attach( + back_leg, + body, + coord=(shoulder_t, splay, 1.2), + joint=Joint(rest=(0, 90, 0), bounds=shoulder_bounds), + rotation_basis="global", + side=side, + ) # , smooth_rad=0.06)#, bridge_rad=0.1) + + frontleg_fac = parts.leg.QuadrupedFrontLeg(params=params) + for side in [-1, 1]: + front_leg = genome.attach( + genome.part(foot_fac), + genome.part(frontleg_fac), + coord=(0.9, 0, 0), + joint=Joint(rest=(0, 0, 0)), + ) + genome.attach( + front_leg, + body, + coord=(neck_t - shoulder_t, splay, 0.8), + joint=Joint(rest=(0, 90, 0)), + rotation_basis="global", + side=side, + ) # , smooth_rad=0.06)#, bridge_rad=0.1) + + # neck_lrr = np.array((body_lrr[0], body_lrr[-1], body_lrr[-1])) * np.array((0.45, 0.5, 0.25)) * N(1, 0.05, 3) + # neck = genome.part(parts.head.Neck({'length_rad1_rad2': neck_lrr})) + genome.attach( + head, + body, + coord=(N(0.97, 0.01), 0, 0), + joint=Joint(rest=(0, N(20, 5), 0)), + rotation_basis="global", + ) # , bridge_rad=0.1) + # genome.attach(neck, body, coord=(0.8, 0, 0.1), joint=Joint(rest=(0, -N(15, 2), 0))) + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict( + hair=tiger_hair_params(), + skin=tiger_skin_sim_params(), + surface_registry=[ + (infinigen.assets.materials.tiger_attr, 3), + (infinigen.assets.materials.giraffe_attr, 0.2), + (infinigen.assets.materials.spot_sparse_attr, 2), + ], + ), + ) + + +@gin.configurable +class CarnivoreFactory(AssetFactory): + def __init__( + self, + factory_seed=None, + bvh: mathutils.bvhtree.BVHTree = None, + coarse: bool = False, + animation_mode: str = None, + hair: bool = True, + clothsim_skin: bool = False, + **kwargs, + ): + super().__init__(factory_seed, coarse) + self.bvh = bvh + self.animation_mode = animation_mode + self.hair = hair + self.clothsim_skin = clothsim_skin + + if self.hair and (self.animation_mode is not None or self.clothsim_skin): + raise NotImplementedError( + "Dynamic hair is not yet fully working. " + "Please disable either hair or both of animation/clothsim" + ) + + def create_placeholder(self, **kwargs): + return butil.spawn_cube(size=4) + + def create_asset(self, i, placeholder, **kwargs): + genome = tiger_genome() + root, parts = creature.genome_to_creature( + genome, name=f"carnivore({self.factory_seed}, {i})" + ) + # tag_object(root, 'carnivore') + offset_center(root) + + dynamic = self.animation_mode is not None + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + rigging=dynamic, + postprocess_func=tiger_postprocessing, + **kwargs, + ) + + butil.parent_to(root, placeholder, no_inverse=True) + + if self.hair: + creature_hair.configure_hair( + joined, root, genome.postprocess_params["hair"], is_dynamic=dynamic + ) + + if dynamic: + if self.animation_mode == "run": + run_cycle.animate_run(root, arma, ik_targets) + elif self.animation_mode == "idle": + idle.snap_iks_to_floor(ik_targets, self.bvh) + idle.idle_body_noise_drivers(ik_targets) + elif self.animation_mode == "tpose": + pass + else: + raise ValueError(f"Unrecognized mode {self.animation_mode=}") + if self.clothsim_skin: + rigidity = surface.write_vertex_group( + joined, cloth_sim.local_pos_rigity_mask, apply=True + ) + cloth_sim.bake_cloth( + joined, + genome.postprocess_params["skin"], + attributes=dict(vertex_group_mass=rigidity), + ) + + return root diff --git a/infinigen/assets/objects/creatures/crustacean.py b/infinigen/assets/objects/creatures/crustacean.py new file mode 100644 index 000000000..91a21687a --- /dev/null +++ b/infinigen/assets/objects/creatures/crustacean.py @@ -0,0 +1,456 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +from collections import defaultdict + +import bpy +import gin +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.creatures.parts.crustacean.antenna import ( + LobsterAntennaFactory, + SpinyLobsterAntennaFactory, +) +from infinigen.assets.objects.creatures.parts.crustacean.body import ( + CrabBodyFactory, + LobsterBodyFactory, +) +from infinigen.assets.objects.creatures.parts.crustacean.claw import ( + CrabClawFactory, + LobsterClawFactory, +) +from infinigen.assets.objects.creatures.parts.crustacean.eye import CrustaceanEyeFactory +from infinigen.assets.objects.creatures.parts.crustacean.fin import CrustaceanFinFactory +from infinigen.assets.objects.creatures.parts.crustacean.leg import ( + CrabLegFactory, + LobsterLegFactory, +) +from infinigen.assets.objects.creatures.parts.crustacean.tail import ( + CrustaceanTailFactory, +) +from infinigen.assets.objects.creatures.util.creature import genome_to_creature +from infinigen.assets.objects.creatures.util.genome import ( + CreatureGenome, + Joint, + attach, + part, +) +from infinigen.assets.objects.creatures.util.joining import join_and_rig_parts +from infinigen.assets.utils.decorate import read_material_index, write_material_index +from infinigen.assets.utils.misc import assign_material +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import read_attr_data, shaderfunc_to_material +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +n_legs = 4 +n_limbs = 5 +n_side_fin = 2 + + +def crustacean_genome(sp): + body_fac = sp["body_fn"]() + obj = part(body_fac) + # Add legs + leg_x_length = sp["leg_x_length"](body_fac.params) + leg_x_lengths = np.sort(uniform(0.6, 1, 4))[::-1] * leg_x_length + leg_angle = sp["leg_angle"] + x_legs = sp["x_legs"] + leg_joints_x, leg_joints_y, leg_joints_z = sp["leg_joint"] + + shared_leg_params = ["bottom_flat", "bottom_cutoff"] + leg_fn = sp["leg_fn"] + leg_params = {k: v for k, v in leg_fn().params.items() if k in shared_leg_params} + leg_fac = [ + leg_fn({**leg_params, "x_length": leg_x_lengths[i]}) for i in range(n_legs) + ] + for i in range(n_legs): + for side in [1, -1]: + attach( + part(leg_fac[i]), + obj, + (x_legs[i + 1], leg_angle, 0.99), + Joint((leg_joints_x[i], leg_joints_y[i], leg_joints_z[i])), + side=side, + ) + # Add claws + claw_angle = sp["claw_angle"] + claw_fn = sp["claw_fn"] + claw_fac = claw_fn({"x_length": sp["claw_x_length"](body_fac.params)}) + + for side in [1, -1]: + attach( + part(claw_fac), + obj, + (x_legs[0] + sp["x_claw_offset"], claw_angle, 0.99), + Joint(sp["claw_joint"]), + side=side, + ) + # Add tails + tail_fac = sp["tail_fn"] + if tail_fac is not None: + shared_params = [ + "bottom_shift", + "bottom_cutoff", + "top_shift", + "top_cutoff", + "y_length", + "z_length", + ] + tail_fac = tail_fac( + { + **{k: v for k, v in body_fac.params.items() if k in shared_params}, + "x_length": sp["tail_x_length"](body_fac.params), + } + ) + tail = part(tail_fac) + attach(tail, obj, (0, 0, 0), Joint((0, 0, 180))) + fin_fn = sp["fin_fn"] + if fin_fn is not None: + fin_fn = sp["fin_fn"] + x_fins = sp["x_fins"] + fin_joints_x, fin_joints_y, fin_joints_z = sp["fin_joints"] + fin_x_length = sp["fin_x_length"](body_fac.params) + fin_x_lengths = np.sort(uniform(0.6, 1, 4))[::-1] * fin_x_length + fin_fac = [ + fin_fn({"x_length": fin_x_lengths[i]}) for i in range(n_side_fin + 1) + ] + + for i in range(n_side_fin): + for side in [1, -1]: + attach( + part(fin_fac[i]), + tail, + (x_fins[i], 0.5, 0.99), + Joint((fin_joints_x[i], fin_joints_y[i], fin_joints_z[i])), + side=side, + ) + attach(part(fin_fac[-1]), tail, (0.99, 0.5, 0.9), Joint((0, 0, 0))) + + # Add eyes + x_eye = sp["x_eye"] + eye_angle = sp["eye_angle"] + eye_joint_x, eye_joint_y, eye_joint_z = sp["eye_joint"] + eye_fac = CrustaceanEyeFactory() + for side in [1, -1]: + attach( + part(eye_fac), + obj, + (x_eye, eye_angle, 0.99), + Joint((eye_joint_x, eye_joint_y, eye_joint_z)), + side=side, + ) + # Add antenna + antenna_fn = sp["antenna_fn"] + if antenna_fn is not None: + x_antenna = sp["x_antenna"] + antenna_angle = sp["antenna_angle"] + antenna_fac = antenna_fn({"x_length": sp["antenna_x_length"](body_fac.params)}) + for side in [1, -1]: + attach( + part(antenna_fac), + obj, + (x_antenna, antenna_angle, 0.99), + Joint(sp["antenna_joint"]), + side=side, + ) + + anim_params = {k: v for k, v in sp.items() if "curl" in k or "rot" in k} + anim_params["freq"] = sp["freq"] + postprocess_params = dict(material={"base_hue": sp["base_hue"]}, anim=anim_params) + return CreatureGenome(obj, postprocess_params) + + +def build_base_hue(): + if uniform(0, 1) < 0.6: + return uniform(0, 0.05) + else: + return uniform(0.4, 0.45) + + +def shader_crustacean(nw: NodeWrangler, params): + value_shift = log_uniform(2, 10) + base_hue = params["base_hue"] + bright_color = hsv2rgba( + base_hue, uniform(0.8, 1.0), log_uniform(0.02, 0.05) * value_shift + ) + dark_color = hsv2rgba( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.8, 1.0), + log_uniform(0.01, 0.02) * value_shift, + ) + light_color = hsv2rgba(base_hue, uniform(0.0, 0.4), log_uniform(0.2, 1.0)) + specular = uniform(0.6, 0.8) + specular_tint = uniform(0, 1) + clearcoat = uniform(0.2, 0.8) + roughness = uniform(0.1, 0.3) + metallic = uniform(0.6, 0.8) + x, y, z = nw.separate(nw.new_node(Nodes.NewGeometry).outputs["Position"]) + color = build_color_ramp( + nw, + nw.new_node( + Nodes.MapRange, + [ + nw.new_node( + Nodes.MusgraveTexture, + [nw.combine(x, nw.math("ABSOLUTE", y), z)], + input_kwargs={"Scale": log_uniform(5, 8)}, + ), + -1, + 1, + 0, + 1, + ], + ), + [0.0, 0.3, 0.7, 1.0], + [bright_color, bright_color, dark_color, dark_color], + ) + ratio = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "ratio"}).outputs[ + "Fac" + ] + color = nw.new_node(Nodes.MixRGB, [ratio, light_color, color]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Metallic": metallic, + "Roughness": roughness, + "Specular": specular, + "Specular Tint": specular_tint, + "Clearcoat": clearcoat, + }, + ) + return bsdf + + +def shader_eye(nw: NodeWrangler): + return nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (0.1, 0.1, 0.1, 1), "Specular": 0}, + ) + + +def crustacean_postprocessing(body_parts, extras, params): + tag_list = ["body", "claw", "leg"] + materials = [ + shaderfunc_to_material(shader_crustacean, params["material"]) + for _, t in enumerate(tag_list) + ] + tag_list.append("eye") + materials.append(shaderfunc_to_material(shader_eye)) + assign_material(body_parts + extras, materials) + + for part_obj in body_parts: + material_indices = read_material_index(part_obj) + for i, tag_name in enumerate(tag_list): + if f"tag_{tag_name}" in part_obj.data.attributes.keys(): + part_obj.data.attributes.active = part_obj.data.attributes[ + f"tag_{tag_name}" + ] + with butil.SelectObjects(part_obj): + bpy.ops.geometry.attribute_convert(domain="FACE") + has_tag = read_attr_data(part_obj, f"tag_{tag_name}", "FACE") + material_indices[np.nonzero(has_tag)[0]] = i + write_material_index(part_obj, material_indices) + for extra in extras: + material_indices = read_material_index(extra) + material_indices.fill(tag_list.index("claw")) + write_material_index(extra, material_indices) + + +def animate_crustacean_move(arma, params): + groups = defaultdict(list) + for bone in arma.pose.bones.values(): + groups[(bone.bone["factory_class"], bone.bone["index"])].append(bone) + for (factory_name, part_id), bones in groups.items(): + eval(factory_name).animate_bones(arma, bones, params) + + +@gin.configurable +class CrustaceanFactory(AssetFactory): + max_expected_radius = 1 + max_distance = 40 + + def __init__(self, factory_seed, coarse=False, **_): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.species_params = { + "lobster": self.lobster_params, + "crab": self.crab_params, + "spiny_lobster": self.spiny_lobster_params, + } + self.species = np.random.choice(list(self.species_params.keys())) + + def create_asset(self, i, animate=True, rigging=True, cloth=False, **kwargs): + genome = crustacean_genome(self.species_params[self.species]()) + root, parts = genome_to_creature( + genome, name=f"crustacean({self.factory_seed}, {i})" + ) + for p in parts: + if p.obj.name.split("=")[-1] == "CrustaceanEyeFactor": + assign_material(p.obj, shaderfunc_to_material(shader_eye)) + joined, extras, arma, ik_targets = join_and_rig_parts( + root, + parts, + genome, + postprocess_func=crustacean_postprocessing, + rigging=rigging, + min_remesh_size=0.005, + face_size=kwargs["face_size"], + roll="GLOBAL_POS_Z", + ) + if animate and arma is not None: + animate_crustacean_move(arma, genome.postprocess_params["anim"]) + else: + butil.join_objects([joined] + extras) + return root + + def crab_params(self): + base_leg_curl = uniform(-np.pi * 0.15, np.pi * 0.15) + return { + "body_fn": CrabBodyFactory, + "leg_fn": CrabLegFactory, + "claw_fn": CrabClawFactory, + "tail_fn": None, + "antenna_fn": None, + "fin_fn": None, + "leg_x_length": lambda p: p["y_length"] * log_uniform(2.0, 3.0), + "claw_x_length": lambda p: p["y_length"] * log_uniform(1.5, 1.8), + "tail_x_length": lambda p: 0, + "antenna_x_length": lambda p: 0, + "fin_x_length": lambda p: 0, + "x_legs": ( + np.linspace(uniform(0.08, 0.1), uniform(0.55, 0.6), n_limbs) + + np.arange(n_limbs) * 0.02 + )[::-1], + "leg_angle": uniform(0.42, 0.44), + "leg_joint": ( + np.sort(uniform(-5, 5, n_legs))[:: 1 if uniform(0, 1) > 0.5 else -1], + np.sort(uniform(0, 10, n_legs)), + np.sort(uniform(65, 105, n_legs) + uniform(-8, 8)) + + np.arange(n_legs) * 2, + ), + "x_claw_offset": uniform(0.08, 0.1), + "claw_angle": uniform(0.44, 0.46), + "claw_joint": (uniform(-50, -40), uniform(-20, 20), uniform(10, 20)), + "x_eye": uniform(0.92, 0.96), + "eye_angle": uniform(0.8, 0.85), + "eye_joint": (0, uniform(-60, -0), uniform(10, 70)), + "x_antenna": 0, + "antenna_angle": 0, + "antenna_joint": (0, 0, 0), + "x_fins": 0, + "fin_joints": ([0] * n_side_fin, [0] * n_side_fin, [0] * n_side_fin), + "leg_rot": (uniform(np.pi * 0.8, np.pi * 1.1), 0, 0), + "leg_curl": ( + (-np.pi * 1.1, -np.pi * 0.7), + 0, + (base_leg_curl - np.pi * 0.02, base_leg_curl + np.pi * 0.02), + ), + "claw_curl": ((-np.pi * 0.2, np.pi * 0.1), 0, (-np.pi * 0.1, np.pi * 0.1)), + "claw_lower_curl": ((-np.pi * 0.1, np.pi * 0.1), 0, 0), + "tail_curl": (0, 0, 0), + "antenna_curl": (0, 0, 0), + "base_hue": build_base_hue(), + "freq": 1 / log_uniform(100, 200), + } + + def lobster_params(self): + base_leg_curl = uniform(-np.pi * 0.4, np.pi * 0.4) + return { + "body_fn": LobsterBodyFactory, + "leg_fn": LobsterLegFactory, + "claw_fn": LobsterClawFactory, + "tail_fn": CrustaceanTailFactory, + "antenna_fn": LobsterAntennaFactory, + "fin_fn": CrustaceanFinFactory, + "leg_x_length": lambda p: p["x_length"] * log_uniform(0.6, 0.8), + "claw_x_length": lambda p: p["x_length"] * log_uniform(1.2, 1.5), + "tail_x_length": lambda p: p["x_length"] * log_uniform(1.2, 1.8), + "antenna_x_length": lambda p: p["x_length"] * log_uniform(1.6, 3.0), + "fin_x_length": lambda p: p["y_length"] * log_uniform(1.2, 2.5), + "x_legs": ( + np.linspace(0.05, uniform(0.2, 0.25), n_limbs) + + np.arange(n_limbs) * 0.02 + )[::-1], + "leg_angle": uniform(0.3, 0.35), + "leg_joint": ( + uniform(-5, 5, n_legs), + uniform(0, 10, n_legs), + np.sort(uniform(95, 110, n_legs) + uniform(-8, 8)), + ), + "x_claw_offset": uniform(0.08, 0.1), + "claw_angle": uniform(0.4, 0.5), + "claw_joint": (uniform(-80, -70), uniform(-10, 10), uniform(10, 20)), + "x_eye": uniform(0.8, 0.88), + "eye_angle": uniform(0.8, 0.85), + "eye_joint": (0, uniform(-60, -0), uniform(10, 70)), + "x_antenna": uniform(0.76, 0.8), + "antenna_angle": uniform(0.6, 0.7), + "antenna_joint": (uniform(70, 110), uniform(-40, -30), uniform(20, 40)), + "x_fins": np.sort(uniform(0.85, 0.95, n_side_fin)), + "fin_joints": ( + np.sort(uniform(0, 30, n_side_fin))[ + :: 1 if uniform(0, 1) < 0.5 else -1 + ], + [0] * n_side_fin, + np.sort(uniform(10, 30, n_side_fin)), + ), + "leg_rot": (uniform(np.pi * 0.8, np.pi * 1.1), 0, 0), + "leg_curl": ( + (-np.pi * 1.1, -np.pi * 0.7), + 0, + (base_leg_curl - np.pi * 0.02, base_leg_curl + np.pi * 0.02), + ), + "claw_curl": ((-np.pi * 0.1, np.pi * 0.2), 0, 0), + "claw_lower_curl": ((-np.pi * 0.1, np.pi * 0.1), 0, 0), + "tail_curl": ((-np.pi * 0.6, 0), 0, 0), + "antenna_curl": ((np.pi * 0.1, np.pi * 0.3), 0, (0, np.pi * 0.8)), + "base_hue": build_base_hue(), + "freq": 1 / log_uniform(400, 500), + } + + def spiny_lobster_params(self): + lobster_params = self.lobster_params() + leg_joint_x, leg_joint_y, leg_joint_z = lobster_params["leg_joint"] + leg_joint_z_min = np.min(leg_joint_z) + uniform(-10, -5) + return { + **lobster_params, + "antenna_fn": SpinyLobsterAntennaFactory, + "claw_fn": LobsterLegFactory, + "claw_x_length": lobster_params["leg_x_length"], + "claw_angle": lobster_params["leg_angle"], + "claw_joint": (uniform(10, 40), uniform(0, 10), leg_joint_z_min), + "x_antenna": uniform(0.7, 0.75), + "antenna_angle": uniform(0.4, 0.5), + } + + +@gin.configurable +class CrabFactory(CrustaceanFactory): + def __init__(self, factory_seed, coarse=False, **_): + super().__init__(factory_seed, coarse) + self.species = "crab" + + +@gin.configurable +class LobsterFactory(CrustaceanFactory): + def __init__(self, factory_seed, coarse=False, **_): + super().__init__(factory_seed, coarse) + self.species = "lobster" + + +@gin.configurable +class SpinyLobsterFactory(CrustaceanFactory): + def __init__(self, factory_seed, coarse=False, **_): + super().__init__(factory_seed, coarse) + self.species = "spiny_lobster" diff --git a/infinigen/assets/objects/creatures/fish.py b/infinigen/assets/objects/creatures/fish.py new file mode 100644 index 000000000..b24985ff7 --- /dev/null +++ b/infinigen/assets/objects/creatures/fish.py @@ -0,0 +1,414 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Alexander Raistrick: FishSchoolFactory, basic version of FishFactory, anim & simulation +# - Mingzhe Wang: Fin placement + + +from collections import defaultdict + +import bpy +import gin +import numpy as np +from numpy.random import normal as N +from numpy.random import randint +from numpy.random import uniform as U + +import infinigen.assets.materials.fishbody +import infinigen.assets.materials.scale +from infinigen.assets.materials import fish_eye_shader, fishfin +from infinigen.assets.materials.utils.surface_utils import sample_range +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import cloth_sim, creature, genome, joining +from infinigen.assets.objects.creatures.util.animation.driver_wiggle import ( + animate_wiggle_bones, +) +from infinigen.assets.objects.creatures.util.boid_swarm import BoidSwarmFactory +from infinigen.assets.objects.creatures.util.creature_util import offset_center +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed, clip_gaussian + + +def fin_params(scale=(1, 1, 1), dorsal=False): + # scale = np.array((0.2, 1, 0.4)) * np.array((l / l_mean, 1, rad/r_mean)) * np.array(scale) + noise = np.array( + (clip_gaussian(1, 0.1, 0.8, 1.2), 1, 0.8 * clip_gaussian(1, 0.1, 0.8, 1.2)) + ) + scale *= noise + scale = scale.astype(np.float32) + if dorsal: + # if U() < 0.8: + # for dorsal fins, change the shape via RoundWeight + RoundWeight = sample_range(0.8, 1) + RoundingWeight = 1 + # else: + # RoundWeight = sample_range(0.4, 0.5) + # RoundingWeight = sample_range(0.04, 0.06) + AffineZ = sample_range(0, 0.1) + OffsetWeightZ = sample_range(0.6, 1) + OffsetWeightY = 1 + Freq = U(100, 150) + else: + RoundWeight = 1 + RoundingWeight = sample_range(0.02, 0.07) + AffineZ = sample_range(0.8, 1.2) + OffsetWeightZ = sample_range(0.05, 0.2) + OffsetWeightY = sample_range(0.2, 1) + Freq = U(60, 80) + + return { + "FinScale": scale, + "RoundWeight": RoundWeight, + "RoundingWeight": RoundingWeight, + "AffineZ": AffineZ, + "OffsetWeightZ": OffsetWeightZ, + "OffsetWeightY": OffsetWeightY, + "Freq": Freq, + } + + +def fish_postprocessing(body_parts, extras, params): + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + main_template.apply(body_parts + get_extras("BodyExtra")) + + mat = body_parts[0].active_material + gold = mat is not None and "gold" in mat.name + body_parts[0].active_material.name.lower() or U() < 0.1 + fishfin.apply(get_extras("Fin"), shader_kwargs={"goldfish": gold}) + + fish_eye_shader.apply(get_extras("Eyeball")) + # eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) + + +def fish_fin_cloth_sim_params(): + res = dict( + compression_stiffness=1200, + tension_stiffness=1200, + shear_stiffness=1200, + bending_stiffness=3000, + tension_damping=100, + compression_damping=100, + shear_damping=100, + bending_damping=100, + air_damping=5, + mass=0.3, + ) + + for k, v in res.items(): + res[k] = clip_gaussian(1, 0.2, 0.2, 3) * v + + return res + + +def fish_genome(): + temp_dict = defaultdict( + lambda: 0.1, {"body_fish_eel": 0.01, "body_fish_puffer": 0.001} + ) + body = genome.part( + parts.generic_nurbs.NurbsBody( + prefix="body_fish", + tags=["body"], + var=U(0.3, 1), + temperature=temp_dict, + shoulder_ik_ts=[0.0, 0.3, 0.6, 1.0], + n_bones=15, + rig_reverse_skeleton=True, + ) + ) + + if U() < 0.9: + n_dorsal = 1 # if U() < 0.6 else randint(1, 4) + coord = (U(0.3, 0.45), 1, 0.7) + for i in range(n_dorsal): + dorsal_fin = parts.ridged_fin.FishFin( + fin_params((U(0.4, 0.6), 0.5, 0.2), dorsal=True), rig=False + ) + genome.attach( + genome.part(dorsal_fin), + body, + coord=coord, + joint=Joint(rest=(0, -100, 0)), + ) + + def rot(r): + return np.array((20, r, -205)) + N(0, 7, 3) + + if U() < 0.8: + pectoral_fin = parts.ridged_fin.FishFin(fin_params((0.1, 0.5, 0.3))) + coord = (U(0.65, 0.8), U(55, 65) / 180, 0.9) + for side in [-1, 1]: + genome.attach( + genome.part(pectoral_fin), + body, + coord=coord, + joint=Joint(rest=rot(-13)), + side=side, + ) + + if U() < 0.8: + pelvic_fin = parts.ridged_fin.FishFin(fin_params((0.08, 0.5, 0.25))) + coord = (U(0.5, 0.65), U(8, 15) / 180, 0.8) + for side in [-1, 1]: + genome.attach( + genome.part(pelvic_fin), + body, + coord=coord, + joint=Joint(rest=rot(28)), + side=side, + ) + + if U() < 0.8: + hind_fin = parts.ridged_fin.FishFin(fin_params((0.1, 0.5, 0.3))) + coord = (U(0.2, 0.3), N(36, 5) / 180, 0.9) + for side in [-1, 1]: + genome.attach( + genome.part(hind_fin), + body, + coord=coord, + joint=Joint(rest=rot(28)), + side=side, + ) + + angle = U(140, 170) + tail_fin = parts.ridged_fin.FishFin(fin_params((0.12, 0.5, 0.35)), rig=False) + for vdir in [-1, 1]: + genome.attach( + genome.part(tail_fin), + body, + coord=(0.05, 0, 0), + joint=Joint((0, -angle * vdir, 0)), + ) + + eye_fac = parts.eye.MammalEye({"Eyelids": False, "Radius": N(0.036, 0.01)}) + coord = (0.9, 0.6, 0.9) + for side in [-1, 1]: + genome.attach( + genome.part(eye_fac), + body, + coord=coord, + joint=Joint(rest=(0, 0, 0)), + side=side, + rotation_basis="normal", + ) + + if U() < 0: + jaw = genome.part( + parts.head.CarnivoreJaw({"length_rad1_rad2": (0.2, 0.1, 0.06)}) + ) + genome.attach( + jaw, + body, + coord=(0.8, 0, 0.7), + joint=Joint(rest=(0, U(-30, -80), 0)), + rotation_basis="normal", + ) + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict( + cloth=fish_fin_cloth_sim_params(), + anim=fish_swim_params(), + surface_registry=[ + (infinigen.assets.materials.fishbody, 3), + # (infinigen.assets.materials.scale, 1), + ], + ), + ) + + +def fish_swim_params(): + swim_freq = 3 * clip_gaussian(1, 0.3, 0.1, 2) + swim_mag = N(20, 3) + return dict( + swim_mag=swim_mag, + swim_freq=swim_freq, + flipper_freq=3 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, + flipper_mag=0.35 * N(1, 0.1) * swim_mag, + flipper_var=U(0, 0.2), + ) + + +def animate_fish_swim(arma, params): + spine = [b for b in arma.pose.bones if "Body" in b.name] + fin_bones = [b for b in arma.pose.bones if "extra_bone(Fin" in b.name] + + global_offset = U(0, 1000) # so swimming animations dont sync across fish + animate_wiggle_bones( + arma=arma, + bones=spine, + off=global_offset, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(0.5, 2), + ) + v = params["flipper_var"] + for b in fin_bones: + animate_wiggle_bones( + arma=arma, + bones=[b], + off=global_offset + U(0, 1), + mag_deg=params["flipper_mag"] * N(1, v), + freq=params["flipper_mag"] * N(1, v), + ) + + +def simulate_fish_cloth(joined, extras, cloth_params, rigidity="cloth_pin_rigidity"): + for e in [joined] + extras: + assert e.type == "MESH" + if "Fin" in e.name: + assert rigidity in e.data.attributes + else: + surface.write_attribute( + joined, lambda nw: 1, data_type="FLOAT", name=rigidity, apply=True + ) + joined = butil.join_objects([joined] + extras) + + cloth_sim.bake_cloth( + joined, settings=cloth_params, attributes=dict(vertex_group_mass=rigidity) + ) + + return joined + + +@gin.configurable +class FishFactory(AssetFactory): + max_distance = 40 + + def __init__( + self, + factory_seed=None, + bvh=None, + coarse=False, + animation_mode=None, + species_variety=None, + clothsim_skin: bool = False, + **_, + ): + super().__init__(factory_seed, coarse) + self.bvh = bvh + self.animation_mode = animation_mode + self.clothsim_skin = clothsim_skin + + with FixedSeed(factory_seed): + self.species_genome = fish_genome() + self.species_variety = ( + species_variety + if species_variety is not None + else clip_gaussian(0.2, 0.1, 0.05, 0.45) + ) + + def create_asset(self, i, **kwargs): + instance_genome = genome.interp_genome( + self.species_genome, fish_genome(), self.species_variety + ) + + root, parts = creature.genome_to_creature( + instance_genome, name=f"fish({self.factory_seed}, {i})" + ) + offset_center(root, x=True, z=False) + + # Force material consistency across a whole species of fish + # TODO: Replace once Generator class is stnadardized + def seeded_fish_postprocess(*args, **kwargs): + with FixedSeed(self.factory_seed): + fish_postprocessing(*args, **kwargs) + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + instance_genome, + rigging=(self.animation_mode is not None), + rig_before_subdiv=True, + postprocess_func=seeded_fish_postprocess, + adapt_mode="subdivide", + **kwargs, + ) + if self.animation_mode is not None and arma is not None: + if self.animation_mode == "idle" or self.animation_mode == "roam": + animate_fish_swim(arma, instance_genome.postprocess_params["anim"]) + else: + raise ValueError(f"Unrecognized {self.animation_mode=}") + + if self.clothsim_skin: + joined = simulate_fish_cloth( + joined, extras, instance_genome.postprocess_params["cloth"] + ) + else: + joined = butil.join_objects([joined] + extras) + joined.parent = root + + tag_object(root, "fish") + + return root + + +class FishSchoolFactory(BoidSwarmFactory): + @gin.configurable + def fish_school_params(self): + boids_settings = dict( + use_flight=True, + use_land=False, + use_climb=False, + rules=[ + dict(type="SEPARATE"), + dict(type="GOAL"), + dict(type="FLOCK"), + ], + air_speed_max=U(5, 10), + air_acc_max=U(0.7, 1), + air_personal_space=U(0.15, 2), + bank=0, # fish dont tip over / roll + pitch=0.4, # + rule_fuzzy=U(0.6, 0.9), + ) + + return dict( + particle_size=U(0.3, 1), + size_random=U(0.1, 0.7), + use_rotation_instance=True, + lifetime=bpy.context.scene.frame_end - bpy.context.scene.frame_start, + warmup_frames=1, + emit_duration=0, # all particles appear immediately + emit_from="VOLUME", + mass=2, + use_multiply_size_mass=True, + effect_gravity=0, + boids_settings=boids_settings, + ) + + def __init__(self, factory_seed, bvh=None, coarse=False): + with FixedSeed(factory_seed): + settings = self.fish_school_params() + col = make_asset_collection( + FishFactory(factory_seed=randint(1e7), animation_mode="idle"), n=3 + ) + super().__init__( + factory_seed, + child_col=col, + collider_col=bpy.data.collections.get("colliders"), + settings=settings, + bvh=bvh, + volume=("uniform", 3, 10), + coarse=coarse, + ) + + +if __name__ == "__main__": + import os + + for i in range(3): + factory = FishFactory(i) + root = factory.create_asset(i) + root.location[0] = i * 3 + + bpy.ops.wm.save_as_mainfile( + filepath=os.path.join(os.path.abspath(os.curdir), "dev_fish5.blend") + ) diff --git a/infinigen/assets/objects/creatures/herbivore.py b/infinigen/assets/objects/creatures/herbivore.py new file mode 100644 index 000000000..8a8db083c --- /dev/null +++ b/infinigen/assets/objects/creatures/herbivore.py @@ -0,0 +1,340 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from collections import defaultdict + +import gin +import mathutils +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +import infinigen.assets.materials.giraffe_attr +import infinigen.assets.materials.reptile_brown_circle_attr +import infinigen.assets.materials.reptile_gray_attr +import infinigen.assets.materials.spot_sparse_attr +import infinigen.assets.materials.tiger_attr +from infinigen.assets.materials import bone, eyeball, horn, nose, tongue +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import cloth_sim, creature, genome, joining +from infinigen.assets.objects.creatures.util import hair as creature_hair +from infinigen.assets.objects.creatures.util.animation import idle, run_cycle +from infinigen.assets.objects.creatures.util.creature_util import offset_center +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import clip_gaussian + + +def herbivore_hair(): + mat_roughness = U(0.5, 0.9) + + puff = U(0.14, 0.4) + length = clip_gaussian(0.035, 0.03, 0.01, 0.1) + + return { + "density": 500000, + "clump_n": np.random.randint(10, 300), + "avoid_features_dist": 0.06, + "grooming": { + "Length MinMaxScale": np.array( + (length, length * U(1.5, 4), U(15, 60)), dtype=np.float32 + ), + "Puff MinMaxScale": np.array( + (puff, U(0.5, 1.3), U(15, 60)), dtype=np.float32 + ), + "Combing": U(0.5, 1), + "Strand Random Mag": U(0, 0.003) if U() < 0.5 else 0, + "Strand Perlin Mag": U(0, 0.006), + "Strand Perlin Scale": U(15, 45), + "Tuft Spread": N(0.06, 0.025), + "Tuft Clumping": U(0.7, 0.95), + "Root Radius": 0.0025, + "Post Clump Noise Mag": 0.001 * N(1, 0.15), + "Hair Length Pct Min": U(0.5, 0.9), + }, + "material": { + "Roughness": mat_roughness, + "Radial Roughness": mat_roughness + N(0, 0.07), + "Random Roughness": 0, + "IOR": 1.55, + }, + } + + +def herbivore_postprocessing(body_parts, extras, params): + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + main_template.apply(body_parts + get_extras("BodyExtra")) + + tongue.apply(get_extras("Tongue")) + bone.apply(get_extras("Teeth") + get_extras("Claws")) + horn.apply(get_extras("Horn")) + eyeball.apply(get_extras("Eyeball"), shader_kwargs={"coord": "X"}) + nose.apply(get_extras("Nose")) + + +def herbivore_genome(): + temp_dict = defaultdict( + lambda: 0.2, {"body_herbivore_giraffe": 0.02, "body_herbivore_llama": 0.1} + ) + body = genome.part( + parts.generic_nurbs.NurbsBody( + prefix="body_herbivore", tags=["body"], var=1, temperature=temp_dict + ) + ) + + neck_t = 0.67 + shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) + splay = clip_gaussian(130, 7, 90, 130) / 180 + shoulder_t = clip_gaussian(0.1, 0.05, 0.05, 0.2) + params = { + "length_rad1_rad2": np.array((1.8, 0.1, 0.05)) * N(1, (0.1, 0.05, 0.05), 3) + } + + leg_rest = (0, 90, 0) # (0, 90, 0) + foot_rest = (0, -90, 0) + foot_fac = parts.hoof.HoofAnkle() + claw_fac = parts.hoof.HoofClaw() + backleg_fac = parts.leg.QuadrupedBackLeg(params=params) + frontleg_fac = parts.leg.QuadrupedFrontLeg(params=params) + + if U() < 0.15: + lenscale = U(1, 1.3) + backleg_fac.params["length_rad1_rad2"][0] *= lenscale + frontleg_fac.params["length_rad1_rad2"][0] *= lenscale + + for side in [-1, 1]: + # foot = genome.part(claw_fac) + foot = genome.attach( + genome.part(claw_fac), + genome.part(foot_fac), + coord=(0.7, -1, 0), + joint=Joint(rest=(0, 90, 0)), + rotation_basis="global", + ) + back_leg = genome.attach( + foot, + genome.part(backleg_fac), + coord=(0.95, 1, 0.2), + joint=Joint(rest=foot_rest), + rotation_basis="global", + ) + genome.attach( + back_leg, + body, + coord=(shoulder_t, splay, 1), + joint=Joint(rest=leg_rest, bounds=shoulder_bounds), + rotation_basis="global", + side=side, + ) + + for side in [-1, 1]: + # foot = genome.part(claw_fac) + foot = genome.attach( + genome.part(claw_fac), + genome.part(foot_fac), + coord=(0.7, 1, 0), + joint=Joint(rest=(0, 90, 0)), + rotation_basis="normal", + ) + front_leg = genome.attach( + foot, + genome.part(frontleg_fac), + coord=(0.95, 0, 0.5), + joint=Joint(rest=(0, -70, 0)), + ) + genome.attach( + front_leg, + body, + coord=(neck_t - shoulder_t, splay + 0 / 180, 0.9), + joint=Joint(rest=leg_rest), + rotation_basis="global", + side=side, + ) + + temp_dict = defaultdict(lambda: 0.2, {"body_herbivore_giraffe": 0.02}) + head_fac = parts.generic_nurbs.NurbsHead( + prefix="head_herbivore", tags=["head"], var=0.5, temperature=temp_dict + ) + head = genome.part(head_fac) + + eye_fac = parts.eye.MammalEye({"Radius": N(0.035, 0.01)}) + eye_t, splay = U(0.34, 0.45), U(80, 140) / 180 + r = U(0.7, 0.9) + rot = np.array([0, 0, 0]) + for side in [-1, 1]: + eye = genome.part(eye_fac) + genome.attach( + eye, + head, + coord=(eye_t, splay, r), + joint=Joint(rest=rot), + rotation_basis="normal", + side=side, + ) + + jaw = genome.part( + parts.head.CarnivoreJaw( + { + "length_rad1_rad2": (0.6 * head_fac.params["length"], 0.12, 0.08), + "Canine Length": 0, + } + ) + ) + genome.attach( + jaw, + head, + coord=(0.25 * N(1, 0.1), 0, 0.35 * N(1, 0.1)), + joint=Joint(rest=(0, 10 * N(1, 0.1), 0)), + ) + + if U() < 0.7: + nose = genome.part(parts.head_detail.CatNose()) + genome.attach(nose, head, coord=(0.95, 1, 0.45), joint=Joint(rest=(0, 20, 0))) + + t, splay = U(0.15, eye_t - 0.07), N(125, 15) / 180 + ear_fac = parts.head_detail.CatEar({}) + ear_fac.params["length_rad1_rad2"] *= N(1.2, 0.1, 3) + rot = np.array([0, -10, -23]) * N(1, 0.1, 3) + for side in [-1, 1]: + ear = genome.part(ear_fac) + genome.attach( + ear, + head, + coord=(t, splay, 1), + joint=Joint(rest=rot), + rotation_basis="normal", + side=side, + ) + + if U() < 0.7: + horn_fac = parts.horn.Horn() + horn_fac.params["length"] *= U(0.1, 2) + horn_fac.params["rad1"] *= U(0.07, 1.5) + horn_fac.params["rad2"] *= U(0.07, 1.5) + t, splay = U(0.25, t), U(splay + 20 / 180, 130 / 180) + rot = np.array([U(-40, 0), 0, N(120, 10)]) + for side in [-1, 1]: + horn = genome.part(horn_fac) + genome.attach( + horn, + head, + coord=(t, splay, 0.5), + joint=Joint(rest=rot), + rotation_basis="global", + side=side, + ) + elif U() < 0: + horn_fac = parts.horn.Horn() + horn_fac.params["length"] *= U(0.3, 1) + horn_fac.params["rotation_x"] = 0 + horn = genome.part(horn_fac) + genome.attach( + horn, + head, + coord=(U(0.3, 0.9), 1, 0.6), + joint=Joint(rest=(0, -90, -90)), + rotation_basis="global", + ) + + genome.attach(head, body, coord=(0.97, 0, 0.2), joint=Joint(rest=(0, 20, 0))) + + if U() < 1: + hair = herbivore_hair() + registry = [ + (infinigen.assets.materials.giraffe_attr, 1), + (infinigen.assets.materials.spot_sparse_attr, 3), + ] + else: + hair = None + registry = [ + (infinigen.assets.materials.reptile_brown_circle_attr, 1), + (infinigen.assets.materials.reptile_gray_attr, 1), + ] + + return genome.CreatureGenome( + parts=body, + postprocess_params=dict(animation=dict(), hair=hair, surface_registry=registry), + ) + + +@gin.configurable +class HerbivoreFactory(AssetFactory): + max_distance = 40 + + def __init__( + self, + factory_seed=None, + bvh: mathutils.bvhtree.BVHTree = None, + coarse: bool = False, + animation_mode: str = None, + hair: bool = True, + clothsim_skin: bool = False, + **kwargs, + ): + super().__init__(factory_seed, coarse) + self.bvh = bvh + self.animation_mode = animation_mode + self.hair = hair + self.clothsim_skin = clothsim_skin + + if self.hair and (self.animation_mode is not None or self.clothsim_skin): + raise NotImplementedError( + "Dynamic hair is not yet fully working. " + "Please disable either hair or both of animation/clothsim" + ) + + def create_placeholder(self, **kwargs): + return butil.spawn_cube(size=4) + + def create_asset(self, i, placeholder, **kwargs): + genome = herbivore_genome() + root, parts = creature.genome_to_creature( + genome, name=f"herbivore({self.factory_seed}, {i})" + ) + # tag_object(root, 'herbivore') + offset_center(root) + + dynamic = self.animation_mode is not None + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + rigging=dynamic, + postprocess_func=herbivore_postprocessing, + **kwargs, + ) + + butil.parent_to(root, placeholder, no_inverse=True) + + if self.hair: + creature_hair.configure_hair( + joined, root, genome.postprocess_params["hair"] + ) + if dynamic: + if self.animation_mode == "run": + run_cycle.animate_run(root, arma, ik_targets) + elif self.animation_mode == "idle": + idle.snap_iks_to_floor(ik_targets, self.bvh) + idle.idle_body_noise_drivers(ik_targets) + else: + raise ValueError(f"Unrecognized mode {self.animation_mode=}") + if self.clothsim_skin: + rigidity = surface.write_vertex_group( + joined, cloth_sim.local_pos_rigity_mask, apply=True + ) + cloth_sim.bake_cloth( + joined, + genome.postprocess_params["skin"], + attributes=dict(vertex_group_mass=rigidity), + ) + + return root diff --git a/infinigen/assets/objects/creatures/insects/__init__.py b/infinigen/assets/objects/creatures/insects/__init__.py new file mode 100644 index 000000000..5ec870676 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/__init__.py @@ -0,0 +1 @@ +from .dragonfly import DragonflyFactory diff --git a/infinigen/assets/objects/creatures/insects/dragonfly.py b/infinigen/assets/objects/creatures/insects/dragonfly.py new file mode 100644 index 000000000..bc76a2593 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/dragonfly.py @@ -0,0 +1,531 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +import gin +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement import animation_policy +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed + +from .parts.body.dragonfly_body import nodegroup_dragonfly_body +from .parts.head.dragonfly_head import nodegroup_dragon_fly_head +from .parts.leg.dragonfly_leg import nodegroup_dragonfly_leg, nodegroup_leg_control +from .parts.tail.dragonfly_tail import nodegroup_dragonfly_tail +from .parts.wing.dragonfly_wing import nodegroup_dragonfly_wing +from .utils.geom_utils import nodegroup_symmetric_clone + + +def geometry_dragonfly(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + value_head_scale = nw.new_node(Nodes.Value) + value_head_scale.outputs[0].default_value = kwargs["Head Scale"] + + dragonflyhead = nw.new_node( + nodegroup_dragon_fly_head( + base_color=kwargs["Base Color"], + eye_color=kwargs["Eye Color"], + v=kwargs["V"], + ).name + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": kwargs["Head Roll"], "Y": kwargs["Head Pitch"], "Z": 1.5708}, + ) + + transform_8 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflyhead, + "Translation": (0.0, -0.3, 0.0), + "Rotation": combine_xyz_8, + "Scale": value_head_scale, + }, + ) + + transform_13 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_8, "Scale": (1.1, 1.0, 1.0)}, + ) + + dragonflybody = nw.new_node( + nodegroup_dragonfly_body(base_color=kwargs["Body Color"], v=kwargs["V"]).name, + input_kwargs={ + "Body Length": kwargs["Body Length"], + "Random Seed": kwargs["Body Seed"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": dragonflybody.outputs["Geometry"], + "Name": "spline parameter", + "Value": dragonflybody.outputs["spline parameter"], + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute, + "Name": "body seed", + "Value": kwargs["Body Seed"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Rotation": (1.5708, 0.0, 0.0), + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["Tail Length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["Tail Tip Z"], 1: -0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Z": multiply_1} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": kwargs["Tail Length"], "Z": kwargs["Tail Tip Z"]}, + ) + + dragonflytail = nw.new_node( + nodegroup_dragonfly_tail( + base_color=kwargs["Base Color"], + v=kwargs["V"], + ring_length=kwargs["Ring Length"], + ).name, + input_kwargs={ + "Middle": combine_xyz_1, + "End": combine_xyz, + "Segment Length": 0.38, + "Random Seed": kwargs["Tail Seed"], + "Radius": kwargs["Tail Radius"], + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 10.0 + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflytail, + "Translation": (0.0, -10.2, 0.0), + "Rotation": (0.0, 0.0, -1.5708), + "Scale": value, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + nodegroup = nw.new_node( + nodegroup_leg_control().name, + input_kwargs={"Openness": kwargs["Leg Openness 3"]}, + ) + + dragonflyleg = nw.new_node( + nodegroup_dragonfly_leg().name, + input_kwargs={ + "Rot claw": 0.18, + "Rot Tarsus": nodegroup.outputs["Tarsus"], + "Rot Femur": nodegroup.outputs["Femur"], + }, + ) + + value_leg_scale = nw.new_node(Nodes.Value) + value_leg_scale.outputs[0].default_value = kwargs["Leg Scale"] + + transform_15 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflyleg, + "Rotation": (0.0, 0.0, -0.5236), + "Scale": value_leg_scale, + }, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": nodegroup.outputs["Shoulder"], "Z": -0.5861}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_15, + "Translation": (0.38, 0.0, 0.0), + "Rotation": combine_xyz_6, + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": transform_2, "Scale": (-1.0, 1.0, 1.0)}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 1.2 + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": symmetric_clone.outputs["Both"], + "Translation": (0.0, -4.6, -2.26), + "Scale": value_1, + }, + ) + + nodegroup_1 = nw.new_node( + nodegroup_leg_control().name, + input_kwargs={"Openness": kwargs["Leg Openness 2"]}, + ) + + dragonflyleg_1 = nw.new_node( + nodegroup_dragonfly_leg().name, + input_kwargs={ + "Rot claw": 0.18, + "Rot Tarsus": nodegroup_1.outputs["Tarsus"], + "Rot Femur": nodegroup_1.outputs["Femur"], + }, + ) + + transform_16 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflyleg_1, + "Rotation": (0.0, 0.0, -0.1745), + "Scale": value_leg_scale, + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": nodegroup_1.outputs["Shoulder"], "Z": 0.174}, + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_16, + "Translation": (0.38, 0.0, 0.0), + "Rotation": combine_xyz_5, + }, + ) + + symmetric_clone_1 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": transform_5, "Scale": (-1.0, 1.0, 1.0)}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 1.18 + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": symmetric_clone_1.outputs["Both"], + "Translation": (0.0, -3.62, -2.26), + "Scale": value_2, + }, + ) + + nodegroup_2 = nw.new_node( + nodegroup_leg_control().name, + input_kwargs={"Openness": kwargs["Leg Openness 1"]}, + ) + + dragonflyleg_2 = nw.new_node( + nodegroup_dragonfly_leg().name, + input_kwargs={ + "Rot claw": 1.0, + "Rot Tarsus": nodegroup_2.outputs["Tarsus"], + "Rot Femur": nodegroup_2.outputs["Femur"], + }, + ) + + transform_14 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflyleg_2, + "Rotation": (0.0, 0.0, 0.3491), + "Scale": value_leg_scale, + }, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": nodegroup_2.outputs["Shoulder"], "Z": 0.663}, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_14, + "Translation": (0.38, 0.0, 0.0), + "Rotation": combine_xyz_4, + }, + ) + + symmetric_clone_2 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": transform_6, "Scale": (-1.0, 1.0, 1.0)}, + ) + + value_3 = nw.new_node(Nodes.Value) + value_3.outputs[0].default_value = 1.04 + + transform_7 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": symmetric_clone_2.outputs["Both"], + "Translation": (0.0, -2.66, -2.26), + "Scale": value_3, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [join_geometry, transform_3, transform_4, transform_7] + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_13, join_geometry_1]} + ) + + dragonflywing = nw.new_node(nodegroup_dragonfly_wing().name) + + scene_time = nw.new_node("GeometryNodeInputSceneTime") + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: scene_time.outputs["Seconds"], + 1: 2 * np.pi * kwargs["Flap Freq"], + }, + attrs={"operation": "MULTIPLY"}, + ) + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2}, attrs={"operation": "SINE"} + ) + wing_roll = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: kwargs["Flap Mag"]}, + attrs={"operation": "MULTIPLY"}, + ) + + value_wing_yaw = nw.new_node(Nodes.Value) + value_wing_yaw.outputs[0].default_value = kwargs["Wing Yaw"] + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": wing_roll, "Z": value_wing_yaw} + ) + + value_wing_scale = nw.new_node(Nodes.Value) + value_wing_scale.outputs[0].default_value = kwargs["Wing Scale"] + + transform_9 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflywing, + "Translation": (0.22, 0.0, 0.0), + "Rotation": combine_xyz_2, + "Scale": value_wing_scale, + }, + ) + + symmetric_clone_3 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": transform_9, "Scale": (-1.0, 1.0, 1.0)}, + ) + + value_5 = nw.new_node(Nodes.Value) + value_5.outputs[0].default_value = 5.4 + + transform_10 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": symmetric_clone_3.outputs["Both"], + "Translation": (0.0, -2.4, 1.8), + "Scale": value_5, + }, + ) + + dragonflywing_1 = nw.new_node(nodegroup_dragonfly_wing().name) + + add = nw.new_node(Nodes.Math, input_kwargs={0: wing_roll, 1: 0.0524}) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={1: value_wing_yaw}, attrs={"operation": "SUBTRACT"} + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add, "Z": subtract} + ) + + transform_12 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": dragonflywing_1, + "Translation": (0.22, 0.0, 0.0), + "Rotation": combine_xyz_3, + "Scale": value_wing_scale, + }, + ) + + symmetric_clone_4 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": transform_12, "Scale": (-1.0, 1.0, 1.0)}, + ) + + value_6 = nw.new_node(Nodes.Value) + value_6.outputs[0].default_value = 6.0 + + transform_11 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": symmetric_clone_4.outputs["Both"], + "Translation": (0.0, -4.18, 1.8), + "Scale": value_6, + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry_2, transform_10, transform_11]}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_3} + ) + + # TODO replace this hacky postprocess transform + result = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": realize_instances, + "Translation": (0.6, 0, 0), # position origin at ~center of dragonfly + "Rotation": (0, 0, -np.pi / 2), + "Scale": (kwargs["PostprocessScale"],) * 3, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": result}) + + +@gin.configurable +class DragonflyFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, bvh=None, **_): + super(DragonflyFactory, self).__init__(factory_seed, coarse=coarse) + self.bvh = bvh + with FixedSeed(factory_seed): + self.genome = self.sample_geo_genome() + y = U(20, 60) + self.policy = animation_policy.AnimPolicyRandomForwardWalk( + forward_vec=(1, 0, 0), + speed=U(7, 10), + step_range=(0.2, 7), + yaw_dist=("uniform", -y, y), + rot_vars=[0, 0, 0], + ) + + @staticmethod + def sample_geo_genome(): + base_color = np.array((U(0.1, 0.6), 0.9, 0.8)) + base_color[1] += N(0.0, 0.05) + base_color[2] += N(0.0, 0.05) + base_color_rgba = hsv2rgba(base_color) + + eye_color = np.copy(base_color) + eye_color[0] += N(0.0, 0.1) + eye_color[1] += N(0.0, 0.05) + eye_color[2] += N(0.0, 0.05) + eye_color_rgba = hsv2rgba(eye_color) + + body_color = np.copy(base_color) + body_color[0] += N(0.0, 0.1) + body_color[1] += N(0.0, 0.05) + body_color[2] += N(0.0, 0.05) + body_color_rgba = hsv2rgba(body_color) + + return { + "Tail Length": U(2.5, 3.5), + "Tail Tip Z": U(-0.4, 0.3), + "Tail Seed": U(-100, 100), + "Tail Radius": U(0.7, 0.9), + "Body Length": U(8.0, 10.0), + "Body Seed": U(-100, 100), + "Flap Freq": U(20, 50), + "Flap Mag": U(0.15, 0.25), + "Wing Yaw": U(0.43, 0.7), + "Wing Scale": U(0.9, 1.1), + "Leg Scale": U(0.9, 1.1), + "Leg Openness 1": U(0.0, 1.0), + "Leg Openness 2": U(0.0, 1.0), + "Leg Openness 3": U(0.0, 1.0), + "Head Scale": U(1.6, 1.8), + "Head Roll": U(-0.2, 0.2), + "Head Pitch": U(-0.6, 0.6), + "Base Color": base_color_rgba, + "Body Color": body_color_rgba, + "Eye Color": eye_color_rgba, + "V": U(0.0, 0.5), + "Ring Length": U(0.0, 0.3), + "PostprocessScale": 0.015 * N(1, 0.1), + } + + def create_placeholder(self, i, loc, rot): + p = butil.spawn_cube(size=1) + p.location = loc + p.rotation_euler = rot + + if self.bvh is not None: + p.location.z += U(0.5, 2) + animation_policy.animate_trajectory(p, self.bvh, self.policy) + + return p + + def create_asset(self, placeholder, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + phenome = self.genome.copy() + + surface.add_geomod(obj, geometry_dragonfly, apply=False, input_kwargs=phenome) + obj.parent = placeholder + + return obj diff --git a/infinigen/assets/objects/creatures/insects/parts/antenna/dragonfly_antenna.py b/infinigen/assets/objects/creatures/insects/parts/antenna/dragonfly_antenna.py new file mode 100644 index 000000000..ab56c7da9 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/antenna/dragonfly_antenna.py @@ -0,0 +1,52 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_simple_tube_v2, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_antenna", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_antenna(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.24, 0.02, 0.01)), + ("NodeSocketVector", "angles_deg", (0.0, -63.9, 31.39)), + ("NodeSocketFloat", "Carapace Rad Pct", 1.4), + ("NodeSocketVector", "spike_length_rad1_rad2", (0.1, 0.025, 0.0)), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "proportions": (0.2533, 0.3333, -0.2267), + "do_bezier": False, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": simple_tube_v2.outputs["Geometry"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/body/dragonfly_body.py b/infinigen/assets/objects/creatures/insects/parts/body/dragonfly_body.py new file mode 100644 index 000000000..330f4df9a --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/body/dragonfly_body.py @@ -0,0 +1,420 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.parts.hair.principled_hair import ( + nodegroup_principled_hair, +) +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_circle_cross_section, + nodegroup_instance_on_points, + nodegroup_random_rotation_scale, + nodegroup_surface_bump, +) +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + nodegroup_add_noise, + nodegroup_color_noise, + shader_black_w_noise_shader, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_dragonfly_body_shader(nw: NodeWrangler, base_color, v): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "pos"}) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute.outputs["Vector"]} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 3.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": absolute, "Y": separate_xyz.outputs["Y"], "Z": multiply}, + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "body seed"}) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": combine_xyz, + "W": attribute_1.outputs["Fac"], + "Scale": 0.5, + "Dimension": 1.0, + "Lacunarity": 1.0, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: -0.26, 2: 0.06} + ) + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "spline parameter"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": attribute_2.outputs["Fac"]} + ) + + group = nw.new_node( + nodegroup_add_noise().name, + input_kwargs={ + "Vector": combine_xyz_1, + "Scale": 0.5, + "amount": (0.16, 0.26, 0.0), + "Noise Eval Position": combine_xyz, + }, + ) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_1.outputs["X"], + "Y": attribute_1.outputs["Fac"], + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz_2, "Scale": 10.0}, + attrs={"voronoi_dimensions": "2D"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 1: 0.14, 2: 0.82}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add, 1: 0.7, 3: 1.0, 4: 0.0} + ) + + rgb_1 = nw.new_node(Nodes.RGB) + rgb_1.outputs[0].default_value = base_color + + group_2 = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={ + "Scale": 1.34, + "Color": rgb_1, + "Value From Max": 0.7, + "Value To Min": 0.18, + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", input_kwargs={"Value": v, "Color": rgb_1} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": group_2, + "Color2": hue_saturation_value, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Metallic": 0.2182, + "Specular": 0.8318, + "Roughness": 0.1545, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_body", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_body( + nw: NodeWrangler, + curve_control_points=[ + (0.0, 0.15), + (0.1586, 0.4688), + (0.36, 0.66), + (0.7427, 0.4606), + (0.9977, 0.2562), + ], + base_color=(0.2789, 0.3864, 0.0319, 1.0), + v=0.3, +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Body Length", 10.0), + ("NodeSocketFloat", "Random Seed", 0.0), + ("NodeSocketFloat", "Hair Density", 200.0), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Body Length"]} + ) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz}) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": 128} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": resample_curve, + 2: spline_parameter.outputs["Factor"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve(float_curve.mapping.curves[0], curve_control_points) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Radius": float_curve, + }, + ) + + circlecrosssection = nw.new_node( + nodegroup_circle_cross_section().name, + input_kwargs={ + "random seed": group_input.outputs["Random Seed"], + "noise amount": 1.26, + "radius": 4.0, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": circlecrosssection, "Rotation": (0.0, 0.0, 1.5708)}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": transform, + "Fill Caps": True, + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + position_2 = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_2, 1: (1.0, 0.2, 0.8)}, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": multiply.outputs["Vector"], + "W": group_input.outputs["Random Seed"], + "Scale": 0.5, + }, + attrs={"voronoi_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 4: 0.4}, + attrs={"clamp": False}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": multiply_1}, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node(Nodes.VectorMath, input_kwargs={1: scale.outputs["Vector"]}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Offset": add.outputs["Vector"]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": surface.shaderfunc_to_material( + shader_dragonfly_body_shader, base_color, v + ), + }, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": set_material, + "Displacement": -0.12, + "Scale": 75.8, + "seed": group_input.outputs["Random Seed"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": surfacebump, "Name": "pos", 2: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.5} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": capture_attribute.outputs[2]} + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: reroute, 1: 0.4}, + attrs={"operation": "LESS_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: less_than} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Hair Density"]} + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": store_named_attribute, + "Selection": op_and, + "Density": reroute_1, + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={ + "random seed": -2.4, + "rot mean": (-1.0, 0.0, 0.0), + "rot std z": -10.2, + "scale mean": 0.03, + }, + ) + + leghair = nw.new_node( + nodegroup_principled_hair().name, input_kwargs={"Resolution": 2} + ) + + transform_3 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": leghair, "Scale": (1.0, 1.0, 5.0)} + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_3, + "Material": surface.shaderfunc_to_material(shader_black_w_noise_shader), + }, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": distribute_points_on_faces.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": distribute_points_on_faces.outputs["Points"], + "Instance": set_material_2, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_1, 1: 0.3}, attrs={"operation": "MULTIPLY"} + ) + + distribute_points_on_faces_1 = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": store_named_attribute, "Density": multiply_2, "Seed": 1}, + ) + + instanceonpoints_1 = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": distribute_points_on_faces_1.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": distribute_points_on_faces_1.outputs["Points"], + "Instance": set_material_2, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [store_named_attribute, instanceonpoints, instanceonpoints_1] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": resample_curve, + "spline parameter": reroute, + }, + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/eye/dragonfly_eye.py b/infinigen/assets/objects/creatures/insects/parts/eye/dragonfly_eye.py new file mode 100644 index 000000000..7d7a18a90 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/eye/dragonfly_eye.py @@ -0,0 +1,124 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + nodegroup_color_noise, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_dragonfly_eye_shader(nw: NodeWrangler, base_color, v): + # Code generated using version 2.4.3 of the node_transpiler + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, input_kwargs={"Scale": 2.0, "Detail": 1.0} + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: -1.0, 2: 0.2} + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = base_color + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", input_kwargs={"Value": v, "Color": rgb} + ) + + group_1 = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={ + "Scale": 1.34, + "Color": rgb, + "Value From Max": 0.7, + "Value To Min": 0.18, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": hue_saturation_value, + "Color2": group_1, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Scale": 1000.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 1: 0.03, + 2: 0.2, + 3: 1.0, + 4: -0.78, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Specular": map_range_1.outputs["Result"], + "Roughness": 0.0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_eye", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_eye( + nw: NodeWrangler, + base_color=(0.2789, 0.3864, 0.0319, 1.0), + v=0.3, +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Rings", 16)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Rings"], 1: 2.0}, + attrs={"operation": "MULTIPLY"}, + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={"Segments": multiply, "Rings": group_input.outputs["Rings"]}, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 1.0, 1.3)} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform, + "Material": surface.shaderfunc_to_material( + shader_dragonfly_eye_shader, base_color, v + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/hair/principled_hair.py b/infinigen/assets/objects/creatures/insects/parts/hair/principled_hair.py new file mode 100644 index 000000000..dfe27a4ec --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/hair/principled_hair.py @@ -0,0 +1,53 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_circle_cross_section, + nodegroup_shape_quadratic, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_principled_hair", singleton=False, type="GeometryNodeTree" +) +def nodegroup_principled_hair(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketIntUnsigned", "Resolution", 4)] + ) + + crosssection = nw.new_node( + nodegroup_circle_cross_section().name, + input_kwargs={"Resolution": group_input.outputs["Resolution"], "radius": 0.5}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 2.0 + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": crosssection, "Scale": value} + ) + + shapequadraticleghair = nw.new_node( + nodegroup_shape_quadratic( + radius_control_points=[(0.0, 0.1125), (0.625, 0.1), (1.0, 0.0531)] + ).name, + input_kwargs={ + "Profile Curve": transform, + "noise amount tilt": 0.0, + "Resolution": 8, + "Start": (0.0, 0.0, 0.0), + "Middle": (-0.2, 0.0, 1.0), + "End": (0.0, 0.0, 2.66), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": shapequadraticleghair.outputs["Mesh"]} + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/head/dragonfly_head.py b/infinigen/assets/objects/creatures/insects/parts/head/dragonfly_head.py new file mode 100644 index 000000000..0010f1e4b --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/head/dragonfly_head.py @@ -0,0 +1,357 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.parts.antenna.dragonfly_antenna import ( + nodegroup_dragonfly_antenna, +) +from infinigen.assets.objects.creatures.insects.parts.eye.dragonfly_eye import ( + nodegroup_dragonfly_eye, +) +from infinigen.assets.objects.creatures.insects.parts.hair.principled_hair import ( + nodegroup_principled_hair, +) +from infinigen.assets.objects.creatures.insects.parts.mouth.dragonfly_mouth import ( + nodegroup_dragonfly_mouth, +) +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_add_hair, + nodegroup_attach_part, + nodegroup_surface_bump, + nodegroup_symmetric_clone, +) +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + nodegroup_color_noise, + shader_black_w_noise_shader, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_dragonfly_head_shader(nw: NodeWrangler, base_color, v): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "pos"}) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute.outputs["Vector"]} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": absolute, + "Z": separate_xyz.outputs["Z"], + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": combine_xyz, "W": 28.0, "Scale": 2.0, "Detail": 1.0}, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: -0.28, 2: 0.48} + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = base_color + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", input_kwargs={"Value": v, "Color": rgb} + ) + + group = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={ + "Scale": 1.34, + "Color": rgb, + "Value From Max": 0.7, + "Value To Min": 0.18, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": hue_saturation_value, + "Color2": group, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix, "Specular": 0.7545, "Roughness": 0.0636}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_dragon_fly_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragon_fly_head( + nw: NodeWrangler, + base_color=(0.2789, 0.3864, 0.0319, 1.0), + eye_color=(0.2789, 0.3864, 0.0319, 1.0), + v=0.3, +): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": (1.8, 0.0, 0.0)}) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": 32} + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": resample_curve} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": capture_attribute.outputs[2]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 0.14), (0.3055, 0.93), (0.7018, 0.79), (0.9236, 0.455), (1.0, 0.0)], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Radius": float_curve_1, + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 200, "Radius": 1.1} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh, + "Material": surface.shaderfunc_to_material( + shader_dragonfly_head_shader, base_color, v + ), + }, + ) + + leghair = nw.new_node( + nodegroup_principled_hair().name, input_kwargs={"Resolution": 2} + ) + + transform_3 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": leghair, "Scale": (1.0, 1.0, 5.0)} + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_3, + "Material": surface.shaderfunc_to_material(shader_black_w_noise_shader), + }, + ) + + addhair = nw.new_node( + nodegroup_add_hair().name, + input_kwargs={ + "Mesh": set_material_1, + "Hair": set_material_2, + "Density": 500.0, + "rot mean": (0.36, 0.0, 0.0), + "scale mean": 0.01, + }, + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": addhair}) + + dragonflyeye = nw.new_node( + nodegroup_dragonfly_eye(base_color=eye_color, v=0.0).name, + input_kwargs={"Rings": 128}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.6 + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": dragonflyeye, "Scale": value_1} + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": reroute, + "Skeleton Curve": set_position, + "Geometry": transform_1, + "Length Fac": 0.5625, + "Ray Rot": (1.5474, -0.3944, 1.4556), + "Rad": 0.64, + "Part Rot": (27.1, 0.0, 0.0), + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": attach_part.outputs["Geometry"]}, + ) + + dragonflymouth = nw.new_node(nodegroup_dragonfly_mouth().name) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": dragonflymouth, + "Material": surface.shaderfunc_to_material( + shader_dragonfly_head_shader, base_color, v + ), + }, + ) + + addhair_1 = nw.new_node( + nodegroup_add_hair().name, + input_kwargs={ + "Mesh": set_material_3, + "Hair": set_material_2, + "Density": 5.0, + "rot mean": (-0.04, 0.0, 0.0), + "scale mean": 0.1, + }, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": addhair_1, "Displacement": 0.05, "Scale": 5.0}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.07 + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": surfacebump, "Scale": value} + ) + + attach_part_1 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": reroute, + "Skeleton Curve": resample_curve, + "Geometry": transform, + "Length Fac": 0.9667, + "Part Rot": (0.0, 31.5, 0.0), + "Do Normal Rot": True, + }, + ) + + antenna = nw.new_node( + nodegroup_dragonfly_antenna().name, + input_kwargs={ + "length_rad1_rad2": (1.24, 0.05, 0.04), + "angles_deg": (0.0, -31.0, 0.0), + }, + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": antenna.outputs["Geometry"], "Scale": 5.0}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": surfacebump_1, + "Material": surface.shaderfunc_to_material(shader_black_w_noise_shader), + }, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.48 + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_material, + "Translation": (-0.02, 0.0, 0.0), + "Scale": value_2, + }, + ) + + attach_part_2 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": reroute, + "Skeleton Curve": resample_curve, + "Geometry": transform_2, + "Length Fac": 0.6408, + "Ray Rot": (1.9722, -1.4364, 1.5708), + "Rad": 0.9, + "Part Rot": (108.1, -49.8, 26.7), + }, + ) + + symmetric_clone_1 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": attach_part_2.outputs["Geometry"]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + symmetric_clone.outputs["Both"], + reroute, + attach_part_1.outputs["Geometry"], + symmetric_clone_1.outputs["Both"], + ] + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": join_geometry_1, "Name": "pos", "Value": position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": store_named_attribute} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry} + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/leg/dragonfly_leg.py b/infinigen/assets/objects/creatures/insects/parts/leg/dragonfly_leg.py new file mode 100644 index 000000000..bec3c08d9 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/leg/dragonfly_leg.py @@ -0,0 +1,371 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.parts.hair.principled_hair import ( + nodegroup_principled_hair, +) +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_shape_quadratic, + nodegroup_surface_bump, +) +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + shader_black_w_noise_shader, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_leg_control", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leg_control(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Openness", 1.0)] + ) + + reroute_2 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Openness"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": reroute_2, 3: 0.6, 4: 1.44} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": reroute_2, 3: -0.26, 4: 0.16} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": reroute_2, 3: 1.68, 4: 1.88} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Femur": map_range.outputs["Result"], + "Tarsus": map_range_1.outputs["Result"], + "Shoulder": map_range_2.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_leg", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_leg(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + legcrosssection = nw.new_node(nodegroup_leg_cross_section().name) + + shapequadraticclaw = nw.new_node( + nodegroup_shape_quadratic( + radius_control_points=[ + (0.0, 0.0031), + (0.2682, 0.1906), + (0.6364, 0.3594), + (0.8091, 0.5031), + (1.0, 0.5375), + ] + ).name, + input_kwargs={ + "Profile Curve": legcrosssection, + "noise amount tilt": 0.0, + "Resolution": 16, + "Start": (0.0, 0.0, 3.0), + "Middle": (-1.2, 0.0, 1.5), + "End": (0.2, 0.0, 0.0), + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.3 + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": shapequadraticclaw, + "Translation": (-0.38, 0.0, 1.0), + "Rotation": (0.0, 0.4318, 0.0), + "Scale": value, + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.5 + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": shapequadraticclaw, + "Translation": (0.1, 0.0, 0.04), + "Rotation": (0.0, -0.0262, 0.0), + "Scale": value_1, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [shapequadraticclaw, transform_2, transform_3]}, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Rot claw", 1.82), + ("NodeSocketFloat", "Rot Tarsus", 0.02), + ("NodeSocketFloat", "Rot Femur", 1.42), + ], + ) + + legpart = nw.new_node( + nodegroup_leg_part().name, + input_kwargs={ + "NextJoint": join_geometry_1, + "NextJoint Y rot": group_input.outputs["Rot claw"], + "NextJoint Scale": 0.4, + "Num Hairs": 10, + }, + ) + + legpart_1 = nw.new_node( + nodegroup_leg_part().name, + input_kwargs={ + "NextJoint": legpart, + "NextJoint Y rot": group_input.outputs["Rot Tarsus"], + "NextJoint Scale": 0.45, + "Cross Section Scale": 0.8, + }, + ) + + legpart_2 = nw.new_node( + nodegroup_leg_part().name, + input_kwargs={ + "NextJoint": legpart_1, + "NextJoint Y rot": group_input.outputs["Rot Femur"], + "NextJoint Scale": 0.75, + "Cross Section Scale": 1.2, + "Num Hairs": 30, + "Hair Scale Max": 0.15, + }, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": legpart_2, "Displacement": 0.03, "Scale": 5.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": surfacebump} + ) + + +@node_utils.to_nodegroup("nodegroup_leg_part", singleton=False, type="GeometryNodeTree") +def nodegroup_leg_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + legcrosssection = nw.new_node(nodegroup_leg_cross_section().name) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "NextJoint", None), + ("NodeSocketFloat", "NextJoint Y rot", 0.0), + ("NodeSocketFloat", "NextJoint Scale", 1.0), + ("NodeSocketFloat", "Cross Section Scale", 1.0), + ("NodeSocketInt", "Num Hairs", 15), + ("NodeSocketFloat", "Hair Scale Min", 0.18), + ("NodeSocketFloat", "Hair Scale Max", 0.22), + ], + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": legcrosssection, + "Rotation": (0.0, 0.0, 3.1416), + "Scale": group_input.outputs["Cross Section Scale"], + }, + ) + + tarsus_end = nw.new_node(Nodes.Vector, label="tarsus end") + tarsus_end.vector = (0.2, 0.0, 6.0) + + shapequadratictarsus = nw.new_node( + nodegroup_shape_quadratic( + radius_control_points=[ + (0.0, 0.3125), + (0.0841, 0.3469), + (0.45, 0.4125), + (0.55, 0.3719), + (0.9045, 0.325), + (1.0, 0.125), + ] + ).name, + input_kwargs={ + "Profile Curve": transform_4, + "noise amount tilt": 0.0, + "Resolution": 128, + "Start": (0.0, 0.0, 0.0), + "Middle": (-0.4, 0.0, 3.0), + "End": tarsus_end, + }, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": shapequadratictarsus.outputs["Mesh"], + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + curve_to_points_1 = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={ + "Curve": capture_attribute_1.outputs["Geometry"], + "Count": group_input.outputs["Num Hairs"], + }, + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: capture_attribute_1.outputs[2], 1: 0.9} + ) + + delete_geometry_1 = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": curve_to_points_1.outputs["Points"], + "Selection": greater_than, + }, + ) + + leghair = nw.new_node(nodegroup_principled_hair().name) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.88}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": random_value_3.outputs[1]} + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 2: group_input.outputs["Hair Scale Min"], + 3: group_input.outputs["Hair Scale Max"], + }, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": delete_geometry_1, + "Instance": leghair, + "Rotation": combine_xyz_1, + "Scale": random_value_2.outputs[1], + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: tarsus_end, 1: (0.0, 0.0, 0.05)}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["NextJoint Y rot"]} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["NextJoint"], + "Translation": subtract.outputs["Vector"], + "Rotation": combine_xyz, + "Scale": group_input.outputs["NextJoint Scale"], + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [shapequadratictarsus.outputs["Mesh"], transform_5]}, + ) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [instance_on_points_1, join_geometry_3]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_4, + "Material": surface.shaderfunc_to_material(shader_black_w_noise_shader), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leg_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leg_cross_section(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketIntUnsigned", "Resolution", 8)] + ) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start Handle": (-0.9, 0.7, 0.0), + "End Handle": (0.9, 0.38, 0.0), + }, + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": bezier_segment}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": reroute, "Scale": (1.0, -1.0, 1.0)} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, reroute]} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": join_geometry} + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": curve_to_mesh} + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": merge_by_distance} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": mesh_to_curve, + "Rotation": (0.0, 0.0, 1.5708), + "Scale": (0.6, 1.0, 0.6), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_1} + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/mouth/dragonfly_mouth.py b/infinigen/assets/objects/creatures/insects/parts/mouth/dragonfly_mouth.py new file mode 100644 index 000000000..d7abd8c2f --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/mouth/dragonfly_mouth.py @@ -0,0 +1,139 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_simple_tube_v2, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_mouth", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_mouth(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 1.5 + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": (9.5, 9.36, 5.54), + "proportions": (1.0, 1.0, 1.0), + "aspect": value, + "do_bezier": False, + "fullness": 7.9, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Translation": (0.0, 0.0, -9.1), + "Rotation": (0.0, 1.7645, 0.0), + "Scale": (1.0, 1.2, 1.0), + }, + ) + + simple_tube_v2_1 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": (9.64, 5.46, 9.04), + "proportions": (1.0, 1.0, 1.0), + "aspect": value, + "do_bezier": False, + "fullness": 7.9, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simple_tube_v2_1.outputs["Geometry"], + "Rotation": (0.0, 1.5708, 0.0), + "Scale": (1.0, 1.2, 1.0), + }, + ) + + simple_tube_v2_2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": (8.4, 6.16, 4.7), + "proportions": (1.0, 1.0, 1.0), + "aspect": value, + "do_bezier": False, + "fullness": 7.9, + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simple_tube_v2_2.outputs["Geometry"], + "Translation": (-1.1, 0.0, -17.2), + "Rotation": (0.0, 2.6005, 0.0), + "Scale": (1.0, 1.2, 1.0), + }, + ) + + simple_tube_v2_3 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": (10.1, 4.28, 6.7), + "angles_deg": (4.64, 0.0, 0.0), + "proportions": (1.0, 1.0, 1.0), + "aspect": 2.1, + "do_bezier": False, + "fullness": 7.9, + }, + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simple_tube_v2_3.outputs["Geometry"], + "Translation": (-6.56, 0.0, 5.34), + "Rotation": (0.0, 0.8126, 0.0), + "Scale": (1.0, 1.2, 1.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, transform, transform_2, transform_4]}, + ) + + transform_3 = nw.new_node(Nodes.Transform, input_kwargs={"Geometry": join_geometry}) + + normal = nw.new_node(Nodes.InputNormal) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 0.5}) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": noise_texture.outputs["Fac"], 4: 0.3} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": map_range.outputs["Result"]}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": transform_3, "Offset": scale.outputs["Vector"]}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": set_position, "Level": 2} + ) + + group_output_1 = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": subdivision_surface} + ) diff --git a/infinigen/assets/objects/creatures/insects/parts/tail/dragonfly_tail.py b/infinigen/assets/objects/creatures/insects/parts/tail/dragonfly_tail.py new file mode 100644 index 000000000..f32c8db86 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/tail/dragonfly_tail.py @@ -0,0 +1,647 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.geom_utils import ( + nodegroup_circle_cross_section, + nodegroup_instance_on_points, + nodegroup_random_rotation_scale, + nodegroup_shape_quadratic, + nodegroup_surface_bump, +) +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + nodegroup_add_noise, + nodegroup_color_noise, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_dragonfly_tail_shader(nw: NodeWrangler, base_color, v, ring_length): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "cross section parameter"} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Fac"]} + ) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.4455 + colorramp.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[2].position = 0.5045 + colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[3].position = 1.0 + colorramp.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": colorramp.outputs["Color"], 1: 0.02, 2: 0.38}, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "spline parameter"} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": attribute_1.outputs["Fac"], 1: 0.18, 2: 0.42}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": map_range.outputs["Result"], + "Y": map_range_1.outputs["Result"], + }, + ) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group = nw.new_node( + nodegroup_add_noise().name, + input_kwargs={ + "Vector": combine_xyz, + "amount": (1.0, 1.0, 0.0), + "Noise Eval Position": texture_coordinate.outputs["Object"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group}) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Y"]}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": attribute_1.outputs["Fac"], + "Scale": 5.34, + "Randomness": 0.0, + }, + attrs={"voronoi_dimensions": "1D"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: 0.1, + 3: 1.0, + 4: 0.0, + }, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: map_range_2.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = ring_length + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: value, 1: 0.05}) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": attribute_1.outputs["Fac"], 1: value, 2: add_1}, + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum, 1: map_range_4.outputs["Result"]}, + attrs={"operation": "MINIMUM"}, + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = base_color + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", input_kwargs={"Value": v, "Color": rgb} + ) + + group_2 = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={ + "Scale": 1.34, + "Color": rgb, + "Value From Max": 0.7, + "Value To Min": 0.18, + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": minimum, + "Color1": hue_saturation_value, + "Color2": group_2, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_1, + "Metallic": 0.5, + "Specular": 0.5114, + "Roughness": 0.2568, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_tail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_tail( + nw: NodeWrangler, base_color=(0.2789, 0.3864, 0.0319, 1.0), v=0.3, ring_length=0.3 +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "Middle", (1.84, 0.0, 0.14)), + ("NodeSocketVectorTranslation", "End", (3.14, 0.0, -0.32)), + ("NodeSocketFloatDistance", "Segment Length", 0.44), + ("NodeSocketFloat", "Segment Scale", 0.25), + ("NodeSocketFloat", "Random Seed", 3.2), + ("NodeSocketFloat", "Radius", 0.9), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": quadratic_bezier, + 2: spline_parameter.outputs["Factor"], + }, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Length": group_input.outputs["Segment Length"], + }, + attrs={"mode": "LENGTH"}, + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Segment Scale"]} + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={"rot std z": 0.0, "scale mean": reroute_1, "scale std": 0.05}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": capture_attribute.outputs[2], 3: 1.0, 4: 0.8}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: randomrotationscale.outputs["Value"], + 1: map_range.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + droplast = nw.new_node( + nodegroup_droplast().name, + input_kwargs={"Geometry": curve_to_points.outputs["Points"]}, + ) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 128}) + integer.integer = 128 + + circlecrosssection = nw.new_node( + nodegroup_circle_cross_section().name, + input_kwargs={ + "random seed": 23.4, + "noise amount": 0.9, + "Resolution": integer, + "radius": group_input.outputs["Radius"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": circlecrosssection, "Rotation": (0.0, 0.0, 1.5708)}, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter_1.outputs["Factor"]}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute, 1: 2.0}, attrs={"operation": "MULTIPLY"} + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": transform, + "Name": "cross section parameter", + "Value": multiply_1, + }, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": store_named_attribute_2} + ) + + shapequadratic_001 = nw.new_node( + nodegroup_shape_quadratic( + radius_control_points=[ + (0.0, 0.3906), + (0.1795, 0.4656), + (0.5, 0.4563), + (0.8795, 0.45), + (1.0, 0.4344), + ] + ).name, + input_kwargs={ + "Profile Curve": reroute, + "noise amount tilt": 0.0, + "Resolution": integer, + "Start": (0.0, 0.0, -1.5), + "End": (0.0, 0.0, 0.68), + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": shapequadratic_001.outputs["Mesh"], + "Name": "spline parameter", + "Value": shapequadratic_001.outputs["spline parameter"], + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.02 + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Displacement": value_1, + "Scale": 20.0, + "seed": group_input.outputs["Random Seed"], + }, + ) + + addverticalstripes = nw.new_node( + nodegroup_add_vertical_stripes().name, + input_kwargs={ + "Geometry": surfacebump, + "Seed": group_input.outputs["Random Seed"], + }, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": curve_to_points.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": multiply, + "Points": droplast.outputs["Others"], + "Instance": addverticalstripes, + }, + ) + + shapequadratic_003 = nw.new_node( + nodegroup_shape_quadratic( + radius_control_points=[ + (0.0, 0.3312), + (0.1773, 0.4281), + (0.4318, 0.5031), + (0.5886, 0.3562), + (0.7864, 0.2687), + (1.0, 0.0), + ] + ).name, + input_kwargs={ + "Profile Curve": reroute, + "noise amount tilt": 0.0, + "Resolution": integer, + "Start": (0.26, 0.0, -1.5), + "Middle": (0.32, 0.0, 0.0), + "End": (-0.04, 0.0, 1.5), + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": shapequadratic_003.outputs["Mesh"], + "Translation": (0.0, 0.28, 0.0), + "Rotation": (0.0, 0.0, -1.5708), + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": transform_1, + "Name": "spline parameter", + "Value": shapequadratic_003.outputs["spline parameter"], + }, + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": store_named_attribute, + "Displacement": value_1, + "Scale": 20.0, + }, + ) + + addverticalstripes_1 = nw.new_node( + nodegroup_add_vertical_stripes().name, + input_kwargs={ + "Geometry": surfacebump_1, + "Seed": group_input.outputs["Random Seed"], + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": droplast.outputs["Last"], + "Instance": addverticalstripes_1, + "Rotation": curve_to_points.outputs["Rotation"], + "Scale": reroute_1, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [instanceonpoints, instance_on_points]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry, + "Material": surface.shaderfunc_to_material( + shader_dragonfly_tail_shader, base_color, v, ring_length + ), + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": set_material} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) + + +@node_utils.to_nodegroup("nodegroup_droplast", singleton=False, type="GeometryNodeTree") +def nodegroup_droplast(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + index = nw.new_node(Nodes.Index) + + domain_size = nw.new_node( + Nodes.DomainSize, + input_kwargs={"Geometry": group_input.outputs["Geometry"]}, + attrs={"component": "POINTCLOUD"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: domain_size.outputs["Point Count"], 1: 1.0}, + attrs={"operation": "SUBTRACT"}, + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: subtract}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: equal}, attrs={"operation": "NOT"} + ) + + delete_geometry_1 = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": group_input.outputs["Geometry"], "Selection": op_not}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": group_input.outputs["Geometry"], "Selection": equal}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Last": delete_geometry_1, "Others": delete_geometry}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_add_vertical_stripes", singleton=False, type="GeometryNodeTree" +) +def nodegroup_add_vertical_stripes(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Scale", 5.0), + ("NodeSocketFloat", "Seed", 0.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.05}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": absolute, "Y": separate_xyz.outputs["Y"], "Z": multiply}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": combine_xyz, + "W": group_input.outputs["Seed"], + "Scale": group_input.outputs["Scale"], + }, + attrs={"voronoi_dimensions": "4D", "feature": "DISTANCE_TO_EDGE"}, + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": voronoi_texture.outputs["Distance"]} + ) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": "tail vertical strips", + "Value": reroute_1, + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_1, 1: 0.1}, attrs={"operation": "MULTIPLY"} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": multiply_1}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": store_named_attribute_3, + "Offset": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +## old version +# def shader_dragonfly_tail_shader(nw: NodeWrangler): +# # Code generated using version 2.4.3 of the node_transpiler + +# texture_coordinate = nw.new_node(Nodes.TextureCoord) + +# attribute_1 = nw.new_node(Nodes.Attribute, +# attrs={'attribute_name': 'cross section parameter'}) + +# colorramp_1 = nw.new_node(Nodes.ColorRamp, +# input_kwargs={'Fac': attribute_1.outputs["Fac"]}) +# colorramp_1.color_ramp.elements.new(0) +# colorramp_1.color_ramp.elements.new(0) +# colorramp_1.color_ramp.elements[0].position = 0.0 +# colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) +# colorramp_1.color_ramp.elements[1].position = 0.4455 +# colorramp_1.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) +# colorramp_1.color_ramp.elements[2].position = 0.5045 +# colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) +# colorramp_1.color_ramp.elements[3].position = 1.0 +# colorramp_1.color_ramp.elements[3].color = (1.0, 1.0, 1.0, 1.0) + +# map_range_1 = nw.new_node(Nodes.MapRange, +# input_kwargs={'Value': colorramp_1.outputs["Color"], 1: 0.02, 2: 0.38}) + +# attribute = nw.new_node(Nodes.Attribute, +# attrs={'attribute_name': 'spline parameter'}) + +# map_range = nw.new_node(Nodes.MapRange, +# input_kwargs={'Value': attribute.outputs["Fac"], 1: 0.18, 2: 0.42}) + +# combine_xyz = nw.new_node(Nodes.CombineXYZ, +# input_kwargs={'X': map_range_1.outputs["Result"], 'Y': map_range.outputs["Result"]}) + +# group_2 = nw.new_node(nodegroup_add_noise().name, +# input_kwargs={'Vector': combine_xyz, 'amount': (1.0, 1.0, 0.0), 'Noise Eval Position': texture_coordinate.outputs["Object"],}) + +# separate_xyz = nw.new_node(Nodes.SeparateXYZ, +# input_kwargs={'Vector': group_2}) + +# add = nw.new_node(Nodes.Math, +# input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz.outputs["Y"]}) + +# voronoi_texture = nw.new_node(Nodes.VoronoiTexture, +# input_kwargs={'W': attribute.outputs["Fac"], 'Scale': 5.34, 'Randomness': 0.0}, +# attrs={'voronoi_dimensions': '1D'}) + +# map_range_2 = nw.new_node(Nodes.MapRange, +# input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 3: 1.0, 4: 0.0}) + +# maximum = nw.new_node(Nodes.Math, +# input_kwargs={0: add, 1: map_range_2.outputs["Result"]}, +# attrs={'operation': 'MAXIMUM'}) + +# group_1 = nw.new_node(nodegroup_color_noise().name, +# input_kwargs={'Scale': 6.4, 'Color': (0.1582, 0.291, 1.0, 1.0), 'Value To Min': 0.4}) + +# attribute_2 = nw.new_node(Nodes.Attribute, +# attrs={'attribute_name': 'tail vertical strips'}) + +# map_range_3 = nw.new_node(Nodes.MapRange, +# input_kwargs={'Value': attribute_2.outputs["Fac"], 1: 0.16, 2: 0.34}) + +# mix_1 = nw.new_node(Nodes.MixRGB, +# input_kwargs={'Fac': 0.0, 'Color1': (0.0144, 0.016, 0.0152, 1.0), 'Color2': (0.544, 0.5299, 0.5841, 1.0)}) + +# mix = nw.new_node(Nodes.MixRGB, +# input_kwargs={'Fac': maximum, 'Color1': group_1, 'Color2': mix_1}) + +# principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, +# input_kwargs={'Base Color': mix, 'Metallic': 0.9, 'Specular': 0.5114, 'Roughness': 0.2568}) + +# material_output = nw.new_node(Nodes.MaterialOutput, +# input_kwargs={'Surface': principled_bsdf}) diff --git a/infinigen/assets/objects/creatures/insects/parts/wing/dragonfly_wing.py b/infinigen/assets/objects/creatures/insects/parts/wing/dragonfly_wing.py new file mode 100644 index 000000000..960182ba0 --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/parts/wing/dragonfly_wing.py @@ -0,0 +1,427 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.creatures.insects.utils.shader_utils import ( + nodegroup_add_noise, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_dragonfly_wing", singleton=False, type="GeometryNodeTree" +) +def nodegroup_dragonfly_wing(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + resolution = nw.new_node(Nodes.Integer, label="resolution", attrs={"integer": 32}) + resolution.integer = 32 + + pivot1 = nw.new_node(Nodes.Vector, label="pivot1") + pivot1.vector = (1.84, -0.28, 0.0) + + add = nw.new_node(Nodes.VectorMath, input_kwargs={0: pivot1}) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add.outputs["Vector"]}) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": resolution, + "Start": (0.0, 0.0, 0.0), + "Middle": (1.2, -0.16, 0.0), + "End": reroute, + }, + ) + + pivot2 = nw.new_node(Nodes.Vector, label="pivot2") + pivot2.vector = (3.98, -0.78, 0.0) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": resolution, + "Start": reroute, + "Middle": (3.98, -0.32, 0.0), + "End": pivot2, + }, + ) + + pivot3 = nw.new_node(Nodes.Vector, label="pivot3") + pivot3.vector = (2.54, -1.14, 0.0) + + quadratic_bezier_2 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": resolution, + "Start": pivot2, + "Middle": (4.0, -1.1, 0.0), + "End": pivot3, + }, + ) + + pivot4 = nw.new_node(Nodes.Vector, label="pivot4") + pivot4.vector = (-0.06, -0.74, 0.0) + + quadratic_bezier_3 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": resolution, + "Start": pivot3, + "Middle": (0.28, -1.34, 0.0), + "End": pivot4, + }, + ) + + pivot5 = nw.new_node(Nodes.Vector, label="pivot5") + pivot5.vector = (0.0, -0.14, 0.0) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": resolution, + "Start": pivot4, + "Start Handle": (0.16, -0.44, 0.0), + "End Handle": (-0.24, -0.34, 0.0), + "End": pivot5, + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": bezier_segment, "Count": resolution} + ) + + quadratic_bezier_4 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": resolution, + "Start": pivot5, + "Middle": (-0.18, -0.04, 0.0), + "End": (0.0, 0.0, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + quadratic_bezier, + quadratic_bezier_1, + quadratic_bezier_2, + quadratic_bezier_3, + resample_curve, + quadratic_bezier_4, + ] + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": join_geometry} + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": curve_to_mesh} + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": merge_by_distance} + ) + + fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": mesh_to_curve}) + + subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={"Mesh": fill_curve}) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": quadratic_bezier_2} + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": curve_to_mesh_1}, + attrs={"target_element": "EDGES"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": subdivide_mesh, + "Name": "distance to edge", + "Value": geometry_proximity.outputs["Distance"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": quadratic_bezier_1, + 2: spline_parameter.outputs["Factor"], + }, + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": capture_attribute.outputs["Geometry"]} + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: capture_attribute.outputs[2], 1: 0.65}, + attrs={"operation": "LESS_THAN"}, + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: capture_attribute.outputs[2], 1: 0.84} + ) + + op_or = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: less_than, 1: greater_than}, + attrs={"operation": "OR"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": curve_to_mesh_2, "Selection": op_or}, + ) + + geometry_proximity_1 = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": delete_geometry}, + attrs={"target_element": "EDGES"}, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute, + "Name": "stripes coordinate", + "Value": geometry_proximity_1.outputs["Distance"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Name": "pos", + "Value": position, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Material": surface.shaderfunc_to_material(shader_wing_shader), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +def shader_wing_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "stripes coordinate"} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": attribute_2.outputs["Fac"], 1: 0.04, 2: 0.54}, + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "pos"}) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": attribute_1.outputs["Vector"], "Angle": 0.1047}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.08 + + group = nw.new_node( + nodegroup_add_noise().name, + input_kwargs={"Vector": vector_rotate, "amount": value}, + ) + + voronoi_texture_2 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": group, "Scale": 12.0, "Randomness": 0.7}, + attrs={"voronoi_dimensions": "2D", "feature": "DISTANCE_TO_EDGE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: voronoi_texture_2.outputs["Distance"], 1: 2.34}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group}) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": separate_xyz.outputs["Y"], + "Scale": 14.96, + "Randomness": 0.5, + }, + attrs={"voronoi_dimensions": "1D", "feature": "DISTANCE_TO_EDGE"}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = -0.18 + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, + attrs={"operation": "LESS_THAN"}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: voronoi_texture.outputs["Distance"], 1: less_than}, + attrs={"operation": "MAXIMUM"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: maximum, 1: 0.56}, attrs={"operation": "MULTIPLY"} + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": attribute_1.outputs["Vector"], "Angle": 0.2485}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.08 + + group_1 = nw.new_node( + nodegroup_add_noise().name, + input_kwargs={"Vector": vector_rotate_1, "amount": value_1}, + ) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group_1}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -0.74}, + attrs={"operation": "MULTIPLY"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: 2.22}, attrs={"operation": "POWER"} + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["Y"], 1: power} + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"W": add, "Scale": 10.02}, + attrs={"voronoi_dimensions": "1D", "feature": "DISTANCE_TO_EDGE"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: value_2}, + attrs={"operation": "GREATER_THAN"}, + ) + + maximum_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: greater_than}, + attrs={"operation": "MAXIMUM"}, + ) + + less_than_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -0.48}, attrs={"operation": "LESS_THAN"} + ) + + maximum_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum_1, 1: less_than_1}, + attrs={"operation": "MAXIMUM"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: maximum_2, 1: 3.0}, attrs={"operation": "MULTIPLY"} + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: multiply_3}, + attrs={"operation": "MINIMUM"}, + ) + + minimum_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: minimum}, + attrs={"operation": "MINIMUM"}, + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "distance to edge"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": attribute.outputs["Color"], 3: 0.1, 4: 0.0}, + ) + + maximum_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: minimum_1, 1: map_range.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + minimum_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: maximum_3}, + attrs={"operation": "MINIMUM"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": minimum_2}) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.1136 + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": colorramp.outputs["Color"]} + ) + + transparent_bsdf_1 = nw.new_node( + Nodes.TransparentBSDF, input_kwargs={"Color": reroute} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": reroute} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.1, 1: transparent_bsdf_1, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) diff --git a/infinigen/assets/objects/creatures/insects/utils/geom_utils.py b/infinigen/assets/objects/creatures/insects/utils/geom_utils.py new file mode 100644 index 000000000..5aed4b65f --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/utils/geom_utils.py @@ -0,0 +1,1454 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from .shader_utils import nodegroup_add_noise, nodegroup_color_noise + + +@node_utils.to_nodegroup( + "nodegroup_symmetric_clone", singleton=False, type="GeometryNodeTree" +) +def nodegroup_symmetric_clone(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, -1.0, 1.0)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Scale": group_input.outputs["Scale"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": transform}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [group_input.outputs["Geometry"], flip_faces]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Both": join_geometry_2, + "Orig": group_input.outputs["Geometry"], + "Inverted": flip_faces, + }, + ) + + +@node_utils.to_nodegroup("nodegroup_add_hair", singleton=False, type="GeometryNodeTree") +def nodegroup_add_hair(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketGeometry", "Hair", None), + ("NodeSocketFloat", "Density", 100.0), + ("NodeSocketVector", "rot mean", (1.18, 0.0, 0.0)), + ("NodeSocketFloat", "scale mean", 0.05), + ], + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Density": group_input.outputs["Density"], + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={ + "random seed": -2.4, + "rot mean": group_input.outputs["rot mean"], + "scale mean": group_input.outputs["scale mean"], + }, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": distribute_points_on_faces.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "scale": randomrotationscale.outputs["Value"], + "Points": distribute_points_on_faces.outputs["Points"], + "Instance": group_input.outputs["Hair"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [instanceonpoints, group_input.outputs["Mesh"]]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": join_geometry} + ) + + +@node_utils.to_nodegroup( + "nodegroup_attach_part", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attach_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ("NodeSocketVector", "Part Rot", (0.0, 0.0, 0.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + part_surface = nw.new_node( + nodegroup_part_surface().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length Fac": group_input.outputs["Length Fac"], + "Ray Rot": group_input.outputs["Ray Rot"], + "Rad": group_input.outputs["Rad"], + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": group_input.outputs["Part Rot"]} + ) + + raycast_rotation = nw.new_node( + nodegroup_raycast_rotation().name, + input_kwargs={ + "Rotation": deg2rad, + "Hit Normal": part_surface.outputs["Hit Normal"], + "Curve Tangent": part_surface.outputs["Tangent"], + "Do Normal Rot": group_input.outputs["Do Normal Rot"], + "Do Tangent Rot": group_input.outputs["Do Tangent Rot"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Translation": part_surface.outputs["Position"], + "Rotation": raycast_rotation, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Position": part_surface.outputs["Position"], + "Rotation": raycast_rotation, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_random_rotation_scale", singleton=False, type="GeometryNodeTree" +) +def nodegroup_random_rotation_scale(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 10.0), + ("NodeSocketVector", "rot mean", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "rot std z", 1.0), + ("NodeSocketFloat", "scale mean", 0.35), + ("NodeSocketFloat", "scale std", 0.1), + ], + ) + + position_3 = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_3, 1: group_input.outputs["random seed"]}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Color"], 1: value_2}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_2.outputs["X"], + 1: group_input.outputs["rot std z"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["rot mean"], 1: combine_xyz}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_2.outputs["Y"], + 1: group_input.outputs["scale std"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: group_input.outputs["scale mean"]}, + attrs={"use_clamp": True}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": add_1.outputs["Vector"], "Value": add_2}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_instance_on_points", singleton=False, type="GeometryNodeTree" +) +def nodegroup_instance_on_points(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "rotation base", (0.0, 0.0, 0.0)), + ("NodeSocketVectorEuler", "rotation delta", (-1.5708, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "translation", (0.0, -0.5, 0.0)), + ("NodeSocketFloat", "scale", 0.0), + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + rotate_euler_1 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": group_input.outputs["rotation base"], + "Rotate By": group_input.outputs["rotation delta"], + }, + attrs={"space": "LOCAL"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": group_input.outputs["Instance"], + "Rotation": rotate_euler_1, + "Scale": group_input.outputs["scale"], + }, + ) + + translate_instances = nw.new_node( + Nodes.TranslateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Translation": group_input.outputs["translation"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": translate_instances} + ) + + +def shader_dragonfly_body_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "pos"}) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": attribute_1.outputs["Vector"]} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Z"], 1: 3.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": absolute, "Y": separate_xyz_1.outputs["Y"], "Z": multiply}, + ) + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "body seed"}) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": combine_xyz_1, + "W": attribute_2.outputs["Fac"], + "Scale": 0.5, + "Dimension": 1.0, + "Lacunarity": 1.0, + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 1: -0.26, 2: 0.06} + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "spline parameter"} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": attribute.outputs["Fac"]} + ) + + group = nw.new_node( + nodegroup_add_noise().name, + input_kwargs={ + "Vector": combine_xyz, + "Scale": 0.5, + "amount": (0.16, 0.26, 0.0), + "Noise Eval Position": combine_xyz_1, + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": attribute_2.outputs["Fac"]}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz_2, "Scale": 10.0}, + attrs={"voronoi_dimensions": "2D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 1: 0.14, 2: 0.82}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: map_range.outputs["Result"]}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) + colorramp.color_ramp.elements[0].position = 0.7386 + colorramp.color_ramp.elements[0].color = (0.4397, 0.5841, 0.011, 1.0) + colorramp.color_ramp.elements[1].position = 1.0 + colorramp.color_ramp.elements[1].color = (0.008, 0.0065, 0.0116, 1.0) + + group_1 = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={"Color": colorramp.outputs["Color"], "Value To Min": 0.4}, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group_1, + "Metallic": 0.2182, + "Specular": 0.8318, + "Roughness": 0.1545, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_surface_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_surface_bump(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement", 0.02), + ("NodeSocketFloat", "Scale", 50.0), + ("NodeSocketFloat", "seed", 0.0), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "W": group_input.outputs["seed"], + "Scale": group_input.outputs["Scale"], + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_circle_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_circle_cross_section(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 0.5), + ("NodeSocketFloat", "noise amount", 0.0), + ("NodeSocketInt", "Resolution", 256), + ("NodeSocketFloat", "radius", 1.0), + ("NodeSocketBool", "symmetric noise", False), + ], + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_1.outputs["X"], + "Y": absolute, + "Z": separate_xyz_1.outputs["Z"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": combine_xyz_1, + "W": group_input.outputs["random seed"], + "Scale": group_input.outputs["noise scale"], + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": absolute_1}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["noise amount"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Offset": scale_1.outputs["Vector"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Scale": group_input.outputs["radius"]}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": transform}) + + +@node_utils.to_nodegroup("nodegroup_deg2_rad", singleton=False, type="GeometryNodeTree") +def nodegroup_deg2_rad(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Deg", (0.0, 0.0, 0.0))] + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Rad": multiply.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_raycast_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_raycast_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "Rotation", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Hit Normal", (0.0, 0.0, 1.0)), + ("NodeSocketVector", "Curve Tangent", (0.0, 0.0, 1.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": group_input.outputs["Hit Normal"]}, + ) + + rotate_euler = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Rotate By": align_euler_to_vector, + }, + ) + + if_normal_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Normal Rot"], + 8: group_input.outputs["Rotation"], + 9: rotate_euler, + }, + label="if_normal_rot", + attrs={"input_type": "VECTOR"}, + ) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Vector": group_input.outputs["Curve Tangent"], + }, + ) + + rotate_euler_1 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": align_euler_to_vector_1, + "Rotate By": group_input.outputs["Rotation"], + }, + attrs={"space": "LOCAL"}, + ) + + if_tangent_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Tangent Rot"], + 8: if_normal_rot.outputs[3], + 9: rotate_euler_1, + }, + label="if_tangent_rot", + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": if_tangent_rot.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_part_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_part_surface(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ], + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Factor": group_input.outputs["Length Fac"], + }, + attrs={"mode": "FACTOR"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": sample_curve.outputs["Tangent"], + "Rotation": group_input.outputs["Ray Rot"], + }, + attrs={"rotation_type": "EULER_XYZ"}, + ) + + raycast = nw.new_node( + Nodes.Raycast, + input_kwargs={ + "Target Geometry": group_input.outputs["Skin Mesh"], + "Source Position": sample_curve.outputs["Position"], + "Ray Direction": vector_rotate, + "Ray Length": 5.0, + }, + ) + + lerp = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["Rad"], + 9: sample_curve.outputs["Position"], + 10: raycast.outputs["Hit Position"], + }, + label="lerp", + attrs={"data_type": "FLOAT_VECTOR", "clamp": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Position": lerp.outputs["Vector"], + "Hit Normal": raycast.outputs["Hit Normal"], + "Tangent": sample_curve.outputs["Tangent"], + "Skeleton Pos": sample_curve.outputs["Position"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shape_quadratic", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shape_quadratic( + nw: NodeWrangler, radius_control_points=[(0.0, 0.5), (1.0, 0.5)] +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloat", "random seed tilt", 0.5), + ("NodeSocketFloat", "noise scale tilt", 0.5), + ("NodeSocketFloat", "noise amount tilt", 5.0), + ("NodeSocketFloat", "random seed pos", 0.0), + ("NodeSocketFloat", "noise scale pos", 0.0), + ("NodeSocketFloat", "noise amount pos", 0.0), + ("NodeSocketIntUnsigned", "Resolution", 256), + ("NodeSocketVectorTranslation", "Start", (0.0, 0.15, -1.5)), + ("NodeSocketVectorTranslation", "Middle", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "End", (0.0, 0.0, 1.5)), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": quadratic_bezier, + 2: spline_parameter_2.outputs["Factor"], + }, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 1: curve_tangent, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["random seed pos"]}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale pos"], + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, + attrs={"operation": "SUBTRACT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": spline_parameter_2.outputs["Factor"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["noise amount pos"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "Offset": scale_1.outputs["Vector"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: spline_parameter.outputs["Factor"], + 1: group_input.outputs["random seed tilt"], + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": add_1, "Scale": group_input.outputs["noise scale tilt"]}, + attrs={"noise_dimensions": "1D"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["noise amount tilt"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_position, "Tilt": multiply} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve(float_curve.mapping.curves[0], radius_control_points) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": set_curve_tilt, "Radius": float_curve}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": group_input.outputs["Profile Curve"], + "Fill Caps": True, + }, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": set_position}, + attrs={"mode": "EVALUATED"}, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": curve_to_points.outputs["Points"]}, + attrs={"target_element": "POINTS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": curve_to_mesh, + "spline parameter": capture_attribute.outputs[2], + "spline tangent": capture_attribute_1.outputs["Attribute"], + "radius to center": geometry_proximity.outputs["Distance"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_to_cart", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_to_cart(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Angle", 0.5), + ("NodeSocketFloat", "Length", 0.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "SINE"}, + ) + + construct_unit_vector = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": cosine, "Z": sine}, + label="Construct Unit Vector", + ) + + offset_polar = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Length"], + 1: construct_unit_vector, + 2: group_input.outputs["Origin"], + }, + label="Offset Polar", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": offset_polar.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_switch4", singleton=False, type="GeometryNodeTree") +def nodegroup_switch4(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Arg", 0), + ("NodeSocketVector", "Arg == 0", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 1", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 3", (0.0, 0.0, 0.0)), + ], + ) + + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 2}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + greater_equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 1}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_1, + 8: group_input.outputs["Arg == 0"], + 9: group_input.outputs["Arg == 1"], + }, + attrs={"input_type": "VECTOR"}, + ) + + greater_equal_2 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 3}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_2, + 8: group_input.outputs["Arg == 2"], + 9: group_input.outputs["Arg == 3"], + }, + attrs={"input_type": "VECTOR"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": switch.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_smooth_taper", singleton=False, type="GeometryNodeTree" +) +def nodegroup_smooth_taper(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SINE"} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "start_rad", 0.29), + ("NodeSocketFloat", "end_rad", 0.0), + ("NodeSocketFloat", "fullness", 2.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, + attrs={"operation": "DIVIDE"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: sine, 1: divide}, attrs={"operation": "POWER"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["start_rad"], + 4: group_input.outputs["end_rad"], + }, + attrs={"clamp": False}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply_1}) + + +@node_utils.to_nodegroup( + "nodegroup_aspect_to_dim", singleton=False, type="GeometryNodeTree" +) +def nodegroup_aspect_to_dim(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Aspect Ratio", 1.0)] + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["Aspect Ratio"], "Y": 1.0}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": 1.0, "Y": divide}) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"XY Scale": switch.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_warped_circle_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_warped_circle_curve(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Position", (0.0, 0.0, 0.0)), + ("NodeSocketInt", "Vertices", 32), + ], + ) + + mesh_circle = nw.new_node( + Nodes.MeshCircle, input_kwargs={"Vertices": group_input.outputs["Vertices"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_circle, + "Position": group_input.outputs["Position"], + }, + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Curve": mesh_to_curve}) + + +@node_utils.to_nodegroup( + "nodegroup_vector_sum", singleton=False, type="GeometryNodeTree" +) +def nodegroup_vector_sum(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Sum": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_polar_bezier", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_bezier(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 32), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Seg Lengths", (0.3, 0.3, 0.3)), + ("NodeSocketBool", "Do Bezier", True), + ], + ) + + mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={"Count": 4}) + + index = nw.new_node(Nodes.Index) + + deg2_rad = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["angles_deg"], "Scale": 0.0175}, + label="Deg2Rad", + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": deg2_rad.outputs["Vector"]} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz.outputs["X"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Seg Lengths"]} + ) + + polartocart = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": reroute, + "Length": separate_xyz_1.outputs["X"], + "Origin": group_input.outputs["Origin"], + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]} + ) + + polartocart_1 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add, + "Length": separate_xyz_1.outputs["Y"], + "Origin": polartocart, + }, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) + + polartocart_2 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add_1, + "Length": separate_xyz_1.outputs["Z"], + "Origin": polartocart_1, + }, + ) + + switch4 = nw.new_node( + nodegroup_switch4().name, + input_kwargs={ + "Arg": index, + "Arg == 0": group_input.outputs["Origin"], + "Arg == 1": polartocart, + "Arg == 2": polartocart_1, + "Arg == 3": polartocart_2, + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": mesh_line, "Position": switch4} + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, + input_kwargs={ + "Curve": mesh_to_curve, + "Cuts": group_input.outputs["Resolution"], + }, + ) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 2}) + integer.integer = 2 + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": integer, + "Start": group_input.outputs["Origin"], + "Start Handle": polartocart, + "End Handle": polartocart_1, + "End": polartocart_2, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, + attrs={"operation": "DIVIDE"}, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": bezier_segment, "Cuts": divide} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Do Bezier"], + 14: subdivide_curve_1, + 15: subdivide_curve, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": switch.outputs[6], "Endpoint": polartocart_2}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_profile_part", singleton=False, type="GeometryNodeTree" +) +def nodegroup_profile_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloatDistance", "Radius Func", 1.0), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Radius": group_input.outputs["Radius Func"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": group_input.outputs["Profile Curve"], + "Fill Caps": True, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": curve_to_mesh, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube_v2", singleton=False, type="GeometryNodeTree" +) +def nodegroup_simple_tube_v2(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.5, 0.3)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "proportions", (0.3333, 0.3333, 0.3333)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketBool", "do_bezier", True), + ("NodeSocketFloat", "fullness", 4.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + vector_sum = nw.new_node( + nodegroup_vector_sum().name, + input_kwargs={"Vector": group_input.outputs["proportions"]}, + ) + + divide = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, + attrs={"operation": "DIVIDE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: divide.outputs["Vector"], "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 25, + "Origin": group_input.outputs["Origin"], + "angles_deg": group_input.outputs["angles_deg"], + "Seg Lengths": scale.outputs["Vector"], + "Do Bezier": group_input.outputs["do_bezier"], + }, + ) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["aspect"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: aspect_to_dim, 1: position}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": 40}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["Y"], + "end_rad": separate_xyz.outputs["Z"], + "fullness": group_input.outputs["fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": profilepart, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Endpoint": polarbezier.outputs["Endpoint"], + }, + ) diff --git a/infinigen/assets/objects/creatures/insects/utils/shader_utils.py b/infinigen/assets/objects/creatures/insects/utils/shader_utils.py new file mode 100644 index 000000000..5731e032d --- /dev/null +++ b/infinigen/assets/objects/creatures/insects/utils/shader_utils.py @@ -0,0 +1,156 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_black_w_noise_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group = nw.new_node( + nodegroup_color_noise().name, + input_kwargs={"Scale": 10.0, "Color": (0.0779, 0.0839, 0.0809, 1.0)}, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": group, + "Metallic": 0.9, + "Specular": 0.5114, + "Roughness": 0.2568, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf_1} + ) + + +@node_utils.to_nodegroup("nodegroup_add_noise", singleton=False, type="ShaderNodeTree") +def nodegroup_add_noise(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale", 10.0), + ("NodeSocketVector", "amount", (0.1, 0.26, 0.0)), + ("NodeSocketFloat", "seed", 0.0), + ("NodeSocketVector", "Noise Eval Position", None), + ], + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": group_input.outputs["Noise Eval Position"], + "W": group_input.outputs["seed"], + "Scale": group_input.outputs["Scale"], + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_1.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"], 1: group_input.outputs["amount"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply.outputs["Vector"], 1: group_input.outputs["Vector"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_color_noise", singleton=False, type="ShaderNodeTree" +) +def nodegroup_color_noise(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Scale", 0.8), + ("NodeSocketColor", "Color", (0.0147, 0.0156, 0.0152, 1.0)), + ("NodeSocketFloat", "Hue From Min", 0.4), + ("NodeSocketFloat", "Hue From Max", 0.7), + ("NodeSocketFloat", "Hue To Min", 0.48), + ("NodeSocketFloat", "Hue To Max", 0.55), + ("NodeSocketFloat", "Value From Min", 0.4), + ("NodeSocketFloat", "Value From Max", 0.78), + ("NodeSocketFloat", "Value To Min", -0.56), + ("NodeSocketFloat", "Value To Max", 1.0), + ], + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": group_input.outputs["Scale"], + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, + input_kwargs={"Color": noise_texture.outputs["Color"]}, + attrs={"mode": "HSV"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: group_input.outputs["Hue From Min"], + 2: group_input.outputs["Hue From Max"], + 3: group_input.outputs["Hue To Min"], + 4: group_input.outputs["Hue To Max"], + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Blue"], + 1: group_input.outputs["Value From Min"], + 2: group_input.outputs["Value From Max"], + 3: group_input.outputs["Value To Min"], + 4: group_input.outputs["Value To Max"], + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": group_input.outputs["Color"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": hue_saturation_value} + ) diff --git a/infinigen/assets/objects/creatures/jellyfish.py b/infinigen/assets/objects/creatures/jellyfish.py new file mode 100644 index 000000000..58b395bbc --- /dev/null +++ b/infinigen/assets/objects/creatures/jellyfish.py @@ -0,0 +1,476 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from mathutils import Vector +from numpy.random import uniform +from scipy.interpolate import interp1d + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + repeated_driver, +) +from infinigen.assets.utils.decorate import ( + geo_extension, + read_co, + remove_vertices, + subsurface2face_size, + write_attribute, + write_co, +) +from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.nodegroup import geo_base_selection +from infinigen.assets.utils.object import ( + join_objects, + new_circle, + new_empty, + new_icosphere, +) +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import shaderfunc_to_material, write_attr_data +from infinigen.core.tagging import tag_object +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class JellyfishFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.base_hue = np.random.normal(0.57, 0.15) + self.outside_material = ( + self.make_transparent() if uniform(0, 1) < 0.8 else self.make_dotted() + ) + self.inside_material = ( + self.make_transparent() if uniform(0, 1) < 0.8 else self.make_opaque() + ) + self.tentacle_material = self.make_transparent() + self.arm_mat_transparent = self.make_transparent() + self.arm_mat_opaque = self.make_opaque() + self.arm_mat_solid = self.make_solid() + + self.has_arm = uniform(0, 1) < 0.5 + arm_radius = uniform(0, 0.3) + self.arm_radius_range = arm_radius, arm_radius + uniform(0.1, 0.4) + self.arm_height_range = -uniform(0.4, 0.5), -uniform(0, 0.2) + self.arm_min_distance = uniform(0.06, 0.08) + self.arm_size = uniform(0.03, 0.06) + self.arm_length = log_uniform(2, 5) + self.arm_bend_angle = uniform(0, np.pi / 60) + self.arm_displace_range = uniform(0, 0.4), uniform(0.4, 0.8) + + self.tentacle_min_distance = uniform(0.04, 0.06) + self.tentacle_size = uniform(0.005, 0.01) + self.tentacle_length = log_uniform(1.5, 2.5) + self.tentacle_bend_angle = uniform(0, np.pi / 12) + + self.cap_thickness = uniform(0.05, 0.6) + self.cap_inner_radius = uniform(0.6, 0.8) + self.cap_z_scale = log_uniform(0.4, 1.5) + self.cap_dent = uniform(0.15, 0.3) if uniform(0, 1) < 0.5 else 0 + + self.length_scale = log_uniform(0.25, 2.0) + self.anim_freq = 1 / log_uniform(25, 100) + self.move_freq = 1 / log_uniform(500, 1000) + + def create_asset(self, face_size, **params): + obj, radius = self.build_cap(face_size) + + assign_material(obj, [self.outside_material, self.inside_material]) + for axis in "XY": + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="TWIST", + angle=uniform(-np.pi / 3, np.pi / 3), + deform_axis=axis, + ) + for axis in "XY": + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(-np.pi / 3, np.pi / 3), + deform_axis=axis, + ) + + def selection(nw: NodeWrangler): + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + r = nw.math( + "POWER", nw.add(nw.math("POWER", x, 2), nw.math("POWER", y, 2)), 0.5 + ) + center = nw.boolean_math( + "AND", + nw.compare("GREATER_THAN", r, self.arm_radius_range[0] * radius), + nw.compare("LESS_THAN", r, self.arm_radius_range[1] * radius), + ) + down = nw.compare( + "LESS_THAN", nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0 + ) + inside = nw.new_node(Nodes.NamedAttribute, ["inside"]) + return nw.boolean_math("AND", nw.boolean_math("AND", center, down), inside) + + if self.has_arm: + long_arms = self.place_tentacles( + obj, + selection, + self.arm_min_distance, + self.arm_size, + self.arm_length, + self.arm_bend_angle, + displace=True, + ) + for a in long_arms: + assign_material( + a, + np.random.choice( + [ + self.arm_mat_opaque, + self.arm_mat_transparent, + self.arm_mat_solid, + ] + ), + ) + else: + long_arms = [] + + tentacles = self.place_tentacles( + obj, + "boundary", + self.tentacle_min_distance, + self.tentacle_size, + self.tentacle_length, + self.tentacle_bend_angle, + ) + assign_material(tentacles, self.tentacle_material) + + obj = join_objects([obj] + long_arms + tentacles) + head_z = np.amax(read_co(obj)[:, -1]) + tail_z = -np.amin(read_co(obj)[:, -1]) + self.animate_expansion(obj, head_z, tail_z) + self.animate_movement(obj) + tag_object(obj, "jellyfish") + + return obj + + def animate_movement(self, obj): + offset = uniform(0, 1) + seed = np.random.randint(1e5) + driver_x, driver_y, driver_z = [_.driver for _ in obj.driver_add("location")] + driver_x.expression = repeated_driver( + uniform(-0.2, 0.2), uniform(-0.2, 0.2), self.move_freq, offset, seed + ) + driver_y.expression = repeated_driver( + uniform(-0.2, 0.2), uniform(-0.2, 0.2), self.move_freq, offset, seed + ) + driver_z.expression = repeated_driver( + uniform(-1.5, -0.5), uniform(0.5, 1.5), self.move_freq, offset, seed + ) + driver_rot = obj.driver_add("rotation_euler")[-1].driver + twist_range = uniform(0, np.pi / 60) + driver_rot.expression = repeated_driver( + -twist_range, twist_range, self.move_freq, offset, seed + ) + + obj, mod = butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + False, + deform_method="TWIST", + deform_axis="Z", + return_mod=True, + ) + twist_driver = mod.driver_add("angle").driver + twist_driver.expression = repeated_driver( + -np.pi / 30, np.pi / 30, self.move_freq, offset, seed + ) + + def animate_expansion(self, obj, head_z, tail_z): + obj.shape_key_add(name="Base") + offset = uniform(0, 1) + seed = np.random.randint(1e5) + self.animate_radius(obj, offset, seed, head_z, tail_z) + self.animate_height(obj, offset, seed, head_z, tail_z) + self.animate_arms(obj, tail_z) + + def animate_height(self, obj, offset, seed, head_z, tail_z): + x, y, z = read_co(obj).T + obj.active_shape_key_index = 0 + key_block_z = obj.shape_key_add(name="Height") + z_anchors = -tail_z, 0, head_z + z_disp = 1, 1, uniform(0.6, 0.8) + z_curve = interp1d(z_anchors, z_disp, fill_value="extrapolate") + co = np.stack([x, y, z_curve(z) * z], -1) + key_block_z.data.foreach_set("co", co.reshape(-1)) + dr = key_block_z.driver_add("value").driver + dr.expression = repeated_driver( + 0, 1, self.anim_freq, offset + uniform(0.05, 0.15), seed + ) + + def animate_radius(self, obj, offset, seed, head_z, tail_z): + obj.active_shape_key_index = 0 + x, y, z = read_co(obj).T + key_block_r = obj.shape_key_add(name="Radius") + z_anchors = -tail_z, -head_z * 2, -head_z, 0, head_z + r_scale = uniform(0.7, 0.9), uniform(0.85, 0.95), 1, uniform(1.2, 1.4), 1 + r_curve = interp1d(z_anchors, r_scale, "quadratic", fill_value="extrapolate") + co = np.stack([r_curve(z) * x, r_curve(z) * y, z], -1) + key_block_r.data.foreach_set("co", co.reshape(-1)) + dr = key_block_r.driver_add("value").driver + dr.expression = repeated_driver(0, 1, self.anim_freq, offset, seed) + + def animate_arms(self, obj, tail_z): + def geo_musgrave_texture(nw: NodeWrangler, axis): + geometry = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketGeometry", "Geometry", None)], + ) + z = nw.separate(nw.new_node(Nodes.InputPosition))[-1] + musgrave = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Scale": uniform(1, 2)}, + attrs={"musgrave_dimensions": "2D"}, + ) + offset = nw.scalar_multiply( + log_uniform(0.1, 0.4), + nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + axis: nw.scalar_divide(nw.scalar_multiply(musgrave, z), -tail_z) + }, + ), + ) + geometry = nw.new_node( + Nodes.SetPosition, + [ + geometry, + nw.boolean_math("NOT", nw.new_node(Nodes.NamedAttribute, ["pin"])), + None, + offset, + ], + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + for i, axis in enumerate("XY"): + obj.active_shape_key_index = 0 + key_block_r = obj.shape_key_add(name=f"Arm_{i}") + temp = deep_clone_obj(obj) + temp.shape_key_clear() + surface.add_geomod( + temp, geo_musgrave_texture, apply=True, input_args=[axis] + ) + key_block_r.data.foreach_set("co", read_co(temp).reshape(-1)) + butil.delete(temp) + dr = key_block_r.driver_add("value").driver + dr.expression = repeated_driver(0, 1, self.anim_freq) + + def place_tentacles( + self, obj, selection, min_distance, size, length, bend_angle, displace=False + ): + temp = butil.spawn_vert("temp") + surface.add_geomod( + temp, + geo_base_selection, + apply=True, + input_args=[obj, selection, min_distance], + ) + locations = read_co(temp) + if displace: + locations[:, -1] -= uniform(*self.arm_displace_range, len(locations)) + butil.delete(temp) + n = min(10, len(locations)) + arms = [self.build_arm(size, length, bend_angle) for _ in range(n)] + arms += [ + deep_clone_obj(np.random.choice(arms)) for _ in range(len(locations) - n) + ] + for arm, loc in zip(arms, locations): + arm.rotation_euler[-1] = ( + np.arctan2(loc[1], loc[0]) + uniform(-np.pi / 6, np.pi / 6) + np.pi + ) + arm.location = loc + return arms + + def build_cap(self, face_size): + obj = new_icosphere(subdivisions=6) + write_attribute(obj, lambda nw, position: 0, "material_index", "FACE") + + d = np.sqrt(1 - self.cap_inner_radius**2) + 1 - self.cap_thickness + r = (d * d + self.cap_inner_radius**2) / (2 * d) + + cutter = new_icosphere(subdivisions=6, radius=r) + write_attribute(cutter, lambda nw, position: 1, "material_index", "FACE") + cutter.location[-1] = 1 - self.cap_thickness - r + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") + co = read_co(obj) + outside = np.abs(np.linalg.norm(co, axis=-1) - 1) < 1e-6 + co[:, -1] -= cutter.location[-1] + inside = np.abs(np.linalg.norm(co, axis=-1) - r) < 1e-6 + write_attr_data(obj, "inside", inside.astype(float)) + write_attr_data(obj, "boundary", ((~inside) & (~outside)).astype(float)) + butil.delete(cutter) + + if self.cap_dent > 0: + self.apply_cap_dent(obj) + + surface.add_geomod( + obj, + geo_extension, + apply=True, + input_args=[log_uniform(0.2, 0.4), log_uniform(0.5, 1.0), "2D"], + ) + obj.scale *= Vector(uniform(0.4, 0.6, 3)) + obj.scale[-1] *= self.cap_z_scale + radius = self.cap_inner_radius * min(obj.scale[:2]) + butil.apply_transform(obj) + subsurface2face_size(obj, face_size) + + obj.vertex_groups.new(name="pin") + tag_object(obj, "cap") + return obj, radius + + def apply_cap_dent(self, obj): + n_dent = np.random.randint(6, 12) + angles = polygon_angles(n_dent) + angles = np.concatenate([angles, angles + 2 * np.pi]) + dent = uniform(1 - self.cap_dent, 1, n_dent) + margin = uniform(np.pi * 0.02, np.pi * 0.05, n_dent) + x, y, z = read_co(obj).T + a = np.arctan2(y, x) + np.pi * 1.5 + difference = np.abs(a[:, np.newaxis] - angles[np.newaxis, :]) + index = np.argmin(difference, 1) % n_dent + dent_ = np.take(dent, index) + margin_ = np.take(margin, index) + s = np.exp( + np.log(dent_) / margin_ * np.clip(margin_ - np.min(difference, 1), 0, None) + ) + co = np.stack([s * x, s * y, z]).T + write_co(obj, co) + + def build_arm(self, radius, length, bend_angle): + obj = new_circle(vertices=16) + obj.scale = radius, radius * uniform(0, 1), 1 + butil.apply_transform(obj) + remove_vertices(obj, lambda x, y, z: y * (-1) ** np.random.randint(2) > 0) + steps = 256 + + empty = new_empty(location=(0, 0, 1), rotation=(0, -uniform(0, np.pi / 24), 0)) + butil.modify_mesh( + obj, + "SCREW", + angle=log_uniform(0.5, 3) * np.pi * (-1) ** int(uniform(0, 1)), + screw_offset=-length * self.length_scale * uniform(0.5, 1.0), + object=empty, + steps=steps, + render_steps=steps, + ) + butil.delete(empty) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="TAPER", + factor=uniform(0.5, 1.0), + deform_axis="Z", + ) + texture = bpy.data.textures.new(name="arm", type="MARBLE") + texture.noise_scale = log_uniform(0.1, 0.2) + butil.modify_mesh( + obj, + "DISPLACE", + texture=texture, + strength=uniform(0.01, 0.02), + direction="Y", + ) + texture = bpy.data.textures.new(name="arm", type="MARBLE") + texture.noise_scale = log_uniform(0.1, 2.0) + butil.modify_mesh( + obj, + "DISPLACE", + texture=texture, + strength=log_uniform(0.1, 0.2), + direction="X", + ) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=bend_angle * log_uniform(0.5, 1.5), + deform_axis="Y", + ) + co = read_co(obj) + x, y, z = co.T + center = np.mean(co[z > -0.01], 0) + obj.location[0] -= center[0] + obj.location[1] -= center[1] + butil.apply_transform(obj, loc=True) + tag_object(obj, "arm") + return obj + + @staticmethod + def shader_jellyfish(nw: NodeWrangler, base_hue, saturation, transparency): + layerweight = nw.build_float_curve( + nw.new_node(Nodes.LayerWeight, input_kwargs={"Blend": 0.3}), + [(0, 0), (0.4, 0), (uniform(0.6, 0.9), 1), (1, 1)], + ) + emission_color = hsv2rgba(base_hue, uniform(0.4, 0.6), 1) + transparent_color = hsv2rgba((base_hue + uniform(-0.1, 0.1)) % 1, saturation, 1) + emission = nw.new_node(Nodes.Emission, [emission_color]) + glossy = nw.new_node( + Nodes.GlossyBSDF, + input_kwargs={"Color": transparent_color, "Roughness": uniform(0.8, 1)}, + ) + transparent = nw.new_node(Nodes.TransparentBSDF, [transparent_color]) + mix_shader = nw.new_node(Nodes.MixShader, [0.5, glossy, transparent]) + mix_shader = nw.new_node(Nodes.MixShader, [layerweight, emission, mix_shader]) + transparent = nw.new_node(Nodes.TransparentBSDF, [transparent_color]) + transparency = surface.eval_argument(nw, transparency) + mix_shader = nw.new_node( + Nodes.MixShader, [transparency, mix_shader, transparent] + ) + return mix_shader + + def make_transparent(self): + hue = (self.base_hue + uniform(-0.1, 0.1)) % 1 + return shaderfunc_to_material( + self.shader_jellyfish, hue, uniform(0.1, 0.3), uniform(0.88, 0.92) + ) + + def make_opaque(self): + hue = (self.base_hue + uniform(-0.1, 0.1)) % 1 + return shaderfunc_to_material( + self.shader_jellyfish, hue, uniform(0.3, 0.6), uniform(0.75, 0.8) + ) + + def make_solid(self): + hue = (self.base_hue + uniform(-0.1, 0.1)) % 1 + return shaderfunc_to_material( + self.shader_jellyfish, hue, uniform(0.5, 0.8), uniform(0.4, 0.5) + ) + + def make_dotted(self): + def transparency(nw: NodeWrangler): + return nw.build_float_curve( + nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": uniform(20, 50)} + ), + [ + (0, uniform(0.92, 0.96)), + (0.62, uniform(0.92, 0.96)), + (0.65, uniform(0.5, 0.6)), + (1, uniform(0.5, 0.6)), + ], + ) + + hue = (self.base_hue + uniform(-0.1, 0.1)) % 1 + return shaderfunc_to_material( + self.shader_jellyfish, hue, uniform(0.5, 0.8), transparency + ) diff --git a/infinigen/assets/objects/creatures/parts/__init__.py b/infinigen/assets/objects/creatures/parts/__init__.py new file mode 100644 index 000000000..78d37747e --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/__init__.py @@ -0,0 +1,18 @@ +from . import ( + beak, + body, + chameleon, + eye, + fin_old, + foot, + generic_nurbs, + head, + head_detail, + hoof, + horn, + leg, + reptile_detail, + ridged_fin, + tail, + wings, +) diff --git a/infinigen/assets/objects/creatures/parts/beak.py b/infinigen/assets/objects/creatures/parts/beak.py new file mode 100644 index 000000000..bd65ad3ec --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/beak.py @@ -0,0 +1,414 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +from math import cos, exp, pi, sin + +import numpy as np + +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.assets.utils.geometry import nurbs as nurbs_util +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +def square(x): + return x * x + + +class Beak: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + self.hook_x = lambda x, theta: self.hook( + self.hook_scale_x, + self.hook_a, + self.hook_b, + self.hook_pos_x, + self.hook_thickness_x, + x, + theta, + ) + self.hook_z = lambda x, theta: self.hook( + self.hook_scale_z, + self.hook_a, + self.hook_b, + self.hook_pos_z, + self.hook_thickness_z, + x, + theta, + ) + + self.crown_z = lambda x, theta: self.crown( + self.crown_scale_z, self.crown_a, self.crown_b, self.crown_pos_z, x, theta + ) + self.bump_z = lambda x, theta: self.bump( + self.bump_scale_z, x, self.bump_l, self.bump_r + ) * max(sin(theta), 0) + + def cx(self, x): + return x + + def cy(self, x): + return 1 - exp(self.cy_a * (x - 1)) + + def cz(self, x): + return 1 - (x**self.cz_a) + + def sigmoid(self, x): + return 1 / (1 + exp(-x)) + + def exp(self, a, b, x): + return a * exp(b * x) + + def hook(self, scale, a, b, p, t, x, theta): + return scale * self.exp(a, b, x - p - (1 - x) * t * sin(theta)) + + def bump(self, scale, x, lower, upper): + if x < lower or x > upper: + return 0 + x = (x - lower) / (upper - lower) * pi + return scale * sin(x) + + def crown(self, scale, a, b, p, x, theta): + return scale * self.exp(a, b, p - x) * max(sin(theta), 0) + + def dx(self, x, theta): + hook = self.hook_x(x, theta) + sharp = self.sharpness * max(x - 0.95, 0) + return hook + sharp + + def dy(self, x): + return 0 + + def dz(self, x, theta): + hook = self.hook_z(x, theta) + crown = self.crown_z(x, theta) + bump = self.bump_z(x, theta) + return hook + crown + bump + + def generate(self): + self.n = int(self.n) + self.m = int(self.m) + ctrls = np.zeros((self.n, self.m, 3)) + for i in range(self.n): + for j in range(self.m): + p = i / (self.n - 1) + theta = 2 * pi * j / (self.m) + ctrls[i][j][0] = self.sx * self.cx(p) + self.dx(p, theta) + ctrls[i][j][1] = self.sy * self.cy(p) * self.r * cos(theta) + self.dy(p) + ctrls[i][j][2] = self.reverse * ( + self.sz * self.cz(p) * self.r * max(sin(theta), 0) + + self.dz(p, theta) + ) + + method = "blender" if False else "geomdl" + return nurbs_util.nurbs(ctrls, method, face_size=0.02) + + +class BirdBeak(PartFactory): + param_templates = {} + tags = ["head_detail", "rigid"] + unit_scale = (0.5, 0.5, 0.5) + + def sample_params(self, select=None, var=1): + weights = part_util.random_convex_coord( + self.param_templates.keys(), select=select + ) + params = part_util.rdict_comb(self.param_templates, weights) + # params = np.random.choice(list(self.param_templates.values())) + + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + # add additional noise to params + for key in params["upper"]: + if key in params["range"]: + l, r = params["range"][key] + noise = N(0, 0.05 * (r - l)) + params["upper"][key] += noise + params["lower"][key] += noise + params["upper"][key] = max(min(params["upper"][key], r), l) + params["lower"][key] = max(min(params["lower"][key], r), l) + params["lower"]["sx"] = min( + params["lower"]["sx"], + params["upper"]["sx"] + * (params["upper"]["hook_pos_x"] - params["upper"]["hook_thickness_x"] / 2), + ) + + return params + + def rescale(self, params, scale): + params["sx"] *= scale + params["sy"] *= scale + params["sz"] *= scale + return params + + def make_part(self, params): + obj = butil.spawn_vert("beak_parent_temp") + upper = Beak(**params["upper"]).generate() + upper.parent = obj + upper.name = "BeakUpper" + + lower = Beak(**params["lower"]).generate() + lower.parent = obj + lower.name = "BeakLower" + + upper.scale = self.unit_scale + lower.scale = self.unit_scale + butil.apply_transform([upper, lower], scale=True) + + part = Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) + tag_object(part.obj, "bird_beak") + + return part + + +class FlyingBirdBeak(BirdBeak): + def sample_params(self, select="normal", var=1): + return super().sample_params(select=select) + + def make_part(self, params): + obj = butil.spawn_vert("beak_parent_temp") + params["upper"] = self.rescale(params["upper"], 0.4) + params["lower"] = self.rescale(params["lower"], 0.4) + upper = Beak(**params["upper"]).generate() + upper.parent = obj + upper.name = "BeakUpper" + + lower = Beak(**params["lower"]).generate() + lower.parent = obj + lower.name = "BeakLower" + + upper.scale = self.unit_scale + lower.scale = self.unit_scale + butil.apply_transform([upper, lower], scale=True) + + return Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) + + +default_beak = { + "n": 20, + "m": 20, + "r": 1.0, + "sx": 1.0, + "sy": 1.0, + "sz": 1.0, + "cy_a": 1.0, + "cz_a": 2.0, + "reverse": 1, + "hook_a": 0.1, + "hook_b": 5.0, + "hook_scale_x": 0.0, + "hook_pos_x": 0.0, + "hook_thickness_x": 0.0, + "hook_scale_z": 0.0, + "hook_pos_z": 0.0, + "hook_thickness_z": 0.0, + "crown_scale_z": 0.0, + "crown_a": 0.5, + "crown_b": 0.5, + "crown_pos_z": 0.5, + "bump_scale_z": 0.0, + "bump_l": 0.5, + "bump_r": 0.5, + "sharpness": 0.0, +} + +scales = { + "r": [0.3, 1], + "sx": [0.2, 1], + "sy": [0.2, 1], + "sz": [0.2, 1], + "cy_a": [1, 10], + "cz_a": [1, 5], + "hook_a": [0.1, 0.8], + "hook_b": [1, 5], + "hook_scale_x": [-0.5, 0.5], + "hook_pos_x": [0.5, 1], + "hook_thickness_x": [0, 0.5], + "hook_scale_z": [-0.5, 0.5], + "hook_pos_z": [0.5, 1], + "hook_thickness_z": [0, 0.5], + "crown_scale_z": [0, 0.3], + "crown_a": [0.1, 0.8], + "crown_b": [0, 2], + "crown_pos_z": [0, 0.5], + "bump_scale_z": [0, 0.03], + "bump_l": [0, 0.4], + "bump_r": [0.6, 1], + "sharpness": [-0.5, 0.5], +} +for k, v in scales.items(): + scales[k] = np.array(v) + +eagle_upper = default_beak | { + "r": 0.4, + "sx": 0.8, + "sy": 0.4, + "sz": 1.0, + "hook_a": 0.1, + "hook_b": 5.0, + "hook_scale_x": -1.0, + "hook_pos_x": 0.72, + "hook_thickness_x": 0.35, + "hook_scale_z": -0.8, + "hook_pos_z": 0.7, + "hook_thickness_z": 0.0, +} + +eagle_lower = default_beak | { + "r": 0.4, + "sx": 0.4, + "sy": 0.4, + "sz": 0.2, + "reverse": -1, + "hook_a": 0.1, + "hook_b": 5.0, + "hook_scale_x": 0.0, + "hook_pos_x": 0.72, + "hook_thickness_x": 0.35, + "hook_scale_z": 0.1, + "hook_pos_z": 0.6, + "hook_thickness_z": -0.2, +} + +normal_upper = default_beak | { + "r": 0.4, + "sx": 0.7, + "sy": 0.3, + "sz": 0.5, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": 0.0, + "hook_pos_x": 0.72, + "hook_thickness_x": 0.35, + "hook_scale_z": -0.8, + "hook_pos_z": 0.7, + "hook_thickness_z": 0.0, +} + +normal_lower = default_beak | { + "r": 0.4, + "sx": 0.7, + "sy": 0.3, + "sz": 0.3, + "reverse": -1, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": 0.0, + "hook_pos_x": 0.72, + "hook_thickness_x": 0.35, + "hook_scale_z": 0.8, + "hook_pos_z": 0.7, + "hook_thickness_z": 0.0, +} + +duck_upper = default_beak | { + "n": 50, + "r": 0.4, + "sx": 1.0, + "sy": 0.4, + "sz": 0.5, + "cy_a": 10.0, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": -1.5, + "hook_pos_x": 0.9, + "hook_thickness_x": 0.0, + "hook_scale_z": 0.4, + "hook_pos_z": 0.6, + "hook_thickness_z": 0.2, + "crown_scale_z": 0.3, + "crown_a": 0.1, + "crown_b": 5.0, + "crown_pos_z": 0.3, + "bump_scale_z": 0.02, + "bump_l": 0.4, + "bump_r": 1.0, + "sharpness": -0.5, +} + +duck_lower = default_beak | { + "n": 50, + "r": 0.4, + "sx": 0.97, + "sy": 0.4, + "sz": 0.1, + "cy_a": 10.0, + "reverse": -1, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": -1.5, + "hook_pos_x": 0.9, + "hook_thickness_x": 0.0, + "hook_scale_z": -0.4, + "hook_pos_z": 0.6, + "hook_thickness_z": 0.0, + "crown_scale_z": 0.1, + "crown_a": 0.1, + "crown_b": 5.0, + "crown_pos_z": 0.3, + "bump_scale_z": 0.03, + "bump_l": 0.3, + "bump_r": 1.0, + "sharpness": -0.5, +} + +short_upper = default_beak | { + "r": 0.4, + "sx": 0.25, + "sy": 0.3, + "sz": 0.3, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": -0.5, + "hook_pos_x": 0.8, + "hook_thickness_x": 0.35, + "hook_scale_z": -0.15, + "hook_pos_z": 0.7, + "hook_thickness_z": 0.0, +} +short_lower = default_beak | { + "r": 0.4, + "sx": 0.25, + "sy": 0.3, + "sz": 0.3, + "cy_a": 1.0, + "cz_a": 1.1, + "reverse": -1, + "hook_a": 0.1, + "hook_b": 2.0, + "hook_scale_x": -0.5, + "hook_pos_x": 0.8, + "hook_thickness_x": 0.35, + "hook_scale_z": 0.15, + "hook_pos_z": 0.7, + "hook_thickness_z": 0.0, +} + +BirdBeak.param_templates["normal"] = { + "upper": normal_upper, + "lower": normal_lower, + "range": scales, +} +BirdBeak.param_templates["duck"] = { + "upper": duck_upper, + "lower": duck_lower, + "range": scales, +} +BirdBeak.param_templates["eagle"] = { + "upper": eagle_upper, + "lower": eagle_lower, + "range": scales, +} +BirdBeak.param_templates["short"] = { + "upper": short_upper, + "lower": short_lower, + "range": scales, +} diff --git a/infinigen/assets/objects/creatures/parts/body.py b/infinigen/assets/objects/creatures/parts/body.py new file mode 100644 index 000000000..14838d9bb --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/body.py @@ -0,0 +1,428 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N + +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.utils.nodegroups.attach import nodegroup_surface_muscle +from infinigen.assets.utils.nodegroups.curve import nodegroup_simple_tube_v2 +from infinigen.assets.utils.nodegroups.geometry import ( + nodegroup_symmetric_clone, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object + + +@node_utils.to_nodegroup( + "nodegroup_quadruped_body", singleton=False, type="GeometryNodeTree" +) +def nodegroup_quadruped_body(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input_1 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Pct Ribcage", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Pct Backpart", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Spine StartRad, EndRad, Fullness", (0.05, 0.05, 3.0)), + ("NodeSocketVector", "Belly StartRad, EndRad, Fullness", (0.07, 0.15, 2.5)), + ( + "NodeSocketVector", + "Belly ProfileHeight, StartTilt, EndTilt", + (0.5, 114.0, 114.0), + ), + ( + "NodeSocketVector", + "TopFlank StartRad, EndRad, Fullness", + (0.2, 0.28, 2.5), + ), + ( + "NodeSocketVector", + "TopFlank ProfileHeight, StartTilt, EndTilt", + (0.6, 72.0, 8.0), + ), + ( + "NodeSocketVector", + "BackFlank StartRad, EndRad, Fullness", + (0.15, 0.15, 2.5), + ), + ( + "NodeSocketVector", + "BackFlank ProfileHeight, StartTilt, EndTilt", + (0.6, 53.0, 53.0), + ), + ( + "NodeSocketVector", + "BottomFlank StartRad, EndRad, Fullness", + (0.14, 0.27, 2.5), + ), + ( + "NodeSocketVector", + "BottomFlank0 ProfileHeight, StartTilt, EndTilt", + (0.6, -29.0, 48.0), + ), + ( + "NodeSocketVector", + "BottomFlank1 ProfileHeight, StartTilt, EndTilt", + (0.5, -44.0, -17.4), + ), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input_1.outputs["length_rad1_rad2"], + 1: group_input_1.outputs["Pct Ribcage"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": multiply.outputs["Vector"], + "angles_deg": (0.0, -1.0, 4.0), + "proportions": (0.3333, 0.45, 0.3), + "aspect": group_input_1.outputs["aspect"], + "fullness": 3.0, + "Origin": (0.48, 0.0, -0.07), + }, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input_1.outputs["length_rad1_rad2"], + 1: group_input_1.outputs["Pct Backpart"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (-0.01, 0.0, 0.02) + + simple_tube_v2_1 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": multiply_1.outputs["Vector"], + "angles_deg": (0.94, -3.94, 11.66), + "proportions": (0.3, 0.6, 0.2), + "aspect": group_input_1.outputs["aspect"], + "fullness": 7.0, + "Origin": vector, + }, + ) + + union = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 2": [ + simple_tube_v2.outputs["Geometry"], + simple_tube_v2_1.outputs["Geometry"], + ] + }, + attrs={"operation": "UNION"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": vector, + "Middle": simple_tube_v2_1.outputs["Endpoint"], + "End": simple_tube_v2.outputs["Endpoint"], + }, + ) + + bottom_flank_0 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.16, 0.91, 0.66), + "Coord 1": (0.38, 0.37, 1.0), + "Coord 2": (0.67, -0.42, 0.6), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "BottomFlank StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input_1.outputs[ + "BottomFlank0 ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Bottom Flank 0", + ) + + top_flank = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.25, 4.91, 0.5), + "Coord 1": (0.65, -0.35, 1.0), + "Coord 2": (0.88, 0.47, 0.7), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "TopFlank StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input_1.outputs[ + "TopFlank ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Top Flank", + ) + + bottom_flank_1 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.36, 1.03, 0.95), + "Coord 1": (0.6, 0.85, 1.0), + "Coord 2": (0.9, -0.01, 0.71), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "BottomFlank StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input_1.outputs[ + "BottomFlank1 ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Bottom Flank 1", + ) + + back_flank = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.02, -0.9, 0.53), + "Coord 1": (0.2, -0.85, 0.85), + "Coord 2": (0.61, -0.99, 0.7), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "BackFlank StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input_1.outputs[ + "BackFlank ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Back Flank", + ) + + belly = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.24, 1.52, 0.7), + "Coord 1": (0.48, 1.24, 1.42), + "Coord 2": (0.92, 1.41, 0.97), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "Belly StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input_1.outputs[ + "Belly ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Belly", + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [bottom_flank_0, top_flank, bottom_flank_1, back_flank, belly] + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry_1} + ) + + spine = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union, + "Skeleton Curve": quadratic_bezier, + "Coord 0": (0.05, -1.5708, 1.0), + "Coord 1": (0.5, -1.5708, 1.2), + "Coord 2": (0.95, -1.5708, 1.0), + "StartRad, EndRad, Fullness": group_input_1.outputs[ + "Spine StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": (1.0, 0.0, 0.0), + }, + label="Spine", + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [union, symmetric_clone.outputs["Both"], spine]}, + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": quadratic_bezier}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": reroute, + "Base Mesh": union, + }, + ) + + +class QuadrupedBody(PartFactory): + tags = ["body", "head"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((1.7, 0.65, 0.65)) * N(1, 0.15, 3), + "Pct Ribcage": (0.76, 0.56, 0.56) * N(1, 0.1, 3), + "Pct Backpart": (0.64, 0.25, 0.4) * N(1, 0.1, 3), + "Spine StartRad, EndRad, Fullness": np.array((0.05, 0.05, 3.0)) + * N(1, 0.1, 3), + "Belly StartRad, EndRad, Fullness": np.array((0.07, 0.15, 2.5)) + * N(1, 0.1, 3), + "Belly ProfileHeight, StartTilt, EndTilt": (0.5, 114.0, 114.0), + "TopFlank StartRad, EndRad, Fullness": (0.2, 0.28, 2.5), + "TopFlank ProfileHeight, StartTilt, EndTilt": (0.6, 72.0, 8.0), + "BackFlank StartRad, EndRad, Fullness": (0.15, 0.15, 2.5), + "BackFlank ProfileHeight, StartTilt, EndTilt": (0.6, 53.0, 53.0), + "BottomFlank StartRad, EndRad, Fullness": (0.14, 0.27, 2.5), + "BottomFlank0 ProfileHeight, StartTilt, EndTilt": (0.6, -29.0, 48.0), + "BottomFlank1 ProfileHeight, StartTilt, EndTilt": (0.5, -44.0, -17.4), + "aspect": N(1, 0.1), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_quadruped_body, params) + part.joints = { + i: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) + for i in np.linspace(0, 1, 4, endpoint=True) + } + part.iks = { + 0.0: IKParams(name="hip", mode="pin", target_size=0.3), + 1.0: IKParams(name="shoulder", rotation_weight=0.1, target_size=0.4), + } + tag_object(part.obj, "quadruped_body") + return part + + +@node_utils.to_nodegroup( + "nodegroup_fish_body", singleton=False, type="GeometryNodeTree" +) +def nodegroup_fish_body(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.89, 0.2, 0.29)), + ("NodeSocketVector", "angles_deg", (7.0, 0.51, -9.02)), + ("NodeSocketFloat", "aspect", 0.56), + ("NodeSocketFloat", "fullness", 3.43), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) + + +class FishBody(PartFactory): + tags = ["body"] + + def sample_params(self): + return {} + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_fish_body, params) + part.joints = { + i: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) + for i in np.linspace(0, 1, 4, endpoint=True) + } + part.iks = { + 0.0: IKParams(name="hip", mode="pin", target_size=0.3), + 1.0: IKParams(name="shoulder", rotation_weight=0.1, target_size=0.4), + } + tag_object(part.obj, "fish_body") + return part + + +@node_utils.to_nodegroup( + "nodegroup_bird_body", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bird_body(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0000, 0.5000, 0.3000)), + ("NodeSocketFloat", "aspect", 1.0000), + ("NodeSocketFloat", "fullness", 2.0000), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "proportions": (0.1000, 0.1000, 0.1000), + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class BirdBody(PartFactory): + tags = ["body"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.95, 0.15, 0.2)) * N(1.0, 0.05, size=(3,)), + "aspect": N(1.2, 0.02), + "fullness": N(2, 0.1), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_bird_body, params) + part.joints = { + i: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) + for i in np.linspace(0, 1, 4, endpoint=True) + } + part.iks = { + 0.0: IKParams(name="hip", mode="pin", target_size=0.3), + 1.0: IKParams(name="shoulder", rotation_weight=0.1, target_size=0.4), + } + tag_object(part.obj, "bird_body") + return part diff --git a/infinigen/assets/objects/creatures/parts/chameleon.py b/infinigen/assets/objects/creatures/parts/chameleon.py new file mode 100644 index 000000000..7ed41fbb0 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/chameleon.py @@ -0,0 +1,3708 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=LJD3nvFXCLE by Redjam9 + + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_toe", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_toe(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + spiral = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Rotations": 0.1000, + "Start Radius": 0.1000, + "End Radius": 0.3000, + "Height": 0.0000, + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0000, 1.0000), (1.0000, 0.0000)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 0.4000}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": spiral, "Radius": multiply} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_curve_radius, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.1000}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": curve_to_mesh, + "Name": "Ridge", + "Value": capture_attribute.outputs[2], + }, + attrs={"data_type": "FLOAT", "domain": "POINT"}, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": set_curve_radius}, + attrs={"mode": "FACTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": store_named_attribute, + "Position": sample_curve.outputs["Position"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_floor_ceil", singleton=False, type="GeometryNodeTree" +) +def nodegroup_floor_ceil(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.0000)] + ) + + float_to_integer = nw.new_node( + Nodes.FloatToInt, + input_kwargs={"Float": group_input.outputs["Value"]}, + attrs={"rounding_mode": "FLOOR"}, + ) + + float_to_integer_1 = nw.new_node( + Nodes.FloatToInt, + input_kwargs={"Float": group_input.outputs["Value"]}, + attrs={"rounding_mode": "CEILING"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 1: float_to_integer}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Floor": float_to_integer, + "Ceil": float_to_integer_1, + "Remainder": subtract, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_clamp_or_wrap", singleton=False, type="GeometryNodeTree" +) +def nodegroup_clamp_or_wrap(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Value", 0), + ("NodeSocketFloat", "Max", 0.5000), + ("NodeSocketBool", "Use Wrap", False), + ], + ) + + clamp = nw.new_node( + Nodes.Clamp, + input_kwargs={ + "Value": group_input.outputs["Value"], + "Max": group_input.outputs["Max"], + }, + ) + + wrap = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Value"], + 1: group_input.outputs["Max"], + 2: 0.0000, + }, + attrs={"operation": "WRAP"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: group_input.outputs["Use Wrap"], 4: clamp, 5: wrap}, + attrs={"input_type": "INT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Output": switch.outputs[1]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_claw_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_claw_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.0000, 0.0000, 0.0000), + "Middle": (0.5000, 0.5000, 0.0000), + "End": (0.7000, 0.3000, 0.0000), + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": quadratic_bezier, + "RadStartEnd": (0.2000, 0.2000, 1.0000), + }, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 10, + "Start": (0.9500, 0.2500, 0.0000), + "Middle": (1.0000, 0.5000, 0.0000), + "End": (0.9500, 0.7500, 0.0000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": simpletube.outputs["Mesh"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 32, + "CtrlptsW": 32, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": simpletube.outputs["Mesh"], + "Curve": curveparametercurve, + "Base Radius": 0.1000, + "Base Factor": 0.0200, + "Attr": True, + }, + ) + + chameleon_toe = nw.new_node(nodegroup_chameleon_toe().name) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": simpletube.outputs["Curve"], "Factor": 1.0000}, + attrs={"mode": "FACTOR"}, + ) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: sample_curve.outputs["Position"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: chameleon_toe.outputs["Position"]}, + attrs={"operation": "SUBTRACT"}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_toe.outputs["Geometry"], + "Translation": subtract.outputs["Vector"], + "Rotation": (0.1745, -0.1745, 0.8727), + }, + ) + + chameleon_toe_1 = nw.new_node(nodegroup_chameleon_toe().name) + + add_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: sample_curve.outputs["Position"]} + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_1.outputs["Vector"], + 1: chameleon_toe_1.outputs["Position"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_toe_1.outputs["Geometry"], + "Translation": subtract_1.outputs["Vector"], + "Rotation": (0.0000, 0.1745, 0.8727), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [curvesculpt.outputs["Geometry"], transform_1, transform_2] + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "Rotation", (0.0000, 1.0472, 0.0000)), + ("NodeSocketVectorXYZ", "Scale", (0.2000, 0.2000, 0.4000)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Rotation": group_input.outputs["Rotation"], + "Scale": group_input.outputs["Scale"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_u_v_param_to_vert_idxs", singleton=False, type="GeometryNodeTree" +) +def nodegroup_u_v_param_to_vert_idxs(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 0.5000), + ("NodeSocketInt", "Size", 0), + ("NodeSocketBool", "Cyclic", False), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Size"]}, + attrs={"operation": "MULTIPLY"}, + ) + + floorceil = nw.new_node( + nodegroup_floor_ceil().name, input_kwargs={"Value": multiply} + ) + + clamporwrap = nw.new_node( + nodegroup_clamp_or_wrap().name, + input_kwargs={ + "Value": floorceil.outputs["Floor"], + "Max": group_input.outputs["Size"], + "Use Wrap": group_input.outputs["Cyclic"], + }, + ) + + clamporwrap_1 = nw.new_node( + nodegroup_clamp_or_wrap().name, + input_kwargs={ + "Value": floorceil.outputs["Ceil"], + "Max": group_input.outputs["Size"], + "Use Wrap": group_input.outputs["Cyclic"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Floor": clamporwrap, + "Ceil": clamporwrap_1, + "Remainder": floorceil.outputs["Remainder"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_foot_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_foot_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + chameleon_claw_shape = nw.new_node( + nodegroup_chameleon_claw_shape().name, + input_kwargs={"Rotation": (0.0000, 0.0000, 0.0000)}, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "ouRotation", (0.0000, 1.0472, 0.0000)), + ("NodeSocketVectorEuler", "inRotation", (0.0000, 2.0944, 3.1416)), + ("NodeSocketVectorXYZ", "ouScale", (1.0000, 1.0000, 1.0000)), + ("NodeSocketVectorXYZ", "inScale", (1.0000, 1.0000, 1.0000)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_claw_shape, + "Rotation": group_input.outputs["ouRotation"], + "Scale": group_input.outputs["ouScale"], + }, + ) + + chameleon_claw_shape_1 = nw.new_node( + nodegroup_chameleon_claw_shape().name, + input_kwargs={"Rotation": (0.0000, 0.0000, 0.0000)}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_claw_shape_1, + "Rotation": group_input.outputs["inRotation"], + "Scale": group_input.outputs["inScale"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bilinear_interp_index_transfer", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bilinear_interp_index_transfer(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Source", None), + ("NodeSocketFloat", "U", 0.5000), + ("NodeSocketFloat", "V", 0.5000), + ("NodeSocketVector", "Attribute", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "SizeU", 0), + ("NodeSocketInt", "SizeV", 0), + ("NodeSocketBool", "CyclicU", False), + ("NodeSocketBool", "CyclicV", False), + ], + ) + + uvparamtovertidxs = nw.new_node( + nodegroup_u_v_param_to_vert_idxs().name, + input_kwargs={ + "Value": group_input.outputs["V"], + "Size": group_input.outputs["SizeV"], + "Cyclic": group_input.outputs["CyclicV"], + }, + ) + + uvparamtovertidxs_1 = nw.new_node( + nodegroup_u_v_param_to_vert_idxs().name, + input_kwargs={ + "Value": group_input.outputs["U"], + "Size": group_input.outputs["SizeU"], + "Cyclic": group_input.outputs["CyclicU"], + }, + ) + + floor_floor = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Floor"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Floor"], + }, + label="FloorFloor", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input, + "Value": group_input.outputs["Attribute"], + "Index": floor_floor, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + ceil_floor = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Ceil"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Floor"], + }, + label="CeilFloor", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_1 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input, + "Value": group_input.outputs["Attribute"], + "Index": ceil_floor, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs_1.outputs["Remainder"], + 9: (transfer_attribute, "Value"), + 10: (transfer_attribute_1, "Value"), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + floor_ceil = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Floor"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Ceil"], + }, + label="FloorCeil", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_2 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input, + "Value": group_input.outputs["Attribute"], + "Index": floor_ceil, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + ceil_ceil = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Ceil"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Ceil"], + }, + label="CeilCeil", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input, + "Value": group_input.outputs["Attribute"], + "Index": ceil_ceil, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs_1.outputs["Remainder"], + 9: (transfer_attribute_2, "Value"), + 10: (transfer_attribute_3, "Value"), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs.outputs["Remainder"], + 9: map_range.outputs["Vector"], + 10: map_range_1.outputs["Vector"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": map_range_2.outputs["Vector"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_to_cart", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_to_cart(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Angle", 0.5000), + ("NodeSocketFloat", "Length", 0.0000), + ("NodeSocketVector", "Origin", (0.0000, 0.0000, 0.0000)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "SINE"}, + ) + + construct_unit_vector = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": cosine, "Z": sine}, + label="Construct Unit Vector", + ) + + offset_polar = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Length"], + 1: construct_unit_vector, + 2: group_input.outputs["Origin"], + }, + label="Offset Polar", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": offset_polar.outputs["Vector"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_switch4", singleton=False, type="GeometryNodeTree") +def nodegroup_switch4(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Arg", 0), + ("NodeSocketVector", "Arg == 0", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Arg == 1", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Arg == 2", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Arg == 3", (0.0000, 0.0000, 0.0000)), + ], + ) + + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 2}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + greater_equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 1}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_1, + 8: group_input.outputs["Arg == 0"], + 9: group_input.outputs["Arg == 1"], + }, + attrs={"input_type": "VECTOR"}, + ) + + greater_equal_2 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 3}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_2, + 8: group_input.outputs["Arg == 2"], + 9: group_input.outputs["Arg == 3"], + }, + attrs={"input_type": "VECTOR"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Output": switch.outputs[3]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_symmetric_clone", singleton=False, type="GeometryNodeTree" +) +def nodegroup_symmetric_clone(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVectorXYZ", "Scale", (1.0000, -1.0000, 1.0000)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Scale": group_input.outputs["Scale"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": transform}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [group_input.outputs["Geometry"], flip_faces]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Both": join_geometry_2, + "Orig": group_input.outputs["Geometry"], + "Inverted": flip_faces, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_scale_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_scale_bump(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Density", 50.0000), + ("NodeSocketFloat", "Depth", 0.0050), + ("NodeSocketFloat", "Bump", 0.0100), + ("NodeSocketInt", "Level", 2), + ("NodeSocketBool", "Selection", True), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Level": group_input.outputs["Level"], + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + position = nw.new_node(Nodes.InputPosition) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Vector": position}) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_1.outputs["Color"], "Scale": 0.2000}, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: position} + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["Density"], + "Randomness": 0.5000, + }, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Distance"]} + ) + colorramp_1.color_ramp.elements[0].position = 0.0000 + colorramp_1.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp_1.color_ramp.elements[1].position = 0.9909 + colorramp_1.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: colorramp_1.outputs["Color"], + "Scale": group_input.outputs["Bump"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": scale_1.outputs["Vector"]}, + attrs={"operation": "SCALE"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture_1.outputs["Distance"]} + ) + colorramp.color_ramp.elements[0].position = 0.0000 + colorramp.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp.color_ramp.elements[1].position = 0.0591 + colorramp.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: colorramp.outputs["Color"], + "Scale": group_input.outputs["Depth"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": scale_3.outputs["Vector"]}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale_2.outputs["Vector"], 1: scale_4.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": subdivide_mesh, + "Selection": group_input.outputs["Selection"], + "Offset": add_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_leg_raw_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_leg_raw_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thigh_length", 0.6000), + ("NodeSocketFloat", "calf_length", 0.5000), + ("NodeSocketFloat", "thigh_body_rotation", 0.5000), + ("NodeSocketFloat", "calf_body_rotation", 0.5000), + ("NodeSocketFloat", "thigh_calf_rotation", 20.0000), + ("NodeSocketFloat", "toe_toe_rotation", 20.0000), + ("NodeSocketVectorXYZ", "thigh_scale", (1.0000, 0.6500, 1.0000)), + ("NodeSocketVectorXYZ", "calf_scale", (1.0000, 0.6500, 1.0000)), + ("NodeSocketVectorXYZ", "ouScale", (1.0000, 1.0000, 1.0000)), + ("NodeSocketVectorXYZ", "inScale", (1.0000, 1.0000, 1.0000)), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thigh_length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["thigh_length"]} + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.0000, 0.0000, 0.0000), + "Middle": combine_xyz_3, + "End": combine_xyz_2, + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": quadratic_bezier, + "RadStartEnd": (0.1500, 0.2000, 0.9000), + "Resolution": 64, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thigh_calf_rotation"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thigh_body_rotation"], 1: 180.0000}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": add} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_7, "Scale": 0.0174}, + attrs={"operation": "SCALE"}, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube.outputs["Mesh"], + "Rotation": scale.outputs["Vector"], + "Scale": group_input.outputs["thigh_scale"], + }, + ) + + round_bump = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": transform_geometry, + "Distance": 0.0070, + "Offset Scale": 0.0020, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["calf_length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_2}) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["calf_length"]} + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.0000, 0.0000, 0.0000), + "Middle": combine_xyz_4, + "End": combine_xyz_5, + }, + ) + + simpletube_1 = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": quadratic_bezier_1, + "RadStartEnd": (0.1500, 0.1000, 0.9000), + "Resolution": 64, + }, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["calf_body_rotation"], 1: 180.0000}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": group_input.outputs["thigh_calf_rotation"], "Z": add_1}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, "Scale": 0.0174}, + attrs={"operation": "SCALE"}, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube_1.outputs["Mesh"], + "Rotation": scale_1.outputs["Vector"], + "Scale": group_input.outputs["calf_scale"], + }, + ) + + round_bump_1 = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": transform_geometry_1, + "Distance": 0.0070, + "Offset Scale": 0.0020, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 180.0000, 1: group_input.outputs["thigh_calf_rotation"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["toe_toe_rotation"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": subtract, "Z": multiply_3} + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_1, "Scale": 0.0174}, + attrs={"operation": "SCALE"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["toe_toe_rotation"], 1: 180.0000}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": group_input.outputs["thigh_calf_rotation"], "Z": add_2}, + ) + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_6, "Scale": 0.0174}, + attrs={"operation": "SCALE"}, + ) + + chameleon_foot_shape = nw.new_node( + nodegroup_chameleon_foot_shape().name, + input_kwargs={ + "ouRotation": scale_2.outputs["Vector"], + "inRotation": scale_3.outputs["Vector"], + "ouScale": group_input.outputs["ouScale"], + "inScale": group_input.outputs["inScale"], + }, + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube_1.outputs["Curve"], + "Rotation": scale_1.outputs["Vector"], + "Scale": (1.0000, 0.6500, 1.0000), + }, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": transform_geometry_2, "Factor": 0.8500}, + attrs={"mode": "FACTOR"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": chameleon_foot_shape, + "Offset": sample_curve.outputs["Position"], + }, + ) + + round_bump_2 = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": set_position, + "Distance": 0.0050, + "Offset Scale": 0.0020, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [round_bump, round_bump_1, round_bump_2]}, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube.outputs["Curve"], + "Rotation": scale.outputs["Vector"], + "Scale": group_input.outputs["thigh_scale"], + }, + ) + + sample_curve_1 = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": transform_geometry_3, "Factor": 1.0000}, + attrs={"mode": "FACTOR"}, + ) + + scale_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_curve_1.outputs["Position"], "Scale": -1.0000}, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": join_geometry, "Offset": scale_4.outputs["Vector"]}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": set_position_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": subdivision_surface}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_tail_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_tail_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Middle": (0.0000, 0.2000, 0.0000), + "End": (2.0000, -0.5000, 0.0000), + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": quadratic_bezier, + "RadStartEnd": (0.4000, 0.0000, 0.9000), + "Resolution": 64, + }, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.2000, 0.0000, 0.0000), + "Middle": (0.6000, 0.0000, 0.0100), + "End": (0.8000, 0.0000, 0.0200), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": simpletube.outputs["Mesh"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": simpletube.outputs["Mesh"], + "Curve": curveparametercurve, + "Base Radius": 0.0200, + "SymmY": False, + "Attr": True, + }, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, + input_kwargs={"Mesh": curvesculpt.outputs["Geometry"], "Level": 2}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": subdivision_surface, + "Translation": (1.0000, 0.0000, 0.1000), + "Rotation": (-1.5708, 0.0000, 0.0000), + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": quadratic_bezier, + "Translation": (1.0000, 0.0000, 0.0000), + "Rotation": (-1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": transform, "Curve": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_bump1", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_bump1(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.0000, 0.7500, 0.1000), + "Middle": (0.6000, 0.7500, 0.0000), + "End": (1.0000, 0.7500, 0.1000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.3000, + "Base Factor": 0.0300, + "Name": "", + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_bump2", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_bump2(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.1000, 0.7500, 0.1000), + "Middle": (0.4000, 0.7500, 0.0000), + "End": (0.9000, 0.7500, 0.1000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.1500, + "Base Factor": 0.1000, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_bump3", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_bump3(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.1500, 0.7500, 0.0600), + "Middle": (0.6000, 0.7500, 0.0000), + "End": (0.9000, 0.7500, 0.0600), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": quadratic_bezier_1} + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": join_geometry, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.1000, + "Attr": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_belly_sunken1", singleton=False, type="GeometryNodeTree" +) +def nodegroup_belly_sunken1(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 30, + "Start": (0.0000, 0.2500, 0.0000), + "Middle": (0.6000, 0.2500, 0.0000), + "End": (1.0000, 0.2500, 0.0000), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": quadratic_bezier_1} + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": join_geometry, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.0300, + "Base Factor": 0.0200, + "Name": "", + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shouder_sunken", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shouder_sunken(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.1500, 0.2500, 0.1000), + "Middle": (0.2000, 0.2500, 0.0000), + "End": (0.3000, 0.2500, 0.1000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.2000, + "Base Factor": -0.0300, + "SymmY": False, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_neck_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_neck_bump(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.0000, 0.2500, 0.0000), + "Middle": (0.0500, 0.2500, 0.0000), + "End": (0.0700, 0.2500, 0.1000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.2000, + "SymmY": False, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_neck_bump2", singleton=False, type="GeometryNodeTree" +) +def nodegroup_neck_bump2(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Surface", None)] + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.0000, 0.2500, 0.0000), + "Middle": (0.0250, 0.2500, 0.1000), + "End": (0.0500, 0.2500, 0.2000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": group_input.outputs["Surface"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": group_input.outputs["Surface"], + "Curve": curveparametercurve, + "Base Radius": 0.2000, + "SymmY": False, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curvesculpt.outputs["Geometry"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_parameter_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_parameter_curve(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Surface", None), + ("NodeSocketGeometry", "UVCurve", None), + ("NodeSocketInt", "CtrlptsU", 0), + ("NodeSocketInt", "CtrlptsW", 0), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + position_1 = nw.new_node(Nodes.InputPosition) + + bilinearinterpindextransfer = nw.new_node( + nodegroup_bilinear_interp_index_transfer().name, + input_kwargs={ + "Source": group_input.outputs["Surface"], + "U": separate_xyz.outputs["X"], + "V": separate_xyz.outputs["Y"], + "Attribute": position_1, + "SizeU": group_input.outputs["CtrlptsU"], + "SizeV": group_input.outputs["CtrlptsW"], + "CyclicV": True, + }, + ) + + transfer_attribute = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={ + "Mesh": group_input.outputs["Surface"], + "Value": normal, + "Sample Position": bilinearinterpindextransfer, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (transfer_attribute, "Value"), + 1: separate_xyz.outputs["Z"], + 2: bilinearinterpindextransfer, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["UVCurve"], + "Position": multiply_add.outputs["Vector"], + }, + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + dot_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: normal_1}, + attrs={"operation": "DOT_PRODUCT"}, + ) + + arcsine = nw.new_node( + Nodes.Math, + input_kwargs={0: dot_product.outputs["Value"]}, + attrs={"operation": "ARCSINE"}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_position, "Tilt": arcsine} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_curve_tilt}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_bezier", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_bezier(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 32), + ("NodeSocketVector", "Origin", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "angles_deg", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Seg Lengths", (0.3000, 0.3000, 0.3000)), + ("NodeSocketBool", "Do Bezier", True), + ], + ) + + mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={"Count": 4}) + + index = nw.new_node(Nodes.Index) + + deg2_rad = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["angles_deg"], "Scale": 0.0175}, + label="Deg2Rad", + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": deg2_rad.outputs["Vector"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Seg Lengths"]} + ) + + polartocart = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": separate_xyz, + "Length": separate_xyz_1.outputs["X"], + "Origin": group_input.outputs["Origin"], + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz, 1: separate_xyz.outputs["Y"]} + ) + + polartocart_1 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add, + "Length": separate_xyz_1.outputs["Y"], + "Origin": polartocart, + }, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) + + polartocart_2 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add_1, + "Length": separate_xyz_1.outputs["Z"], + "Origin": polartocart_1, + }, + ) + + switch4 = nw.new_node( + nodegroup_switch4().name, + input_kwargs={ + "Arg": index, + "Arg == 0": group_input.outputs["Origin"], + "Arg == 1": polartocart, + "Arg == 2": polartocart_1, + "Arg == 3": polartocart_2, + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": mesh_line, "Position": switch4} + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, + input_kwargs={ + "Curve": mesh_to_curve, + "Cuts": group_input.outputs["Resolution"], + }, + ) + + integer = nw.new_node(Nodes.Integer) + integer.integer = 2 + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": integer, + "Start": group_input.outputs["Origin"], + "Start Handle": polartocart, + "End Handle": polartocart_1, + "End": polartocart_2, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, + attrs={"operation": "DIVIDE"}, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": bezier_segment, "Cuts": divide} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Do Bezier"], + 14: subdivide_curve_1, + 15: subdivide_curve, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": switch.outputs[6], "Endpoint": polartocart_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_simple_tube(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketVector", "RadStartEnd", (0.0500, 0.0500, 1.0000)), + ("NodeSocketInt", "Resolution", 32), + ], + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: spline_parameter.outputs["Factor"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: spline_parameter.outputs["Factor"]}, + attrs={"operation": "MULTIPLY"}, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SQRT"} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["RadStartEnd"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz.outputs["X"], + 4: separate_xyz.outputs["Y"], + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: sqrt, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": multiply_1}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": separate_xyz.outputs["Z"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_circle.outputs["Curve"], "Scale": combine_xyz}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_radius, "Profile Curve": transform}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh, "Curve": set_curve_radius}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_sculpt", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_sculpt(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input_1 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Target", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Base Radius", 0.0500), + ("NodeSocketFloat", "Base Factor", 0.0500), + ("NodeSocketBool", "SymmY", True), + ("NodeSocketGeometry", "StrokeRadFacModifier", None), + ("NodeSocketBool", "Switch", True), + ("NodeSocketBool", "Attr", False), + ("NodeSocketString", "Name", "Ridge"), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": group_input_1.outputs["Curve"]}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input_1.outputs["SymmY"], + 14: group_input_1.outputs["Curve"], + 15: symmetric_clone.outputs["Both"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": switch.outputs[6]} + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": curve_to_mesh}, + attrs={"target_element": "POINTS"}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": group_input_1.outputs["StrokeRadFacModifier"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + index = nw.new_node(Nodes.Index) + + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": curve_to_mesh_1, "Value": position, "Index": index}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": (transfer_attribute, "Value")} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["Base Radius"], + 1: separate_xyz.outputs["X"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": geometry_proximity.outputs["Distance"], 2: add}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0000, 1.0000), (0.4364, 0.9212), (0.6182, 0.0787), (1.0000, 0.0000)], + handles=["VECTOR", "AUTO", "AUTO", "VECTOR"], + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0000, 1.0000), (0.2500, 0.9588), (0.7455, 0.0475), (1.0000, 0.0000)], + handles=["VECTOR", "AUTO", "AUTO", "VECTOR"], + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input_1.outputs["Switch"], + 2: float_curve_1, + 3: float_curve, + }, + attrs={"input_type": "FLOAT"}, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["Base Factor"], + 1: separate_xyz.outputs["Y"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: switch_2.outputs["Output"], 1: add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": multiply}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input_1.outputs["Target"], + "Offset": scale.outputs["Vector"], + }, + ) + + named_attribute = nw.new_node( + Nodes.NamedAttribute, input_kwargs={"Name": group_input_1.outputs["Name"]} + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: named_attribute.outputs[1], 1: switch_2.outputs["Output"]}, + attrs={"use_clamp": True, "operation": "MAXIMUM"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": set_position, + "Name": group_input_1.outputs["Name"], + "Value": maximum, + }, + attrs={"data_type": "FLOAT", "domain": "POINT"}, + ) + + switch_3 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input_1.outputs["Attr"], + 14: set_position, + 15: store_named_attribute, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": switch_3.outputs[6], + "Result": switch_2.outputs["Output"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_eye", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_eye(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 1024, + "angles_deg": (0.0000, 0.0000, 10.0000), + "Seg Lengths": (0.1500, 0.1500, 0.1500), + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": polarbezier.outputs["Curve"], + "RadStartEnd": (0.4000, 0.4000, 1.0000), + "Resolution": 1024, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube.outputs["Mesh"], + "Scale": (4.0000, 4.5000, 4.5000), + }, + ) + + quadratic_bezier_25 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 256, + "Start": (0.9900, 0.0000, 0.0000), + "Middle": (0.9900, 0.5000, 0.0000), + "End": (0.9900, 1.0000, 0.0000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": transform, + "UVCurve": quadratic_bezier_25, + "CtrlptsU": 1024, + "CtrlptsW": 1024, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": transform, + "Curve": curveparametercurve, + "Base Factor": 0.1000, + }, + ) + + quadratic_bezier_26 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 200, + "Start": (1.0000, 0.0000, 0.0000), + "Middle": (1.0000, 0.5000, 0.0000), + "End": (1.0000, 1.0000, 0.0000), + }, + ) + + curveparametercurve_1 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt.outputs["Geometry"], + "UVCurve": quadratic_bezier_26, + "CtrlptsU": 1024, + "CtrlptsW": 1024, + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "pupil_radius", 0.2200)] + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["pupil_radius"], 1: 0.0300} + ) + + curvesculpt_1 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt.outputs["Geometry"], + "Curve": curveparametercurve_1, + "Base Radius": add, + "Base Factor": 0.0000, + "Switch": False, + "Attr": True, + }, + ) + + quadratic_bezier_27 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 256, + "Start": (1.0000, 0.0000, 0.0000), + "Middle": (1.0000, 0.5000, 0.0000), + "End": (1.0000, 1.0000, 0.0000), + }, + ) + + curveparametercurve_2 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_1.outputs["Geometry"], + "UVCurve": quadratic_bezier_27, + "CtrlptsU": 1024, + "CtrlptsW": 1024, + }, + ) + + curvesculpt_2 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_1.outputs["Geometry"], + "Curve": curveparametercurve_2, + "Base Radius": group_input.outputs["pupil_radius"], + "Base Factor": 0.0000, + "Switch": False, + "Attr": True, + "Name": "Pupil", + }, + ) + + op_or = nw.new_node( + Nodes.BooleanMath, + input_kwargs={ + 0: curvesculpt_1.outputs["Result"], + 1: curvesculpt_2.outputs["Result"], + }, + attrs={"operation": "OR"}, + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_or}, attrs={"operation": "NOT"} + ) + + scale_bump = nw.new_node( + nodegroup_scale_bump().name, + input_kwargs={ + "Geometry": curvesculpt_2.outputs["Geometry"], + "Density": 20.0000, + "Depth": 0.1000, + "Bump": 0.0200, + "Level": 0, + "Selection": op_not, + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "W": separate_xyz.outputs["X"], + "Scale": 12.0000, + "Detail": 10.0000, + "Roughness": 0.0000, + }, + attrs={"noise_dimensions": "1D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.0300}, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": multiply}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": scale_bump, "Offset": scale.outputs["Vector"]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position, + "Material": surface.shaderfunc_to_material(shader_chameleon_eye), + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_material, "Scale": (0.0500, 0.0600, 0.0600)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +def shader_chameleon(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "Ridge", "attribute_type": "GEOMETRY"} + ) + + # map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': attribute_1.outputs["Fac"], 2: 0.0010}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_1.outputs["Fac"]} + ) + colorramp_2.color_ramp.elements[0].position = 0.0091 + colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp_2.color_ramp.elements[1].position = 0.9841 + colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Scale": 10.0000, + "Distortion": 2.0000, + }, + ) + + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) + colorramp_3.color_ramp.elements.new(0) + colorramp_3.color_ramp.elements[0].position = 0.2773 + colorramp_3.color_ramp.elements[0].color = [0.0660, 0.1203, 0.0151, 1.0000] + colorramp_3.color_ramp.elements[1].position = 0.6386 + colorramp_3.color_ramp.elements[1].color = [0.0405, 0.0397, 0.0064, 1.0000] + colorramp_3.color_ramp.elements[2].position = 1.0000 + colorramp_3.color_ramp.elements[2].color = [0.0069, 0.0278, 0.0000, 1.0000] + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Generated"], "W": 1.0000}, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.2818 + colorramp_1.color_ramp.elements[0].color = [0.3390, 0.1458, 0.0277, 1.0000] + colorramp_1.color_ramp.elements[1].position = 0.5795 + colorramp_1.color_ramp.elements[1].color = [0.1295, 0.0542, 0.0220, 1.0000] + colorramp_1.color_ramp.elements[2].position = 1.0000 + colorramp_1.color_ramp.elements[2].color = [0.2549, 0.1495, 0.0318, 1.0000] + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": colorramp_3.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + separate_color = nw.new_node(Nodes.SeparateColor, input_kwargs={"Color": mix_1}) + + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Normal"], + "Scale": 20.0000, + "Detail": 200.0000, + "Roughness": 0.0000, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_color.outputs["Red"], + 1: noise_texture_2.outputs["Fac"], + }, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + combine_color = nw.new_node( + "ShaderNodeCombineColor", + input_kwargs={ + "Red": multiply, + "Green": separate_color.outputs["Green"], + "Blue": separate_color.outputs["Blue"], + }, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": combine_color, + "Specular": 0.3000, + "Roughness": 0.6000, + }, + ) + + material_output_1 = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_leg_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_leg_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "body_length", 0.5000), + ("NodeSocketFloat", "body_position", 0.1000), + ("NodeSocketFloat", "body_thickness", 0.0500), + ("NodeSocketFloat", "body_height", -0.1000), + ("NodeSocketVectorEuler", "Rotation", (0.0000, -0.6981, 0.0000)), + ("NodeSocketFloat", "thigh_length", 0.6000), + ("NodeSocketFloat", "calf_length", 0.5000), + ("NodeSocketFloat", "thigh_body_rotation", 25.0000), + ("NodeSocketFloat", "calf_body_rotation", 15.0000), + ("NodeSocketFloat", "thigh_calf_rotation", 20.0000), + ("NodeSocketFloat", "toe_toe_rotation", 20.0000), + ("NodeSocketVectorXYZ", "thigh_scale", (1.0000, 0.6500, 1.0000)), + ("NodeSocketVectorXYZ", "calf_scale", (1.0000, 0.6500, 1.0000)), + ("NodeSocketVectorXYZ", "ouScale", (1.0000, 1.0000, 1.0000)), + ("NodeSocketVectorXYZ", "inScale", (0.6000, 1.0000, 1.0000)), + ], + ) + + chameleon_leg_raw_shape = nw.new_node( + nodegroup_chameleon_leg_raw_shape().name, + input_kwargs={ + "thigh_length": group_input.outputs["thigh_length"], + "calf_length": group_input.outputs["calf_length"], + "thigh_body_rotation": group_input.outputs["thigh_body_rotation"], + "calf_body_rotation": group_input.outputs["calf_body_rotation"], + "thigh_calf_rotation": group_input.outputs["thigh_calf_rotation"], + "toe_toe_rotation": group_input.outputs["toe_toe_rotation"], + "thigh_scale": group_input.outputs["thigh_scale"], + "calf_scale": group_input.outputs["calf_scale"], + "ouScale": group_input.outputs["ouScale"], + "inScale": group_input.outputs["inScale"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["body_length"], + 1: group_input.outputs["body_position"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply, + "Y": group_input.outputs["body_thickness"], + "Z": group_input.outputs["body_height"], + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_leg_raw_shape, + "Translation": combine_xyz, + "Rotation": group_input.outputs["Rotation"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_tail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_tail(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "RadStartEnd", (0.4000, 0.2500, 0.9000)), + ("NodeSocketFloat", "body_length", 0.5000), + ("NodeSocketFloat", "body_position", 0.5000), + ], + ) + + chameleon_tail_shape = nw.new_node(nodegroup_chameleon_tail_shape().name) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": chameleon_tail_shape.outputs["Curve"]}, + attrs={"mode": "FACTOR"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_curve.outputs["Position"], "Scale": -1.0000}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": chameleon_tail_shape.outputs["Mesh"], + "Offset": scale.outputs["Vector"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["body_length"], + 1: group_input.outputs["body_position"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Z": 0.1000} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_position, + "Translation": combine_xyz, + "Rotation": (0.0000, 0.1745, 0.3491), + "Scale": (1.0000, 0.8000, 1.0000), + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": transform, "Level": 2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": subdivide_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_body_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_body_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorXYZ", "Scale", (0.9000, 0.7000, 0.8000)), + ("NodeSocketFloat", "length", 1.4000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": 0.1000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["length"], "Y": 0.3000} + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.0000, 0.0000, 0.0000), + "Middle": combine_xyz_1, + "End": combine_xyz, + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": quadratic_bezier, + "RadStartEnd": (0.6000, 0.6000, 1.0000), + "Resolution": 64, + }, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube.outputs["Mesh"], + "Scale": group_input.outputs["Scale"], + }, + ) + + back_bump1 = nw.new_node( + nodegroup_back_bump1().name, input_kwargs={"Surface": transform_geometry} + ) + + back_bump2 = nw.new_node( + nodegroup_back_bump2().name, input_kwargs={"Surface": back_bump1} + ) + + back_bump3 = nw.new_node( + nodegroup_back_bump3().name, input_kwargs={"Surface": back_bump2} + ) + + belly_sunken1 = nw.new_node( + nodegroup_belly_sunken1().name, input_kwargs={"Surface": back_bump3} + ) + + shouder_sunken = nw.new_node( + nodegroup_shouder_sunken().name, input_kwargs={"Surface": belly_sunken1} + ) + + neck_bump = nw.new_node( + nodegroup_neck_bump().name, input_kwargs={"Surface": shouder_sunken} + ) + + neck_bump2 = nw.new_node( + nodegroup_neck_bump2().name, input_kwargs={"Surface": neck_bump} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": neck_bump2} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": subdivision_surface} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon_head_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon_head_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 64, + "angles_deg": (0.0000, 0.0000, -5.0000), + "Seg Lengths": (0.1000, 0.2400, 0.1000), + }, + ) + + simpletube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Curve": polarbezier.outputs["Curve"], + "RadStartEnd": (0.4000, 0.1800, 0.7800), + "Resolution": 64, + }, + ) + + group_input_2 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Crown", 0.2000), + ("NodeSocketFloat", "EyeBrow", 0.0200), + ("NodeSocketVectorXYZ", "Scale", (1.0000, 1.0000, 1.0000)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simpletube.outputs["Mesh"], + "Scale": group_input_2.outputs["Scale"], + }, + ) + + quadratic_bezier_17 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.2000, 0.2500, 0.1000), + "Middle": (0.6000, 0.2500, 0.0000), + "End": (0.7900, 0.2500, 0.0000), + }, + ) + + curveparametercurve = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": transform, + "UVCurve": quadratic_bezier_17, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": transform, + "Curve": curveparametercurve, + "Base Radius": 0.1500, + "Base Factor": 0.0200, + "SymmY": False, + }, + ) + + quadratic_bezier_22 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.7500, 0.7500, 0.1000), + "Middle": (0.7200, 0.7500, 0.0000), + "End": (0.7000, 0.7500, 0.0000), + }, + ) + + curveparametercurve_1 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt.outputs["Geometry"], + "UVCurve": quadratic_bezier_22, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_1 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt.outputs["Geometry"], + "Curve": curveparametercurve_1, + "Base Radius": 0.1700, + "Base Factor": 0.0300, + "SymmY": False, + }, + ) + + quadratic_bezier_26 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.8000, 0.6800, 0.0300), + "Middle": (0.6500, 0.6800, 0.0000), + "End": (0.5000, 0.6000, 0.0500), + }, + ) + + curveparametercurve_2 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_1.outputs["Geometry"], + "UVCurve": quadratic_bezier_26, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_2 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_1.outputs["Geometry"], + "Curve": curveparametercurve_2, + "Base Factor": 0.0300, + }, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.7000, 0.5500, 0.0300), + "Middle": (0.7000, 0.5500, 0.0300), + "End": (0.7500, 0.5700, -0.0200), + }, + ) + + curveparametercurve_3 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_2.outputs["Geometry"], + "UVCurve": quadratic_bezier_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_3 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_2.outputs["Geometry"], + "Curve": curveparametercurve_3, + "Base Radius": 0.1000, + "Base Factor": -0.0200, + }, + ) + + quadratic_bezier_3 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.7000, 0.5800, 0.0100), + "Middle": (0.7500, 0.5800, 0.0100), + "End": (0.7700, 0.5300, 0.0100), + }, + ) + + curveparametercurve_4 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_3.outputs["Geometry"], + "UVCurve": quadratic_bezier_3, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_4 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_3.outputs["Geometry"], + "Curve": curveparametercurve_4, + "Base Radius": 0.0400, + "Base Factor": -0.0100, + }, + ) + + quadratic_bezier_4 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.3000, 0.2500, 0.0000), + "Middle": (0.4000, 0.2500, 0.0000), + "End": (0.7000, 0.2500, 0.0000), + }, + ) + + curveparametercurve_5 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_4.outputs["Geometry"], + "UVCurve": quadratic_bezier_4, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_5 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_4.outputs["Geometry"], + "Curve": curveparametercurve_5, + "Base Radius": 0.2000, + "Base Factor": 0.0100, + "SymmY": False, + }, + ) + + quadratic_bezier_9 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.3000, 0.2500, 0.0000), + "Middle": (0.4000, 0.2500, 0.0000), + "End": (0.5000, 0.2500, 0.0000), + }, + ) + + curveparametercurve_6 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_5.outputs["Geometry"], + "UVCurve": quadratic_bezier_9, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_6 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_5.outputs["Geometry"], + "Curve": curveparametercurve_6, + "Base Radius": 0.2000, + "Base Factor": 0.0100, + "SymmY": False, + }, + ) + + quadratic_bezier_5 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 40, + "Start": (0.5000, 0.6000, 0.0000), + "Middle": (0.7000, 0.7000, 0.0000), + "End": (1.0000, 0.6500, 0.0100), + }, + ) + + quadratic_bezier_6 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.5000, 0.6000, 0.0000), + "Middle": (0.3000, 0.5500, 0.0000), + "End": (0.2000, 0.7000, 0.0200), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadratic_bezier_5, quadratic_bezier_6]}, + ) + + curveparametercurve_7 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_6.outputs["Geometry"], + "UVCurve": join_geometry, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_7 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_6.outputs["Geometry"], + "Curve": curveparametercurve_7, + "Base Radius": 0.0150, + "Base Factor": group_input_2.outputs["EyeBrow"], + }, + ) + + quadratic_bezier_7 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.6400, 0.7600, 0.0200), + "Middle": (0.4400, 0.8800, 0.0000), + "End": (0.5100, 0.9200, 0.0000), + }, + ) + + curveparametercurve_8 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_7.outputs["Geometry"], + "UVCurve": quadratic_bezier_7, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_8 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_7.outputs["Geometry"], + "Curve": curveparametercurve_8, + "Base Radius": 0.1100, + "Base Factor": -0.0100, + }, + ) + + quadratic_bezier_8 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.6500, 0.7500, 0.0200), + "Middle": (0.3000, 0.7500, 0.0100), + "End": (0.1000, 0.7500, 0.0000), + }, + ) + + quadratic_bezier_12 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 12, + "Start": (0.1500, 0.6000, 0.0200), + "Middle": (0.2000, 0.7000, 0.0100), + "End": (0.1000, 0.7500, 0.0050), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadratic_bezier_8, quadratic_bezier_12]}, + ) + + curveparametercurve_9 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_8.outputs["Geometry"], + "UVCurve": join_geometry_1, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_9 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_8.outputs["Geometry"], + "Curve": curveparametercurve_9, + "Base Radius": 0.0300, + "Base Factor": group_input_2.outputs["Crown"], + }, + ) + + quadratic_bezier_18 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 200, + "Start": (0.9000, 0.2500, 0.0500), + "Middle": (0.8000, 0.2500, 0.0000), + "End": (0.6000, 0.2500, 0.0400), + }, + ) + + curveparametercurve_10 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_9.outputs["Geometry"], + "UVCurve": quadratic_bezier_18, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_10 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_9.outputs["Geometry"], + "Curve": curveparametercurve_10, + "Base Radius": 0.1000, + "Base Factor": 0.0200, + }, + ) + + quadratic_bezier_16 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.7000, 0.3500, 0.0500), + "Middle": (0.6000, 0.4000, 0.0000), + "End": (0.4000, 0.3500, 0.0400), + }, + ) + + curveparametercurve_11 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_10.outputs["Geometry"], + "UVCurve": quadratic_bezier_16, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_11 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_10.outputs["Geometry"], + "Curve": curveparametercurve_11, + "Base Radius": 0.1500, + "Base Factor": 0.0200, + }, + ) + + quadratic_bezier_15 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 20, + "Start": (0.9000, 0.2500, 0.0100), + "Middle": (0.6000, 0.2500, 0.0000), + "End": (0.2000, 0.2500, 0.0000), + }, + ) + + curveparametercurve_12 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_11.outputs["Geometry"], + "UVCurve": quadratic_bezier_15, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_12 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_11.outputs["Geometry"], + "Curve": curveparametercurve_12, + "Base Radius": 0.0200, + "Base Factor": 0.0300, + "SymmY": False, + }, + ) + + quadratic_bezier_19 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (1.0000, 0.4000, 0.0100), + "Middle": (0.5000, 0.4500, 0.0000), + "End": (0.4500, 0.4000, 0.0100), + }, + ) + + curveparametercurve_13 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_12.outputs["Geometry"], + "UVCurve": quadratic_bezier_19, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_13 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_12.outputs["Geometry"], + "Curve": curveparametercurve_13, + "Base Radius": 0.0200, + "Base Factor": 0.0100, + "Switch": False, + }, + ) + + quadratic_bezier_14 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (0.8000, 0.7500, 0.0000), + "Middle": (0.5000, 0.7500, 0.0000), + "End": (0.1000, 0.7500, 0.0000), + }, + ) + + quadratic_bezier_13 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 12, + "Start": (0.1500, 0.6000, 0.0000), + "Middle": (0.2000, 0.7000, 0.0000), + "End": (0.1000, 0.7500, 0.0000), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadratic_bezier_14, quadratic_bezier_13]}, + ) + + curveparametercurve_14 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_13.outputs["Geometry"], + "UVCurve": join_geometry_2, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_14 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_13.outputs["Geometry"], + "Curve": curveparametercurve_14, + "Base Radius": 0.0300, + "Base Factor": 0.0000, + "Attr": True, + }, + ) + + quadratic_bezier_23 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 40, + "Start": (0.6000, 0.6000, 0.0000), + "Middle": (0.9000, 0.7300, 0.0000), + "End": (1.0000, 0.6500, 0.0000), + }, + ) + + quadratic_bezier_24 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.6000, 0.6000, 0.0000), + "Middle": (0.5000, 0.5500, 0.0000), + "End": (0.2000, 0.6200, 0.0000), + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [quadratic_bezier_23, quadratic_bezier_24]}, + ) + + curveparametercurve_15 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_14.outputs["Geometry"], + "UVCurve": join_geometry_3, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_15 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_14.outputs["Geometry"], + "Curve": curveparametercurve_15, + "Base Radius": 0.0200, + "Base Factor": 0.0000, + "Attr": True, + }, + ) + + quadratic_bezier_25 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 64, + "Start": (1.0000, 0.4000, 0.0000), + "Middle": (0.7000, 0.4500, 0.0000), + "End": (0.4500, 0.4000, 0.0000), + }, + ) + + curveparametercurve_16 = nw.new_node( + nodegroup_curve_parameter_curve().name, + input_kwargs={ + "Surface": curvesculpt_15.outputs["Geometry"], + "UVCurve": quadratic_bezier_25, + "CtrlptsU": 64, + "CtrlptsW": 64, + }, + ) + + curvesculpt_16 = nw.new_node( + nodegroup_curve_sculpt().name, + input_kwargs={ + "Target": curvesculpt_15.outputs["Geometry"], + "Curve": curveparametercurve_16, + "Base Radius": 0.0150, + "Base Factor": 0.0000, + "Switch": False, + "Attr": True, + }, + ) + + merge_by_distance_1 = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={ + "Geometry": curvesculpt_16.outputs["Geometry"], + "Distance": 0.0000, + }, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": merge_by_distance_1, "Level": 3} + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": subdivision_surface} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": set_position, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_round_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_round_bump(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloatDistance", "Distance", 0.0200), + ("NodeSocketFloat", "Offset Scale", 0.0100), + ("NodeSocketInt", "Level", 1), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Level": group_input.outputs["Level"], + }, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={ + "Geometry": subdivide_mesh, + "Distance": group_input.outputs["Distance"], + }, + ) + # merge_by_distance = nw.new_node(Nodes.MergeByDistance, + # input_kwargs={'Geometry': subdivide_mesh, 'Distance': 2}) + + dual_mesh = nw.new_node(Nodes.DualMesh, input_kwargs={"Mesh": merge_by_distance}) + + split_edges = nw.new_node(Nodes.SplitEdges, input_kwargs={"Mesh": dual_mesh}) + + scale_elements = nw.new_node( + Nodes.ScaleElements, input_kwargs={"Geometry": split_edges, "Scale": 0.9000} + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": scale_elements, + "Offset Scale": group_input.outputs["Offset Scale"], + "Individual": False, + }, + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": extrude_mesh.outputs["Mesh"]} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": subdivision_surface_1} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_shade_smooth, group_input.outputs["Geometry"]]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +def shader_chameleon_eye(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Pupil"}) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_2.outputs["Fac"]} + ) + colorramp_4.color_ramp.elements[0].position = 0.0091 + colorramp_4.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp_4.color_ramp.elements[1].position = 0.9841 + colorramp_4.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "Ridge"}) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_1.outputs["Fac"]} + ) + colorramp_2.color_ramp.elements[0].position = 0.0091 + colorramp_2.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp_2.color_ramp.elements[1].position = 0.9841 + colorramp_2.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Scale": 300.0000, + "Smoothness": 0.0000, + }, + attrs={"feature": "SMOOTH_F1"}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": voronoi_texture.outputs["Distance"]} + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mapping_1}) + colorramp.color_ramp.interpolation = "CONSTANT" + colorramp.color_ramp.elements[0].position = 0.0000 + colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] + colorramp.color_ramp.elements[1].position = 0.3159 + colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Generated"], + "Location": (1.0000, 0.0000, 0.0000), + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Scale": 3.0000} + ) + + colorramp_3 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Fac"]} + ) + colorramp_3.color_ramp.elements.new(0) + colorramp_3.color_ramp.elements[0].position = 0.2773 + colorramp_3.color_ramp.elements[0].color = [0.0353, 0.0942, 0.0136, 1.0000] + colorramp_3.color_ramp.elements[1].position = 0.6000 + colorramp_3.color_ramp.elements[1].color = [0.0580, 0.0276, 0.0020, 1.0000] + colorramp_3.color_ramp.elements[2].position = 0.6386 + colorramp_3.color_ramp.elements[2].color = [0.0405, 0.0397, 0.0064, 1.0000] + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": colorramp_3.outputs["Color"], + "Color2": (0.1421, 0.1015, 0.0241, 1.0000), + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping, "W": 1.0000}, + attrs={"noise_dimensions": "4D"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) + colorramp_1.color_ramp.elements[0].position = 0.0000 + colorramp_1.color_ramp.elements[0].color = [0.6990, 0.5484, 0.1189, 1.0000] + colorramp_1.color_ramp.elements[1].position = 1.0000 + colorramp_1.color_ramp.elements[1].color = [0.2549, 0.1495, 0.0318, 1.0000] + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": mix, + "Color2": colorramp_1.outputs["Color"], + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_4.outputs["Color"], + "Color1": mix_1, + "Color2": (0.0082, 0.0082, 0.0082, 1.0000), + }, + ) + + separate_color = nw.new_node(Nodes.SeparateColor, input_kwargs={"Color": mix_2}) + + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Normal"], + "Scale": 20.0000, + "Detail": 200.0000, + "Roughness": 0.0000, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_color.outputs["Red"], + 1: noise_texture_2.outputs["Fac"], + }, + attrs={"use_clamp": True, "operation": "MULTIPLY"}, + ) + + combine_color = nw.new_node( + "ShaderNodeCombineColor", + input_kwargs={ + "Red": multiply, + "Green": separate_color.outputs["Green"], + "Blue": separate_color.outputs["Blue"], + }, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": combine_color, + "Specular": 0.3000, + "Roughness": 0.6000, + }, + ) + + material_output_1 = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_chameleon", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chameleon(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "body_length", 1.4000), + ("NodeSocketFloat", "head_crown", 0.2000), + ("NodeSocketFloat", "head_eyebrow", 0.0200), + ("NodeSocketVectorXYZ", "head_scale", (1.0000, 1.0000, 1.0000)), + ("NodeSocketVectorEuler", "left_eye_rotation", (0.0000, 0.0000, -1.5)), + ("NodeSocketVectorEuler", "right_eye_rotation", (0.0000, 0.0000, 1.5)), + ("NodeSocketFloat", "pupil_radius", 0.2200), + ("NodeSocketFloat", "front_leg_position", 0.0800), + ("NodeSocketFloat", "back_leg_position", 0.8500), + ], + ) + + chameleon_head_shape = nw.new_node( + nodegroup_chameleon_head_shape().name, + input_kwargs={ + "Crown": group_input.outputs["head_crown"], + "EyeBrow": group_input.outputs["head_eyebrow"], + "Scale": group_input.outputs["head_scale"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_head_shape, + "Translation": (0.1000, 0.0000, 0.0000), + "Rotation": (0.0000, 0.0000, 3.1416), + }, + ) + + round_bump = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": transform, + "Distance": 0.0080, + "Offset Scale": 0.0030, + }, + ) + + chameleon_body_shape = nw.new_node( + nodegroup_chameleon_body_shape().name, + input_kwargs={"length": group_input.outputs["body_length"]}, + ) + + round_bump_1 = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": chameleon_body_shape.outputs["Mesh"], + "Distance": 0.0080, + "Offset Scale": 0.0030, + }, + ) + + chameleon_tail = nw.new_node( + nodegroup_chameleon_tail().name, + input_kwargs={ + "body_length": group_input.outputs["body_length"], + "body_position": 0.4500, + }, + ) + + round_bump_2 = nw.new_node( + nodegroup_round_bump().name, + input_kwargs={ + "Geometry": chameleon_tail.outputs["Geometry"], + "Distance": 0.0080, + "Offset Scale": 0.0030, + }, + ) + + chameleon_leg_shape = nw.new_node( + nodegroup_chameleon_leg_shape().name, + input_kwargs={ + "body_length": group_input.outputs["body_length"], + "body_position": group_input.outputs["back_leg_position"], + "body_thickness": 0.2500, + "Rotation": (0.0000, -1.0472, 3.1416), + "thigh_length": 0.4000, + "thigh_body_rotation": -35.0000, + "calf_body_rotation": -30.0000, + "thigh_calf_rotation": 10.0000, + "ouScale": (0.6000, 1.0000, 1.0000), + "inScale": (1.0000, 1.0000, 1.0000), + }, + ) + + chameleon_leg_shape_1 = nw.new_node( + nodegroup_chameleon_leg_shape().name, + input_kwargs={ + "body_length": group_input.outputs["body_length"], + "body_position": group_input.outputs["back_leg_position"], + "body_thickness": 0.1500, + "Rotation": (0.0000, -1.0472, 3.1416), + "thigh_length": 0.4000, + "thigh_body_rotation": 50.0000, + "calf_body_rotation": 5.0000, + "thigh_calf_rotation": 5.0000, + }, + ) + + chameleon_leg_shape_2 = nw.new_node( + nodegroup_chameleon_leg_shape().name, + input_kwargs={ + "body_length": group_input.outputs["body_length"], + "body_position": group_input.outputs["front_leg_position"], + "body_thickness": 0.0800, + "thigh_body_rotation": 35.0000, + "thigh_calf_rotation": 15.0000, + }, + ) + + chameleon_leg_shape_3 = nw.new_node( + nodegroup_chameleon_leg_shape().name, + input_kwargs={ + "body_length": group_input.outputs["body_length"], + "body_position": group_input.outputs["front_leg_position"], + "body_thickness": -0.0300, + "thigh_body_rotation": -25.0000, + "calf_body_rotation": -15.0000, + "thigh_calf_rotation": 15.0000, + "ouScale": (0.6000, 1.0000, 1.0000), + "inScale": (1.0000, 1.0000, 1.0000), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + chameleon_leg_shape, + chameleon_leg_shape_1, + chameleon_leg_shape_2, + chameleon_leg_shape_3, + ] + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [round_bump, round_bump_1, round_bump_2, join_geometry_2] + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": surface.shaderfunc_to_material(shader_chameleon), + }, + ) + + chameleon_eye = nw.new_node( + nodegroup_chameleon_eye().name, + input_kwargs={"pupil_radius": group_input.outputs["pupil_radius"]}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_eye, + "Translation": (-0.2000, -0.0300, 0.0200), + "Rotation": group_input.outputs["left_eye_rotation"], + }, + ) + + chameleon_eye_1 = nw.new_node( + nodegroup_chameleon_eye().name, + input_kwargs={"pupil_radius": group_input.outputs["pupil_radius"]}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": chameleon_eye_1, + "Translation": (-0.2000, 0.0300, 0.0200), + "Rotation": group_input.outputs["right_eye_rotation"], + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform_1]} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, join_geometry_3]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class Chameleon(PartFactory): + param_templates = {} + tags = [] + + def sample_params(self, select=None, var=1): + return {} + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_chameleon, params) + + return part diff --git a/infinigen/assets/creatures/parts/crustacean/__init__.py b/infinigen/assets/objects/creatures/parts/crustacean/__init__.py similarity index 100% rename from infinigen/assets/creatures/parts/crustacean/__init__.py rename to infinigen/assets/objects/creatures/parts/crustacean/__init__.py diff --git a/infinigen/assets/objects/creatures/parts/crustacean/antenna.py b/infinigen/assets/objects/creatures/parts/crustacean/antenna.py new file mode 100644 index 000000000..800ee98bf --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/crustacean/antenna.py @@ -0,0 +1,70 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.creatures.parts.crustacean.leg import CrabLegFactory +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + bend_bones_lerp, +) +from infinigen.assets.objects.creatures.util.creature import Part +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.utils.decorate import displace_vertices +from infinigen.assets.utils.object import join_objects +from infinigen.core.util.random import log_uniform + + +class LobsterAntennaFactory(CrabLegFactory): + tag = ["claw"] + + def make_part(self, params) -> Part: + x_length, z_length = params["x_length"], params["z_length"] + segments, x_cuts = self.make_segments(params) + displace_vertices( + segments[-1], + lambda x, y, z: ( + 0, + 0, + params["antenna_bend"] + * (x / x_length - x_cuts[-2]) ** 2 + * params["z_length"], + ), + ) + obj = join_objects(segments) + + skeleton = np.zeros((2, 3)) + skeleton[1, 0] = x_length + joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} + return Part(skeleton, obj, joints=joints) + + @staticmethod + def animate_bones(arma, bones, params): + bend_bones_lerp(arma, bones, params["antenna_curl"], params["freq"]) + + def sample_params(self): + y_length = uniform(0.01, 0.015) + z_length = y_length * log_uniform(1, 1.2) + x_mid_first = uniform(0.1, 0.15) + x_mid_second = uniform(0.25, 0.3) + antenna_bend = uniform(2, 5) + return { + **super().sample_params(), + "y_length": y_length, + "z_length": z_length, + "x_mid_first": x_mid_first, + "x_mid_second": x_mid_second, + "antenna_bend": antenna_bend, + } + + +class SpinyLobsterAntennaFactory(LobsterAntennaFactory): + tag = ["claw"] + + def sample_params(self): + y_length = uniform(0.05, 0.08) + z_length = y_length * log_uniform(1, 1.2) + return {**super().sample_params(), "y_length": y_length, "z_length": z_length} diff --git a/infinigen/assets/objects/creatures/parts/crustacean/body.py b/infinigen/assets/objects/creatures/parts/crustacean/body.py new file mode 100644 index 000000000..7f27977d7 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/crustacean/body.py @@ -0,0 +1,447 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform +from scipy.interpolate import interp1d + +from infinigen.assets.objects.creatures.parts.utils.draw import geo_symmetric_texture +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.utils.decorate import ( + displace_vertices, + distance2boundary, + read_co, +) +from infinigen.assets.utils.draw import leaf, spin +from infinigen.assets.utils.object import join_objects, new_line +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.placement import placeholder_locs +from infinigen.core.surface import read_attr_data, write_attr_data +from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform + + +class CrabBodyFactory(PartFactory): + tags = ["body"] + min_spike_distance = 0.1 + min_spike_radius = 0.02 + + def make_part(self, params) -> Part: + x_length, x_tip, bend_height = map( + params.get, ["x_length", "x_tip", "bend_height"] + ) + upper = self.make_surface(params) + lower = butil.deep_clone_obj(upper) + self.make_surface_side(upper, params, "upper") + self.make_surface_side(lower, params, "lower") + self.add_spikes(upper, params) + self.add_mouth(lower, params) + obj = join_objects([upper, lower]) + + x, y, z = read_co(obj).T + write_attr_data( + obj, "ratio", np.where(z > np.min(z) * params["color_cutoff"], 1, 0) + ) + butil.modify_mesh(obj, "WELD", merge_threshold=0.001) + + height_scale = interp1d( + [0, -x_tip + 0.01, -x_tip - 0.01, -1], + [0, bend_height, bend_height, 0], + "quadratic", + fill_value="extrapolate", + ) + displace_vertices(obj, lambda x, y, z: (0, 0, height_scale(x / x_length))) + self.add_head(obj, params) + + line = new_line(x_length) + line.location[0] -= x_length + butil.apply_transform(line, loc=True) + + line.rotation_euler[1] = np.pi / 2 + butil.apply_transform(line) + butil.modify_mesh( + line, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=-params["bend_angle"], + deform_axis="Y", + ) + line.rotation_euler[1] = -np.pi / 2 + butil.apply_transform(line) + skeleton = read_co(line) + butil.delete(line) + + obj.rotation_euler[1] = np.pi / 2 + butil.apply_transform(obj) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=-params["bend_angle"], + deform_axis="Y", + ) + obj.rotation_euler[1] = -np.pi / 2 + butil.apply_transform(obj) + joints = { + i: Joint((0, 0, 0), bounds=np.array([[0, 0, 0], [0, 0, 0]])) + for i in np.linspace(0, 1, 5, endpoint=True) + } + return Part(skeleton, obj, joints=joints) + + def add_head(self, obj, params): + def offset(nw: NodeWrangler, vector): + head = nw.scalar_add( + 1, + nw.scalar_divide( + nw.separate(nw.new_node(Nodes.InputPosition))[0], params["x_length"] + ), + ) + texture = nw.new_node( + Nodes.MusgraveTexture, + [vector], + input_kwargs={"Scale": params["noise_scale"]}, + ) + return nw.combine( + nw.scalar_multiply( + head, nw.scalar_multiply(texture, params["noise_strength"]) + ), + 0, + 0, + ) + + surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) + + @staticmethod + def make_surface(params): + x_length, y_length, x_tip, y_tail = map( + params.get, ["x_length", "y_length", "x_tip", "y_tail"] + ) + x_anchors = ( + np.array( + [0, 0, -x_tip / 2, -x_tip, -x_tip, -x_tip, -(x_tip + 1) / 2, -1, -1] + ) + * x_length + ) + y_anchors = ( + np.array( + [ + 0, + 0.1, + params["front_midpoint"], + 1, + 1, + 1, + params["back_midpoint"], + y_tail, + 0, + ] + ) + * y_length + ) + tip_size = params["tip_size"] + if params["has_sharp_tip"]: + front_angle = params["front_angle"] + back_angle = params["back_angle"] + x_anchors[3] += tip_size * np.sin(front_angle) * x_length + x_anchors[5] -= tip_size * np.sin(back_angle) * x_length + y_anchors[3] += tip_size * (1 - np.cos(front_angle)) * x_length + y_anchors[4] += tip_size * x_length + y_anchors[5] += tip_size * (1 - np.cos(back_angle)) * x_length + vector_locations = [4] + else: + x_anchors[3] += 0.05 * x_tip * x_length + x_anchors[5] -= 0.05 * (1 - x_tip) * x_length + vector_locations = [] + obj = leaf(x_anchors, y_anchors, vector_locations) + butil.modify_mesh(obj, "SUBSURF", levels=1, render_levels=1) + distance2boundary(obj) + return obj + + def make_surface_side(self, obj, params, prefix="upper"): + distance = read_attr_data(obj, "distance") + height_scale = interp1d( + [0, 0.5, 1], [0, params[f"{prefix}_alpha"], 1], "quadratic" + ) + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + (1 if prefix == "upper" else -1) + * height_scale(distance) + * params[f"{prefix}_z"], + ), + ) + displace_vertices(obj, lambda x, y, z: (params[f"{prefix}_shift"] * z, 0, 0)) + + def offset(nw, vector, distance): + return nw.combine( + 0, + 0, + nw.scalar_multiply( + distance, + nw.scalar_multiply( + nw.new_node( + Nodes.MusgraveTexture, + [vector], + input_kwargs={"Scale": params["noise_scale"]}, + ), + params["noise_strength"], + ), + ), + ) + + surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) + return obj + + def add_spikes(self, obj, params): + def selection(nw: NodeWrangler): + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + return nw.boolean_math( + "AND", + nw.compare("GREATER_THAN", y, 0), + nw.compare("GREATER_THAN", z, 0.02), + ) + + locations = placeholder_locs( + obj, params["spike_density"], selection, self.min_spike_distance, 0 + ) + locations_ = locations.copy() + locations_[:, 1] = -locations_[:, 1] + locations = np.concatenate([locations, locations_], 0) + if len(locations) == 0: + return + x, y, z = read_co(obj).T + dist = np.amin( + np.linalg.norm( + read_co(obj)[np.newaxis] - locations[:, np.newaxis], axis=-1 + ), + 0, + ) + extrude = params["spike_height"] * np.clip( + 1 - dist / self.min_spike_radius, 0, None + ) + d = np.stack( + [ + x + params["spike_center"] * params["x_length"], + y, + z + params["spike_depth"], + ], + -1, + ) + d = d / np.linalg.norm(d, axis=-1, keepdims=True) + displace_vertices(obj, lambda x, y, z: (d * extrude[:, np.newaxis]).T) + + def add_mouth(self, obj, params): + def selection(nw: NodeWrangler): + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + z_length = params["lower_z"] if "lower_z" in params else params["z_length"] + z_range = nw.boolean_math( + "AND", + nw.compare("GREATER_THAN", z, -params["mouth_z"] * z_length), + nw.compare("LESS_THAN", z, 0), + ) + x_range = nw.compare( + "GREATER_THAN", x, -params["mouth_x"] * params["x_length"] + ) + return nw.boolean_math("AND", z_range, x_range) + + def offset(nw: NodeWrangler, vector, distance): + wave_texture = nw.new_node( + Nodes.WaveTexture, + [vector], + input_kwargs={ + "Scale": params["mouth_noise_scale"], + "Distortion": 20, + "Detail": 0, + }, + ) + ratio = nw.scalar_multiply( + distance, + nw.build_float_curve( + distance, [(0, 0), (0.001, 0), (0.005, 1), (1, 1)] + ), + ) + return nw.scale( + nw.scalar_multiply( + ratio, + nw.scalar_multiply(wave_texture, params["mouth_noise_strength"]), + ), + nw.new_node(Nodes.InputNormal), + ) + + surface.add_geomod( + obj, geo_symmetric_texture, input_args=[offset, selection], apply=True + ) + + def sample_params(self): + x_length = uniform(0.8, 1.2) + y_length = x_length * uniform(0.5, 0.7) + x_tip = uniform(0.3, 0.6) + y_tail = uniform(0.1, 0.3) + has_sharp_tip = uniform(0, 1) < 0.4 + front_midpoint = uniform(0.7, 0.9) + back_midpoint = uniform(0.7, 0.9) + front_angle = uniform(np.pi / 12, np.pi / 8) + back_angle = uniform(np.pi / 6, np.pi / 4) + tip_size = uniform(0.05, 0.15) + upper_z = x_length * uniform(0.15, 0.3) + upper_alpha = uniform(0.8, 0.9) + upper_shift = uniform(-0.6, -0.4) + noise_strength = uniform(0.02, 0.03) + noise_scale = uniform(8, 15) + lower_alpha = uniform(0.96, 0.98) + lower_z = x_length * uniform(0.3, 0.4) + lower_shift = uniform(0.1, 0.2) + spike_height = uniform(0.05, 0.2) if uniform(0, 1) < 0.5 else 0 + spike_depth = log_uniform(0.4, 2) + spike_center = uniform(0.3, 0.7) + spike_density = log_uniform(100, 500) + mouth_z = uniform(0.5, 0.8) + mouth_x = uniform(0.1, 0.15) + mouth_noise_scale = uniform(10, 15) + mouth_noise_strength = uniform(0.1, 0.2) + bend_angle = uniform(0, np.pi / 3) + bend_height = uniform(0.08, 0.12) + color_cutoff = uniform(0, 0.5) + return { + "x_length": x_length, + "y_length": y_length, + "x_tip": x_tip, + "y_tail": y_tail, + "has_sharp_tip": has_sharp_tip, + "front_midpoint": front_midpoint, + "back_midpoint": back_midpoint, + "front_angle": front_angle, + "back_angle": back_angle, + "tip_size": tip_size, + "upper_z": upper_z, + "upper_alpha": upper_alpha, + "upper_shift": upper_shift, + "noise_strength": noise_strength, + "noise_scale": noise_scale, + "lower_z": lower_z, + "lower_alpha": lower_alpha, + "lower_shift": lower_shift, + "spike_height": spike_height, + "spike_depth": spike_depth, + "spike_density": spike_density, + "spike_center": spike_center, + "mouth_z": mouth_z, + "mouth_x": mouth_x, + "mouth_noise_scale": mouth_noise_scale, + "mouth_noise_strength": mouth_noise_strength, + "bend_angle": bend_angle, + "bend_height": bend_height, + "color_cutoff": color_cutoff, + } + + +class LobsterBodyFactory(CrabBodyFactory): + tags = ["body"] + min_spike_distance = 0.08 + min_spike_radius = 0.01 + + def make_part(self, params) -> Part: + x_length, y_length, z_length = map( + params.get, ["x_length", "y_length", "z_length"] + ) + x_anchors = np.array([0, 0, 1 / 3, 2 / 3, 1, 1]) * x_length + y_anchors = ( + np.array( + [0, 1, params["midpoint_second"], params["midpoint_first"], 0.01, 0] + ) + * y_length + ) + obj = spin([x_anchors, y_anchors, 0], [1, 4], axis=(1, 0, 0)) + self.add_mouth(obj, params) + + height_fn = interp1d( + [0, 1 / 2, 1], + [0, params["z_shift_midpoint"] / 2, params["z_shift"]], + fill_value="extrapolate", + ) + displace_vertices( + obj, lambda x, y, z: (0, 0, height_fn(x / x_length) * y_length) + ) + + z = read_co(obj).T[-1] + write_attr_data( + obj, "ratio", 1 + np.where(z > 0, 0, uniform(1, 1.5) * z / y_length) + ) + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + -np.clip(z + y_length * params["bottom_cutoff"], None, 0) + * (1 - params["bottom_shift"]), + ), + ) + + obj.scale[-1] = z_length / y_length + butil.apply_transform(obj) + + def offset(nw, vector): + return nw.scale( + nw.scalar_multiply( + nw.new_node( + Nodes.MusgraveTexture, + [vector], + input_kwargs={"Scale": params["noise_scale"]}, + ), + params["noise_strength"], + ), + nw.new_node(Nodes.InputNormal), + ) + + surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) + + n_segments = 4 + co = read_co(obj) + skeleton = np.zeros((n_segments, 3)) + skeleton[:, 0] = np.linspace(0, x_length, n_segments) + head_z = co[np.argmax(co[:, 0])][-1] + skeleton[:, -1] = np.linspace(0, head_z, n_segments) + return Part(skeleton, obj) + + def sample_params(self): + x_length = uniform(0.6, 0.8) + y_length = uniform(0.15, 0.2) + z_length = y_length * uniform(1, 1.2) + midpoint_first = uniform(0.65, 0.75) + midpoint_second = uniform(0.95, 1.05) + z_shift = uniform(0.4, 0.6) + z_shift_midpoint = uniform(0.2, 0.3) + noise_strength = uniform(0.02, 0.04) + noise_scale = uniform(5, 8) + bottom_shift = uniform(0.3, 0.5) + bottom_cutoff = uniform(0.2, 0.3) + mouth_z = uniform(0.5, 0.8) + mouth_x = uniform(0.1, 0.15) - 1 + mouth_noise_scale = uniform(10, 15) + mouth_noise_strength = uniform(0.2, 0.3) + return { + "x_length": x_length, + "y_length": y_length, + "z_length": z_length, + "midpoint_first": midpoint_first, + "midpoint_second": midpoint_second, + "z_shift": z_shift, + "z_shift_midpoint": z_shift_midpoint, + "noise_strength": noise_strength, + "noise_scale": noise_scale, + "bottom_shift": bottom_shift, + "bottom_cutoff": bottom_cutoff, + "mouth_z": mouth_z, + "mouth_x": mouth_x, + "mouth_noise_scale": mouth_noise_scale, + "mouth_noise_strength": mouth_noise_strength, + } diff --git a/infinigen/assets/objects/creatures/parts/crustacean/claw.py b/infinigen/assets/objects/creatures/parts/crustacean/claw.py new file mode 100644 index 000000000..9d760ec59 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/crustacean/claw.py @@ -0,0 +1,269 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform +from scipy.interpolate import interp1d + +from infinigen.assets.objects.creatures.parts.crustacean.leg import CrabLegFactory +from infinigen.assets.objects.creatures.parts.utils.draw import decorate_segment +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + bend_bones_lerp, +) +from infinigen.assets.objects.creatures.util.creature import Part +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.utils.decorate import displace_vertices, read_co, remove_vertices +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.nodegroup import geo_base_selection +from infinigen.assets.utils.object import join_objects +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.surface import write_attr_data +from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform + + +class CrabClawFactory(CrabLegFactory): + tags = ["claw"] + min_spike_radius = 0.01 + + def make_part(self, params) -> Part: + x_length = params["x_length"] + segments, x_cuts = self.make_segments(params) + butil.delete(segments[-1]) + claw, lower = self.make_claw(params) + segments[-1] = claw + obj = join_objects(segments) + lower.parent = obj + + skeleton = np.zeros((2, 3)) + skeleton[1, 0] = x_length + joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} + return Part(skeleton, obj, joints=joints, settings={"rig_extras": True}) + + def make_claw(self, params): + x_length, y_length, z_length, x_mid, y_mid = map( + params.get, + ["x_length", "y_length", "z_length", "x_mid_second", "y_mid_second"], + ) + xs = x_mid, (x_mid + 1) / 2, (x_mid + 3) / 4, 1 + ys = ( + y_mid, + y_mid * params["claw_y_first"], + y_mid * params["claw_y_second"], + 0.01, + ) + obj = spin( + [ + np.array([xs[0], *xs, xs[-1]]) * x_length, + np.array([0, *ys, 0]) * y_length, + 0.0, + ], + [1, len(xs)], + axis=(1, 0, 0), + ) + + bottom_cutoff = params["bottom_cutoff"] + claw_x_depth = params["claw_x_depth"] + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + -np.clip( + z + + y_length * bottom_cutoff + + y_length + * (y_mid - bottom_cutoff) + * (x / x_length - x_mid) + / claw_x_depth, + None, + 0, + ) + * (1 - params["bottom_shift"]), + ), + ) + width_scale = interp1d( + [ + x_mid, + x_mid + claw_x_depth, + x_mid + + claw_x_depth + + params["claw_x_turn"] * (1 - x_mid - claw_x_depth), + 1, + ], + [0, 0, params["claw_z_width"], 0], + "cubic", + fill_value="extrapolate", + ) + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + np.where( + x > (x_mid + claw_x_depth) * x_length, + width_scale(x / x_length) * y_mid * y_length, + 0, + ), + ), + ) + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + np.where( + z > 0, + np.clip(params["top_cutoff"] * y_length - np.abs(y), 0, None) + * params["top_shift"], + 0, + ), + ), + ) + z = read_co(obj).T[-1] + write_attr_data( + obj, + "ratio", + 1 + np.where(z > 0, 0, uniform(0.5, 1.0) * z / params["y_length"]), + ) + + def selection(nw: NodeWrangler): + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + lower = nw.compare( + "LESS_THAN", nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0 + ) + x_range = nw.boolean_math( + "AND", + nw.compare("GREATER_THAN", x, (x_mid + claw_x_depth * 1.5) * x_length), + nw.compare("LESS_THAN", x, x_length * 0.98), + ) + center = nw.compare( + "LESS_THAN", nw.math("ABSOLUTE", y), params["y_length"] * 0.5 + ) + return nw.boolean_math( + "AND", nw.boolean_math("AND", lower, x_range), center + ) + + temp = butil.spawn_vert("temp") + surface.add_geomod( + temp, + geo_base_selection, + apply=True, + input_args=[obj, selection, params["claw_spike_distance"]], + ) + locations = read_co(temp) + np.random.shuffle(locations) + locations = locations[:100] + butil.delete(temp) + if len(locations) > 0: + dist = np.amin( + np.linalg.norm( + read_co(obj)[np.newaxis] - locations[:, np.newaxis], axis=-1 + ), + 0, + ) + extrude = params["claw_spike_strength"] * np.clip( + 1 - dist / self.min_spike_radius, 0, None + ) + displace_vertices(obj, lambda x, y, z: (0, 0, -extrude)) + + decorate_segment(obj, params, x_mid, 1) + obj.scale[-1] = z_length / y_length + butil.apply_transform(obj) + + lower_scale = params["lower_scale"] + lower = butil.deep_clone_obj(obj) + remove_vertices(lower, lambda x, y, z: x < (x_mid + claw_x_depth) * x_length) + lower.location[0] = -(x_mid + claw_x_depth) * x_length + butil.apply_transform(lower, loc=True) + lower.scale = lower_scale, lower_scale, -lower_scale * params["lower_z_scale"] + lower.rotation_euler[1] = uniform(np.pi / 12, np.pi / 4) + butil.apply_transform(lower) + lower.location[0] = (x_mid + claw_x_depth) * x_length + lower.location[-1] = params["lower_z_offset"] * z_length + butil.apply_transform(lower, loc=True) + butil.modify_mesh(lower, "WELD", merge_threshold=0.001) + return obj, lower + + @staticmethod + def animate_bones(arma, bones, params): + main_bones = [b for b in bones if "extra" not in b.name] + bend_bones_lerp( + arma, main_bones, params["claw_curl"], params["freq"], symmetric=False + ) + extra_bones = [b for b in bones if "extra" in b.name] + bend_bones_lerp( + arma, + extra_bones, + params["claw_lower_curl"], + params["freq"], + symmetric=False, + ) + + def sample_params(self): + params = super().sample_params() + z_length = params["y_length"] * uniform(1, 1.2) + x_mid_first = uniform(0.2, 0.25) + x_mid_second = uniform(0.4, 0.6) + y_mid_first = uniform(1.5, 2.0) + y_mid_second = y_mid_first * log_uniform(1.0, 1.5) + y_expand = uniform(1.4, 1.5) + noise_strength = uniform(0.01, 0.02) + top_shift = uniform(0.6, 0.8) + claw_y_first = uniform(0.6, 1.5) + claw_y_second = claw_y_first * uniform(0.4, 0.6) + claw_x_depth = (1 - x_mid_second) * uniform(0.3, 0.5) + claw_x_turn = uniform(0.2, 0.4) + claw_z_width = uniform(0.2, 0.3) + claw_spike_strength = uniform(0.02, 0.03) + claw_spike_distance = uniform(0.03, 0.06) + lower_z_scale = uniform(0.4, 0.6) + lower_scale = uniform(0.75, 0.9) + lower_z_offset = uniform(-0.5, 0.5) + return { + **params, + "z_length": z_length, + "x_mid_first": x_mid_first, + "x_mid_second": x_mid_second, + "y_mid_first": y_mid_first, + "y_mid_second": y_mid_second, + "y_expand": y_expand, + "noise_strength": noise_strength, + "top_shift": top_shift, + "claw_y_first": claw_y_first, + "claw_y_second": claw_y_second, + "claw_x_depth": claw_x_depth, + "claw_x_turn": claw_x_turn, + "claw_z_width": claw_z_width, + "claw_spike_distance": claw_spike_distance, + "claw_spike_strength": claw_spike_strength, + "lower_z_scale": lower_z_scale, + "lower_scale": lower_scale, + "lower_z_offset": lower_z_offset, + } + + +class LobsterClawFactory(CrabClawFactory): + def sample_params(self): + y_expand = uniform(1.4, 1.5) + y_mid_first = uniform(1.5, 2.0) + y_mid_second = y_mid_first * log_uniform(1.2, 1.6) + claw_y_first = uniform(1.2, 1.5) + claw_y_second = claw_y_first * uniform(0.7, 0.8) + noise_strength = uniform(0.01, 0.02) + claw_spike_strength = uniform(0.01, 0.02) + return { + **super().sample_params(), + "y_expand": y_expand, + "y_mid_first": y_mid_first, + "y_mid_second": y_mid_second, + "claw_y_first": claw_y_first, + "claw_y_second": claw_y_second, + "noise_strength": noise_strength, + "claw_spike_strength": claw_spike_strength, + } diff --git a/infinigen/assets/creatures/parts/crustacean/eye.py b/infinigen/assets/objects/creatures/parts/crustacean/eye.py similarity index 62% rename from infinigen/assets/creatures/parts/crustacean/eye.py rename to infinigen/assets/objects/creatures/parts/crustacean/eye.py index c07eac775..b8a231422 100644 --- a/infinigen/assets/creatures/parts/crustacean/eye.py +++ b/infinigen/assets/objects/creatures/parts/crustacean/eye.py @@ -8,24 +8,26 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory from infinigen.assets.utils.object import join_objects, new_icosphere, origin2leftmost from infinigen.core.placement.detail import remesh_with_attrs class CrustaceanEyeFactory(PartFactory): - tags = ['eye'] - min_spike_distance = .05 - min_spike_radius = .02 + tags = ["eye"] + min_spike_distance = 0.05 + min_spike_radius = 0.02 def make_part(self, params) -> Part: - length = params['length'] - sphere = new_icosphere(radius=params['radius']) - bpy.ops.mesh.primitive_cylinder_add(radius=.01, depth=length, location=(-length / 2, 0, 0)) + length = params["length"] + sphere = new_icosphere(radius=params["radius"]) + bpy.ops.mesh.primitive_cylinder_add( + radius=0.01, depth=length, location=(-length / 2, 0, 0) + ) cylinder = bpy.context.active_object cylinder.rotation_euler[1] = np.pi / 2 obj = join_objects([sphere, cylinder]) - remesh_with_attrs(obj, .005) + remesh_with_attrs(obj, 0.005) origin2leftmost(obj) skeleton = np.zeros((2, 3)) @@ -33,6 +35,6 @@ def make_part(self, params) -> Part: return Part(skeleton, obj) def sample_params(self): - radius = uniform(.015, .02) + radius = uniform(0.015, 0.02) length = radius * uniform(1, 1.5) - return {'radius': radius, 'length': length} + return {"radius": radius, "length": length} diff --git a/infinigen/assets/creatures/parts/crustacean/fin.py b/infinigen/assets/objects/creatures/parts/crustacean/fin.py similarity index 54% rename from infinigen/assets/creatures/parts/crustacean/fin.py rename to infinigen/assets/objects/creatures/parts/crustacean/fin.py index f4bb48e7c..1bee6e541 100644 --- a/infinigen/assets/creatures/parts/crustacean/fin.py +++ b/infinigen/assets/objects/creatures/parts/crustacean/fin.py @@ -7,29 +7,36 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory from infinigen.assets.utils.draw import leaf from infinigen.core.surface import write_attr_data from infinigen.core.util import blender as butil class CrustaceanFinFactory(PartFactory): - tags = ['body'] + tags = ["body"] def make_part(self, params) -> Part: - x_length, y_length, x_tip, y_mid = map(params.get, ['x_length', 'y_length', 'x_tip', 'y_mid']) + x_length, y_length, x_tip, y_mid = map( + params.get, ["x_length", "y_length", "x_tip", "y_mid"] + ) x_anchors = 0, x_tip / 2, x_tip, 1 y_anchors = 0, y_mid, 1, 0 obj = leaf(np.array(x_anchors) * x_length, np.array(y_anchors) * y_length) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=.01, offset=0.) - write_attr_data(obj, 'ratio', np.ones(len(obj.data.vertices))) + butil.modify_mesh(obj, "SOLIDIFY", thickness=0.01, offset=0.0) + write_attr_data(obj, "ratio", np.ones(len(obj.data.vertices))) skeleton = np.zeros((2, 3)) skeleton[1, 0] = x_length return Part(skeleton, obj) def sample_params(self): - x_length = uniform(.15, .3) - y_length = x_length * uniform(.3, .4) - x_tip = uniform(.7, .8) - y_mid = uniform(.6, .8) - return {'x_length': x_length, 'y_length': y_length, 'x_tip': x_tip, 'y_mid': y_mid} + x_length = uniform(0.15, 0.3) + y_length = x_length * uniform(0.3, 0.4) + x_tip = uniform(0.7, 0.8) + y_mid = uniform(0.6, 0.8) + return { + "x_length": x_length, + "y_length": y_length, + "x_tip": x_tip, + "y_mid": y_mid, + } diff --git a/infinigen/assets/objects/creatures/parts/crustacean/leg.py b/infinigen/assets/objects/creatures/parts/crustacean/leg.py new file mode 100644 index 000000000..b98c1fcfc --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/crustacean/leg.py @@ -0,0 +1,102 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.creatures.parts.utils.draw import make_segments +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + bend_bones_lerp, +) +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.utils.decorate import read_co +from infinigen.assets.utils.object import join_objects +from infinigen.core.surface import write_attr_data +from infinigen.core.util.random import log_uniform + + +class CrabLegFactory(PartFactory): + tags = ["leg"] + + def make_part(self, params) -> Part: + x_length = params["x_length"] + segments, x_cuts = self.make_segments(params) + obj = join_objects(segments) + + skeleton = np.zeros((2, 3)) + skeleton[1, 0] = x_length + joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:-1]} + return Part(skeleton, obj, joints=joints) + + def make_segments(self, params): + x_cuts = [0, params["x_mid_first"], params["x_mid_second"], 1] + y_cuts = [1, params["y_mid_first"], params["y_mid_second"], 0.01] + + def x_anchors(u, v): + return u, u + 0.01, (u + v) / 2, v - 0.01, v + + def y_anchors(u, v): + return u * 0.9, u, (u + v) / 2 * params["y_expand"], v, v * 0.9 + + segments = make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params) + for obj in segments: + z = read_co(obj).T[-1] + write_attr_data( + obj, + "ratio", + 1 + np.where(z > 0, 0, uniform(0.8, 1.5) * z / params["y_length"]), + ) + return segments, x_cuts + + def sample_params(self): + x_length = uniform(0.8, 1.2) + y_length = uniform(0.025, 0.035) + z_length = y_length * uniform(1.0, 1.5) + x_mid_first = uniform(0.3, 0.4) + x_mid_second = uniform(0.6, 0.7) + y_mid_first = uniform(0.7, 1.0) + y_mid_second = y_mid_first / 2 * uniform(1.1, 1.3) + y_expand = uniform(1.1, 1.3) + noise_strength = uniform(0.005, 0.01) + noise_scale = log_uniform(5, 10) + bottom_shift = uniform(0.3, 0.5) + bottom_cutoff = uniform(0.2, 0.5) + top_shift = uniform(0.2, 0.4) + top_cutoff = uniform(0.6, 0.8) + return { + "x_length": x_length, + "y_length": y_length, + "z_length": z_length, + "x_mid_first": x_mid_first, + "x_mid_second": x_mid_second, + "y_mid_first": y_mid_first, + "y_mid_second": y_mid_second, + "y_expand": y_expand, + "noise_strength": noise_strength, + "noise_scale": noise_scale, + "bottom_shift": bottom_shift, + "bottom_cutoff": bottom_cutoff, + "top_shift": top_shift, + "top_cutoff": top_cutoff, + } + + @staticmethod + def animate_bones(arma, bones, params): + bend_bones_lerp( + arma, bones, params["leg_curl"], params["freq"], rot=params["leg_rot"] + ) + + +class LobsterLegFactory(CrabLegFactory): + def sample_params(self): + y_length = uniform(0.01, 0.015) + z_length = y_length * log_uniform(1, 1.2) + return { + **super(LobsterLegFactory, self).sample_params(), + "y_length": y_length, + "z_length": z_length, + } diff --git a/infinigen/assets/objects/creatures/parts/crustacean/tail.py b/infinigen/assets/objects/creatures/parts/crustacean/tail.py new file mode 100644 index 000000000..62e948cad --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/crustacean/tail.py @@ -0,0 +1,106 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform +from scipy.interpolate import interp1d + +from infinigen.assets.objects.creatures.parts.utils.draw import make_segments +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + bend_bones_lerp, +) +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.utils.decorate import read_co +from infinigen.assets.utils.object import join_objects +from infinigen.core.surface import write_attr_data +from infinigen.core.util.random import log_uniform + + +class CrustaceanTailFactory(PartFactory): + tags = ["body"] + + def make_part(self, params) -> Part: + x_length = params["x_length"] + segments, x_cuts = self.make_segments(params) + obj = join_objects(segments) + + skeleton = np.zeros((2, 3)) + skeleton[1, 0] = x_length + joints = {x: Joint(rest=(0, 0, 0)) for x in x_cuts[1:]} + return Part(skeleton, obj, joints=joints) + + def make_segments(self, params): + n = params["n_segments"] + decay = np.exp(np.log(params["x_decay"]) / n) + x_cuts = np.cumsum(decay ** np.arange(n)) + x_cuts = [0, *x_cuts / x_cuts[-1]] + y_cuts_scale = interp1d( + [0, 1 / 3, 2 / 3, 1], + [ + 1 / params["shell_ratio"], + params["y_midpoint_first"], + params["y_midpoint_second"], + 0.1, + ], + fill_value="extrapolate", + ) + y_cuts = y_cuts_scale(x_cuts) + + def x_anchors(u, v): + return u, (u + v) / 2, v + + def y_anchors(u, v): + return u, np.sqrt(u * v), v * params["shell_ratio"] + + segments = make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params) + height = uniform(0.5, 1.0) + for obj in segments: + z = read_co(obj).T[-1] + write_attr_data( + obj, "ratio", 1 + np.where(z > 0, 0, height * z / params["y_length"]) + ) + return segments, x_cuts + + def sample_params(self): + x_length = uniform(1.0, 1.5) + y_length = uniform(0.15, 0.2) + z_length = y_length * uniform(1, 1.2) + y_expand = uniform(1.1, 1.3) + y_midpoint_first = uniform(0.85, 0.95) + y_midpoint_second = uniform(0.7, 0.8) + noise_strength = uniform(0.01, 0.02) + noise_scale = log_uniform(10, 20) + bottom_shift = uniform(0.3, 0.5) + bottom_cutoff = uniform(0.2, 0.5) + top_shift = 0 + top_cutoff = 1 + n_segments = np.random.randint(6, 10) + x_decay = log_uniform(0.2, 0.3) + shell_ratio = uniform(1.05, 1.08) + fin_x_length = uniform(0.5, 0.8) + return { + "x_length": x_length, + "y_length": y_length, + "z_length": z_length, + "y_expand": y_expand, + "noise_strength": noise_strength, + "noise_scale": noise_scale, + "bottom_shift": bottom_shift, + "bottom_cutoff": bottom_cutoff, + "top_shift": top_shift, + "top_cutoff": top_cutoff, + "n_segments": n_segments, + "x_decay": x_decay, + "shell_ratio": shell_ratio, + "y_midpoint_first": y_midpoint_first, + "y_midpoint_second": y_midpoint_second, + "fin_x_length": fin_x_length, + } + + @staticmethod + def animate_bones(arma, bones, params): + bend_bones_lerp(arma, bones, params["tail_curl"], params["freq"]) diff --git a/infinigen/assets/objects/creatures/parts/eye.py b/infinigen/assets/objects/creatures/parts/eye.py new file mode 100644 index 000000000..48f8b27bf --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/eye.py @@ -0,0 +1,303 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from numpy.random import normal as N + +from infinigen.assets.materials.utils.surface_utils import nodegroup_norm_vec +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.utils.nodegroups.curve import ( + nodegroup_profile_part, + nodegroup_simple_tube, + nodegroup_smooth_taper, + nodegroup_warped_circle_curve, +) +from infinigen.assets.utils.nodegroups.math import nodegroup_aspect_to_dim +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_nodegroup, tag_object + + +@node_utils.to_nodegroup("nodegroup_eyelid", singleton=True, type="GeometryNodeTree") +def nodegroup_eyelid(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Eyeball Radius", 1.0), + ("NodeSocketFloat", "Aspect Ratio", 0.34999999999999998), + ("NodeSocketFloat", "fullness", 2.0), + ("NodeSocketVector", "TearDuctCoord", (0.0, -1.5, -0.20000000000000001)), + ("NodeSocketVector", "PeakCoord", (1.2, -0.20000000000000001, 2.0)), + ("NodeSocketVector", "EyelidEndCoord", (0.0, 1.5, 0.29999999999999999)), + ("NodeSocketFloat", "StartRadPct", 0.5), + ("NodeSocketFloat", "EndRadPct", 0.5), + ("NodeSocketFloatAngle", "Tilt", -0.34910000000000002), + ], + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["TearDuctCoord"], + "Scale": group_input.outputs["Eyeball Radius"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["PeakCoord"], + "Scale": group_input.outputs["Eyeball Radius"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["EyelidEndCoord"], + "Scale": group_input.outputs["Eyeball Radius"], + }, + attrs={"operation": "SCALE"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": scale.outputs["Vector"], + "Middle": scale_1.outputs["Vector"], + "End": scale_2.outputs["Vector"], + }, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, + input_kwargs={"Curve": quadratic_bezier, "Tilt": group_input.outputs["Tilt"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["Aspect Ratio"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: aspect_to_dim}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"]}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Eyeball Radius"], + 1: group_input.outputs["StartRadPct"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Eyeball Radius"], + 1: group_input.outputs["EndRadPct"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": multiply_1, + "end_rad": multiply_2, + "fullness": group_input.outputs["fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": set_curve_tilt, + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": tag_nodegroup(nw, profilepart, "eyelid")}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_mammal_eye", singleton=True, type="GeometryNodeTree" +) +def nodegroup_mammal_eye(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.050000000000000003), + ("NodeSocketFloat", "Eyelid Thickness Ratio", 0.34999999999999998), + ("NodeSocketFloat", "Eyelid Fullness", 2.0), + ("NodeSocketBool", "Eyelids", True), + ], + ) + + eyelid = nw.new_node( + nodegroup_eyelid().name, + input_kwargs={ + "Eyeball Radius": group_input.outputs["Radius"], + "Aspect Ratio": group_input.outputs["Eyelid Thickness Ratio"], + "fullness": group_input.outputs["Eyelid Fullness"], + "TearDuctCoord": (0.0, -1.2, -0.20000000000000001), + "PeakCoord": (1.2, 0.40000000000000002, -1.7), + "EyelidEndCoord": (0.0, 1.2, 0.31), + "Tilt": 0.69810000000000005, + }, + ) + + eyelid_1 = nw.new_node( + nodegroup_eyelid().name, + input_kwargs={ + "Eyeball Radius": group_input.outputs["Radius"], + "Aspect Ratio": group_input.outputs["Eyelid Thickness Ratio"], + "fullness": group_input.outputs["Eyelid Fullness"], + "PeakCoord": (1.2, -0.20000000000000001, 1.8), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [eyelid, eyelid_1]} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Eyelids"], 15: join_geometry}, + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Radius": group_input.outputs["Radius"]} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.10000000000000001, 0.0, 0.0), + "Scale": group_input.outputs["Radius"], + }, + attrs={"operation": "SCALE"}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": uv_sphere, + "Translation": scale.outputs["Vector"], + "Rotation": (0.0, 1.5708, 0.0), + "Scale": (1.0, 1.0, 0.69999999999999996), + }, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (-1.7, 0.0, 0.0), "Scale": group_input.outputs["Radius"]}, + attrs={"operation": "SCALE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 6.0}, + attrs={"operation": "MULTIPLY"}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), + "Scale": multiply, + }, + attrs={"operation": "SCALE"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 3.0}, + attrs={"operation": "MULTIPLY"}, + ) + + simple_tube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": scale_1.outputs["Vector"], + "Angles Deg": (0.0, 0.0, 0.0), + "Seg Lengths": scale_2.outputs["Vector"], + "Start Radius": group_input.outputs["Radius"], + "End Radius": multiply_1, + "Fullness": 0.29999999999999999, + "Do Bezier": False, + "Aspect Ratio": 1.1000000000000001, + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": simple_tube.outputs["Geometry"], + "Rotation": (0.0, 0.0, 0.34910000000000002), + }, + ) + + eyeball = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": transform_1, "Level": 2} + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + normvec = nw.new_node( + nodegroup_norm_vec().name, + input_kwargs={ + "Geometry": eyeball, + "Name": "EyeballPosition", + "Vector": position_2, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": None, + "BodyExtra_Lid": switch.outputs[6], + "Eyeballl": normvec, + "ParentCutter": transform_2, + }, + ) + + +class MammalEye(PartFactory): + tags = ["head_detail", "eye_socket"] + + def sample_params(self): + return { + "Radius": 0.03 * N(1, 0.1), + "Eyelid Thickness Ratio": 0.35 * N(1, 0.05), + "Eyelid Fullness": 2.0 * N(1, 0.1), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_mammal_eye, params) + tag_object(part.obj, "mammal_eye") + return part diff --git a/infinigen/assets/objects/creatures/parts/eye_new.py b/infinigen/assets/objects/creatures/parts/eye_new.py new file mode 100644 index 000000000..9df9dbe6a --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/eye_new.py @@ -0,0 +1,4477 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Mingzhe Wang +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=EfNzAaqKHXQ by PixelicaCG, https://www.youtube.com/watch?v=JcHX4AT1vtg by CGCookie and https://www.youtube.com/watch?v=E0JyyWeptSA by CGRogue + + +import os + +import bpy +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup("nodegroup_circle", singleton=False, type="GeometryNodeTree") +def nodegroup_circle(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "R", 0.5000), + ("NodeSocketInt", "Resolution", 512), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": group_input}) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": group_input}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Point 1": combine_xyz_4, + "Point 2": combine_xyz_3, + "Point 3": combine_xyz_5, + "Radius": 2.0000, + }, + attrs={"mode": "POINTS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": curve_circle.outputs["Curve"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_eyeball", singleton=False, type="GeometryNodeTree") +def nodegroup_eyeball(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 1.0), + ("NodeSocketInt", "Resolution", 32), + ], + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": group_input.outputs["Resolution"], + "Rings": group_input.outputs["Resolution"], + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: multiply}, attrs={"operation": "SUBTRACT"} + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "SQRT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: sqrt, 1: 1.02}, attrs={"operation": "MULTIPLY"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_1}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: 0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: subtract_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": subtract_2}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": uv_sphere, "Offset": combine_xyz_1} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: 0.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": set_position_1, "Name": "Iris", 3: greater_than}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": store_named_attribute} + ) + + +@node_utils.to_nodegroup("nodegroup_cornea", singleton=False, type="GeometryNodeTree") +def nodegroup_cornea(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "ScaleX", 0.5000), + ("NodeSocketFloat", "Height", 2.0000), + ("NodeSocketFloatFactor", "ScaleZ", 0.0000), + ("NodeSocketFloat", "Y", 20.0000), + ("NodeSocketInt", "Resolution", 128), + ], + ) + + uv_sphere_1 = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": group_input.outputs["Resolution"], + "Rings": group_input.outputs["Resolution"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 3.0000, 1: group_input.outputs["Height"]}, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["ScaleX"], 1: subtract}, + attrs={"operation": "DIVIDE"}, + ) + + combine_color = nw.new_node( + "FunctionNodeCombineColor", + input_kwargs={ + "Red": group_input.outputs["ScaleX"], + "Green": divide, + "Blue": group_input.outputs["ScaleZ"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere_1, "Scale": combine_color} + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_2}) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: separate_xyz_2.outputs["Y"]} + ) + + separate_geometry = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={"Geometry": transform, "Selection": greater_than}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_1, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.0200 + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply.outputs["Vector"], 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": separate_geometry.outputs["Selection"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["ScaleX"], + 1: group_input.outputs["ScaleX"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_2}, attrs={"operation": "SQRT"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: sqrt, 1: 0.9500}, attrs={"operation": "MULTIPLY"} + ) + + combine_color_1 = nw.new_node( + "FunctionNodeCombineColor", input_kwargs={"Green": multiply_3} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Y"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_4}) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_position, + "Translation": combine_color_1, + "Rotation": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_eyelid_radius", singleton=False, type="GeometryNodeTree" +) +def nodegroup_eyelid_radius(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": (0.0, 0.8, 0.0)}) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "OuterControl", 0.3), + ("NodeSocketFloat", "InnerControl1", 5.4), + ("NodeSocketFloat", "InnerControl2", 0.3), + ("NodeSocketInt", "Resolution", 32), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": curve_line, "Count": group_input.outputs["Resolution"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": resample_curve, 2: separate_xyz.outputs["Y"]}, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz.outputs["Y"]} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: reroute, 1: 0.4}, attrs={"operation": "SUBTRACT"} + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 2.0}, attrs={"operation": "POWER"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: power, 1: -0.7}, attrs={"operation": "MULTIPLY"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute, 1: group_input.outputs["InnerControl2"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: greater_than}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: greater_than}, + attrs={"operation": "SUBTRACT"}, + ) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["OuterControl"]} + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_3, 1: reroute}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: subtract_2}, + attrs={"operation": "MULTIPLY"}, + ) + + power_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: 2.0}, attrs={"operation": "POWER"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: power_1, 1: group_input.outputs["InnerControl1"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) + + subtract_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 0.0}, attrs={"operation": "SUBTRACT"} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["OuterControl"]} + ) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: reroute_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_3, "Y": subtract_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Scale": (1.5, 1.5, 1.5)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform, "Attribute": capture_attribute.outputs[2]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_eyelid_circle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_eyelid_circle(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "ShapeW", 0.0), + ("NodeSocketFloat", "ShapeH", 0.0), + ("NodeSocketInt", "Resolution", 32), + ], + ) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["ShapeW"]} + ) + + circle = nw.new_node( + nodegroup_circle().name, + input_kwargs={"R": reroute_3, "Resolution": group_input.outputs["Resolution"]}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": circle, 2: spline_parameter.outputs["Factor"]}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": capture_attribute.outputs["Geometry"], 1: position_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: -0.5}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: subtract}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: -0.02}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: multiply_1} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["ShapeH"], + 1: group_input.outputs["ShapeW"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_2}) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_1, 1: reroute_1}, + attrs={"operation": "MULTIPLY"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: greater_than, 1: multiply_4}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0}, + attrs={"operation": "LESS_THAN"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: less_than, 1: separate_xyz.outputs["X"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: multiply_6}) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": add_1}) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute, 1: reroute}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: multiply_7}, + attrs={"operation": "SUBTRACT"}, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_1}, attrs={"operation": "SQRT"} + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_3, 1: reroute_3}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: multiply_8}, + attrs={"operation": "SUBTRACT"}, + ) + + sqrt_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_2}, attrs={"operation": "SQRT"} + ) + + reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": sqrt_1}) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: sqrt, 1: reroute_2}, + attrs={"operation": "SUBTRACT"}, + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0}, + attrs={"operation": "SIGN"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_3, 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": add, "Z": multiply_9}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 50.0, "Scale": 0.5}, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + multiply_10 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_4.outputs["Vector"], 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": multiply_10.outputs["Vector"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Offset": combine_xyz}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + "Attribute": capture_attribute.outputs[2], + "Attribute1": capture_attribute_1.outputs["Attribute"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_eye_ball", singleton=False, type="GeometryNodeTree") +def nodegroup_eye_ball(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "CorneaScaleX", 0.52), + ("NodeSocketFloat", "Height", 1.2), + ("NodeSocketFloatFactor", "CorneaScaleZ", 0.8), + ("NodeSocketFloat", "Y", 20.0), + ("NodeSocketInt", "EyeballResolution", 32), + ("NodeSocketInt", "CorneaResolution", 128), + ], + ) + + cornea_008 = nw.new_node( + nodegroup_cornea().name, + input_kwargs={ + "ScaleX": group_input.outputs["CorneaScaleX"], + "Height": group_input.outputs["Height"], + "ScaleZ": group_input.outputs["CorneaScaleZ"], + "Y": group_input.outputs["Y"], + "Resolution": group_input.outputs["CorneaResolution"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": cornea_008, "Name": "tag_cornea", 5: True}, + attrs={"data_type": "BOOLEAN"}, + ) + + eyeball_009 = nw.new_node( + nodegroup_eyeball().name, + input_kwargs={ + "Value": group_input.outputs["CorneaScaleX"], + "Resolution": group_input.outputs["EyeballResolution"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Cornea": store_named_attribute, "Eyeball": eyeball_009}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_raycast_append", singleton=False, type="GeometryNodeTree" +) +def nodegroup_raycast_append(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Target Geometry", None), + ("NodeSocketVector", "Ray Direction", (-1.0, 0.0, 0.0)), + ("NodeSocketFloat", "Default Offset", -0.005), + ], + ) + + raycast = nw.new_node( + Nodes.Raycast, + input_kwargs={ + "Target Geometry": group_input.outputs["Target Geometry"], + "Ray Direction": group_input.outputs["Ray Direction"], + "Ray Length": 0.1, + }, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: raycast.outputs["Hit Distance"], 1: 0.07}, + attrs={"operation": "LESS_THAN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: raycast.outputs["Hit Distance"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) + + named_attribute = nw.new_node( + Nodes.NamedAttribute, + input_kwargs={"Name": "pos"}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + distance = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: named_attribute.outputs["Attribute"]}, + attrs={"operation": "DISTANCE"}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 1.2 + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: distance.outputs["Value"], 1: value_1}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 1.5}, + attrs={"operation": "MULTIPLY", "use_clamp": True}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: multiply_1}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["Default Offset"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Ray Direction"]}, + attrs={"operation": "LENGTH"}, + ) + + divide = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Ray Direction"], + 1: length.outputs["Value"], + }, + attrs={"operation": "DIVIDE"}, + ) + + multiply_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add, 1: divide.outputs["Vector"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_4.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_vector_sum", singleton=False, type="GeometryNodeTree" +) +def nodegroup_vector_sum(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Sum": add_1}) + + +def shader_material(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": (0.8, 0.0, 0.6028, 1.0)} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_part_surface_simple", singleton=False, type="GeometryNodeTree" +) +def nodegroup_part_surface_simple(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketVector", "Length, Yaw, Rad", (0.0, 0.0, 0.0)), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Length, Yaw, Rad"]}, + ) + + clamp_1 = nw.new_node( + Nodes.Clamp, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.5708, "Y": separate_xyz.outputs["Y"], "Z": 1.5708}, + ) + + part_surface = nw.new_node( + nodegroup_part_surface().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length Fac": clamp_1, + "Ray Rot": combine_xyz, + "Rad": separate_xyz.outputs["Z"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Position": part_surface.outputs["Position"], + "Hit Normal": part_surface.outputs["Hit Normal"], + "Tangent": part_surface.outputs["Tangent"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_aspect_to_dim", singleton=False, type="GeometryNodeTree" +) +def nodegroup_aspect_to_dim(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Aspect Ratio", 1.0)] + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["Aspect Ratio"], "Y": 1.0}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": 1.0, "Y": divide}) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"XY Scale": switch.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_to_cart", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_to_cart(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Angle", 0.5), + ("NodeSocketFloat", "Length", 0.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "SINE"}, + ) + + construct_unit_vector = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": cosine, "Z": sine}, + label="Construct Unit Vector", + ) + + offset_polar = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Length"], + 1: construct_unit_vector, + 2: group_input.outputs["Origin"], + }, + label="Offset Polar", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": offset_polar.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_switch4", singleton=False, type="GeometryNodeTree") +def nodegroup_switch4(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Arg", 0), + ("NodeSocketVector", "Arg == 0", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 1", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 3", (0.0, 0.0, 0.0)), + ], + ) + + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 2}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + greater_equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 1}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_1, + 8: group_input.outputs["Arg == 0"], + 9: group_input.outputs["Arg == 1"], + }, + attrs={"input_type": "VECTOR"}, + ) + + greater_equal_2 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 3}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_2, + 8: group_input.outputs["Arg == 2"], + 9: group_input.outputs["Arg == 3"], + }, + attrs={"input_type": "VECTOR"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": switch.outputs[3]} + ) + + +def shader_eyeball_fish(nw: NodeWrangler, rand=True, **input_kwargs): + # Code generated using version 2.6.3 of the node_transpiler + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_cornea"}) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "EyeballPosition"} + ) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": attribute_1, "Scale": (1.2000, 1.0000, 0.4000)}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mapping, "Scale": 50.0000} + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.0200, + "Color1": mapping, + "Color2": noise_texture_2.outputs["Color"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mix_3}) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.0000 + + group = nw.new_node( + nodegroup_rotate2_d().name, + input_kwargs={ + 0: separate_xyz.outputs["X"], + 1: separate_xyz.outputs["Z"], + 2: value, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs[1], 1: 0.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: multiply_2}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.6300}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) + colorramp.color_ramp.elements[0].position = 0.6400 + colorramp.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] + colorramp.color_ramp.elements[1].position = 0.6591 + colorramp.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": attribute_1, "Scale": (1.0000, 100.0000, 1.0000)}, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 0.3000, "Color1": mapping_1, "Color2": attribute_1}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mix_4, "Scale": 10.0000} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.7000, + "Color1": noise_texture.outputs["Fac"], + "Color2": mix_4, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": mix, "Scale": 20.0000} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + 2: 0.0000, + }, + attrs={"operation": "MULTIPLY"}, + ) + + mapping_2 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": attribute_1, "Scale": (1.0000, 20.0000, 1.0000)}, + ) + + mix_8 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 0.3000, "Color1": mapping_2, "Color2": attribute_1}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": mix_8, "Scale": 10.0000} + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.7000, + "Color1": noise_texture_3.outputs["Fac"], + "Color2": mix_8, + }, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix_1, "W": 4.5000, "Scale": 1.0000}, + attrs={"voronoi_dimensions": "4D"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + 2: 0.0000, + }, + attrs={"operation": "MULTIPLY"}, + ) + + mix_9 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": 1.0000, "Color1": multiply_4, "Color2": multiply_5}, + attrs={"blend_type": "OVERLAY"}, + ) + + bright_contrast = nw.new_node( + "ShaderNodeBrightContrast", + input_kwargs={"Color": mix_9, "Bright": 0.6000, "Contrast": 1.5000}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs[1], 1: 0.6000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_6, 1: multiply_6}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: 0.6000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_8, 1: multiply_8}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_7, 1: multiply_9}) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_3}) + colorramp_1.color_ramp.elements[0].position = 0.6159 + colorramp_1.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] + colorramp_1.color_ramp.elements[1].position = 0.6591 + colorramp_1.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] + + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": colorramp_1.outputs["Color"]} + ) + colorramp_5.color_ramp.elements[0].position = 0.0295 + colorramp_5.color_ramp.elements[0].color = [0.0000, 0.0000, 0.0000, 1.0000] + colorramp_5.color_ramp.elements[1].position = 0.0523 + colorramp_5.color_ramp.elements[1].color = [1.0000, 1.0000, 1.0000, 1.0000] + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: bright_contrast, 1: colorramp_5.outputs["Color"]}, + attrs={"use_clamp": True}, + ) + + multiply_10 = nw.new_node( + Nodes.Math, input_kwargs={0: group.outputs[1]}, attrs={"operation": "MULTIPLY"} + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_10, 1: multiply_10}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: 0.7000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_13 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_12, 1: multiply_12}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_11, 1: multiply_13}) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: 0.1800}) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_6}) + colorramp_2.color_ramp.elements[0].position = 0.4773 + colorramp_2.color_ramp.elements[0].color = [1.0000, 1.0000, 1.0000, 1.0000] + colorramp_2.color_ramp.elements[1].position = 0.6659 + colorramp_2.color_ramp.elements[1].color = [0.0000, 0.0000, 0.0000, 1.0000] + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"W": 1.0000}, attrs={"noise_dimensions": "4D"} + ) + + colorramp_4 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture_1.outputs["Color"]} + ) + colorramp_4.color_ramp.interpolation = "CARDINAL" + colorramp_4.color_ramp.elements[0].position = 0.2886 + colorramp_4.color_ramp.elements[0].color = [1.0000, 0.5767, 0.0000, 1.0000] + colorramp_4.color_ramp.elements[1].position = 0.5455 + colorramp_4.color_ramp.elements[1].color = [1.0000, 0.0000, 0.0112, 1.0000] + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": (0.7384, 0.5239, 0.2703, 1.0000), + "Color2": colorramp_4.outputs["Color"], + }, + ) + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": mix_7, + "Color2": (0.0000, 0.0000, 0.0000, 1.0000), + }, + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": add_4, + "Color1": (0.0000, 0.0000, 0.0000, 1.0000), + "Color2": mix_6, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": mix_5, + "Color2": (0.0000, 0.0000, 0.0000, 1.0000), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_2, "Specular": 0.0000, "Roughness": 0.0000}, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Specular": 1.0000, + "Roughness": 0.0000, + "IOR": 1.3500, + "Transmission": 1.0000, + }, + ) + + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.1577, 1: principled_bsdf_1, 2: transparent_bsdf}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": attribute_2.outputs["Color"], + 1: principled_bsdf, + 2: mix_shader_1, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": mix_shader}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_eyeball_eyelid_inner", singleton=False, type="GeometryNodeTree" +) +def nodegroup_eyeball_eyelid_inner(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input_2 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "EyeRot", 0.5000), + ("NodeSocketVector", "EyelidCircleShape(W, H)", (0.0000, 0.0000, 0.0000)), + ( + "NodeSocketVector", + "EyelidRadiusShape(Out, In1, In2)", + (0.0000, 0.0000, 0.0000), + ), + ( + "NodeSocketVector", + "EyelidResolution(Circle, Radius)", + (0.0000, 0.0000, 0.0000), + ), + ( + "NodeSocketVector", + "CorneaScale(W, H, Thickness)", + (0.0000, 0.0000, 0.0000), + ), + ( + "NodeSocketVector", + "EyeballResolution(White, Cornea)", + (0.0000, 0.0000, 0.0000), + ), + ("NodeSocketVectorXYZ", "Scale", (1.0000, 1.0000, 1.0000)), + ], + ) + + separate_xyz_6 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input_2.outputs["CorneaScale(W, H, Thickness)"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_2.outputs["EyeRot"], 1: 0.0175}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_7 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": group_input_2.outputs["EyeballResolution(White, Cornea)"] + }, + ) + + eyeball = nw.new_node( + nodegroup_eye_ball().name, + input_kwargs={ + "CorneaScaleX": separate_xyz_6.outputs["X"], + "Height": separate_xyz_6.outputs["Y"], + "CorneaScaleZ": separate_xyz_6.outputs["Z"], + "Y": multiply, + "EyeballResolution": separate_xyz_7.outputs["X"], + "CorneaResolution": separate_xyz_7.outputs["Y"], + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [eyeball.outputs["Cornea"], eyeball.outputs["Eyeball"]] + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_2, + "Material": surface.shaderfunc_to_material(shader_eyeball_tiger), + }, + ) + + value_5 = nw.new_node(Nodes.Value) + value_5.outputs[0].default_value = 1.5000 + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_material_1, + "Translation": (0.0000, -1.3500, -0.0500), + "Scale": value_5, + }, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": transform_2, + "Name": "EyeballPosition", + 2: position_2, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": store_named_attribute} + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input_2.outputs["EyelidCircleShape(W, H)"]}, + ) + + separate_xyz_5 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": group_input_2.outputs["EyelidResolution(Circle, Radius)"] + }, + ) + + eyelidcircle = nw.new_node( + nodegroup_eyelid_circle().name, + input_kwargs={ + "ShapeW": separate_xyz_3.outputs["X"], + "ShapeH": separate_xyz_3.outputs["Y"], + "Resolution": separate_xyz_5.outputs["X"], + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.6000 + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": eyelidcircle.outputs["Geometry"], "Scale": value_1}, + ) + + separate_xyz_4 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": group_input_2.outputs["EyelidRadiusShape(Out, In1, In2)"] + }, + ) + + eyelidradis = nw.new_node( + nodegroup_eyelid_radius().name, + input_kwargs={ + "OuterControl": separate_xyz_4.outputs["X"], + "InnerControl1": separate_xyz_4.outputs["Y"], + "InnerControl2": separate_xyz_4.outputs["Z"], + "Resolution": separate_xyz_5.outputs["Y"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": transform_1, + "Profile Curve": eyelidradis.outputs["Geometry"], + }, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 0.7000}) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Fac"], 1: (0.5000, 0.5000, 0.5000)}, + attrs={"operation": "SUBTRACT"}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"], 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.1000 + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_1.outputs["Vector"], 1: value_2}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_to_mesh, + "Offset": multiply_2.outputs["Vector"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": eyelidcircle}) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0000}, + attrs={"operation": "LESS_THAN"}, + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 0.0000}, + attrs={"operation": "ABSOLUTE"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: -0.0000, 1: absolute}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: less_than, 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: eyelidradis, 1: 0.6000}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: greater_than}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: -1.2000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_5}) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Offset": combine_xyz_2}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_position_2, + "Scale": group_input_2.outputs["Scale"], + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": transform_3} + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "COSINE"} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SINE"} + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_6, 1: multiply_7}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: multiply_9}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": subtract_2, "Y": separate_xyz_2.outputs["Y"], "Z": add}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": join_geometry_1, "Position": combine_xyz}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Eyeball": join_geometry_3, "Eyelid": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_append_eye", singleton=False, type="GeometryNodeTree" +) +def nodegroup_append_eye(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Target Geometry", None), + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Translation", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale", 0.0), + ("NodeSocketVectorEuler", "Rotation", (0.1745, 0.0, -1.3963)), + ("NodeSocketVector", "Ray Direction", (-1.0, 0.0, 0.0)), + ("NodeSocketFloat", "Default Offset", -0.002), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": "pos", + 2: position, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": group_input.outputs["Translation"], + "Rotation": group_input.outputs["Rotation"], + "Scale": group_input.outputs["Scale"], + }, + ) + + raycastappend = nw.new_node( + nodegroup_raycast_append().name, + input_kwargs={ + "Geometry": transform, + "Target Geometry": group_input.outputs["Target Geometry"], + "Ray Direction": group_input.outputs["Ray Direction"], + "Default Offset": group_input.outputs["Default Offset"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": raycastappend} + ) + + +@node_utils.to_nodegroup( + "nodegroup_eye_sockets", singleton=False, type="GeometryNodeTree" +) +def nodegroup_eye_sockets(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Base Mesh", None), + ("NodeSocketVector", "Length/Yaw/Rad", (0.5000, 0.0000, 1.0000)), + ("NodeSocketVector", "Part Rot", (0.0000, 0.0000, 53.7000)), + ("NodeSocketVectorXYZ", "Scale", (2.0000, 2.0000, 2.0000)), + ], + ) + + eyehole_cutter = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": (-0.1000, 0.0000, 0.0000), + "Angles Deg": (0.0000, 0.0000, 0.0000), + "Seg Lengths": (0.0500, 0.0500, 0.0900), + "Start Radius": 0.0200, + "Fullness": 0.3000, + }, + label="Eyehole Cutter", + ) + + part_surface_simple = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Base Mesh"], + "Length, Yaw, Rad": group_input.outputs["Length/Yaw/Rad"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": eyehole_cutter.outputs["Geometry"], + "Translation": part_surface_simple.outputs["Position"], + "Rotation": group_input.outputs["Part Rot"], + "Scale": group_input.outputs["Scale"], + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": group_input.outputs["Skin Mesh"], + "Mesh": symmetric_clone.outputs["Both"], + "Position": part_surface_simple.outputs["Position"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube_v2", singleton=False, type="GeometryNodeTree" +) +def nodegroup_simple_tube_v2(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.5, 0.3)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "proportions", (0.3333, 0.3333, 0.3333)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketBool", "do_bezier", True), + ("NodeSocketFloat", "fullness", 4.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + vector_sum = nw.new_node( + nodegroup_vector_sum().name, + input_kwargs={"Vector": group_input.outputs["proportions"]}, + ) + + divide = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, + attrs={"operation": "DIVIDE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: divide.outputs["Vector"], "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 25, + "Origin": group_input.outputs["Origin"], + "angles_deg": group_input.outputs["angles_deg"], + "Seg Lengths": scale.outputs["Vector"], + "Do Bezier": group_input.outputs["do_bezier"], + }, + ) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["aspect"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: aspect_to_dim, 1: position}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": 40}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["Y"], + "end_rad": separate_xyz.outputs["Z"], + "fullness": group_input.outputs["fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": profilepart, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Endpoint": polarbezier.outputs["Endpoint"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_surface_muscle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_surface_muscle(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketVector", "Coord 0", (0.4, 0.0, 1.0)), + ("NodeSocketVector", "Coord 1", (0.5, 0.0, 1.0)), + ("NodeSocketVector", "Coord 2", (0.6, 0.0, 1.0)), + ("NodeSocketVector", "StartRad, EndRad, Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "ProfileHeight, StartTilt, EndTilt", (0.0, 0.0, 0.0)), + ("NodeSocketBool", "Debug Points", False), + ], + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": (0.03, 0.03, 0.03)}) + + part_surface_simple = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 0"], + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple.outputs["Position"], + }, + ) + + part_surface_simple_1 = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 1"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple_1.outputs["Position"], + }, + ) + + part_surface_simple_2 = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 2"], + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple_2.outputs["Position"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_2, transform_1, transform_3]}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Debug Points"], 15: join_geometry}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": switch.outputs[6], + "Material": surface.shaderfunc_to_material(shader_material), + }, + ) + + u_resolution = nw.new_node( + Nodes.Integer, label="U Resolution", attrs={"integer": 16} + ) + u_resolution.integer = 16 + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": u_resolution, + "Start": part_surface_simple.outputs["Position"], + "Middle": part_surface_simple_1.outputs["Position"], + "End": part_surface_simple_2.outputs["Position"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": group_input.outputs["ProfileHeight, StartTilt, EndTilt"] + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz_1.outputs["Y"], + 4: separate_xyz_1.outputs["Z"], + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": map_range_1.outputs["Result"]} + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": quadratic_bezier, "Tilt": deg2rad} + ) + + position = nw.new_node(Nodes.InputPosition) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz_1.outputs["X"], "Y": 1.0, "Z": 1.0}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + v_resolution = nw.new_node( + Nodes.Integer, label="V resolution", attrs={"integer": 10} + ) + v_resolution.integer = 10 + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": v_resolution}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["StartRad, EndRad, Fullness"]}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["X"], + "end_rad": separate_xyz.outputs["Y"], + "fullness": separate_xyz.outputs["Z"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": set_curve_tilt, + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, profilepart]} + ) + + switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: True, 15: join_geometry_1}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": switch_1.outputs[6]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_simple_tube(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Angles Deg", (30.0, -1.5, 11.0)), + ("NodeSocketVector", "Seg Lengths", (0.02, 0.02, 0.02)), + ("NodeSocketFloat", "Start Radius", 0.06), + ("NodeSocketFloat", "End Radius", 0.03), + ("NodeSocketFloat", "Fullness", 8.17), + ("NodeSocketBool", "Do Bezier", True), + ("NodeSocketFloat", "Aspect Ratio", 1.0), + ], + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 25, + "Origin": group_input.outputs["Origin"], + "angles_deg": group_input.outputs["Angles Deg"], + "Seg Lengths": group_input.outputs["Seg Lengths"], + "Do Bezier": group_input.outputs["Do Bezier"], + }, + ) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["Aspect Ratio"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: aspect_to_dim, 1: position}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": 40}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": group_input.outputs["Start Radius"], + "end_rad": group_input.outputs["End Radius"], + "fullness": group_input.outputs["Fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": profilepart, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Endpoint": polarbezier.outputs["Endpoint"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_smooth_taper", singleton=False, type="GeometryNodeTree" +) +def nodegroup_smooth_taper(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SINE"} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "start_rad", 0.29), + ("NodeSocketFloat", "end_rad", 0.0), + ("NodeSocketFloat", "fullness", 2.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, + attrs={"operation": "DIVIDE"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: sine, 1: divide}, attrs={"operation": "POWER"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["start_rad"], + 4: group_input.outputs["end_rad"], + }, + attrs={"clamp": False}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply_1}) + + +@node_utils.to_nodegroup( + "nodegroup_warped_circle_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_warped_circle_curve(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Position", (0.0, 0.0, 0.0)), + ("NodeSocketInt", "Vertices", 32), + ], + ) + + mesh_circle = nw.new_node( + Nodes.MeshCircle, input_kwargs={"Vertices": group_input.outputs["Vertices"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_circle, + "Position": group_input.outputs["Position"], + }, + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Curve": mesh_to_curve}) + + +@node_utils.to_nodegroup( + "nodegroup_profile_part", singleton=False, type="GeometryNodeTree" +) +def nodegroup_profile_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloatDistance", "Radius Func", 1.0), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Radius": group_input.outputs["Radius Func"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": group_input.outputs["Profile Curve"], + "Fill Caps": True, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": curve_to_mesh, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_bezier", singleton=False, type="GeometryNodeTree" +) +def nodegroup_polar_bezier(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 32), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Seg Lengths", (0.3, 0.3, 0.3)), + ("NodeSocketBool", "Do Bezier", True), + ], + ) + + mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={"Count": 4}) + + index = nw.new_node(Nodes.Index) + + deg2_rad = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["angles_deg"], "Scale": 0.0175}, + label="Deg2Rad", + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": deg2_rad.outputs["Vector"]} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz.outputs["X"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Seg Lengths"]} + ) + + polartocart = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": reroute, + "Length": separate_xyz_1.outputs["X"], + "Origin": group_input.outputs["Origin"], + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]} + ) + + polartocart_1 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add, + "Length": separate_xyz_1.outputs["Y"], + "Origin": polartocart, + }, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) + + polartocart_2 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add_1, + "Length": separate_xyz_1.outputs["Z"], + "Origin": polartocart_1, + }, + ) + + switch4 = nw.new_node( + nodegroup_switch4().name, + input_kwargs={ + "Arg": index, + "Arg == 0": group_input.outputs["Origin"], + "Arg == 1": polartocart, + "Arg == 2": polartocart_1, + "Arg == 3": polartocart_2, + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": mesh_line, "Position": switch4} + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, + input_kwargs={ + "Curve": mesh_to_curve, + "Cuts": group_input.outputs["Resolution"], + }, + ) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 2}) + integer.integer = 2 + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": integer, + "Start": group_input.outputs["Origin"], + "Start Handle": polartocart, + "End Handle": polartocart_1, + "End": polartocart_2, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, + attrs={"operation": "DIVIDE"}, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": bezier_segment, "Cuts": divide} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Do Bezier"], + 14: subdivide_curve_1, + 15: subdivide_curve, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": switch.outputs[6], "Endpoint": polartocart_2}, + ) + + +@node_utils.to_nodegroup("nodegroup_solidify", singleton=False, type="GeometryNodeTree") +def nodegroup_solidify(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloatDistance", "Distance", 0.0), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Distance"]}, + attrs={"operation": "MULTIPLY"}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Offset Scale": multiply, + "Individual": False, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Distance"], 1: -0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + extrude_mesh_1 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Offset Scale": multiply_1, + "Individual": False, + }, + ) + + flip_faces = nw.new_node( + Nodes.FlipFaces, input_kwargs={"Mesh": extrude_mesh_1.outputs["Mesh"]} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [extrude_mesh.outputs["Mesh"], flip_faces]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": join_geometry, "Distance": 0.0} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": merge_by_distance, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +@node_utils.to_nodegroup("nodegroup_taper", singleton=False, type="GeometryNodeTree") +def nodegroup_taper(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Start", (1.0, 0.63, 0.72)), + ("NodeSocketVector", "End", (1.0, 1.0, 1.0)), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["X"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": separate_xyz.outputs["X"], + 7: attribute_statistic.outputs["Min"], + 8: attribute_statistic.outputs["Max"], + 9: group_input.outputs["Start"], + 10: group_input.outputs["End"], + }, + attrs={"data_type": "FLOAT_VECTOR", "clamp": False}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: map_range.outputs["Vector"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": multiply.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_raycast_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_raycast_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "Rotation", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Hit Normal", (0.0, 0.0, 1.0)), + ("NodeSocketVector", "Curve Tangent", (0.0, 0.0, 1.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": group_input.outputs["Hit Normal"]}, + ) + + rotate_euler = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Rotate By": align_euler_to_vector, + }, + ) + + if_normal_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Normal Rot"], + 8: group_input.outputs["Rotation"], + 9: rotate_euler, + }, + label="if_normal_rot", + attrs={"input_type": "VECTOR"}, + ) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Vector": group_input.outputs["Curve Tangent"], + }, + ) + + rotate_euler_1 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": align_euler_to_vector_1, + "Rotate By": group_input.outputs["Rotation"], + }, + attrs={"space": "LOCAL"}, + ) + + if_tangent_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Tangent Rot"], + 8: if_normal_rot.outputs[3], + 9: rotate_euler_1, + }, + label="if_tangent_rot", + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": if_tangent_rot.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_part_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_part_surface(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ], + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Factor": group_input.outputs["Length Fac"], + }, + attrs={"mode": "FACTOR"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": sample_curve.outputs["Tangent"], + "Rotation": group_input.outputs["Ray Rot"], + }, + attrs={"rotation_type": "EULER_XYZ"}, + ) + + raycast = nw.new_node( + Nodes.Raycast, + input_kwargs={ + "Target Geometry": group_input.outputs["Skin Mesh"], + "Source Position": sample_curve.outputs["Position"], + "Ray Direction": vector_rotate, + "Ray Length": 5.0, + }, + ) + + lerp = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["Rad"], + 9: sample_curve.outputs["Position"], + 10: raycast.outputs["Hit Position"], + }, + label="lerp", + attrs={"data_type": "FLOAT_VECTOR", "clamp": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Position": lerp.outputs["Vector"], + "Hit Normal": raycast.outputs["Hit Normal"], + "Tangent": sample_curve.outputs["Tangent"], + "Skeleton Pos": sample_curve.outputs["Position"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_eyeball_eyelid", singleton=False, type="GeometryNodeTree" +) +def nodegroup_eyeball_eyelid(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Base Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketVector", "Length/Yaw/Rad", (0.5000, 0.0000, 1.0000)), + ("NodeSocketGeometry", "Target Geometry", None), + ("NodeSocketFloat", "EyeRot", -23.0000), + ("NodeSocketVector", "EyelidCircleShape(W, H)", (2.0000, 1.4500, 0.0000)), + ( + "NodeSocketVector", + "EyelidRadiusShape(Out, In1, In2)", + (0.4000, 5.3000, 0.4000), + ), + ( + "NodeSocketVector", + "EyelidResolution(Circle, Radius)", + (32.0000, 32.0000, 0.0000), + ), + ( + "NodeSocketVector", + "CorneaScale(W, H, Thickness)", + (0.8000, 0.8000, 0.5500), + ), + ( + "NodeSocketVector", + "EyeballResolution(White, Cornea)", + (32.0000, 128.0000, 0.0000), + ), + ("NodeSocketVector", "OffsetPreAppending", (0.0120, 0.0000, 0.0000)), + ("NodeSocketFloat", "Scale", 1.0), + ("NodeSocketVectorEuler", "Rotation", (0.1745, 0.0000, -1.3963)), + ("NodeSocketVector", "RayDirection", (-1.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "DefaultAppendDistance", -0.0020), + ("NodeSocketVector", "EyeSocketRot", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVectorXYZ", "EyelidScale", (1.0000, 1.0000, 1.0000)), + ], + ) + + eyesockets = nw.new_node( + nodegroup_eye_sockets().name, + input_kwargs={ + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Base Mesh": group_input.outputs["Base Mesh"], + "Length/Yaw/Rad": group_input.outputs["Length/Yaw/Rad"], + "Part Rot": group_input.outputs["EyeSocketRot"], + "Scale": group_input.outputs["Scale"], + }, + ) + + # transform = nw.new_node(Nodes.Transform, + # input_kwargs={'Geometry': eyesockets.outputs["Mesh"], 'Scale': group_input.outputs["Scale"]}) + + tigereyeinner = nw.new_node( + nodegroup_eyeball_eyelid_inner().name, + input_kwargs={ + "EyeRot": group_input.outputs["EyeRot"], + "EyelidCircleShape(W, H)": group_input.outputs["EyelidCircleShape(W, H)"], + "EyelidRadiusShape(Out, In1, In2)": group_input.outputs[ + "EyelidRadiusShape(Out, In1, In2)" + ], + "EyelidResolution(Circle, Radius)": group_input.outputs[ + "EyelidResolution(Circle, Radius)" + ], + "CorneaScale(W, H, Thickness)": group_input.outputs[ + "CorneaScale(W, H, Thickness)" + ], + "EyeballResolution(White, Cornea)": group_input.outputs[ + "EyeballResolution(White, Cornea)" + ], + "Scale": group_input.outputs["EyelidScale"], + }, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: eyesockets.outputs["Position"], + 1: group_input.outputs["OffsetPreAppending"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Scale"], 1: 0.0170}, + attrs={"operation": "MULTIPLY"}, + ) + + appendeye = nw.new_node( + nodegroup_append_eye().name, + input_kwargs={ + "Target Geometry": group_input.outputs["Target Geometry"], + "Geometry": tigereyeinner.outputs["Eyeball"], + "Translation": add, + "Scale": multiply, + "Rotation": group_input.outputs["Rotation"], + "Ray Direction": group_input.outputs["RayDirection"], + "Default Offset": group_input.outputs["DefaultAppendDistance"], + }, + ) + + appendeye_1 = nw.new_node( + nodegroup_append_eye().name, + input_kwargs={ + "Target Geometry": group_input.outputs["Target Geometry"], + "Geometry": tigereyeinner.outputs["Eyelid"], + "Translation": add, + "Scale": multiply, + "Rotation": group_input.outputs["Rotation"], + "Ray Direction": group_input.outputs["RayDirection"], + "Default Offset": group_input.outputs["DefaultAppendDistance"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": None, + "ParentCutter": eyesockets.outputs["Mesh"], + "Eyeballl": appendeye, + "BodyExtra_Lid": appendeye_1, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_carnivore__face_structure", singleton=False, type="GeometryNodeTree" +) +def nodegroup_carnivore__face_structure(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Skull Length Width1 Width2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Snout Length Width1 Width2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Snout Y Scale", 0.62), + ("NodeSocketVectorXYZ", "Nose Bridge Scale", (1.0, 0.35, 0.9)), + ("NodeSocketVector", "Jaw Muscle Middle Coord", (0.24, 0.41, 1.3)), + ("NodeSocketVector", "Jaw StartRad, EndRad, Fullness", (0.06, 0.11, 1.5)), + ( + "NodeSocketVector", + "Jaw ProfileHeight, StartTilt, EndTilt", + (0.8, 33.1, 0.0), + ), + ("NodeSocketVector", "Lip Muscle Middle Coord", (0.95, 0.0, 1.5)), + ("NodeSocketVector", "Lip StartRad, EndRad, Fullness", (0.05, 0.09, 1.48)), + ( + "NodeSocketVector", + "Lip ProfileHeight, StartTilt, EndTilt", + (0.8, 0.0, -17.2), + ), + ("NodeSocketVector", "Forehead Muscle Middle Coord", (0.7, -1.32, 1.31)), + ( + "NodeSocketVector", + "Forehead StartRad, EndRad, Fullness", + (0.06, 0.05, 2.5), + ), + ( + "NodeSocketVector", + "Forehead ProfileHeight, StartTilt, EndTilt", + (0.3, 60.6, 66.0), + ), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (-0.07, 0.0, 0.05) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["Skull Length Width1 Width2"], + "angles_deg": (-5.67, 0.0, 0.0), + "aspect": group_input.outputs["aspect"], + "fullness": 3.63, + "Origin": vector, + }, + ) + + snout_origin = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: (-0.1, 0.0, 0.0)}, + label="Snout Origin", + ) + + split_length_width1_width2 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Snout Length Width1 Width2"]}, + label="Split Length / Width1 / Width2", + ) + + snout_seg_lengths = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33, 0.33, 0.33), + "Scale": split_length_width1_width2.outputs["X"], + }, + label="Snout Seg Lengths", + attrs={"operation": "SCALE"}, + ) + + bridge = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": snout_origin.outputs["Vector"], + "Angles Deg": (-4.0, -4.5, -5.61), + "Seg Lengths": snout_seg_lengths.outputs["Vector"], + "Start Radius": 0.17, + "End Radius": 0.1, + "Fullness": 5.44, + }, + label="Bridge", + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": bridge.outputs["Geometry"], + "Translation": (0.0, 0.0, 0.03), + "Scale": group_input.outputs["Nose Bridge Scale"], + }, + ) + + snout = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": snout_origin.outputs["Vector"], + "Angles Deg": (-3.0, -4.5, -5.61), + "Seg Lengths": snout_seg_lengths.outputs["Vector"], + "Start Radius": split_length_width1_width2.outputs["Y"], + "End Radius": split_length_width1_width2.outputs["Z"], + "Fullness": 2.0, + }, + label="Snout", + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": snout.outputs["Geometry"], + "Translation": (0.0, 0.0, 0.03), + "Scale": (1.0, 0.7, 0.7), + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0, "Y": group_input.outputs["Snout Y Scale"], "Z": 1.0}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_1, "Scale": combine_xyz} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_2]} + ) + + union = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 2": [join_geometry, simple_tube_v2.outputs["Geometry"]], + "Self Intersection": True, + }, + attrs={"operation": "UNION"}, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={"Start": vector, "End": snout.outputs["Endpoint"]}, + ) + + jaw_muscle = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Coord 0": (0.19, -0.41, 0.78), + "Coord 1": group_input.outputs["Jaw Muscle Middle Coord"], + "Coord 2": (0.67, 1.26, 0.52), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Jaw StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Jaw ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Jaw Muscle", + ) + + lip = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Coord 0": (0.51, -0.13, 0.02), + "Coord 1": group_input.outputs["Lip Muscle Middle Coord"], + "Coord 2": (0.99, 10.57, 0.1), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Lip StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Lip ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Lip", + ) + + forehead = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.31, -1.06, 0.97), + "Coord 1": group_input.outputs["Forehead Muscle Middle Coord"], + "Coord 2": (0.95, -1.52, 0.9), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Forehead StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Forehead ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Forehead", + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [jaw_muscle, lip, forehead]} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry_1} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (0.33, 0.33, 0.33)}, + attrs={"operation": "SCALE"}, + ) + + jaw_cutter = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": (0.0, 0.0, 0.09), + "Angles Deg": (0.0, 0.0, 0.0), + "Seg Lengths": scale.outputs["Vector"], + "Start Radius": 0.13, + }, + label="Jaw Cutter", + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Geometry": jaw_cutter.outputs["Geometry"], + "Length Fac": 0.2, + "Ray Rot": (0.0, 1.5708, 0.0), + "Rad": 1.25, + "Part Rot": (0.0, -8.5, 0.0), + "Do Tangent Rot": True, + }, + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": union.outputs["Mesh"], + "Mesh 2": attach_part.outputs["Geometry"], + "Self Intersection": True, + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [symmetric_clone.outputs["Both"], difference.outputs["Mesh"]] + }, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": curve_line_1, "Cuts": 10} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_2, + "Skeleton Curve": subdivide_curve, + "Base Mesh": union.outputs["Mesh"], + "Cranium Skeleton": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_rotate2_d", singleton=False, type="ShaderNodeTree") +def nodegroup_rotate2_d(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketFloat", "Value", 0.0175), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs[2], 1: 0.0175}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_3 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply}) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_3}, attrs={"operation": "SINE"} + ) + + reroute_5 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs[1]} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: reroute_5}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_4 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Value"]} + ) + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_3}, attrs={"operation": "COSINE"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_4, 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_5, 1: cosine}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_4, 1: sine}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: multiply_4}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Value": subtract, "Value1": add} + ) + + +@node_utils.to_nodegroup( + "nodegroup_carnivore_jaw", singleton=False, type="GeometryNodeTree" +) +def nodegroup_carnivore_jaw(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloatFactor", "Width Shaping", 0.6764), + ("NodeSocketFloat", "Canine Length", 0.05), + ("NodeSocketFloat", "Incisor Size", 0.01), + ("NodeSocketFloat", "Tooth Crookedness", 0.0), + ("NodeSocketFloatFactor", "Tongue Shaping", 1.0), + ("NodeSocketFloat", "Tongue X Scale", 0.9), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (0.33, 0.33, 0.33), "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "angles_deg": (0.0, 0.0, 13.0), + "Seg Lengths": scale.outputs["Vector"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + vector_curves = nw.new_node(Nodes.VectorCurve, input_kwargs={"Vector": position}) + node_utils.assign_curve( + vector_curves.mapping.curves[0], + [(-1.0, -1.0), (0.0036, 0.0), (0.2436, 0.21), (1.0, 1.0)], + ) + node_utils.assign_curve( + vector_curves.mapping.curves[1], + [ + (-1.0, 0.12), + (-0.7745, 0.06), + (-0.6509, -0.44), + (-0.3673, -0.4), + (-0.0545, -0.01), + (0.1055, 0.02), + (0.5273, 0.5), + (0.7964, 0.64), + (1.0, 1.0), + ], + handles=[ + "AUTO", + "AUTO", + "AUTO", + "AUTO_CLAMPED", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + ], + ) + node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, -1.0), (1.0, 1.0)]) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, input_kwargs={"Position": vector_curves} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Width Shaping"], + "Value": spline_parameter.outputs["Factor"], + }, + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.955), (0.4255, 0.785), (0.6545, 0.535), (0.9491, 0.75), (1.0, 0.595)], + handles=["AUTO", "AUTO", "AUTO", "AUTO_CLAMPED", "AUTO"], + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["Y"], + "end_rad": separate_xyz.outputs["Z"], + "fullness": 2.6, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: smoothtaper}, + attrs={"operation": "MULTIPLY"}, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": multiply, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": profilepart, "Scale": (1.0, 1.7, 1.0)}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33, 0.33, 0.33), + "Scale": group_input.outputs["Canine Length"], + }, + attrs={"operation": "SCALE"}, + ) + + canine_tooth = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Seg Lengths": scale_1.outputs["Vector"], + "Start Radius": 0.015, + "End Radius": 0.003, + }, + label="Canine Tooth", + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": transform, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Geometry": canine_tooth.outputs["Geometry"], + "Length Fac": 0.9, + "Ray Rot": (1.5708, 0.1204, 1.5708), + "Rad": 1.0, + "Part Rot": (-17.6, -53.49, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": attach_part.outputs["Geometry"]} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry} + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: attach_part.outputs["Position"], 1: (0.015, -0.05, 0.0)}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: (1.0, -1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: multiply_1.outputs["Vector"]}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_1.outputs["Vector"], + 1: (0.5, 0.5, 0.5), + 2: (-0.02, 0.0, 0.0), + "Scale": 0.5, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 6, + "Start": add.outputs["Vector"], + "Middle": multiply_add.outputs["Vector"], + "End": multiply_1.outputs["Vector"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": quadratic_bezier} + ) + + transform_1 = nw.new_node(Nodes.Transform, input_kwargs={"Geometry": curve_to_mesh}) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (3.0, 1.0, 0.6), "Scale": group_input.outputs["Incisor Size"]}, + attrs={"operation": "SCALE"}, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": scale_2.outputs["Vector"]}) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": cube, "Level": 3} + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": subdivision_surface} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform_1, + "Instance": transform_2, + "Rotation": (0.0, -1.5708, 0.0), + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (2.0, 2.0, 2.0), 1: group_input.outputs["Tooth Crookedness"]}, + attrs={"operation": "SUBTRACT"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 0: subtract.outputs["Vector"], + 1: group_input.outputs["Tooth Crookedness"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": instance_on_points, + "Scale": random_value.outputs["Value"], + }, + ) + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (-3.0, -3.0, -3.0), + "Scale": group_input.outputs["Tooth Crookedness"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (3.0, 3.0, 3.0), + "Scale": group_input.outputs["Tooth Crookedness"], + }, + attrs={"operation": "SCALE"}, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: scale_3.outputs["Vector"], 1: scale_4.outputs["Vector"]}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": random_value_1.outputs["Value"]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": scale_instances, "Rotation": deg2rad}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [symmetric_clone.outputs["Both"], realize_instances]}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": polarbezier.outputs["Curve"]} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Tongue Shaping"], + "Value": spline_parameter_1.outputs["Factor"], + }, + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 1.0), (0.6982, 0.55), (0.9745, 0.35), (1.0, 0.175)], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={3: separate_xyz.outputs["Y"], 4: separate_xyz.outputs["Z"]}, + attrs={"clamp": False}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": resample_curve, "Radius": multiply_3}, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, input_kwargs={"Resolution": 3, "Middle": (0.0, 0.7, 0.0)} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": quadratic_bezier_1, + "Fill Caps": True, + }, + ) + + solidify = nw.new_node( + nodegroup_solidify().name, + input_kwargs={"Mesh": curve_to_mesh_1, "Distance": 0.02}, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": solidify, "Shade Smooth": False} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["Tongue X Scale"], "Y": 1.0, "Z": 1.0}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_shade_smooth, + "Rotation": (0.0, -0.0159, 0.0), + "Scale": combine_xyz, + }, + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": transform_3, "Level": 2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Teeth": join_geometry_1, + "Tongue": subdivision_surface_1, + }, + ) + + +@node_utils.to_nodegroup("nodegroup_deg2_rad", singleton=False, type="GeometryNodeTree") +def nodegroup_deg2_rad(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Deg", (0.0, 0.0, 0.0))] + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Rad": multiply.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_cat_ear", singleton=False, type="GeometryNodeTree") +def nodegroup_cat_ear(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Depth", 0.0), + ("NodeSocketFloatDistance", "Thickness", 0.0), + ("NodeSocketFloatDistance", "Curl Deg", 0.0), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Curl Deg"], 1: (-1.0, 1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 3.0}, + attrs={"operation": "DIVIDE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Origin": (-0.07, 0.0, 0.0), + "angles_deg": multiply.outputs["Vector"], + "Seg Lengths": divide, + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.3236, 0.98), (0.7462, 0.63), (1.0, 0.0)], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": polarbezier.outputs["Curve"], "Radius": float_curve}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_curve_radius} + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["Y"], 1: (-0.5, 0.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Depth"], 1: (0.0, -1.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["Y"], 1: (0.5, 0.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": multiply_1.outputs["Vector"], + "Middle": multiply_2.outputs["Vector"], + "End": multiply_3.outputs["Vector"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_tilt, "Profile Curve": quadratic_bezier}, + ) + + solidify = nw.new_node( + nodegroup_solidify().name, + input_kwargs={ + "Mesh": curve_to_mesh, + "Distance": group_input.outputs["Thickness"], + }, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": solidify, "Distance": 0.005} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": merge_by_distance} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": subdivision_surface, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": set_shade_smooth} + ) + + +@node_utils.to_nodegroup( + "nodegroup_symmetric_clone", singleton=False, type="GeometryNodeTree" +) +def nodegroup_symmetric_clone(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, -1.0, 1.0)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Scale": group_input.outputs["Scale"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": transform}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [group_input.outputs["Geometry"], flip_faces]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Both": join_geometry_2, + "Orig": group_input.outputs["Geometry"], + "Inverted": flip_faces, + }, + ) + + +@node_utils.to_nodegroup("nodegroup_cat_nose", singleton=False, type="GeometryNodeTree") +def nodegroup_cat_nose(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Nose Radius", 0.06), + ("NodeSocketFloatDistance", "Nostril Size", 0.025), + ("NodeSocketFloatFactor", "Crease", 0.008), + ("NodeSocketVectorXYZ", "Scale", (1.2, 1.0, 1.0)), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["Nose Radius"]} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": cube, "Level": 3} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": subdivision_surface, + "Scale": group_input.outputs["Scale"], + }, + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Radius": group_input.outputs["Nostril Size"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": uv_sphere, + "Translation": (0.04, 0.025, 0.015), + "Rotation": (0.5643, 0.0, 0.0), + "Scale": (1.0, 0.87, 0.31), + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform_1} + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": transform, + "Mesh 2": symmetric_clone.outputs["Both"], + "Self Intersection": True, + }, + ) + + taper = nw.new_node( + nodegroup_taper().name, input_kwargs={"Geometry": difference.outputs["Mesh"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": taper}) + + +@node_utils.to_nodegroup( + "nodegroup_attach_part", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attach_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ("NodeSocketVector", "Part Rot", (0.0, 0.0, 0.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + part_surface = nw.new_node( + nodegroup_part_surface().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length Fac": group_input.outputs["Length Fac"], + "Ray Rot": group_input.outputs["Ray Rot"], + "Rad": group_input.outputs["Rad"], + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": group_input.outputs["Part Rot"]} + ) + + raycast_rotation = nw.new_node( + nodegroup_raycast_rotation().name, + input_kwargs={ + "Rotation": deg2rad, + "Hit Normal": part_surface.outputs["Hit Normal"], + "Curve Tangent": part_surface.outputs["Tangent"], + "Do Normal Rot": group_input.outputs["Do Normal Rot"], + "Do Tangent Rot": group_input.outputs["Do Tangent Rot"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Translation": part_surface.outputs["Position"], + "Rotation": raycast_rotation, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Position": part_surface.outputs["Position"], + "Rotation": raycast_rotation, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_carnivore_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_carnivore_head(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.5, 0.0, 0.0)), + ("NodeSocketVector", "snout_length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Snout Y Scale", 0.62), + ("NodeSocketVector", "eye_coord", (0.96, -0.95, 0.79)), + ("NodeSocketVectorXYZ", "Nose Bridge Scale", (1.0, 0.35, 0.9)), + ("NodeSocketVector", "Jaw Muscle Middle Coord", (0.24, 0.41, 1.3)), + ("NodeSocketVector", "Jaw StartRad, EndRad, Fullness", (0.06, 0.11, 1.5)), + ( + "NodeSocketVector", + "Jaw ProfileHeight, StartTilt, EndTilt", + (0.8, 33.1, 0.0), + ), + ("NodeSocketVector", "Lip Muscle Middle Coord", (0.95, 0.0, 1.5)), + ("NodeSocketVector", "Lip StartRad, EndRad, Fullness", (0.05, 0.09, 1.48)), + ( + "NodeSocketVector", + "Lip ProfileHeight, StartTilt, EndTilt", + (0.8, 0.0, -17.2), + ), + ("NodeSocketVector", "Forehead Muscle Middle Coord", (0.7, -1.32, 1.31)), + ( + "NodeSocketVector", + "Forehead StartRad, EndRad, Fullness", + (0.06, 0.05, 2.5), + ), + ( + "NodeSocketVector", + "Forehead ProfileHeight, StartTilt, EndTilt", + (0.3, 60.6, 66.0), + ), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + carnivore_face_structure = nw.new_node( + nodegroup_carnivore__face_structure().name, + input_kwargs={ + "Skull Length Width1 Width2": group_input.outputs["length_rad1_rad2"], + "Snout Length Width1 Width2": group_input.outputs["snout_length_rad1_rad2"], + "Snout Y Scale": group_input.outputs["Snout Y Scale"], + "Nose Bridge Scale": group_input.outputs["Nose Bridge Scale"], + "Jaw Muscle Middle Coord": group_input.outputs["Jaw Muscle Middle Coord"], + "Jaw StartRad, EndRad, Fullness": group_input.outputs[ + "Jaw StartRad, EndRad, Fullness" + ], + "Jaw ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Jaw ProfileHeight, StartTilt, EndTilt" + ], + "Lip Muscle Middle Coord": group_input.outputs["Lip Muscle Middle Coord"], + "Lip StartRad, EndRad, Fullness": group_input.outputs[ + "Lip StartRad, EndRad, Fullness" + ], + "Lip ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Lip ProfileHeight, StartTilt, EndTilt" + ], + "Forehead Muscle Middle Coord": group_input.outputs[ + "Forehead Muscle Middle Coord" + ], + "Forehead StartRad, EndRad, Fullness": group_input.outputs[ + "Forehead StartRad, EndRad, Fullness" + ], + "Forehead ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Forehead ProfileHeight, StartTilt, EndTilt" + ], + "aspect": group_input.outputs["aspect"], + }, + ) + + tigereye = nw.new_node( + nodegroup_eyeball_eyelid().name, + input_kwargs={ + "Skin Mesh": carnivore_face_structure.outputs["Geometry"], + "Base Mesh": carnivore_face_structure.outputs["Base Mesh"], + "Skeleton Curve": carnivore_face_structure.outputs["Cranium Skeleton"], + "Length/Yaw/Rad": group_input.outputs["eye_coord"], + "Target Geometry": carnivore_face_structure.outputs["Geometry"], + "EyelidCircleShape(W, H)": (2.0, 1.35, 0.0), + "CorneaScale(W, H, Thickness)": (0.8, 0.8, 0.7), + "DefaultAppendDistance": 0.002, + "EyelidScale": (1.1, 1.6, 1.6), + "Scale": 1.0, + }, + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": carnivore_face_structure.outputs["Geometry"], + "Mesh 2": tigereye.outputs["ParentCutter"], + "Self Intersection": True, + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": tigereye.outputs["Eyeballl"]}, + ) + + symmetric_clone_1 = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": tigereye.outputs["BodyExtra_Lid"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": difference.outputs["Mesh"], + "Skeleton Curve": carnivore_face_structure.outputs["Skeleton Curve"], + "Base Mesh": carnivore_face_structure.outputs["Base Mesh"], + "LeftEye": symmetric_clone.outputs["Orig"], + "RightEye": symmetric_clone.outputs["Inverted"], + "Eyelid": symmetric_clone_1.outputs["Both"], + }, + ) + + +def shader_eyeball_tiger(nw: NodeWrangler, **input_kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_cornea"}) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "EyeballPosition"} + ) + + reroute_8 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": attribute_1.outputs["Color"]} + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": reroute_8}) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": reroute, "Scale": 50.0} + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.02, + "Color1": reroute, + "Color2": noise_texture_2.outputs["Color"], + }, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": mix_3}) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 30.0 + + group = nw.new_node( + nodegroup_rotate2_d().name, + input_kwargs={ + 0: separate_xyz.outputs["X"], + 1: separate_xyz.outputs["Z"], + 2: value, + }, + ) + + w_offset = U(0, 0.2) + iris_scale = U(0.4, 0.8) + scale2 = iris_scale * 1.7 + N(0, 0.05) + scale3 = iris_scale * 1.75 + N(0, 0.05) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs[1], 1: iris_scale}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_2, 1: reroute_2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: iris_scale + w_offset}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_2}) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_1, 1: reroute_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.63}) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) + colorramp.color_ramp.elements[0].position = 0.64 + colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.6591 + colorramp.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={"Vector": reroute_8, "Scale": (1.0, U(1, 200), 1.0)}, + attrs={"vector_type": "NORMAL"}, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": U(0.2, 0.4), "Color1": mapping_1, "Color2": reroute_8}, + ) + + reroute_3 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": mix_4}) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": reroute_3, "Scale": 10.0} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": U(0.5, 0.9), + "Color1": noise_texture.outputs["Fac"], + "Color2": reroute_3, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": mix, "Scale": 10.0 + N(0, 2)} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + 2: 0.0, + }, + attrs={"operation": "MULTIPLY"}, + ) + + bright_contrast = nw.new_node( + "ShaderNodeBrightContrast", + input_kwargs={"Color": multiply_4, "Bright": 0.6, "Contrast": U(0.8, 1.2)}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs[1], 1: scale3}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_6 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_5}) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_6, 1: reroute_6}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: scale3 + w_offset}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_7 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_7}) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_7, 1: reroute_7}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: multiply_8}) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: 0.18}) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_3}) + colorramp_3.color_ramp.elements[0].position = 0.5955 + colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_3.color_ramp.elements[1].position = 1.0 + colorramp_3.color_ramp.elements[1].color = (0.7896, 0.7896, 0.7896, 1.0) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: bright_contrast, 1: colorramp_3.outputs["Color"]} + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs[1], 1: scale2}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_4 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_9}) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_4, 1: reroute_4}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: group.outputs["Value"], 1: scale2 + w_offset}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_5 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_11}) + + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_5, 1: reroute_5}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_10, 1: multiply_12}) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5}) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_6}) + colorramp_1.color_ramp.elements[0].position = 0.6159 + colorramp_1.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.6591 + colorramp_1.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) + + colorramp_2 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_3}) + colorramp_2.color_ramp.elements[0].position = 0.4773 + colorramp_2.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp_2.color_ramp.elements[1].position = 0.6659 + colorramp_2.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) + + mix_7 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_2.outputs["Color"], + "Color1": (U(0.5, 0.9), U(0.3, 0.8), U(0.3, 0.7), 1.0), + "Color2": (U(0.2, 0.6), U(0.15, 0.6), U(0.1, 0.4), 1.0), + }, + ) + + mix_6 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp_1.outputs["Color"], + "Color1": mix_7, + "Color2": (U(0.1, 0.55), U(0.1, 0.55), U(0.1, 0.55), 1.0), + }, + ) + + color1 = max(0, N(0.125, 0.05)) + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": add_4, + "Color1": (color1, U(0, color1), U(0, color1), 1.0), + "Color2": mix_6, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": colorramp.outputs["Color"], + "Color1": mix_5, + "Color2": (0.0, 0.0, 0.0, 1.0), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix_2, "Specular": 0.0, "Roughness": 0.0}, + ) + + principled_bsdf_1 = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Specular": 1.0, + "Roughness": 0.0, + "IOR": 1.35, + "Transmission": 1.0, + }, + ) + + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.1577, 1: principled_bsdf_1, 2: transparent_bsdf}, + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": attribute_2.outputs["Color"], + 1: principled_bsdf, + 2: mix_shader, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_1} + ) + + +def geometry_tiger_head(nw: NodeWrangler, input_kwargs={}): + # Code generated using version 2.4.3 of the node_transpiler + + carnivorehead = nw.new_node( + nodegroup_carnivore_head().name, + input_kwargs={ + "length_rad1_rad2": (0.36, 0.2, 0.18), + "snout_length_rad1_rad2": (0.25, 0.15, 0.15), + "eye_coord": (0.96, -0.85, 0.79), + "Lip Muscle Middle Coord": (0.95, -0.45, 2.03), + }, + ) + + nose_radius = nw.new_node( + nodegroup_cat_nose().name, + input_kwargs={"Nose Radius": 0.11, "Nostril Size": 0.03, "Crease": 0.237}, + label="NoseRadius ~ N(0.1, 0.02)", + ) + + attach_nose = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": carnivorehead.outputs["Base Mesh"], + "Skeleton Curve": carnivorehead.outputs["Skeleton Curve"], + "Geometry": nose_radius, + "Length Fac": 0.9017, + "Ray Rot": (0.0, -1.3277, 0.0), + "Rad": 0.56, + "Part Rot": (0.0, 26.86, 0.0), + "Do Normal Rot": True, + "Do Tangent Rot": True, + }, + label="Attach Nose", + ) + + cat_ear = nw.new_node( + nodegroup_cat_ear().name, + input_kwargs={ + "length_rad1_rad2": (0.2, 0.1, 0.0), + "Depth": 0.06, + "Thickness": 0.01, + "Curl Deg": 49.0, + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": (90.0, -44.0, 90.0)} + ) + + attach_ear = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": carnivorehead.outputs["Base Mesh"], + "Skeleton Curve": carnivorehead.outputs["Skeleton Curve"], + "Geometry": cat_ear, + "Length Fac": 0.3328, + "Ray Rot": deg2rad, + "Rad": 1.0, + "Part Rot": (-43.3, -9.5, -29.6), + "Do Normal Rot": True, + }, + label="Attach Ear", + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": attach_ear.outputs["Geometry"]}, + ) + + carnivore_jaw = nw.new_node( + nodegroup_carnivore_jaw().name, + input_kwargs={ + "length_rad1_rad2": (0.4, 0.12, 0.08), + "Width Shaping": 1.0, + "Tooth Crookedness": 1.2, + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + carnivore_jaw.outputs["Geometry"], + carnivore_jaw.outputs["Teeth"], + carnivore_jaw.outputs["Tongue"], + ] + }, + ) + + attach_jaw = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": carnivorehead.outputs["Base Mesh"], + "Skeleton Curve": carnivorehead.outputs["Skeleton Curve"], + "Geometry": join_geometry_3, + "Length Fac": 0.2, + "Ray Rot": (0.0, 1.5708, 0.0), + "Rad": 0.36, + "Part Rot": (0.0, 21.1, 0.0), + "Do Normal Rot": True, + "Do Tangent Rot": True, + }, + label="Attach Jaw", + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + carnivorehead.outputs["Geometry"], + attach_nose.outputs["Geometry"], + carnivorehead.outputs["LeftEye"], + symmetric_clone.outputs["Both"], + attach_jaw.outputs["Geometry"], + carnivorehead.outputs["RightEye"], + carnivorehead.outputs["Eyelid"], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry_2} + ) + + +class Eye(PartFactory): + tags = ["head_detail", "eye_socket"] + + def sample_params(self): + return { + "Skin Mesh": None, + "Base Mesh": None, + "Skeleton Curve": None, + "Length/Yaw/Rad": (0.5, 0.0, 1.0), + "Target Geometry": None, + "EyeRot": -U(15, 35), + "EyelidCircleShape(W, H)": (2.0, U(1.3, 1.5), 0.0), + "EyelidRadiusShape(Out, In1, In2)": (0.4, 5.3, 0.4), + "EyelidResolution(Circle, Radius)": (32.0, 32.0, 0.0), + "CorneaScale(W, H, Thickness)": (0.8, 0.8, 0.55), + "EyeballResolution(White, Cornea)": (32.0, 128.0, 0.0), + "OffsetPreAppending": (0.012, 0.0, 0.0), + "Scale": (0.9, 1.1), + "Rotation": (0.1745, 0.0, -1.3963), + "RayDirection": (-1.0, 0.0, 0.0), + "DefaultAppendDistance": -0.002, + } + + def sample_params_fish(self): + return { + "Skin Mesh": None, + "Base Mesh": None, + "Skeleton Curve": None, + "Length/Yaw/Rad": (0.8800, -0.6000, 1.0000), + "Target Geometry": None, + "EyeRot": 0.0000, + "EyelidCircleShape(W, H)": (2.0000, 1.0000, 0.0000), + "EyelidRadiusShape(Out, In1, In2)": (0.4000, 5.3000, 0.3000), + "CorneaScale(W, H, Thickness)": (0.8000, 0.8000, 0.8500), + "OffsetPreAppending": (0.0000, 0.0100, 0.0000), + "Scale": 1.5000, + "Rotation": (0.0873, 0.0000, -0.2618), + "RayDirection": (-0.3000, -0.8000, 0.0000), + "DefaultAppendDistance": 0.0050, + "EyeSocketRot": (0.0000, 0.0000, 80.0000), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_eyeball_eyelid, params) + return part + + +def apply(obj, geo_kwargs=None, shader_kwargs=None, **kwargs): + surface.add_geomod(obj, geometry_tiger_head, apply=False, input_kwargs=geo_kwargs) + + +if __name__ == "__main__": + mat = "tigereye" + for i in range(1): + bpy.ops.wm.open_mainfile(filepath="test.blend") + apply(bpy.data.objects["SolidModel"], geo_kwargs={}, shader_kwargs={}) + fn = os.path.join(os.path.abspath(os.curdir), "tigereye_test.blend") + bpy.ops.wm.save_as_mainfile(filepath=fn) + # bpy.context.scene.render.filepath = os.path.join('surfaces/surface_thumbnails', '%s_%d.jpg'%(mat, i)) + # bpy.context.scene.render.image_settings.file_format='JPEG' + # bpy.ops.render.render(write_still=True) diff --git a/infinigen/assets/objects/creatures/parts/fin_old.py b/infinigen/assets/objects/creatures/parts/fin_old.py new file mode 100644 index 000000000..55ae0016d --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/fin_old.py @@ -0,0 +1,180 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.attach import nodegroup_attach_part +from infinigen.assets.utils.nodegroups.curve import nodegroup_simple_tube_v2 +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object + + +@node_utils.to_nodegroup("nodegroup_fish_fin", singleton=False, type="GeometryNodeTree") +def nodegroup_fish_fin(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.34, 0.07, 0.04)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "proportions", (0.3333, 0.3333, 0.3333)), + ("NodeSocketFloat", "aspect", 2.65), + ("NodeSocketFloat", "fullness", 4.0), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "proportions": group_input.outputs["proportions"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) + + +class FishFin(PartFactory): + tags = ["limb", "fin"] + + def sample_params(self): + return { + "length_rad1_rad2": (0.34, 0.07, 0.04), + "angles_deg": (0.0, 0.0, 0.0), + "proportions": (0.3333, 0.3333, 0.3333), + "aspect": 2.65, + "fullness": 4.0, + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_fish_fin, params) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.6: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + tag_object(part.obj, "fish_fin") + return part + + +@node_utils.to_nodegroup( + "nodegroup_fish_tail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_fish_tail(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.5, 0.18, 0.04)), + ("NodeSocketVector", "angles_deg", (0.0, -4.6, 0.0)), + ("NodeSocketFloat", "aspect", 0.46), + ("NodeSocketFloat", "fullness", 4.0), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + "Origin": (-0.07, 0.0, 0.0), + }, + ) + + fishfin = nw.new_node( + nodegroup_fish_fin().name, + input_kwargs={"length_rad1_rad2": (0.34, 0.07, 0.11), "aspect": 4.7}, + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Geometry": fishfin.outputs["Geometry"], + "Length Fac": 0.775, + "Part Rot": (90.0, -20.7, 0.0), + }, + ) + + fishfin_1 = nw.new_node( + nodegroup_fish_fin().name, + input_kwargs={"length_rad1_rad2": (0.34, 0.07, 0.11), "aspect": 4.7}, + ) + + attach_part_1 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Geometry": fishfin_1.outputs["Geometry"], + "Length Fac": 0.775, + "Part Rot": (90.0, 18.64, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + attach_part.outputs["Geometry"], + simple_tube_v2.outputs["Geometry"], + attach_part_1.outputs["Geometry"], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) + + +class FishTail(PartFactory): + tags = ["tail"] + + def sample_params(self): + return { + "length_rad1_rad2": (0.5, 0.18, 0.04), + "angles_deg": (0.0, -4.6, 0.0), + "aspect": 0.46, + "fullness": 4.0, + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_fish_tail, params) + part.joints = { + t: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])) + for t in np.linspace(0, 0.7, 4) + } + part.iks = {1.0: IKParams("tail", rotation_weight=0, chain_parts=1)} + tag_object(part.obj, "fish_tail") + return part diff --git a/infinigen/assets/objects/creatures/parts/foot.py b/infinigen/assets/objects/creatures/parts/foot.py new file mode 100644 index 000000000..990d3edb8 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/foot.py @@ -0,0 +1,538 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.attach import ( + nodegroup_attach_part, + nodegroup_surface_muscle, +) +from infinigen.assets.utils.nodegroups.curve import ( + nodegroup_simple_tube, + nodegroup_simple_tube_v2, +) +from infinigen.assets.utils.nodegroups.math import nodegroup_deg2_rad +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object + + +@node_utils.to_nodegroup( + "nodegroup_tiger_toe", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tiger_toe(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.18, 0.045, 0.024)), + ("NodeSocketFloatDistance", "Toebean Radius", 0.03), + ("NodeSocketFloat", "Claw Curl Deg", 30.0), + ("NodeSocketVector", "Claw Pct Length Rad1 Rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Toe Curl Scalar", 1.0), + ], + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (-50.0, 25.0, 35.0), + "Scale": group_input.outputs["Toe Curl Scalar"], + }, + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["X"], "Scale": 0.18}, + attrs={"operation": "SCALE"}, + ) + + toe = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": (-0.05, 0.0, 0.0), + "Angles Deg": scale.outputs["Vector"], + "Seg Lengths": scale_1.outputs["Vector"], + "Start Radius": separate_xyz.outputs["Y"], + "End Radius": separate_xyz.outputs["Z"], + }, + label="Toe", + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": 16, + "Rings": 8, + "Radius": group_input.outputs["Toebean Radius"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": (1.5, 1.0, 0.6)} + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": toe.outputs["Geometry"], + "Skeleton Curve": toe.outputs["Skeleton Curve"], + "Geometry": transform_1, + "Length Fac": 0.5037, + "Ray Rot": (0.0, 1.5708, 0.0), + "Rad": 0.9, + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 0.7, 0.6)} + ) + + attach_part_1 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": toe.outputs["Geometry"], + "Skeleton Curve": toe.outputs["Skeleton Curve"], + "Geometry": transform, + "Length Fac": 0.8, + "Ray Rot": (0.0, 1.5708, 0.0), + "Rad": 0.7, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["Z"], + "Y": separate_xyz.outputs["Z"], + "Z": 3.0, + }, + ) + + toe_top = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": toe.outputs["Geometry"], + "Skeleton Curve": toe.outputs["Skeleton Curve"], + "Coord 0": (0.56, -1.5708, 0.3), + "Coord 1": (0.7, -1.5708, 1.0), + "Coord 2": (0.95, -1.5708, 0.0), + "StartRad, EndRad, Fullness": combine_xyz, + "ProfileHeight, StartTilt, EndTilt": (0.9, 0.0, 0.0), + }, + label="Toe Top", + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + toe.outputs["Geometry"], + attach_part.outputs["Geometry"], + attach_part_1.outputs["Geometry"], + toe_top, + ] + }, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (1.0, -2.0, -1.0), + "Scale": group_input.outputs["Claw Curl Deg"], + }, + attrs={"operation": "SCALE"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["length_rad1_rad2"], + 1: group_input.outputs["Claw Pct Length Rad1 Rad2"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (0.33, 0.33, 0.33), "Scale": separate_xyz_1.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + claw = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": (-0.007, 0.0, 0.0), + "Angles Deg": scale_2.outputs["Vector"], + "Seg Lengths": scale_3.outputs["Vector"], + "Start Radius": separate_xyz_1.outputs["Y"], + "End Radius": separate_xyz_1.outputs["Z"], + }, + label="Claw", + ) + + attach_part_2 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": toe.outputs["Geometry"], + "Skeleton Curve": toe.outputs["Skeleton Curve"], + "Geometry": claw.outputs["Geometry"], + "Length Fac": 0.85, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_1, + "Skeleton Curve": toe.outputs["Skeleton Curve"], + "Claw": attach_part_2.outputs["Geometry"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_foot", singleton=False, type="GeometryNodeTree") +def nodegroup_foot(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Num Toes", 3), + ("NodeSocketVector", "length_rad1_rad2", (0.2700, 0.0400, 0.0900)), + ("NodeSocketVector", "Toe Rotate", (0.0000, -1.57, 0.0000)), + ("NodeSocketVector", "Toe Length Rad1 Rad2", (0.3000, 0.0450, 0.0250)), + ("NodeSocketFloat", "Toe Splay", 0.0000), + ("NodeSocketFloatDistance", "Toebean Radius", 0.0300), + ("NodeSocketFloat", "Claw Curl Deg", 30.0000), + ("NodeSocketVector", "Claw Pct Length Rad1 Rad2", (0.3000, 0.5000, 0.0000)), + ("NodeSocketVector", "Thumb Pct", (1.0000, 1.0000, 1.0000)), + ("NodeSocketFloat", "Toe Curl Scalar", 1.0000), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": (10.0000, 8.0000, -25.0000), + }, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: separate_xyz.outputs["Z"], + 1: (0.0000, -0.4500, 0.1000), + 2: (-0.0700, 0.0000, 0.0000), + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: simple_tube_v2.outputs["Endpoint"], + 1: multiply_add.outputs["Vector"], + }, + ) + + multiply_add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: separate_xyz.outputs["Z"], + 1: (0.0000, 0.4500, 0.1000), + 2: (-0.0700, 0.0000, 0.0000), + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: simple_tube_v2.outputs["Endpoint"], + 1: multiply_add_1.outputs["Vector"], + }, + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={ + "Count": group_input.outputs["Num Toes"], + "Start Location": add.outputs["Vector"], + "Offset": add_1.outputs["Vector"], + }, + attrs={"mode": "END_POINTS"}, + ) + + tigertoe = nw.new_node( + nodegroup_tiger_toe().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["Toe Length Rad1 Rad2"], + "Toebean Radius": group_input.outputs["Toebean Radius"], + "Claw Curl Deg": group_input.outputs["Claw Curl Deg"], + "Claw Pct Length Rad1 Rad2": group_input.outputs[ + "Claw Pct Length Rad1 Rad2" + ], + "Toe Curl Scalar": group_input.outputs["Toe Curl Scalar"], + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": tigertoe.outputs["Geometry"]}, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points, + "Rotation": group_input.outputs["Toe Rotate"], + }, + ) + + index = nw.new_node(Nodes.Index) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Num Toes"], 1: -1.0000} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: add_2}, attrs={"operation": "DIVIDE"} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.0000, 0.0000, -1.0000), + "Scale": group_input.outputs["Toe Splay"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.0000, 0.0000, 1.0000), + "Scale": group_input.outputs["Toe Splay"], + }, + attrs={"operation": "SCALE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": divide, + 9: scale.outputs["Vector"], + 10: scale_1.outputs["Vector"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": map_range.outputs["Vector"]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances_1, "Rotation": deg2rad}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Segments": 16, "Rings": 8, "Radius": 0.01500} + ) + + add_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: simple_tube_v2.outputs["Endpoint"], + 1: (-0.0200, 0.0000, 0.0000), + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": uv_sphere, + "Translation": add_3.outputs["Vector"], + "Scale": (0.7000, 1.0000, 1.0000), + }, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": simple_tube_v2.outputs["Geometry"]} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": simple_tube_v2.outputs["Skeleton Curve"]} + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Toe Length Rad1 Rad2"], + 1: group_input.outputs["Thumb Pct"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + tigertoe_1 = nw.new_node( + nodegroup_tiger_toe().name, + input_kwargs={ + "length_rad1_rad2": multiply.outputs["Vector"], + "Toebean Radius": group_input.outputs["Toebean Radius"], + "Claw Curl Deg": group_input.outputs["Claw Curl Deg"], + "Claw Pct Length Rad1 Rad2": group_input.outputs[ + "Claw Pct Length Rad1 Rad2" + ], + "Toe Curl Scalar": group_input.outputs["Toe Curl Scalar"], + }, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.3000 + + vector_1 = nw.new_node(Nodes.Vector) + vector_1.vector = (90.0000, 90.0000, 90.0000) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.8000 + + vector = nw.new_node(Nodes.Vector) + vector.vector = (90.0000, 1.4300, -55.6800) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": reroute, + "Skeleton Curve": reroute_1, + "Geometry": tigertoe_1.outputs["Geometry"], + "Length Fac": value_2, + "Ray Rot": vector_1, + "Rad": value_1, + "Part Rot": vector, + "Do Tangent Rot": True, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + realize_instances, + transform, + attach_part.outputs["Geometry"], + simple_tube_v2.outputs["Geometry"], + ] + }, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": tigertoe.outputs["Claw"]}, + ) + + rotate_instances_2 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": group_input.outputs["Toe Rotate"], + }, + ) + + rotate_instances_3 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances_2, "Rotation": deg2rad}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances_3} + ) + + attach_part_1 = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": reroute, + "Skeleton Curve": reroute_1, + "Geometry": tigertoe_1.outputs["Claw"], + "Length Fac": value_2, + "Ray Rot": vector_1, + "Rad": value_1, + "Part Rot": vector, + "Do Tangent Rot": True, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [realize_instances_1, attach_part_1.outputs["Geometry"]] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Base Mesh": simple_tube_v2.outputs["Geometry"], + "Claws": join_geometry_1, + }, + ) + + +class Foot(PartFactory): + def __init__(self, params=None, bald=False): + super().__init__(params) + self.tags = ["foot"] + if bald: + self.tags.append("bald") + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.27, 0.04, 0.09)) + * N(1, (0.2, 0.05, 0.05), 3), + "Num Toes": max(int(N(4, 1)), 2), + "Toe Length Rad1 Rad2": np.array((0.3, 0.045, 0.025)) * N(1, 0.1, 3), + "Toe Rotate": (0.0, -N(0.7, 0.15), 0.0), + "Toe Splay": 20.0 * N(1, 0.2), + "Toebean Radius": 0.03 * N(1, 0.2), + "Claw Curl Deg": 30 * N(1, 0.4), + "Claw Pct Length Rad1 Rad2": np.array((0.3, 0.5, 0.0)) * N(1, 0.1, 3), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_foot, params, split_extras=True) + part.iks = { + 1.0: IKParams("foot", rotation_weight=0.1, chain_parts=2, chain_length=-1) + } + part.settings["rig_extras"] = True + tag_object(part.obj, "foot") + return part diff --git a/infinigen/assets/objects/creatures/parts/generic_nurbs.py b/infinigen/assets/objects/creatures/parts/generic_nurbs.py new file mode 100644 index 000000000..ecb5bbe61 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/generic_nurbs.py @@ -0,0 +1,184 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from pathlib import Path + +import bpy +import numpy as np + +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.utils.geometry import lofting +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.logging import Suppress + +NURBS_BASE_PATH = Path(__file__).parent / "nurbs_data" +NURBS_KEYS = [p.stem for p in NURBS_BASE_PATH.iterdir()] + + +def load_nurbs(name: str): + return np.load(NURBS_BASE_PATH / (name + ".npy"))[..., :3] + + +def decompose_nurbs_handles(handles): + skeleton, ts, profiles = lofting.factorize_nurbs_handles(handles) + + rads = np.linalg.norm(profiles, axis=2, keepdims=True).mean(axis=1, keepdims=True) + rads = np.clip(rads, 1e-3, 1e5) + profiles_norm = profiles / rads + + skeleton_root = skeleton[[0]] + dirs = np.diff(skeleton, axis=0) + + lens = np.linalg.norm(dirs, axis=-1) + length = lens.sum() + proportions = lens / length + + thetas = np.arctan2(dirs[:, 2], dirs[:, 0]) + thetas = np.rad2deg(thetas) + skeleton_yoffs = dirs[:, 1] / lens + + return { + "ts": ts, + "rads": rads, + "skeleton_root": skeleton_root, + "skeleton_yoffs": skeleton_yoffs, + "length": length, + "proportions": proportions, + "thetas": thetas, + "profiles_norm": profiles_norm, + } + + +def recompose_nurbs_handles(params): + lens = params["length"] * params["proportions"] + thetas = np.deg2rad(params["thetas"]) + skeleton_offs = np.stack( + [lens * np.cos(thetas), lens * params["skeleton_yoffs"], lens * np.sin(thetas)], + axis=-1, + ) + skeleton = np.concatenate([params["skeleton_root"], skeleton_offs], axis=0) + skeleton = np.cumsum(skeleton, axis=0) + + handles = lofting.compute_profile_verts( + skeleton, + params["ts"], + params["profiles_norm"] * params["rads"], + profile_as_points=True, + ) + + return handles + + +class NurbsPart(PartFactory): + def __init__( + self, params=None, prefix=None, tags=None, temperature=0.3, var=1, exps=None + ): + self.prefix = prefix + self.tags = tags or [] + self.temperature = temperature + self.var = var + self.exps = exps + super(NurbsPart, self).__init__(params) + + def sample_params(self, select=None): + if self.prefix is None: + # for compatibility with interp which will not init prefix but does not need sample_params + return {} # TODO hacky, replace + + def N(u, v, d=1): + return np.random.normal(u, np.array(v) * self.var, d) + + target_keys = [ + k for k in NURBS_KEYS if self.prefix is None or k.startswith(self.prefix) + ] + weights = part_util.random_convex_coord( + target_keys, select=select, temp=self.temperature + ) + if self.exps is not None: + for k, exp in self.exps.items(): + weights[k] = weights[k] ** exp + + handles = sum(w * load_nurbs(k) for k, w in weights.items()) + decomp = decompose_nurbs_handles(handles) + + sz = N(1, 0.1) + decomp["length"] *= sz * N(1, 0.1) + decomp["rads"] *= sz * N(1, 0.1) * N(1, 0.15, decomp["rads"].shape) + decomp["proportions"] *= N(1, 0.15) + + ang_noise = N(0, 7, decomp["thetas"].shape) + ang_noise -= ang_noise.mean() + decomp["thetas"] += ang_noise + + n, m, d = decomp["profiles_norm"].shape + profile_noise = N(1, 0.07, (1, m, 1)) * N(1, 0.15, (n, m, 1)) + profile_noise[:, : m // 2 - 1] = profile_noise[:, m // 2 : -1][ + :, ::-1 + ] # symmetrize noise + decomp["profiles_norm"] *= ( + profile_noise # profiles are 0-centered so multiplication is sensible + ) + + return decomp + + def make_part(self, params): + handles = recompose_nurbs_handles(params) + part = part_util.nurbs_to_part(handles) + with butil.ViewportMode(part.obj, mode="EDIT"), Suppress(): + bpy.ops.mesh.select_all() + bpy.ops.mesh.remove_doubles() + bpy.ops.mesh.normals_make_consistent(inside=False) + return part + + +class NurbsBody(NurbsPart): + def __init__( + self, + *args, + shoulder_ik_ts=[0.0, 0.6], + n_bones=8, + rig_reverse_skeleton=False, + **kwargs, + ): + super().__init__(*args, **kwargs) + self.shoulder_ik_ts = shoulder_ik_ts + self.n_bones = n_bones + self.rig_reverse_skeleton = rig_reverse_skeleton + + def make_part(self, params): + part = super().make_part(params) + part.joints = { + i: Joint((0, 0, 0), bounds=np.array([[-30, -30, -30], [30, 30, 30]])) + for i in np.linspace(0, 1, self.n_bones, endpoint=True) + } + part.iks = { + t: IKParams( + name=f"body_{i}", + mode="pin" if i == 0 else "iksolve", + rotation_weight=0, + target_size=0.3, + ) + for i, t in enumerate(self.shoulder_ik_ts) + } + part.settings["rig_reverse_skeleton"] = self.rig_reverse_skeleton + tag_object(part.obj, "body") + return part + + +class NurbsHead(NurbsPart): + def make_part(self, params): + part = super().make_part(params) + part.iks = { + 1.0: IKParams( + name="head", rotation_weight=0.1, target_size=0.4, chain_length=1 + ) + } + part.settings["rig_extras"] = True + tag_object(part.obj, "head") + return part diff --git a/infinigen/assets/objects/creatures/parts/head.py b/infinigen/assets/objects/creatures/parts/head.py new file mode 100644 index 000000000..791c739ad --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/head.py @@ -0,0 +1,1125 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N + +from infinigen.assets.objects.creatures.parts.eye import nodegroup_mammal_eye +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.utils.nodegroups.attach import ( + nodegroup_attach_part, + nodegroup_profile_part, + nodegroup_smooth_taper, + nodegroup_surface_muscle, +) +from infinigen.assets.utils.nodegroups.curve import ( + nodegroup_polar_bezier, + nodegroup_simple_tube, + nodegroup_simple_tube_v2, + nodegroup_warped_circle_curve, +) +from infinigen.assets.utils.nodegroups.geometry import ( + nodegroup_solidify, + nodegroup_symmetric_clone, +) +from infinigen.assets.utils.nodegroups.math import nodegroup_deg2_rad +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import clip_gaussian + + +@node_utils.to_nodegroup( + "nodegroup_carnivore_jaw", singleton=True, type="GeometryNodeTree" +) +def nodegroup_carnivore_jaw(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloatFactor", "Width Shaping", 0.6764), + ("NodeSocketFloat", "Canine Length", 0.050000000000000003), + ("NodeSocketFloat", "Incisor Size", 0.01), + ("NodeSocketFloat", "Tooth Crookedness", 0.0), + ("NodeSocketFloatFactor", "Tongue Shaping", 1.0), + ("NodeSocketFloat", "Tongue X Scale", 0.90000000000000002), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), + "Scale": separate_xyz.outputs["X"], + }, + attrs={"operation": "SCALE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "angles_deg": (0.0, 0.0, 13.0), + "Seg Lengths": scale.outputs["Vector"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + vector_curves = nw.new_node(Nodes.VectorCurve, input_kwargs={"Vector": position}) + node_utils.assign_curve( + vector_curves.mapping.curves[0], + [ + (-1.0, -1.0), + (0.0035999999999999999, 0.0), + (0.24360000000000001, 0.20999999999999999), + (1.0, 1.0), + ], + ) + node_utils.assign_curve( + vector_curves.mapping.curves[1], + [ + (-1.0, 0.12), + (-0.77449999999999997, 0.059999999999999998), + (-0.65090000000000003, -0.44), + (-0.36730000000000002, -0.40000000000000002), + (-0.0545, -0.01), + (0.1055, 0.02), + (0.52729999999999999, 0.5), + (0.7964, 0.64000000000000001), + (1.0, 1.0), + ], + handles=[ + "AUTO", + "AUTO", + "AUTO", + "AUTO_CLAMPED", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + ], + ) + node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, -1.0), (1.0, 1.0)]) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, input_kwargs={"Position": vector_curves} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Width Shaping"], + "Value": spline_parameter.outputs["Factor"], + }, + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.95499999999999996), + (0.42549999999999999, 0.78500000000000003), + (0.65449999999999997, 0.53500000000000003), + (0.94910000000000005, 0.75), + (1.0, 0.59499999999999997), + ], + handles=["AUTO", "AUTO", "AUTO", "AUTO_CLAMPED", "AUTO"], + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["Y"], + "end_rad": separate_xyz.outputs["Z"], + "fullness": 2.6000000000000001, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: smoothtaper}, + attrs={"operation": "MULTIPLY"}, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": multiply, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": profilepart, "Scale": (1.0, 1.7, 1.0)}, + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["Canine Length"]} + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33000000000000002, 0.33000000000000002, 0.33000000000000002), + "Scale": group_input.outputs["Canine Length"], + }, + attrs={"operation": "SCALE"}, + ) + + canine_tooth = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Seg Lengths": scale_1.outputs["Vector"], + "Start Radius": 0.014999999999999999, + "End Radius": 0.0030000000000000001, + }, + label="Canine Tooth", + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": transform, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Geometry": canine_tooth.outputs["Geometry"], + "Length Fac": 0.90000000000000002, + "Ray Rot": (1.5708, 0.12039999999999999, 1.5708), + "Rad": 1.0, + "Part Rot": (-17.600000000000001, -53.490000000000002, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": attach_part.outputs["Geometry"]} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry} + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={1: greater_than, 15: symmetric_clone.outputs["Both"]}, + ) + + greater_than_1 = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["Incisor Size"]} + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: attach_part.outputs["Position"], + 1: (0.014999999999999999, -0.050000000000000003, 0.0), + }, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: (1.0, -1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], 1: multiply_1.outputs["Vector"]}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_1.outputs["Vector"], + 1: (0.5, 0.5, 0.5), + 2: (-0.02, 0.0, 0.0), + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 6, + "Start": add.outputs["Vector"], + "Middle": multiply_add.outputs["Vector"], + "End": multiply_1.outputs["Vector"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": quadratic_bezier} + ) + + transform_1 = nw.new_node(Nodes.Transform, input_kwargs={"Geometry": curve_to_mesh}) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (3.0, 1.0, 0.59999999999999998), + "Scale": group_input.outputs["Incisor Size"], + }, + attrs={"operation": "SCALE"}, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": scale_2.outputs["Vector"]}) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": cube, "Level": 3} + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": subdivision_surface} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform_1, + "Instance": transform_2, + "Rotation": (0.0, -1.5708, 0.0), + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (2.0, 2.0, 2.0), 1: group_input.outputs["Tooth Crookedness"]}, + attrs={"operation": "SUBTRACT"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 0: subtract.outputs["Vector"], + 1: group_input.outputs["Tooth Crookedness"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": instance_on_points, + "Scale": random_value.outputs["Value"], + }, + ) + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (-3.0, -3.0, -3.0), + "Scale": group_input.outputs["Tooth Crookedness"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (3.0, 3.0, 3.0), + "Scale": group_input.outputs["Tooth Crookedness"], + }, + attrs={"operation": "SCALE"}, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: scale_3.outputs["Vector"], 1: scale_4.outputs["Vector"]}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": random_value_1.outputs["Value"]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": scale_instances, "Rotation": deg2rad}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + switch = nw.new_node( + Nodes.Switch, input_kwargs={1: greater_than_1, 15: realize_instances} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [switch_1.outputs[6], switch.outputs[6]]}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": polarbezier.outputs["Curve"]} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Tongue Shaping"], + "Value": spline_parameter_1.outputs["Factor"], + }, + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, 1.0), + (0.69820000000000004, 0.55000000000000004), + (0.97450000000000003, 0.34999999999999998), + (1.0, 0.17499999999999999), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={3: separate_xyz.outputs["Y"], 4: separate_xyz.outputs["Z"]}, + attrs={"clamp": False}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": resample_curve, "Radius": multiply_3}, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={"Resolution": 3, "Middle": (0.0, 0.69999999999999996, 0.0)}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": quadratic_bezier_1, + "Fill Caps": True, + }, + ) + + solidify = nw.new_node( + nodegroup_solidify().name, + input_kwargs={"Mesh": curve_to_mesh_1, "Distance": 0.02}, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": solidify, "Shade Smooth": False} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["Tongue X Scale"], "Y": 1.0, "Z": 1.0}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_shade_smooth, + "Rotation": (0.0, -0.015900000000000001, 0.0), + "Scale": combine_xyz, + }, + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": transform_3, "Level": 2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Teeth": join_geometry_1, + "Tongue": subdivision_surface_1, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_carnivore_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_carnivore_head(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "snout_length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "snout_y_scale", 0.62), + ("NodeSocketVectorXYZ", "Nose Bridge Scale", (1.0, 0.35, 0.9)), + ("NodeSocketVector", "Jaw Muscle Middle Coord", (0.24, 0.41, 1.3)), + ("NodeSocketVector", "Jaw StartRad, EndRad, Fullness", (0.06, 0.11, 1.5)), + ( + "NodeSocketVector", + "Jaw ProfileHeight, StartTilt, EndTilt", + (0.8, 33.1, 0.0), + ), + ("NodeSocketVector", "Lip Muscle Middle Coord", (0.95, 0.0, 1.5)), + ("NodeSocketVector", "Lip StartRad, EndRad, Fullness", (0.05, 0.09, 1.48)), + ( + "NodeSocketVector", + "Lip ProfileHeight, StartTilt, EndTilt", + (0.8, 0.0, -17.2), + ), + ("NodeSocketVector", "Forehead Muscle Middle Coord", (0.7, -1.32, 1.31)), + ( + "NodeSocketVector", + "Forehead StartRad, EndRad, Fullness", + (0.06, 0.05, 2.5), + ), + ( + "NodeSocketVector", + "Forehead ProfileHeight, StartTilt, EndTilt", + (0.3, 60.6, 66.0), + ), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketFloatDistance", "EyeRad", 0.03), + ("NodeSocketVector", "EyeOffset", (-0.2, 0.5, 0.2)), + ], + ) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (-0.07, 0.0, 0.05) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": (-5.67, 0.0, 0.0), + "aspect": group_input.outputs["aspect"], + "fullness": 3.63, + "Origin": vector, + }, + ) + + snout_origin = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: simple_tube_v2.outputs["Endpoint"], 1: (-0.1, 0.0, 0.0)}, + label="Snout Origin", + ) + + split_length_width1_width2 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["snout_length_rad1_rad2"]}, + label="Split Length / Width1 / Width2", + ) + + snout_seg_lengths = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.33, 0.33, 0.33), + "Scale": split_length_width1_width2.outputs["X"], + }, + label="Snout Seg Lengths", + attrs={"operation": "SCALE"}, + ) + + bridge = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": snout_origin.outputs["Vector"], + "Angles Deg": (-4.0, -4.5, -5.61), + "Seg Lengths": snout_seg_lengths.outputs["Vector"], + "Start Radius": 0.17, + "End Radius": 0.1, + "Fullness": 5.44, + }, + label="Bridge", + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": bridge.outputs["Geometry"], + "Translation": (0.0, 0.0, 0.03), + "Scale": group_input.outputs["Nose Bridge Scale"], + }, + ) + + snout = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": snout_origin.outputs["Vector"], + "Angles Deg": (-3.0, -4.5, -5.61), + "Seg Lengths": snout_seg_lengths.outputs["Vector"], + "Start Radius": split_length_width1_width2.outputs["Y"], + "End Radius": split_length_width1_width2.outputs["Z"], + "Fullness": 2.0, + }, + label="Snout", + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": snout.outputs["Geometry"], + "Translation": (0.0, 0.0, 0.03), + "Scale": (1.0, 0.7, 0.7), + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0, "Y": group_input.outputs["snout_y_scale"], "Z": 1.0}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_1, "Scale": combine_xyz} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_2]} + ) + + union = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 2": [join_geometry, simple_tube_v2.outputs["Geometry"]], + "Self Intersection": True, + }, + attrs={"operation": "UNION"}, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={"Start": vector, "End": snout.outputs["Endpoint"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (0.33, 0.33, 0.33)}, + attrs={"operation": "SCALE"}, + ) + + jaw_cutter = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Origin": (0.0, 0.0, 0.09), + "Angles Deg": (0.0, 0.0, 0.0), + "Seg Lengths": scale.outputs["Vector"], + "Start Radius": 0.13, + }, + label="Jaw Cutter", + ) + + attach_part = nw.new_node( + nodegroup_attach_part().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Geometry": jaw_cutter.outputs["Geometry"], + "Length Fac": 0.2, + "Ray Rot": (0.0, 1.5708, 0.0), + "Rad": 1.25, + "Part Rot": (0.0, -8.5, 0.0), + "Do Tangent Rot": True, + }, + ) + + mammaleye = nw.new_node( + nodegroup_mammal_eye().name, + input_kwargs={"Radius": group_input.outputs["EyeRad"]}, + ) + + reroute_4 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["length_rad1_rad2"]} + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": reroute_4}) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": simple_tube_v2.outputs["Endpoint"]} + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["EyeOffset"], + 1: separate_xyz.outputs["Z"], + 2: reroute_3, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": mammaleye.outputs["ParentCutter"], + "Translation": multiply_add.outputs["Vector"], + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform_4} + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": union.outputs["Mesh"], + "Mesh 2": [ + attach_part.outputs["Geometry"], + symmetric_clone.outputs["Both"], + ], + "Self Intersection": True, + }, + ) + + jaw_muscle = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Coord 0": (0.19, -0.41, 0.78), + "Coord 1": group_input.outputs["Jaw Muscle Middle Coord"], + "Coord 2": (0.67, 1.26, 0.52), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Jaw StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Jaw ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Jaw Muscle", + ) + + lip = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": union.outputs["Mesh"], + "Skeleton Curve": curve_line_1, + "Coord 0": (0.51, -0.13, 0.02), + "Coord 1": group_input.outputs["Lip Muscle Middle Coord"], + "Coord 2": (0.99, 10.57, 0.1), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Lip StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Lip ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Lip", + ) + + forehead = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.31, -1.06, 0.97), + "Coord 1": group_input.outputs["Forehead Muscle Middle Coord"], + "Coord 2": (0.95, -1.52, 0.9), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Forehead StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Forehead ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Forehead", + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [jaw_muscle, lip, forehead]} + ) + + symmetric_clone_1 = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry_1} + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [difference.outputs["Mesh"], symmetric_clone_1.outputs["Both"]] + }, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": curve_line_1, "Cuts": 10} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": mammaleye.outputs["Eyeballl"], + "Translation": multiply_add.outputs["Vector"], + }, + ) + + symmetric_clone_2 = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform_3} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": mammaleye.outputs["BodyExtra_Lid"], + "Translation": multiply_add.outputs["Vector"], + }, + ) + + symmetric_clone_3 = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform_5} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_2, + "Skeleton Curve": subdivide_curve, + "Base Mesh": union.outputs["Mesh"], + "Eyeball_Left": symmetric_clone_2.outputs["Orig"], + "Eyeball_Right": symmetric_clone_2.outputs["Inverted"], + "BodyExtra_Lid": symmetric_clone_3.outputs["Both"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_neck", singleton=True, type="GeometryNodeTree") +def nodegroup_neck(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.5, 0.3)), + ("NodeSocketVector", "angles_deg", (0.0, 3.2, -18.11)), + ( + "NodeSocketVector", + "Muscle StartRad, EndRad, Fullness", + (0.17, 0.17, 2.5), + ), + ("NodeSocketVector", "ProfileHeight, StartTilt, EndTilt", (0.5, 0.0, 66.0)), + ("NodeSocketFloat", "fullness", 5.0), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + }, + ) + + rear_top = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.1, 0.0, 0.9), + "Coord 1": (0.48, -0.77, 1.0), + "Coord 2": (0.87, -1.5708, 0.8), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Muscle StartRad, EndRad, Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "ProfileHeight, StartTilt, EndTilt" + ], + }, + label="Rear Top", + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": rear_top} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": join_geometry_1} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + symmetric_clone.outputs["Both"], + simple_tube_v2.outputs["Geometry"], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Base Mesh": simple_tube_v2.outputs["Geometry"], + }, + ) + + +class Neck(PartFactory): + tags = ["neck"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.65, 0.35, 0.16)) * N(1, (0.2, 0, 0), 3), + "angles_deg": np.array((0.0, 3.2, -18.11)) * N(1, 0.2, 3), + "Muscle StartRad, EndRad, Fullness": (0.17, 0.17, 2.5), + "ProfileHeight, StartTilt, EndTilt": (0.5, 0.0, 66.0), + "fullness": 5.0, + "aspect": 1.0 * N(1, 0.05), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_neck, params) + part.joints = { + i: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) + for i in np.linspace(0, 1, 4, endpoint=True) + } + tag_object(part.obj, "neck") + return part + + +class CarnivoreHead(PartFactory): + tags = ["head"] + + def sample_params(self): + params = { + "length_rad1_rad2": np.array((0.36, 0.20, 0.18)) * N(1, 0.2, 3), + "snout_length_rad1_rad2": np.array((0.22, 0.15, 0.15)) * N(1, 0.2, 3), + "aspect": N(1, 0.2), + } + + muscle_params = { + "Nose Bridge Scale": (1.0, 0.35, 0.9), + "Jaw Muscle Middle Coord": (0.24, 0.41, 1.3), + "Jaw StartRad, EndRad, Fullness": (0.06, 0.11, 1.5), + "Jaw ProfileHeight, StartTilt, EndTilt": (0.8, 33.1, 0.0), + "Lip Muscle Middle Coord": (0.95, 0.0, 1.5), + "Lip StartRad, EndRad, Fullness": (0.05, 0.09, 1.48), + "Lip ProfileHeight, StartTilt, EndTilt": (0.8, 0.0, -17.2), + "Forehead Muscle Middle Coord": (0.7, -1.32, 1.31), + "Forehead StartRad, EndRad, Fullness": (0.06, 0.05, 2.5), + "Forehead ProfileHeight, StartTilt, EndTilt": (0.3, 60.6, 66.0), + } + + for k, v in muscle_params.items(): + v = np.array(v) + v *= N(1, 0.05, len(v)) + params[k] = v + + params.update(muscle_params) + params["EyeRad"] = 0.023 * N(1, 0.3) + params["EyeOffset"] = np.array((-0.25, 0.45, 0.3)) + N(0, (0, 0.02, 0.03)) + + return params + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_carnivore_head, params) + part.iks = {1.0: IKParams("head", rotation_weight=0.1, chain_length=1)} + part.settings["rig_extras"] = True + tag_object(part.obj, "carnivore_head") + return part + + +class CarnivoreJaw(PartFactory): + tags = ["head", "jaw"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.4, 0.12, 0.08)) * N(1, 0.1, 3), + "Width Shaping": 1.0 * clip_gaussian(1, 0.1, 0.5, 1), + "Canine Length": 0.05 * N(1, 0.2), + "Incisor Size": 0.01 * N(1, 0.2), + "Tooth Crookedness": 1.2 * N(1, 0.3), + "Tongue Shaping": 1 * clip_gaussian(1, 0.1, 0.5, 1), + "Tongue X Scale": 0.9 * clip_gaussian(1, 0.1, 0.5, 1), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_carnivore_jaw, params) + tag_object(part.obj, "carnivore_jaw") + return part + + +@node_utils.to_nodegroup( + "nodegroup_flying_bird_head", singleton=True, type="GeometryNodeTree" +) +def nodegroup_flying_bird_head(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ( + "NodeSocketVector", + "length_rad1_rad2", + (0.34999999999999998, 0.11, 0.17000000000000001), + ), + ("NodeSocketVector", "angles_deg", (0.0, -24.0, -20.0)), + ("NodeSocketVector", "eye_coord", (0.5, 0.0, 1.0)), + ("NodeSocketFloatDistance", "Radius", 0.040000000000000001), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": N(0.9, 0.05), + "fullness": 0.9, + "Origin": (-0.13, 0.0, 0.1), + }, + ) + + simple_tube_v2_1 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": 1.1899999999999999, + "fullness": 2.25, + "Origin": (-0.13, 0.0, 0.1 - 0.040000000000000001), + }, + ) + + union = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 2": [ + simple_tube_v2.outputs["Geometry"], + simple_tube_v2_1.outputs["Geometry"], + ] + }, + attrs={"operation": "UNION"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_bird_head", singleton=True, type="GeometryNodeTree") +def nodegroup_bird_head(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ( + "NodeSocketVector", + "length_rad1_rad2", + (0.34999999999999998, 0.11, 0.17000000000000001), + ), + ("NodeSocketVector", "angles_deg", (0.0, -24.0, -20.0)), + ("NodeSocketVector", "eye_coord", (0.5, 0.0, 1.0)), + ("NodeSocketFloatDistance", "Radius", 0.040000000000000001), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": 0.85999999999999999, + "fullness": 1.7, + "Origin": (-0.13, 0.0, 0.1), + }, + ) + + simple_tube_v2_1 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": 1.1899999999999999, + "fullness": 2.25, + "Origin": (-0.13, 0.0, 0.1 - 0.040000000000000001), + }, + ) + + union = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 2": [ + simple_tube_v2.outputs["Geometry"], + simple_tube_v2_1.outputs["Geometry"], + ] + }, + attrs={"operation": "UNION"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": union.outputs["Mesh"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class BirdHead(PartFactory): + tags = ["head"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.35, 0.11, 0.13)) + * N(1, 0.05) + * N(1, 0.1, 3), + "angles_deg": N(0, 5, 3), + "eye_coord": np.array((0.65, -0.32, 0.95)) * N(1, (0.1, 0.2, 0), 3), + "Radius": 0.025 * N(1, 0.05), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_bird_head, params) + part.iks = {1.0: IKParams("head", rotation_weight=0.1, chain_parts=2)} + part.settings["rig_extras"] = True + tag_object(part.obj, "bird_head") + return part + + +class FlyingBirdHead(PartFactory): + tags = ["head"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((0.3, 0.04, 0.12)) * N(1, 0.05, size=(3,)), + "angles_deg": N(0, 0.1, 3), + "eye_coord": np.array((0.65, -0.32, 0.95)) * N(1, (0.1, 0.2, 0), 3), + "Radius": 0.03 * N(1, 0.05), + } + + def make_part(self, params): + part = part_util.nodegroup_to_part(nodegroup_flying_bird_head, params) + part.iks = {1.0: IKParams("head", rotation_weight=0.1, chain_parts=2)} + part.settings["rig_extras"] = True + tag_object(part.obj, "bird_head") + return part diff --git a/infinigen/assets/objects/creatures/parts/head_detail.py b/infinigen/assets/objects/creatures/parts/head_detail.py new file mode 100644 index 000000000..1382d9282 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/head_detail.py @@ -0,0 +1,299 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import Joint +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.curve import ( + nodegroup_polar_bezier, + nodegroup_simple_tube_v2, +) +from infinigen.assets.utils.nodegroups.geometry import ( + nodegroup_solidify, + nodegroup_symmetric_clone, + nodegroup_taper, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import clip_gaussian + + +@node_utils.to_nodegroup("nodegroup_cat_ear", singleton=False, type="GeometryNodeTree") +def nodegroup_cat_ear(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Depth", 0.0), + ("NodeSocketFloatDistance", "Thickness", 0.0), + ("NodeSocketFloatDistance", "Curl Deg", 0.0), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Curl Deg"], 1: (-1.0, 1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 3.0}, + attrs={"operation": "DIVIDE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Origin": (-0.07, 0.0, 0.0), + "angles_deg": multiply.outputs["Vector"], + "Seg Lengths": divide, + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.3236, 0.98), (0.7462, 0.63), (1.0, 0.0)], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": polarbezier.outputs["Curve"], "Radius": float_curve}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_curve_radius} + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["Y"], 1: (-0.5, 0.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Depth"], 1: (0.0, -1.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["Y"], 1: (0.5, 0.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": multiply_1.outputs["Vector"], + "Middle": multiply_2.outputs["Vector"], + "End": multiply_3.outputs["Vector"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_tilt, "Profile Curve": quadratic_bezier}, + ) + + solidify = nw.new_node( + nodegroup_solidify().name, + input_kwargs={ + "Mesh": curve_to_mesh, + "Distance": group_input.outputs["Thickness"], + }, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": solidify, "Distance": 0.005} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": merge_by_distance} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": subdivision_surface, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +class CatEar(PartFactory): + tags = ["head_detail"] + + def sample_params(self): + size = clip_gaussian(1, 0.1, 0.2, 5) + return { + "length_rad1_rad2": np.array((0.25, 0.1, 0.0)) * N(1, (0.1, 0.05, 0.05)), + "Depth": 0.06 * N(1, 0.1), + "Thickness": 0.01, + "Curl Deg": 49.0 * N(1, 0.2), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_cat_ear, params) + tag_object(part.obj, "cat_ear") + return part + + +@node_utils.to_nodegroup("nodegroup_cat_nose", singleton=False, type="GeometryNodeTree") +def nodegroup_cat_nose(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Nose Radius", 0.06), + ("NodeSocketFloatDistance", "Nostril Size", 0.025), + ("NodeSocketFloatFactor", "Crease", 0.008), + ("NodeSocketVectorXYZ", "Scale", (1.2, 1.0, 1.0)), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["Nose Radius"]} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, + input_kwargs={ + "Mesh": cube, + "Level": 4, + "Edge Crease": group_input.outputs["Crease"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": subdivision_surface, + "Scale": group_input.outputs["Scale"], + }, + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Radius": group_input.outputs["Nostril Size"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": uv_sphere, + "Translation": (0.04, 0.025, 0.015), + "Rotation": (0.5643, 0.0, 0.0), + "Scale": (1.0, 0.87, 0.31), + }, + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": transform_1} + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": transform, + "Mesh 2": symmetric_clone.outputs["Both"], + "Self Intersection": True, + }, + ) + + taper = nw.new_node(nodegroup_taper().name, input_kwargs={"Geometry": difference}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": taper}) + + +class CatNose(PartFactory): + tags = ["head_detail"] + + def sample_params(self): + size_mult = N(0.7, 0.05) + return { + "Nose Radius": 0.11 * size_mult, + "Nostril Size": 0.03 * size_mult * N(1, 0.1), + "Crease": 0.237 * N(1, 0.1), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_cat_nose, params) + nose = part.obj + nose.name = "Nose" + part.obj = butil.spawn_vert("nose_parent") + nose.parent = part.obj + tag_object(part.obj, "cat_nose") + return part + + +@node_utils.to_nodegroup("nodegroup_mandible", singleton=False, type="GeometryNodeTree") +def nodegroup_mandible(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.1, 0.1, 0.02)), + ("NodeSocketVector", "angles_deg", (-4.4, 58.22, 77.96)), + ("NodeSocketFloat", "aspect", 0.52), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) + + +class InsectMandible(PartFactory): + tags = ["head_detail", "rigid", "bald"] + + def sample_params(self): + return { + "length_rad1_rad2": (1.1 * U(0.2, 1), 0.1 * N(1, 0.2), 0.02 * N(1, 0.1)), + "angles_deg": np.array((-4.4, 58.22, 77.96)) * N(1, 0.2, 3), + "aspect": U(0.3, 1), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_mandible, params) + part.joints = {0.4: Joint(rest=(0, 0, 0), bounds=np.zeros((2, 3)))} + tag_object(part.obj, "insect_mandible") + return part diff --git a/infinigen/assets/objects/creatures/parts/hoof.py b/infinigen/assets/objects/creatures/parts/hoof.py new file mode 100644 index 000000000..70a67c696 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/hoof.py @@ -0,0 +1,272 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen + + +from math import cos, pi, sin + +import bmesh +import bpy +import mathutils +import numpy as np +from numpy.random import normal as N + +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.geometry import nurbs as nurbs_util +from infinigen.assets.utils.nodegroups.attach import nodegroup_surface_muscle +from infinigen.assets.utils.nodegroups.curve import nodegroup_simple_tube_v2 +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +def square(x): + return x * x + + +def tri(x): + return x**3 + + +class Hoof: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def scale(self, p): + return 1 - 0.2 * p + + def transform(self, p): + return -0.6 * p + + def down(self, p, theta): + return 0.4 * p * cos(theta) + + def get_shape(self): + points = [] + r = self.r + + def N(m, v): + return np.random.normal(m, v) + + for i in range(self.m): + theta = 2 * pi * i / (self.m) + nx = N(0, 0.01) + ny = N(0, 0.01) + if i >= self.m - r or i <= r: + points.append((-0.2 * cos(theta) + nx, 0.05 * sin(theta) + ny)) + elif i >= self.m - 2 * r or i <= 2 * r: + points.append((cos(theta) + nx, 0.2 * sin(theta) + ny)) + # elif i >= self.m - 4 * r or i <= 4 * r: + # points.append((cos(theta) + nx, 0.6 * sin(theta) + ny)) + else: + points.append((cos(theta) + nx, sin(theta) + ny)) + return points + + def make_face(self, obj): + bm = bmesh.new() + for v in obj.data.vertices: + x, y, z = obj.matrix_world @ v.co + if z == 0: + bm.verts.new((x, y, z)) + bm.faces.new(bm.verts) + bm.normal_update() + bm.from_mesh(obj.data) + butil.delete(obj) + + me = bpy.data.meshes.new("face") + bm.to_mesh(me) + # add bmesh to scene + ob = bpy.data.objects.new("face", me) + bpy.context.scene.collection.objects.link(ob) + return ob + + def generate(self): + self.n = int(self.n) + self.m = int(self.m) + + points = self.get_shape() + ctrls = np.zeros((self.n, self.m, 3)) + for i in range(self.n): + for j in range(self.m): + p = i / (self.n - 1) + theta = 2 * pi * j / (self.m) + ctrls[i][j][0] = self.scale(p) * points[j][0] + self.transform(p) + ctrls[i][j][1] = self.scale(p) * points[j][1] # + self.transform(p) + ctrls[i][j][2] = p + self.down(p, theta) + ctrls[i][j][0] *= self.sx + ctrls[i][j][1] *= self.sy + ctrls[i][j][2] *= self.sz + + method = "blender" if False else "geomdl" + + obj = nurbs_util.nurbs(ctrls, method, face_size=0.01) + obj = self.make_face(obj) + + top_pos = mathutils.Vector(ctrls[-1].mean(axis=0)) + with butil.CursorLocation(top_pos), butil.SelectObjects(obj): + bpy.ops.object.origin_set(type="ORIGIN_CURSOR") + obj.location = (0, 0, 0) + + obj.rotation_euler.y -= np.pi / 2 + butil.apply_transform(obj, rot=True) + tag_object(obj, "hoof") + + return obj + + +class HoofClaw(PartFactory): + param_templates = {} + tags = ["head_detail", "rigid"] + + def sample_params(self, select=None, var=1): + params = { + "n": 20, + "m": 20, + "sx": 0.1 * N(1, 0.05), + "sy": 0.1 * N(1, 0.05), + "sz": 0.08 * N(1, 0.05), + "r": 0.5 + N(0, 1), + } + return params + + def make_part(self, params): + obj = butil.spawn_vert("hoofclaw_parent_temp") + + hoof = Hoof(**params).generate() + hoof.parent = obj + hoof.name = "HoofClaw" + + part = Part(skeleton=np.zeros((1, 3)), obj=obj, joints={}, iks={}) + tag_object(part.obj, "hoof_claw") + return part + + +@node_utils.to_nodegroup("nodegroup_hoof", singleton=False, type="GeometryNodeTree") +def nodegroup_hoof(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ( + "NodeSocketVector", + "length_rad1_rad2", + (1.4299999999999999, 0.10000000000000001, 0.10000000000000001), + ), + ("NodeSocketVector", "angles_deg", (-20.0, 16.0, 9.1999999999999993)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketVector", "Upper Rad1 Rad2 Fullness", (0.22, 0.0, 0.0)), + ("NodeSocketVector", "Lower Rad1 Rad2 Fullness", (0.0, 0.0, 0.0)), + ( + "NodeSocketVector", + "Height, Tilt1, Tilt2", + (0.73999999999999999, 0.0, 0.0), + ), + ], + ) + + simple_tube_v2_001 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": 2.5, + }, + ) + + shoulder = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2_001.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2_001.outputs["Skeleton Curve"], + "Coord 0": (0.0, 0.0, 0.0), + "Coord 1": (0.20000000000000001, 0.0, 0.0), + "Coord 2": (0.55000000000000004, 0.0, 0.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Lower Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Height, Tilt1, Tilt2" + ], + }, + label="Shoulder", + ) + + shoulder_1 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2_001.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2_001.outputs["Skeleton Curve"], + "Coord 0": (1.0, 0.0, 0.0), + "Coord 1": (0.20000000000000001, 0.0, 0.0), + "Coord 2": (0.80000000000000004, 0.0, 0.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Upper Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Height, Tilt1, Tilt2" + ], + }, + label="Shoulder", + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [shoulder, simple_tube_v2_001.outputs["Geometry"], shoulder_1] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2_001.outputs["Skeleton Curve"], + }, + ) + + +class HoofAnkle(PartFactory): + tags = ["foot_detail", "rigid"] + ankle_scale = (0.8, 0.8, 0.8) + + def sample_params(self, var=1): + ankle = { + "length_rad1_rad2": ( + 0.45 * N(1, 0.05), + 0.07 * N(1, 0.05), + 0.1 * N(1, 0.05), + ), + "angles_deg": (-90.0 + N(0, 5), 40.0 + N(0, 5), N(0, 5)), + "aspect": 1.0, + "Upper Rad1 Rad2 Fullness": (0.2, 0.0, 4), + "Lower Rad1 Rad2 Fullness": (0.15, 0.0, 4), + "Height, Tilt1, Tilt2": (1, 0.0, 0.0), + } + return ankle + + def make_part(self, params): + obj = butil.spawn_vert("hoof_parent_temp") + + part = nodegroup_to_part(nodegroup_hoof, params) + ankle = part.obj + with butil.SelectObjects(ankle): + bpy.ops.object.shade_flat() + butil.modify_mesh(ankle, "SUBSURF", apply=True, levels=2) + ankle.parent = obj + ankle.name = "HoofAnkle" + + ankle.scale = self.ankle_scale + butil.apply_transform(ankle, scale=True) + tag_object(part.obj, "hoof_ankle") + + part.iks = { + 1.0: IKParams("foot", rotation_weight=0.1, chain_parts=2, chain_length=-1) + } + + return part diff --git a/infinigen/assets/objects/creatures/parts/horn.py b/infinigen/assets/objects/creatures/parts/horn.py new file mode 100644 index 000000000..6f55adadd --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/horn.py @@ -0,0 +1,364 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hongyu Wen +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=5BXvwqVyCQw by Artisans of Vaul + + +import bpy +import numpy as np + +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup("nodegroup_noise", singleton=False, type="GeometryNodeTree") +def nodegroup_noise(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Scale", 0.05), + ("NodeSocketFloat", "W", 0.0), + ], + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": group_input.outputs["W"], "Roughness": 0.0}, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Color"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["Scale"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": group_input.outputs["Geometry"], "Offset": multiply}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("nodegroup_ridge", singleton=False, type="GeometryNodeTree") +def nodegroup_ridge(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 4.0), + ("NodeSocketFloat", "depth_of_ridge", 0.2), + ("NodeSocketInt", "number_of_ridge", 150), + ("NodeSocketGeometry", "geometry", None), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": group_input.outputs["geometry"], + "Count": group_input.outputs["number_of_ridge"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 1.0), + (0.2, 0.9), + (0.3705, 0.7406), + (0.55, 0.5938), + (0.6886, 0.4188), + (0.85, 0.1844), + (1.0, 0.0), + ], + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Index"], 1: 5.0}, + attrs={"operation": "MODULO"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: -1.0, 1: modulo}, attrs={"operation": "POWER"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth_of_ridge"], 1: power}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: 1.0, 1: multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: add}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Color"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["depth_of_ridge"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": resample_curve, "Radius": multiply_3}, + ) + + noise = nw.new_node( + nodegroup_noise().name, + input_kwargs={"Geometry": set_curve_radius, "Scale": 0.02}, + label="Noise", + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": noise}) + + +@node_utils.to_nodegroup("nodegroup_horn", singleton=False, type="GeometryNodeTree") +def nodegroup_horn(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "length", 0.0), + ("NodeSocketFloat", "rad1", 0.0), + ("NodeSocketFloat", "rad2", 0.0), + ("NodeSocketFloat", "thickness", 4.0), + ("NodeSocketFloat", "density_of_ridge", 0.0), + ("NodeSocketFloat", "depth_of_ridge", 0.2), + ("NodeSocketFloatDistance", "height", 2.5), + ("NodeSocketFloat", "rotation_x", 0), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["length"], + 1: group_input.outputs["density_of_ridge"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["rad1"], 1: group_input.outputs["rad2"]}, + ) + + # divide = nw.new_node(Nodes.Math, + # input_kwargs={0: add, 1: 2.0}, + # attrs={'operation': 'DIVIDE'}) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["length"], 1: add}, + attrs={"operation": "DIVIDE"}, + ) + + divide_2 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_1, 1: 3.1415}, attrs={"operation": "DIVIDE"} + ) + + spiral = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Resolution": 150, + "Rotations": divide_2, + "Start Radius": group_input.outputs["rad1"], + "End Radius": group_input.outputs["rad2"], + "Height": group_input.outputs["height"], + }, + ) + + ridge = nw.new_node( + nodegroup_ridge().name, + input_kwargs={ + "thickness": group_input.outputs["thickness"], + "depth_of_ridge": group_input.outputs["depth_of_ridge"], + "number_of_ridge": multiply, + "geometry": spiral, + }, + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 10, "Radius": 0.5} + ) + + noise = nw.new_node( + nodegroup_noise().name, + input_kwargs={"Geometry": curve_circle_2.outputs["Curve"], "Scale": 0.2}, + label="Noise", + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": ridge, "Profile Curve": noise} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["rad1"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_1}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Offset": combine_xyz}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Rotation": (-0.8, 0.0, 2.6)}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["rotation_x"]} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_1, "Rotation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_2} + ) + + +class Horn(PartFactory): + param_templates = {} + tags = ["head_detail", "rigid"] + + def sample_params(self, select=None, var=1): + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + weights = part_util.random_convex_coord( + self.param_templates.keys(), select=select + ) + params = part_util.rdict_comb(self.param_templates, weights) + + for key in params["horn"]: + if key in params["range"]: + l, r = params["range"][key] + noise = N(0, 0.02 * (r - l)) + params["horn"][key] += noise + return params["horn"] + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_horn, params) + horn = part.obj + + # postprocess horn + with butil.SelectObjects(horn): + bpy.ops.object.shade_flat() + horn.name = "Horn" + butil.modify_mesh(horn, "SUBSURF", apply=True, levels=2) + + # swap the horn to be an extra so it doesnt get remeshed etc + part.obj = butil.spawn_vert("horn_parent") + horn.parent = part.obj + tag_object(part.obj, "horn") + + return part + + +goat_horn = { + "length": 0.5, + "rad1": 0.18, + "rad2": 0.3, + "thickness": 0.15, + "density_of_ridge": 250, + "depth_of_ridge": 0.02, + "height": 0.1, + "rotation_x": 0, +} + +gazelle_horn = { + "length": 0.4, + "rad1": 0.7, + "rad2": 0.5, + "thickness": 0.1, + "density_of_ridge": 150, + "depth_of_ridge": 0.1, + "height": 0.1, + "rotation_x": 0, +} + +bull_horn = { + "length": 0.1, + "rad1": 0.5, + "rad2": 0.1, + "thickness": 0.1, + "density_of_ridge": 150, + "depth_of_ridge": 0.01, + "height": -0.1, + "rotation_x": -1, +} + +scales = { + "length": [0.1, 0.6], + "rad1": [0.1, 1], + "rad2": [0.1, 1], + "thickness": [0.05, 0.3], + "density_of_ridge": [100, 300], + "depth_of_ridge": [0.01, 0.1], + "height": [-0.3, 0.3], + "rotation_x": [-1, 1], +} + +for k, v in scales.items(): + scales[k] = np.array(v) + +Horn.param_templates["bull"] = {"horn": bull_horn, "range": scales} +Horn.param_templates["gazelle"] = {"horn": gazelle_horn, "range": scales} +Horn.param_templates["goat"] = {"horn": goat_horn, "range": scales} diff --git a/infinigen/assets/objects/creatures/parts/leg.py b/infinigen/assets/objects/creatures/parts/leg.py new file mode 100644 index 000000000..714010bde --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/leg.py @@ -0,0 +1,566 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.attach import nodegroup_surface_muscle +from infinigen.assets.utils.nodegroups.curve import nodegroup_simple_tube_v2 +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import clip_gaussian + + +@node_utils.to_nodegroup( + "nodegroup_quadruped_back_leg", singleton=False, type="GeometryNodeTree" +) +def nodegroup_quadruped_back_leg(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.8, 0.1, 0.05)), + ("NodeSocketVector", "angles_deg", (30.0, -100.0, 81.0)), + ("NodeSocketVector", "Thigh Rad1 Rad2 Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Calf Rad1 Rad2 Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Thigh Height Tilt1 Tilt2", (0.6, 0.0, 0.0)), + ("NodeSocketVector", "Calf Height Tilt1 Tilt2", (0.8, 0.0, 0.0)), + ("NodeSocketFloat", "fullness", 50.0), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + "Origin": (-0.05, 0.0, 0.0), + }, + ) + + thigh = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.02, 3.1416, 3.0), + "Coord 1": (0.1, -0.14, 1.47), + "Coord 2": (0.73, 4.71, 1.13), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Thigh Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Thigh Height Tilt1 Tilt2" + ], + }, + label="Thigh", + ) + + calf = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.51, 18.91, 0.4), + "Coord 1": (0.69, 0.26, 0.0), + "Coord 2": (0.94, 1.5708, 1.13), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Calf Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Calf Height Tilt1 Tilt2" + ], + }, + label="Calf", + ) + + thigh_2 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.04, 3.1416, 0.0), + "Coord 1": (0.01, 3.46, -0.05), + "Coord 2": (0.73, 4.71, 0.9), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Thigh Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Thigh Height Tilt1 Tilt2" + ], + }, + label="Thigh 2", + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [thigh, calf, thigh_2]} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry, simple_tube_v2.outputs["Geometry"]]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_1, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class QuadrupedBackLeg(PartFactory): + tags = ["leg"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((1.8, 0.1, 0.05)) * N(1, (0.2, 0, 0), 3), + "angles_deg": np.array((40.0, -120.0, 100)), + "fullness": 50.0, + "aspect": 1.0, + "Thigh Rad1 Rad2 Fullness": np.array( + (0.33, 0.15, 2.5), + ) + * N(1, 0.1, 3), + "Calf Rad1 Rad2 Fullness": np.array( + (0.17, 0.07, 2.5), + ) + * N(1, 0.1, 3), + "Thigh Height Tilt1 Tilt2": np.array( + (0.6, 0.0, 0.0), + ) + + N(0, [0.05, 2, 10]), + "Calf Height Tilt1 Tilt2": np.array((0.8, 0.0, 0.0)) + N(0, [0.05, 10, 10]), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_quadruped_back_leg, params) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.5: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + tag_object(part.obj, "quadruped_back_leg") + return part + + +@node_utils.to_nodegroup( + "nodegroup_quadruped_front_leg", singleton=False, type="GeometryNodeTree" +) +def nodegroup_quadruped_front_leg(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.43, 0.1, 0.1)), + ("NodeSocketVector", "angles_deg", (-20.0, 16.0, 9.2)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketVector", "Shoulder Rad1 Rad2 Fullness", (0.22, 0.0, 0.0)), + ("NodeSocketVector", "Calf Rad1 Rad2 Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Elbow Rad1 Rad2 Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Shoulder Height, Tilt1, Tilt2", (0.74, 0.0, 0.0)), + ("NodeSocketVector", "Elbow Height, Tilt1, Tilt2", (0.9, 0.0, 0.0)), + ("NodeSocketVector", "Calf Height, Tilt1, Tilt2", (0.74, 0.0, 0.0)), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": 2.5, + "Origin": (-0.15, 0.0, 0.09), + }, + ) + + shoulder = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.0, 0.0, 0.0), + "Coord 1": (0.2, 0.0, 0.0), + "Coord 2": (0.55, 0.0, 0.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Shoulder Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Shoulder Height, Tilt1, Tilt2" + ], + }, + label="Shoulder", + ) + + elbow_2 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.53, 1.5708, 1.69), + "Coord 1": (0.57, 0.0, 0.0), + "Coord 2": (0.95, 0.0, 0.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Elbow Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Elbow Height, Tilt1, Tilt2" + ], + }, + label="Elbow 2", + ) + + elbow_1 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.22, 1.5708, 1.0), + "Coord 1": (0.4, 0.0, 0.0), + "Coord 2": (0.57, 1.571, 1.7), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Elbow Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Elbow Height, Tilt1, Tilt2" + ], + }, + label="Elbow 1", + ) + + forearm = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.41, -1.7008, 0.6), + "Coord 1": (0.57, 0.0, 0.8), + "Coord 2": (0.95, 0.0, 0.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Calf Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": group_input.outputs[ + "Calf Height, Tilt1, Tilt2" + ], + }, + label="Forearm", + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + shoulder, + elbow_2, + elbow_1, + forearm, + simple_tube_v2.outputs["Geometry"], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class QuadrupedFrontLeg(PartFactory): + tags = ["leg"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((1.43, 0.1, 0.1)) * N(1, (0.2, 0, 0), 3), + "angles_deg": np.array((-40.0, 120.0, -100)), + "aspect": 1.0, + "Shoulder Rad1 Rad2 Fullness": np.array((0.22, 0.22, 2.5)) * N(1, 0.1, 3), + "Calf Rad1 Rad2 Fullness": np.array((0.08, 0.08, 2.5)) * N(1, 0.1, 3), + "Elbow Rad1 Rad2 Fullness": np.array((0.12, 0.1, 2.5) * N(1, 0.1, 3)), + "Shoulder Height, Tilt1, Tilt2": np.array((0.74, 0.0, 0.0)) + + N(0, [0.05, 10, 10]), + "Elbow Height, Tilt1, Tilt2": np.array((0.9, 0.0, 0.0)) + + N(0, [0.05, 10, 10]), + "Calf Height, Tilt1, Tilt2": np.array((0.74, 0.0, 0.0)) + + N(0, [0.05, 10, 10]), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_quadruped_front_leg, params) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.6: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + tag_object(part.obj, "quadruped_front_leg") + return part + + +@node_utils.to_nodegroup("nodegroup_bird_leg", singleton=False, type="GeometryNodeTree") +def nodegroup_bird_leg(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.09, 0.06)), + ("NodeSocketVector", "angles_deg", (-70.0, 90.0, -2.0)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketFloat", "fullness", 8.0), + ("NodeSocketVector", "Thigh Rad1 Rad2 Fullness", (0.18, 0.1, 1.26)), + ("NodeSocketVector", "Shin Rad1 Rad2 Fullness", (0.07, 0.06, 5.0)), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + "fullness": group_input.outputs["fullness"], + }, + ) + + surface_muscle = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.0, 0.0, 0.0), + "Coord 1": (0.2, 0.0, 0.0), + "Coord 2": (0.4, 1.5708, 1.0), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Thigh Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": (0.72, -21.05, 0.0), + }, + ) + + surface_muscle_1 = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.32, 0.0, 0.0), + "Coord 1": (0.5, 1.5708, 0.0), + "Coord 2": (0.74, 1.32, 0.29), + "StartRad, EndRad, Fullness": group_input.outputs[ + "Shin Rad1 Rad2 Fullness" + ], + "ProfileHeight, StartTilt, EndTilt": (0.72, -21.05, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + surface_muscle, + surface_muscle_1, + simple_tube_v2.outputs["Geometry"], + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class BirdLeg(PartFactory): + tags = ["leg"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((1, 0.09, 0.06)) + * np.array((clip_gaussian(1, 0.3, 0.2, 1.5), *N(1, 0.1, 2))), + "angles_deg": np.array((-70.0, 90.0, -2.0)), + "aspect": N(1, 0.05), + "fullness": 8.0 * N(1, 0.1), + "Thigh Rad1 Rad2 Fullness": np.array((0.18, 0.1, 1.26)) * N(1, 0.1, 3), + "Shin Rad1 Rad2 Fullness": np.array((0.07, 0.06, 5.0)) * N(1, 0.1, 3), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_bird_leg, params) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.5: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + part.iks = {} + tag_object(part.obj, "bird_leg") + return part + + +@node_utils.to_nodegroup( + "nodegroup_insect_leg", singleton=False, type="GeometryNodeTree" +) +def nodegroup_insect_leg(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.24, 0.02, 0.01)), + ("NodeSocketVector", "angles_deg", (0.0, -63.9, 31.39)), + ("NodeSocketFloat", "Carapace Rad Pct", 1.4), + ("NodeSocketVector", "spike_length_rad1_rad2", (0.1, 0.025, 0.0)), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "proportions": (0.2533, 0.3333, 0.1333), + "do_bezier": False, + }, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["length_rad1_rad2"], + "Scale": group_input.outputs["Carapace Rad Pct"], + }, + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": scale.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["Y"], + "Y": separate_xyz.outputs["Y"], + "Z": 30.0, + }, + ) + + surface_muscle = nw.new_node( + nodegroup_surface_muscle().name, + input_kwargs={ + "Skin Mesh": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Coord 0": (0.0, 0.0, 0.0), + "Coord 1": (0.01, 0.0, 0.0), + "Coord 2": (0.35, 0.0, 0.0), + "StartRad, EndRad, Fullness": combine_xyz, + "ProfileHeight, StartTilt, EndTilt": (0.73, 0.0, 0.0), + }, + ) + + trim_curve = nw.new_node( + Nodes.TrimCurve, + input_kwargs={ + "Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Start": 0.4892, + "End": 0.725, + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": trim_curve, "Count": 4} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": resample_curve} + ) + + simple_tube_v2_1 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["spike_length_rad1_rad2"], + "angles_deg": (0.0, -40.0, 0.0), + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": curve_to_mesh, + "Instance": simple_tube_v2_1.outputs["Geometry"], + "Rotation": (0.0, 0.1239, 0.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + simple_tube_v2.outputs["Geometry"], + surface_muscle, + instance_on_points, + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Endpoint": simple_tube_v2.outputs["Endpoint"], + }, + ) + + +class InsectLeg(PartFactory): + tags = ["leg", "rigid"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((1, 0.02, 0.01)) * N(1, 0.25, 3), + "angles_deg": np.array((0.0, -63.9, 31.39)) + N(0, 10, 3), + "Carapace Rad Pct": 1.4 * U(0.5, 2), + "spike_length_rad1_rad2": np.array((0.2, 0.025, 0.0)) + * N(1, (0.2, 0.1, 0.1), 3), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_insect_leg, params) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.3: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), + 0.7: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), + } + part.iks = {1.0: IKParams("foot", rotation_weight=0.1, chain_parts=1)} + tag_object(part.obj, "insect_leg") + return part diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_bird_duck.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_duck.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_bird_duck.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_duck.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_bird_gull.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_gull.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_bird_gull.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_gull.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_bird_robin.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_robin.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_bird_robin.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_bird_robin.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_feline_cheetah.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_cheetah.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_feline_cheetah.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_cheetah.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_feline_housecat.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_housecat.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_feline_housecat.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_housecat.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_feline_tiger.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_tiger.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_feline_tiger.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_tiger.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_feline_tiger_2.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_tiger_2.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_feline_tiger_2.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_tiger_2.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_feline_wolf.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_wolf.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_feline_wolf.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_feline_wolf.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_bluefish.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_bluefish.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_bluefish.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_bluefish.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_crappie.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_crappie.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_crappie.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_crappie.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_eel.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_eel.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_eel.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_eel.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_pickerel.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_pickerel.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_pickerel.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_pickerel.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_pufferfish.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_pufferfish.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_pufferfish.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_pufferfish.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_fish_spadefish.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_spadefish.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_fish_spadefish.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_fish_spadefish.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_herbivore_cow.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_cow.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_herbivore_cow.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_cow.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_herbivore_giraffe.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_giraffe.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_herbivore_giraffe.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_giraffe.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_herbivore_goat.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_goat.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_herbivore_goat.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_goat.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_herbivore_llama.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_llama.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_herbivore_llama.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_herbivore_llama.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_insect_bee.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_bee.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_insect_bee.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_bee.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_insect_beetle.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_beetle.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_insect_beetle.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_beetle.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_insect_tarantula.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_tarantula.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_insect_tarantula.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_insect_tarantula.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/body_llama.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/body_llama.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/body_llama.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/body_llama.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_carnivore_tiger.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_carnivore_tiger.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_carnivore_tiger.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_carnivore_tiger.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_carnivore_wolf.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_carnivore_wolf.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_carnivore_wolf.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_carnivore_wolf.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_herbivore_cow.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_cow.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_herbivore_cow.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_cow.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_herbivore_giraffe.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_giraffe.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_herbivore_giraffe.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_giraffe.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_herbivore_goat.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_goat.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_herbivore_goat.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_goat.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_herbivore_llama.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_llama.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_herbivore_llama.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_herbivore_llama.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_insect_beetle.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_insect_beetle.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_insect_beetle.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_insect_beetle.npy diff --git a/infinigen/assets/creatures/parts/nurbs_data/head_insect_wasp.npy b/infinigen/assets/objects/creatures/parts/nurbs_data/head_insect_wasp.npy similarity index 100% rename from infinigen/assets/creatures/parts/nurbs_data/head_insect_wasp.npy rename to infinigen/assets/objects/creatures/parts/nurbs_data/head_insect_wasp.npy diff --git a/infinigen/assets/creatures/parts/reptile_detail.py b/infinigen/assets/objects/creatures/parts/reptile_detail.py similarity index 65% rename from infinigen/assets/creatures/parts/reptile_detail.py rename to infinigen/assets/objects/creatures/parts/reptile_detail.py index 136ded6c0..9e9c2f8e4 100644 --- a/infinigen/assets/creatures/parts/reptile_detail.py +++ b/infinigen/assets/objects/creatures/parts/reptile_detail.py @@ -5,47 +5,58 @@ import logging -import bpy -import numpy as np -from math import sin, cos, pi, exp, sqrt +from math import cos, exp, pi, sin, sqrt -from infinigen.assets.creatures.util.creature import PartFactory, Part -from infinigen.assets.creatures.util.genome import Joint, IKParams -from infinigen.assets.creatures.util import part_util -from infinigen.core.util import blender as butil +import numpy as np from scipy.interpolate import interp1d -from infinigen.assets.creatures.util.geometry import nurbs as nurbs_util +from infinigen.assets.objects.creatures.util import part_util +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.utils.geometry import nurbs as nurbs_util from infinigen.core import surface logger = logging.getLogger(__name__) + def square(x): return x * x + + def sigmoid(x): return 1 / (1 + exp(-x)) + + def interpolate(coords): x = [c[0] for c in coords] y = [c[1] for c in coords] - f = interp1d(x, y, kind='cubic') + f = interp1d(x, y, kind="cubic") return f -def lr_scale(l, r, p): - p = p * (r - l) + l + + +def lr_scale(lower, upper, p): + p = p * (upper - lower) + lower return p -def lrlr_scale(l, r, L, R, p): - p = (p - L) / (R - L) - p = p * (r - l) + l + + +def lrlr_scale(lower, upper, from_lower, from_upper, p): + p = (p - from_lower) / (from_upper - from_lower) + p = p * (upper - lower) + lower return p -def sunk(l, r, gr, p): - if p < l or p > r: + + +def sunk(lower, upper, gr, p): + if p < lower or p > upper: return 1 - - p = (p - l) / (r - l) + + p = (p - lower) / (upper - lower) return lrlr_scale(gr, 1, 0, 0.5, abs(p - 0.5)) + def dist(x, y): return sqrt(square(x[0] - y[0]) + square(x[1] - y[1])) + def corner_vertices(obj): THRESHOLD = 0.3 mesh = obj.data @@ -53,6 +64,7 @@ def corner_vertices(obj): value = np.zeros(n) nor = {} + def add(u, v): if u not in nor: nor[u] = [] @@ -67,7 +79,7 @@ def add(u, v): if u.index not in nor: value[u.index] = 1.0 COUNT += 1 - continue + continue normals = nor[u.index] mx_cross = 0 @@ -81,11 +93,13 @@ def add(u, v): break if mx_cross > THRESHOLD: break - + if mx_cross > THRESHOLD: COUNT += 1 value[u.index] = 1.0 return value + + def dorsal_vertices(obj): mesh = obj.data n = len(obj.data.vertices) @@ -99,6 +113,8 @@ def dorsal_vertices(obj): if u.co[0] > 0: value[u.index] = 0 return value + + def ventral_vertices(obj, bodycheck=False): mesh = obj.data n = len(obj.data.vertices) @@ -114,7 +130,8 @@ def ventral_vertices(obj, bodycheck=False): value[u.index] = 0 return value -class nurbs_ReptileTail(): + +class nurbs_ReptileTail: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 20 @@ -123,7 +140,7 @@ def __init__(self, **kwargs): def local_scale(self, p): def foo(p): return 2 * p * (1 - p) - + sunken = self.sunken sunken *= sunk(-0.1, 0.1, 0.65, p) sunken *= sunk(self.wrist - 0.075, self.wrist + 0.075, 0.7, p) @@ -136,11 +153,10 @@ def foo(p): value = foo(p) * sunken return value - def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) @@ -148,18 +164,20 @@ def get_ctrls(self): ctrls[i][j][0] = p ctrls[i][j][1] = self.local_scale(p) * cos(theta) ctrls[i][j][2] = self.local_scale(p) * sin(theta) - + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - if (self.breast > 0.5): - if (0.10 < p and p < 0.25): + if self.breast > 0.5: + if 0.10 < p and p < 0.25: if abs(sin(theta)) < 0.5: - ctrls[i][j][1] *= 40 * (0.1 - abs(0.15 - p)) * (1 - abs(sin(theta))) + ctrls[i][j][1] *= ( + 40 * (0.1 - abs(0.15 - p)) * (1 - abs(sin(theta))) + ) else: ctrls[i][j][2] *= 0.8 - if (p > self.wrist): + if p > self.wrist: ctrls[i][j][1] *= self.tail_modification ctrls[i][j][2] *= self.tail_modification @@ -171,47 +189,54 @@ def get_ctrls(self): ctrls[i][j][2] -= float_ground - ground return ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.1) -class nurbs_ReptileUpperHead(): + +class nurbs_ReptileUpperHead: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 self.m = 50 self.local_scale_y = self.init_local_scale_y() self.local_scale_z = self.init_local_scale_z() - + def local_offset_x(self, p): return -self.blunt_head * max(p - 0.9, 0) - + def local_offset_z(self, p, theta): a = sin(theta) - if (a > 0.7): + if a > 0.7: return -(a - 0.7) * 0.7 return 0 - + def init_local_scale_y(self): - lrp = lambda p: lr_scale(-1/4, 1, p) + def lrp(p): + return lr_scale(-1 / 4, 1, p) + return lambda p, theta: sqrt(1 - abs(lrp(p)) ** 1) - + def init_local_scale_z(self): def f1(p, theta): - lrp = lambda p: lr_scale(-1/3, 1, p) + def lrp(p): + return lr_scale(-1 / 3, 1, p) + return sqrt(1 - lrp(p) ** 2) + def f2(p, theta): - return (1 - p ** 2) / 10 - + return (1 - p**2) / 10 + def foo(p, theta): if 0 <= theta and theta <= pi: return 0.9 * f1(p, theta) + 0.1 * f2(p, theta) else: return 0.1 * f1(p, theta) + 0.9 * f2(p, theta) + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -228,26 +253,28 @@ def bump(self, pos, degree, boundx, boundy): ry = round(self.m * boundy[1]) for i in range(cx - lx, cx + rx + 1): for j in range(cy - ly, cy + ry + 1): - self.ctrls[i][j][2] *= 1 + max(0, degree * lrlr_scale(0, 1, 5 * 1.4, 0, dist((cx, cy), (i, j)))) - + self.ctrls[i][j][2] *= 1 + max( + 0, degree * lrlr_scale(0, 1, 5 * 1.4, 0, dist((cx, cy), (i, j))) + ) def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p) ctrls[i][j][1] = self.local_scale_y(p, theta) * cos(theta) - ctrls[i][j][2] = self.local_scale_z(p, theta) * (sin(theta) + self.local_offset_z(p, theta)) - + ctrls[i][j][2] = self.local_scale_z(p, theta) * ( + sin(theta) + self.local_offset_z(p, theta) + ) + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - - + self.ctrls = ctrls # snakes @@ -288,14 +315,16 @@ def get_ctrls(self): # ] for set in settings: self.update(set) - + for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) - if (p >= self.up_head_position): - self.ctrls[i][j][2] += self.up_head_degree * (p - self.up_head_position) - if (sin(theta) < -0.6): + if p >= self.up_head_position: + self.ctrls[i][j][2] += self.up_head_degree * ( + p - self.up_head_position + ) + if sin(theta) < -0.6: self.ctrls[i][j][2] += 0.3 * (-sin(theta) - 0.6) # self.bump((0.8, 0.2), 0.2, (0.15, 0.15), (0.15, 0.02)) for i in range(self.n): @@ -303,46 +332,52 @@ def get_ctrls(self): self.ctrls[i][j][0] += self.offset_x self.ctrls[i][j][1] += self.offset_y self.ctrls[i][j][2] += self.offset_z - + return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" obj = nurbs_util.nurbs(ctrls, method, face_size=0.05) - surface.new_attr_data(obj, 'corner', 'FLOAT', 'POINT', corner_vertices(obj)) - surface.new_attr_data(obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(obj)) + surface.new_attr_data(obj, "corner", "FLOAT", "POINT", corner_vertices(obj)) + surface.new_attr_data( + obj, "inside_mouth", "FLOAT", "POINT", ventral_vertices(obj) + ) return obj -class nurbs_ReptileLowerHead(): + +class nurbs_ReptileLowerHead: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 self.m = 50 self.local_scale_y = self.init_local_scale_y() self.local_scale_z = self.init_local_scale_z() - + def local_offset_x(self, p): return -0.7 * max(p - 0.9, 0) - + def local_offset_z(self, p, theta): a = sin(theta) - if (a > 0.7): - return -(a - 0.7) ** 2 * 0.7 + if a > 0.7: + return -((a - 0.7) ** 2) * 0.7 return 0 - + def init_local_scale_y(self): - lrp = lambda p: lr_scale(-1/4, 1, p) + def lrp(p): + return lr_scale(-1 / 4, 1, p) + return lambda p, theta: sqrt(1 - abs(lrp(p)) ** 1) - + def init_local_scale_z(self): def foo(p, theta): if 0 <= theta and theta <= pi: - return sqrt(1 - p ** 2) + return sqrt(1 - p**2) else: - return (1 - p ** 2) / 10 + return (1 - p**2) / 10 + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -353,22 +388,24 @@ def update(self, set): def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p) ctrls[i][j][1] = self.local_scale_y(p, theta) * cos(theta) - ctrls[i][j][2] = self.local_scale_z(p, theta) * (sin(theta) + self.local_offset_z(p, theta)) - + ctrls[i][j][2] = self.local_scale_z(p, theta) * ( + sin(theta) + self.local_offset_z(p, theta) + ) + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= -self.scale_z - - if (p > 0.6): + + if p > 0.6: ctrls[i][j][2] += 0.4 * ((p - 0.6) ** 2) - + self.ctrls = ctrls for i in range(self.n): for j in range(self.m): @@ -376,50 +413,59 @@ def get_ctrls(self): self.ctrls[i][j][1] += self.offset_y self.ctrls[i][j][2] += self.offset_z return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" obj = nurbs_util.nurbs(ctrls, method, face_size=0.01) - surface.new_attr_data(obj, 'corner', 'FLOAT', 'POINT', corner_vertices(obj)) - surface.new_attr_data(obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(obj)) + surface.new_attr_data(obj, "corner", "FLOAT", "POINT", corner_vertices(obj)) + surface.new_attr_data( + obj, "inside_mouth", "FLOAT", "POINT", ventral_vertices(obj) + ) return obj -class nurbs_ReptileHead(): + +class nurbs_ReptileHead: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 self.m = 50 self.local_scale_y = self.init_local_scale_y() self.local_scale_z = self.init_local_scale_z() - + def local_offset_x(self, p): return -self.blunt_head * max(p - 0.9, 0) - + def local_offset_z(self, p, theta): a = sin(theta) - if (a > 0.7): + if a > 0.7: return -(a - 0.7) * 0.7 return 0 - + def init_local_scale_y(self): - lrp = lambda p: lr_scale(-1/2, 1, p) + def lrp(p): + return lr_scale(-1 / 2, 1, p) + return lambda p, theta: sqrt(1 - abs(lrp(p)) ** 1) - + def init_local_scale_z(self): def f1(p, theta): - lrp = lambda p: lr_scale(-1/3, 1, p) + def lrp(p): + return lr_scale(-1 / 3, 1, p) + return sqrt(1 - lrp(p) ** 2) + def f2(p, theta): - return (1 - p ** 2) / 10 - + return (1 - p**2) / 10 + def foo(p, theta): if 0 <= theta and theta <= pi: return 0.9 * f1(p, theta) + 0.1 * f2(p, theta) else: return 0.8 * f1(p, theta) + 0.2 * f2(p, theta) + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -429,38 +475,39 @@ def update(self, set): def update_all(self, scale, offset=(0, 0, 0)): l, r = scale - + n, m, _ = self.ctrls.shape for i in range(n): for j in range(m): - self.ctrls[i][j][0] *= (r - l) + self.ctrls[i][j][0] *= r - l self.ctrls[i][j][1] *= abs(r - l) self.ctrls[i][j][2] *= abs(r - l) - + self.ctrls[i][j][0] += offset[0] self.ctrls[i][j][1] += offset[1] self.ctrls[i][j][2] += offset[2] - if (l > r): + if l > r: self.ctrls = np.flip(self.ctrls, axis=0) return self.ctrls def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p) ctrls[i][j][1] = self.local_scale_y(p, theta) * cos(theta) - ctrls[i][j][2] = self.local_scale_z(p, theta) * (sin(theta) + self.local_offset_z(p, theta)) - + ctrls[i][j][2] = self.local_scale_z(p, theta) * ( + sin(theta) + self.local_offset_z(p, theta) + ) + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - - + self.ctrls = ctrls settings = [ ([42], [4, 21], (0.7, 1)), @@ -476,20 +523,23 @@ def get_ctrls(self): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) - if (p >= self.up_head_position): - self.ctrls[i][j][2] += self.up_head_degree * (p - self.up_head_position) - if (sin(theta) < -0.6): + if p >= self.up_head_position: + self.ctrls[i][j][2] += self.up_head_degree * ( + p - self.up_head_position + ) + if sin(theta) < -0.6: self.ctrls[i][j][2] += 0.3 * (-sin(theta) - 0.6) - + # self.update_all((1, 0), (0, 0, -0.05)) return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.05) -class nurbs_LizardFrontLeg(): + +class nurbs_LizardFrontLeg: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 @@ -502,31 +552,34 @@ def __init__(self, **kwargs): self.scale_y *= 0.4 self.scale_z *= 0.4 - def init_local_offset_x(self): return lambda p, theta: 0 alpha = pi * 0.45 thred = 0.9 + def foo(p, theta): offset = 0 - if (p <= thred): + if p <= thred: offset += -(1 - cos(alpha)) * (p - thred) offset -= -(1 - cos(alpha)) * (0 - thred) return offset + return foo - + def init_local_offset_y(self): return lambda p, theta: 0 alpha = -pi * 0.45 thred = 0.9 + def foo(p, theta): offset = 0 - if (p <= thred): + if p <= thred: offset += sin(alpha) * (p - thred) # offset -= sin(alpha) * (0 - thred) return offset + return foo - + def init_local_scale_y(self): th0 = 0.03 th1 = 0.1 @@ -539,24 +592,26 @@ def init_local_scale_y(self): bar1 = 0.3 - (th0 - th1) * sin(lrlr_scale(0, pi, th0, th1, th1 - th0)) bar2 = 0.3 - cr2 * th1 bar3 = cr2 * th2 + bar2 - cr3 * th2 - + def foo(p, theta): p = 1 - p - if (p < th0): + if p < th0: return cos(lrlr_scale(0, pi / 2, 0, th0, th0 - p)) - elif (p < th1): + elif p < th1: return (th0 - th1) * sin(lrlr_scale(0, pi, th0, th1, p - th0)) + bar1 - elif (p < th2): + elif p < th2: return cr2 * p + bar2 else: return cr3 * p + bar3 + return foo - + def init_local_scale_z(self): def foo(p, theta): return 0.2 + 0.8 * (1 - p) + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -567,28 +622,31 @@ def update(self, set): def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p, theta) - ctrls[i][j][1] = self.local_scale_y(p, theta) * (cos(theta) + self.local_offset_y(p, theta)) + ctrls[i][j][1] = self.local_scale_y(p, theta) * ( + cos(theta) + self.local_offset_y(p, theta) + ) ctrls[i][j][2] = self.local_scale_z(p, theta) * sin(theta) - + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - self.ctrls = ctrls + self.ctrls = ctrls return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.015) -class nurbs_LizardBackLeg(): + +class nurbs_LizardBackLeg: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 @@ -600,31 +658,35 @@ def __init__(self, **kwargs): self.scale_x *= 0.6 self.scale_y *= 0.4 self.scale_z *= 0.4 - + def init_local_offset_x(self): return lambda p, theta: 0 alpha = -pi * 0.45 thred = 0.9 + def foo(p, theta): offset = 0 - if (p <= thred): + if p <= thred: offset += -(1 - cos(alpha)) * (p - thred) offset -= -(1 - cos(alpha)) * (0 - thred) return offset + return foo - + def init_local_offset_y(self): return lambda p, theta: 0 alpha = pi * 0.45 thred = 0.9 + def foo(p, theta): offset = 0 - if (p <= thred): + if p <= thred: offset += sin(alpha) * (p - thred) # offset -= sin(alpha) * (0 - thred) return offset + return foo - + def init_local_scale_y(self): th0 = 0.03 th1 = 0.1 @@ -637,24 +699,26 @@ def init_local_scale_y(self): bar1 = 0.3 - (th0 - th1) * sin(lrlr_scale(0, pi, th0, th1, th1 - th0)) bar2 = 0.3 - cr2 * th1 bar3 = cr2 * th2 + bar2 - cr3 * th2 - + def foo(p, theta): p = 1 - p - if (p < th0): + if p < th0: return cos(lrlr_scale(0, pi / 2, 0, th0, th0 - p)) - elif (p < th1): + elif p < th1: return (th0 - th1) * sin(lrlr_scale(0, pi, th0, th1, p - th0)) + bar1 - elif (p < th2): + elif p < th2: return cr2 * p + bar2 else: return cr3 * p + bar3 + return foo - + def init_local_scale_z(self): def foo(p, theta): return 0.2 + 0.8 * (1 - p) + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -665,28 +729,31 @@ def update(self, set): def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p, theta) - ctrls[i][j][1] = self.local_scale_y(p, theta) * (cos(theta) + self.local_offset_y(p, theta)) + ctrls[i][j][1] = self.local_scale_y(p, theta) * ( + cos(theta) + self.local_offset_y(p, theta) + ) ctrls[i][j][2] = self.local_scale_z(p, theta) * sin(theta) - + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - self.ctrls = ctrls + self.ctrls = ctrls return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.015) -class nurbs_LizardToe(): + +class nurbs_LizardToe: def __init__(self, **kwargs): self.__dict__.update(kwargs) self.n = 50 @@ -701,28 +768,30 @@ def __init__(self, **kwargs): self.scale_x *= 0.6 self.scale_y *= 0.4 self.scale_z *= 0.4 - + def init_local_offset_x(self): def foo(p, theta): - if (p < 0.98): + if p < 0.98: return 0 else: return (p - 0.9) * 3 + return foo - + def init_local_offset_y(self): return lambda p, theta: 0 def init_local_scale(self): def foo(p, theta): - if (p < 0.4): + if p < 0.4: return 1 - elif (p < 0.9): + elif p < 0.9: return 1 + 0.5 * sin(lrlr_scale(0, pi, 0, 0.5, p - 0.4)) else: return cos(lrlr_scale(0, pi / 2, 0, 0.1, p - 0.9)) + return foo - + def update(self, set): xs, ys, k = set for x in xs: @@ -733,75 +802,78 @@ def update(self, set): def get_ctrls(self): self.n = int(self.n) self.m = int(self.m) - ctrls = np.zeros((self.n, self.m, 3)) + ctrls = np.zeros((self.n, self.m, 3)) for i in range(self.n): for j in range(self.m): p = i / (self.n - 1) theta = 2 * pi * j / (self.m) ctrls[i][j][0] = p + self.local_offset_x(p, theta) - ctrls[i][j][1] = self.local_scale_y(p, theta) * (cos(theta) + self.local_offset_y(p, theta)) + ctrls[i][j][1] = self.local_scale_y(p, theta) * ( + cos(theta) + self.local_offset_y(p, theta) + ) ctrls[i][j][2] = self.local_scale_z(p, theta) * sin(theta) - + ctrls[i][j][0] *= self.scale_x ctrls[i][j][1] *= self.scale_y ctrls[i][j][2] *= self.scale_z - self.ctrls = ctrls + self.ctrls = ctrls return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.005) -class nurbs_ReptileBody(): + +class nurbs_ReptileBody: def __init__(self, head, tail, **kwargs): self.__dict__.update(kwargs) self.param_head = head self.param_tail = tail self.m = 50 - + def position(self, x): return np.array([x, 0, sigmoid((x - 0.5) * 4)]) - + def update(self, ctrls, scale, offset=(0, 0, 0)): l, r = scale - + n, m, _ = ctrls.shape for i in range(n): for j in range(m): - ctrls[i][j][0] *= (r - l) + ctrls[i][j][0] *= r - l ctrls[i][j][1] *= abs(r - l) ctrls[i][j][2] *= abs(r - l) - + ctrls[i][j][0] += offset[0] ctrls[i][j][1] += offset[1] ctrls[i][j][2] += offset[2] - if (l > r): + if l > r: ctrls = np.flip(ctrls, axis=0) return ctrls - + def merge(self, c1, c2): nc1 = np.copy(c1) nc2 = np.copy(c2) nc2[0] = 0.5 * c2[0] + 0.5 * c1[-1] - nc1[-1] = nc2[0] + nc1[-1] = nc2[0] for i in range(2, 6): nc1[-i] = 0.1 * ((i + 4) * c1[-i] + (6 - i) * c2[0]) for i in range(1, 5): nc2[i] = 0.1 * ((i + 5) * c2[i] + (5 - i) * c1[-1]) return np.concatenate((nc1, nc2), axis=0) - + def get_ctrls(self): if self.open_mouth: head = nurbs_ReptileUpperHead(**self.param_head) else: head = nurbs_ReptileHead(**self.param_head) tail = nurbs_ReptileTail(**self.param_tail) - + head_ctrls = head.get_ctrls() tail_ctrls = tail.get_ctrls() - + if self.open_mouth: head_ctrls = self.update(head_ctrls, (1, 0), (0, 0, -0.05)) else: @@ -809,18 +881,19 @@ def get_ctrls(self): tail_ctrls = self.update(tail_ctrls, (0, 1)) self.ctrls = self.merge(head_ctrls, tail_ctrls) return self.ctrls - + def generate(self): ctrls = self.get_ctrls() - method = 'blender' if False else 'geomdl' + method = "blender" if False else "geomdl" return nurbs_util.nurbs(ctrls, method, face_size=0.5) - + + class ReptileHeadBody(PartFactory): param_templates = {} - tags = ['body'] + tags = ["body"] unit_scale = (0.5, 0.5, 0.5) - def __init__(self, params=None, type='lizard'): + def __init__(self, params=None, type="lizard"): self.type = type super().__init__(params) @@ -828,25 +901,28 @@ def sample_params(self, select=None, var=1): params = self.param_templates[self.type] # weights = part_util.random_convex_coord(param_templates.keys(), select=select) # params = part_util.rdict_comb(param_templates, weights) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - for key in params['tail']: - l, r = params['trange'][key] + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + for key in params["tail"]: + l, r = params["trange"][key] noise = N(0, 0.1 * (r - l)) - params['tail'][key] += noise - for key in params['head']: - l, r = params['hrange'][key] + params["tail"][key] += noise + for key in params["head"]: + l, r = params["hrange"][key] noise = N(0, 0.1 * (r - l)) - params['head'][key] += noise - + params["head"][key] += noise + return params - + def rescale(self, params, scale): - params['sx'] *= scale - params['sy'] *= scale - params['sz'] *= scale + params["sx"] *= scale + params["sy"] *= scale + params["sz"] *= scale return params def make_part(self, params): @@ -854,40 +930,59 @@ def make_part(self, params): part = part_util.nurbs_to_part(handles) part.skeleton = handles.mean(axis=1) part.joints = { - 0.1: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # head - 0.73: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # neck - 0.80: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # waist - 0.85: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), - 0.88: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), - 0.92: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), - 0.95: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), - 0.98: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), - 1.0: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 0.1: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # head + 0.73: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # neck + 0.80: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # waist + 0.85: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 0.88: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 0.92: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 0.95: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 0.98: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), + 1.0: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])), } - if self.type == 'snake': + if self.type == "snake": part.iks = { - 0.1: IKParams('snake_head', rotation_weight=0.1, chain_parts=2), - 0.73: IKParams(name='snake_shoulder', rotation_weight=0.1, target_size=0.4), - 0.85: IKParams(name='snake_hip', target_size=0.3), - 1.0: IKParams(name='snake_tail', chain_parts=1) - } + 0.1: IKParams("snake_head", rotation_weight=0.1, chain_parts=2), + 0.73: IKParams( + name="snake_shoulder", rotation_weight=0.1, target_size=0.4 + ), + 0.85: IKParams(name="snake_hip", target_size=0.3), + 1.0: IKParams(name="snake_tail", chain_parts=1), + } else: part.iks = { - 0.1: IKParams('head', rotation_weight=0.1, chain_parts=2), - 0.73: IKParams(name='shoulder', rotation_weight=0.1, target_size=0.4), - 0.85: IKParams(name='hip', target_size=0.3), - 1.0: IKParams(name='reptile_tail', chain_parts=1) - } - surface.new_attr_data(part.obj, 'corner', 'FLOAT', 'POINT', corner_vertices(part.obj)) - surface.new_attr_data(part.obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(part.obj, bodycheck=True)) + 0.1: IKParams("head", rotation_weight=0.1, chain_parts=2), + 0.73: IKParams(name="shoulder", rotation_weight=0.1, target_size=0.4), + 0.85: IKParams(name="hip", target_size=0.3), + 1.0: IKParams(name="reptile_tail", chain_parts=1), + } + surface.new_attr_data( + part.obj, "corner", "FLOAT", "POINT", corner_vertices(part.obj) + ) + surface.new_attr_data( + part.obj, + "inside_mouth", + "FLOAT", + "POINT", + ventral_vertices(part.obj, bodycheck=True), + ) return part + class ReptileBody(PartFactory): param_templates = {} - tags = ['body'] + tags = ["body"] unit_scale = (0.5, 0.5, 0.5) - def __init__(self, params=None, type='lizard', n_bones=None, shoulder_ik_ts=None, mod=None): + def __init__( + self, params=None, type="lizard", n_bones=None, shoulder_ik_ts=None, mod=None + ): self.type = type self.n_bones = n_bones self.shoulder_ik_ts = shoulder_ik_ts @@ -897,48 +992,64 @@ def __init__(self, params=None, type='lizard', n_bones=None, shoulder_ik_ts=None def sample_params(self, select=None, var=1): params = self.param_templates[self.type] - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) - for key in params['tail']: - l, r = params['trange'][key] + for key in params["tail"]: + l, r = params["trange"][key] noise = N(0, 0.1 * (r - l)) - params['tail'][key] += noise - - return params['tail'] - + params["tail"][key] += noise + + return params["tail"] + def rescale(self, handles): if self.mod is None: return handles - - handles[:,:,0] *= self.mod[0] - handles[:,:,1] *= self.mod[1] - handles[:,:,2] *= self.mod[2] + + handles[:, :, 0] *= self.mod[0] + handles[:, :, 1] *= self.mod[1] + handles[:, :, 2] *= self.mod[2] return handles def make_part(self, params): logger.debug(params) handles = nurbs_ReptileTail(**params).get_ctrls() handles = self.rescale(handles) - + part = part_util.nurbs_to_part(handles) part.skeleton = handles.mean(axis=1) part.joints = { - i: Joint((0,0,0), bounds=np.array([[-30, -30, -30], [30, 30, 30]])) + i: Joint((0, 0, 0), bounds=np.array([[-30, -30, -30], [30, 30, 30]])) for i in np.linspace(0, 1, self.n_bones, endpoint=True) } part.iks = { - t: IKParams(name=f'body_{i+1}', mode='pin' if i == 0 else 'iksolve', - rotation_weight=0, target_size=0.3) + t: IKParams( + name=f"body_{i+1}", + mode="pin" if i == 0 else "iksolve", + rotation_weight=0, + target_size=0.3, + ) for i, t in enumerate(self.shoulder_ik_ts) } - surface.new_attr_data(part.obj, 'corner', 'FLOAT', 'POINT', corner_vertices(part.obj)) - surface.new_attr_data(part.obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(part.obj, bodycheck=True)) + surface.new_attr_data( + part.obj, "corner", "FLOAT", "POINT", corner_vertices(part.obj) + ) + surface.new_attr_data( + part.obj, + "inside_mouth", + "FLOAT", + "POINT", + ventral_vertices(part.obj, bodycheck=True), + ) return part + class ReptileUpperHead(PartFactory): param_templates = {} - tags = ['jaw'] + tags = ["jaw"] def __init__(self, params=None, mod=None): self.mod = mod @@ -949,40 +1060,49 @@ def sample_params(self, select=None, var=1): # weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) # params = part_util.rdict_comb(self.param_templates, weights) # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - for key in params['head']: - if key in params['range']: - l, r = params['range'][key] + + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + for key in params["head"]: + if key in params["range"]: + l, r = params["range"][key] noise = N(0, 0.1 * (r - l)) - params['head'][key] += noise - return params['head'] - + params["head"][key] += noise + return params["head"] + def rescale(self, handles): if self.mod is None: return handles - - handles[:,:,0] *= self.mod[0] - handles[:,:,1] *= self.mod[1] - handles[:,:,2] *= self.mod[2] + + handles[:, :, 0] *= self.mod[0] + handles[:, :, 1] *= self.mod[1] + handles[:, :, 2] *= self.mod[2] return handles def make_part(self, params): logger.debug(params) handles = nurbs_ReptileUpperHead(**params).get_ctrls() handles = self.rescale(handles) - + part = part_util.nurbs_to_part(handles, 0.01) part.skeleton = handles.mean(axis=1) # part.iks = {1.0: IKParams('body_0', rotation_weight=0.1, chain_parts=1)} - surface.new_attr_data(part.obj, 'corner', 'FLOAT', 'POINT', corner_vertices(part.obj)) - surface.new_attr_data(part.obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(part.obj)) + surface.new_attr_data( + part.obj, "corner", "FLOAT", "POINT", corner_vertices(part.obj) + ) + surface.new_attr_data( + part.obj, "inside_mouth", "FLOAT", "POINT", ventral_vertices(part.obj) + ) return part + class ReptileLowerHead(PartFactory): param_templates = {} - tags = ['jaw'] + tags = ["jaw"] def __init__(self, params=None, mod=None): self.mod = mod @@ -993,42 +1113,50 @@ def sample_params(self, select=None, var=1): # weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) # params = part_util.rdict_comb(self.param_templates, weights) # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - for key in params['head']: - if key in params['range']: - l, r = params['range'][key] + + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + for key in params["head"]: + if key in params["range"]: + l, r = params["range"][key] noise = N(0, 0.1 * (r - l)) - params['head'][key] += noise - return params['head'] - + params["head"][key] += noise + return params["head"] + def rescale(self, handles): if self.mod is None: return handles - - handles[:,:,0] *= self.mod[0] - handles[:,:,1] *= self.mod[1] - handles[:,:,2] *= self.mod[2] + + handles[:, :, 0] *= self.mod[0] + handles[:, :, 1] *= self.mod[1] + handles[:, :, 2] *= self.mod[2] return handles def make_part(self, params): handles = nurbs_ReptileLowerHead(**params).get_ctrls() handles = self.rescale(handles) - + part = part_util.nurbs_to_part(handles, 0.015) part.skeleton = handles.mean(axis=1) # part.iks = {1.0: IKParams('body_0', rotation_weight=0.1, chain_parts=1)} - surface.new_attr_data(part.obj, 'corner', 'FLOAT', 'POINT', corner_vertices(part.obj)) - surface.new_attr_data(part.obj, 'inside_mouth', 'FLOAT', 'POINT', ventral_vertices(part.obj)) + surface.new_attr_data( + part.obj, "corner", "FLOAT", "POINT", corner_vertices(part.obj) + ) + surface.new_attr_data( + part.obj, "inside_mouth", "FLOAT", "POINT", ventral_vertices(part.obj) + ) return part + class LizardFrontLeg(PartFactory): param_templates = {} - tags = ['leg'] - + tags = ["leg"] - def __init__(self, params=None, type='lizard'): + def __init__(self, params=None, type="lizard"): self.type = type super().__init__(params) @@ -1037,21 +1165,24 @@ def sample_params(self, select=None, var=1): # weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) # params = part_util.rdict_comb(self.param_templates, weights) # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - for key in params['leg']: - l, r = params['range'][key] + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + for key in params["leg"]: + l, r = params["range"][key] noise = N(0, 0.1 * (r - l)) - params['leg'][key] += noise + params["leg"][key] += noise + + return params["leg"] - return params['leg'] - def rescale(self, params, scale): - params['sx'] *= scale - params['sy'] *= scale - params['sz'] *= scale + params["sx"] *= scale + params["sy"] *= scale + params["sz"] *= scale return params def make_part(self, params): @@ -1059,10 +1190,16 @@ def make_part(self, params): part = part_util.nurbs_to_part(handles, 0.015) part.skeleton = handles.mean(axis=1) part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [100, 0, 100]])), # shoulder - 0.4: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # knee - 0.9: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # ankle - } + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [100, 0, 100]]) + ), # shoulder + 0.4: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # knee + 0.9: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # ankle + } # part.obj.scale = (0.5, 0.5, 0.5) # butil.apply_transform(part.obj, scale=True) # part.iks = { @@ -1071,11 +1208,12 @@ def make_part(self, params): # 1.0: IKParams('foot', rotation_weight=0.1, chain_parts=1)} return part + class LizardBackLeg(PartFactory): param_templates = {} - tags = ['leg'] + tags = ["leg"] - def __init__(self, params=None, type='lizard'): + def __init__(self, params=None, type="lizard"): self.type = type super().__init__(params) @@ -1084,21 +1222,24 @@ def sample_params(self, select=None, var=1): # weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) # params = part_util.rdict_comb(self.param_templates, weights) # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) - for key in params['leg']: - l, r = params['range'][key] + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + + for key in params["leg"]: + l, r = params["range"][key] noise = N(0, 0.1 * (r - l)) - params['leg'][key] += noise + params["leg"][key] += noise - return params['leg'] + return params["leg"] def rescale(self, params, scale): - params['sx'] *= scale - params['sy'] *= scale - params['sz'] *= scale + params["sx"] *= scale + params["sy"] *= scale + params["sz"] *= scale return params def make_part(self, params): @@ -1106,33 +1247,46 @@ def make_part(self, params): part = part_util.nurbs_to_part(handles, 0.015) part.skeleton = handles.mean(axis=1) part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-30, 0, -30], [100, 0, 100]])), # shoulder - 0.4: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # knee - 0.9: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), # ankle - } + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [100, 0, 100]]) + ), # shoulder + 0.4: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # knee + 0.9: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # ankle + } # part.iks = { # 0.1: IKParams('knee', rotation_weight=0.1, chain_parts=1), # # 0.9: IKParams('ankle', rotation_weight=0.1, chain_parts=1), # 1.0: IKParams('foot', rotation_weight=0.1, chain_parts=1)} return part + class LizardToe(PartFactory): param_templates = {} - tags = ['foot_detail'] + tags = ["foot_detail"] def sample_params(self, select=None, var=1): - weights = part_util.random_convex_coord(self.param_templates.keys(), select=select) + weights = part_util.random_convex_coord( + self.param_templates.keys(), select=select + ) params = part_util.rdict_comb(self.param_templates, weights) # params = np.random.choice(list(self.param_templates.values())) - - N = lambda m, v: np.random.normal(m, v * var) - U = lambda l, r: np.random.uniform(l, r) + + def N(m, v): + return np.random.normal(m, v * var) + + def U(l, r): + return np.random.uniform(l, r) + return params - + def rescale(self, params, scale): - params['sx'] *= scale - params['sy'] *= scale - params['sz'] *= scale + params["sx"] *= scale + params["sy"] *= scale + params["sz"] *= scale return params def make_part(self, params): @@ -1142,60 +1296,60 @@ def make_part(self, params): # butil.apply_transform(part.obj, scale=True) part.joints = { - 0: Joint(rest=(0,0,0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), - } + 0: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), + } return part lizard_tail = { - 'scale_x': 7, - 'scale_y': 0.65, - 'scale_z': 0.6, - 'sunken': 1, - 'sunken_limit': 0.9, - 'breast': 0, - 'body_curve': 0.75, - 'wrist': 0.32, - 'tail_modification': 0.8 + "scale_x": 7, + "scale_y": 0.65, + "scale_z": 0.6, + "sunken": 1, + "sunken_limit": 0.9, + "breast": 0, + "body_curve": 0.75, + "wrist": 0.32, + "tail_modification": 0.8, } lizard_tail_range = { - 'scale_x': [4, 10], - 'scale_y': [0.5, 1.5], - 'scale_z': [0.8, 1.5], - 'sunken': [0.8, 1.2], - 'sunken_limit': [0.5, 0.7], - 'breast': [0, 0], - 'body_curve': [0.7, 0.8], - 'wrist': [0.50, 0.55], - 'tail_modification': [0.3, 0.7] + "scale_x": [4, 10], + "scale_y": [0.5, 1.5], + "scale_z": [0.8, 1.5], + "sunken": [0.8, 1.2], + "sunken_limit": [0.5, 0.7], + "breast": [0, 0], + "body_curve": [0.7, 0.8], + "wrist": [0.50, 0.55], + "tail_modification": [0.3, 0.7], } for k, v in lizard_tail_range.items(): lizard_tail_range[k] = np.array(v) dinosaur_tail = { - 'scale_x': 30, - 'scale_y': 3, - 'scale_z': 3, - 'sunken': 1, - 'sunken_limit': 1, - 'breast': 0, - 'body_curve': 0.8, - 'wrist': 0.4, - 'tail_modification': 0.4 + "scale_x": 30, + "scale_y": 3, + "scale_z": 3, + "sunken": 1, + "sunken_limit": 1, + "breast": 0, + "body_curve": 0.8, + "wrist": 0.4, + "tail_modification": 0.4, } dinosaur_tail_range = { - 'scale_x': [4, 40], - 'scale_y': [5, 5], - 'scale_z': [5, 5], - 'sunken': [0.8, 1.2], - 'sunken_limit': [0.5, 0.7], - 'breast': [0, 0], - 'body_curve': [0.75, 0.75], - 'wrist': [0.50, 0.55], - 'tail_modification': [0.3, 0.7] + "scale_x": [4, 40], + "scale_y": [5, 5], + "scale_z": [5, 5], + "sunken": [0.8, 1.2], + "sunken_limit": [0.5, 0.7], + "breast": [0, 0], + "body_curve": [0.75, 0.75], + "wrist": [0.50, 0.55], + "tail_modification": [0.3, 0.7], } for k, v in dinosaur_tail_range.items(): @@ -1203,52 +1357,52 @@ def make_part(self, params): snake_tail = { - 'scale_x': 30, - 'scale_y': 0.65, - 'scale_z': 0.57, - 'sunken': 1, - 'sunken_limit': 1, - 'breast': 0.5, - 'body_curve': 0.75, - 'wrist': 0.52, - 'tail_modification': 1, + "scale_x": 30, + "scale_y": 0.65, + "scale_z": 0.57, + "sunken": 1, + "sunken_limit": 1, + "breast": 0.5, + "body_curve": 0.75, + "wrist": 0.52, + "tail_modification": 1, } snake_tail_range = { - 'scale_x': [20, 40], - 'scale_y': [0, 0], - 'scale_z': [0, 0], - 'sunken': [1, 1], - 'sunken_limit': [1, 1], - 'breast': [0, 1], - 'body_curve': [0.7, 0.8], - 'wrist': [0.50, 0.55], - 'tail_modification': [1, 1], + "scale_x": [20, 40], + "scale_y": [0, 0], + "scale_z": [0, 0], + "sunken": [1, 1], + "sunken_limit": [1, 1], + "breast": [0, 1], + "body_curve": [0.7, 0.8], + "wrist": [0.50, 0.55], + "tail_modification": [1, 1], } for k, v in snake_tail_range.items(): snake_tail_range[k] = np.array(v) frog_tail = { - 'scale_x': 2, - 'scale_y': 0.8, - 'scale_z': 0.8, - 'sunken': 1, - 'sunken_limit': 1, - 'breast': 0, - 'body_curve': 0.75, - 'wrist': 0.95 + "scale_x": 2, + "scale_y": 0.8, + "scale_z": 0.8, + "sunken": 1, + "sunken_limit": 1, + "breast": 0, + "body_curve": 0.75, + "wrist": 0.95, } frog_tail_range = { - 'scale_x': [1, 3], - 'scale_y': [0.5, 1.5], - 'scale_z': [0.8, 1.5], - 'sunken': [0.8, 1.2], - 'sunken_limit': [0.5, 0.7], - 'breast': [0, 0], - 'body_curve': [0.7, 0.8], - 'wrist': [0.9, 0.98] + "scale_x": [1, 3], + "scale_y": [0.5, 1.5], + "scale_z": [0.8, 1.5], + "sunken": [0.8, 1.2], + "sunken_limit": [0.5, 0.7], + "breast": [0, 0], + "body_curve": [0.7, 0.8], + "wrist": [0.9, 0.98], } for k, v in frog_tail_range.items(): @@ -1256,141 +1410,141 @@ def make_part(self, params): lizard_upper_head = { - 'scale_x': 0.8, - 'scale_y': 0.3, - 'scale_z': 0.4, - 'blunt_head': 0.3, - 'up_head_position': 0.4, - 'up_head_degree': 0.2, - 'offset_x': 0, - 'offset_y': 0, - 'offset_z': 0, + "scale_x": 0.8, + "scale_y": 0.3, + "scale_z": 0.4, + "blunt_head": 0.3, + "up_head_position": 0.4, + "up_head_degree": 0.2, + "offset_x": 0, + "offset_y": 0, + "offset_z": 0, } lizard_upper_head_range = { - 'scale_x': [0.5, 1], - 'scale_y': [0, 0], - 'scale_z': [0, 0], - 'blunt_head': [0, 0.8], - 'up_head_position': [0.2, 0.5], - 'up_head_degree': [0.1, 0.3] + "scale_x": [0.5, 1], + "scale_y": [0, 0], + "scale_z": [0, 0], + "blunt_head": [0, 0.8], + "up_head_position": [0.2, 0.5], + "up_head_degree": [0.1, 0.3], } for k, v in lizard_upper_head_range.items(): lizard_upper_head_range[k] = np.array(v) lizard_lower_head = { - 'scale_x': 0.8, - 'scale_y': 0.3, - 'scale_z': 0.15, - 'blunt_head': 0.3, - 'up_head_position': 0.4, - 'up_head_degree': 0.2, - 'offset_x': 0, - 'offset_y': 0, - 'offset_z': 0, + "scale_x": 0.8, + "scale_y": 0.3, + "scale_z": 0.15, + "blunt_head": 0.3, + "up_head_position": 0.4, + "up_head_degree": 0.2, + "offset_x": 0, + "offset_y": 0, + "offset_z": 0, } lizard_lower_head_range = { - 'scale_x': [0.5, 1], - 'scale_y': [0, 0], - 'scale_z': [0, 0], - 'blunt_head': [0, 0.8], - 'up_head_position': [0.2, 0.5], - 'up_head_degree': [0.1, 0.3] + "scale_x": [0.5, 1], + "scale_y": [0, 0], + "scale_z": [0, 0], + "blunt_head": [0, 0.8], + "up_head_position": [0.2, 0.5], + "up_head_degree": [0.1, 0.3], } for k, v in lizard_lower_head_range.items(): lizard_lower_head_range[k] = np.array(v) ReptileHeadBody.param_templates = { - 'lizard': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': lizard_tail, - 'trange': lizard_tail_range, - }, - 'snake': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': snake_tail, - 'trange': snake_tail_range - }, - 'dinosaur': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': dinosaur_tail, - 'trange': dinosaur_tail_range - }, - 'frog': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': frog_tail, - 'trange': frog_tail_range - } + "lizard": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": lizard_tail, + "trange": lizard_tail_range, + }, + "snake": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": snake_tail, + "trange": snake_tail_range, + }, + "dinosaur": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": dinosaur_tail, + "trange": dinosaur_tail_range, + }, + "frog": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": frog_tail, + "trange": frog_tail_range, + }, } ReptileBody.param_templates = { - 'lizard': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': lizard_tail, - 'trange': lizard_tail_range, - }, - 'snake': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': snake_tail, - 'trange': snake_tail_range - }, - 'dinosaur': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': dinosaur_tail, - 'trange': dinosaur_tail_range - }, - 'frog': { - 'head': lizard_upper_head, - 'hrange': lizard_upper_head_range, - 'tail': frog_tail, - 'trange': frog_tail_range - } + "lizard": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": lizard_tail, + "trange": lizard_tail_range, + }, + "snake": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": snake_tail, + "trange": snake_tail_range, + }, + "dinosaur": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": dinosaur_tail, + "trange": dinosaur_tail_range, + }, + "frog": { + "head": lizard_upper_head, + "hrange": lizard_upper_head_range, + "tail": frog_tail, + "trange": frog_tail_range, + }, } ReptileLowerHead.param_templates = { - 'head': lizard_lower_head, - 'range': lizard_lower_head_range + "head": lizard_lower_head, + "range": lizard_lower_head_range, } ReptileUpperHead.param_templates = { - 'head': lizard_upper_head, - 'range': lizard_upper_head_range + "head": lizard_upper_head, + "range": lizard_upper_head_range, } LizardFrontLeg.param_templates = { # 'lizard': {'leg': {}, 'range': {}}, - 'lizard': { - 'leg': { - 'scale_x': 1, - 'scale_y': 0.2, - 'scale_z': 0.2, - }, - 'range': { - 'scale_x': [0.5, 1], - 'scale_y': [0.15, 0.3], - 'scale_z': [0.05, 0.2], - } + "lizard": { + "leg": { + "scale_x": 1, + "scale_y": 0.2, + "scale_z": 0.2, + }, + "range": { + "scale_x": [0.5, 1], + "scale_y": [0.15, 0.3], + "scale_z": [0.05, 0.2], + }, } } LizardBackLeg.param_templates = { # 'lizard': {'leg': {}, 'range': {}}, - 'lizard': { - 'leg': { - 'scale_x': 1.5, - 'scale_y': 0.2, - 'scale_z': 0.2, - }, - 'range': { - 'scale_x': [1.5, 3], - 'scale_y': [0.15, 0.3], - 'scale_z': [0.05, 0.2], - } + "lizard": { + "leg": { + "scale_x": 1.5, + "scale_y": 0.2, + "scale_z": 0.2, + }, + "range": { + "scale_x": [1.5, 3], + "scale_y": [0.15, 0.3], + "scale_z": [0.05, 0.2], + }, } } -LizardToe.param_templates['lizard'] = {'toe': {}, 'range': {}} \ No newline at end of file +LizardToe.param_templates["lizard"] = {"toe": {}, "range": {}} diff --git a/infinigen/assets/objects/creatures/parts/ridged_fin.py b/infinigen/assets/objects/creatures/parts/ridged_fin.py new file mode 100644 index 000000000..a57b619bc --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/ridged_fin.py @@ -0,0 +1,669 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Mingzhe Wang + + +import random + +import bpy +import numpy as np + +from infinigen.assets.materials.utils.surface_utils import sample_range +from infinigen.assets.objects.creatures.util.creature import Part, PartFactory +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_mix2_values", singleton=True, type="GeometryNodeTree" +) +def nodegroup_mix2_values(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Ratio", 0.5), + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Value1"], + 1: group_input.outputs["Ratio"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["Ratio"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["Value2"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add}) + + +@node_utils.to_nodegroup("nodegroup_fish_fin", singleton=False, type="GeometryNodeTree") +def nodegroup_fish_fin(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + grid = nw.new_node( + Nodes.MeshGrid, input_kwargs={"Vertices X": 100, "Vertices Y": 100} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": grid, "Rotation": (1.5708, 0.0000, 0.0000)}, + ) + + position_3 = nw.new_node(Nodes.InputPosition) + + sep_z = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": position_3}, label="sep_z" + ) + + z_stats = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={"Geometry": transform_3, 2: sep_z.outputs["Z"]}, + label="z_stats", + ) + + norm_z = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": sep_z.outputs["Z"], + 1: z_stats.outputs["Min"], + 2: z_stats.outputs["Max"], + }, + label="norm_z", + ) + + remap_z = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": norm_z.outputs["Result"]}, + label="remap_z", + ) + node_utils.assign_curve( + remap_z.mapping.curves[0], + [(0.1727, 0.9875), (0.5182, 0.2438), (1.0000, 0.0063)], + ) + + capture_z_rigidity = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": transform_3, 2: remap_z}, + label="capture_z_rigidity", + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: separate_xyz.outputs["Y"]} + ) + + op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={1: greater_than}) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": capture_z_rigidity.outputs["Geometry"], + "Selection": op_and, + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": delete_geometry, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position_1, 1: (0.5000, 0.0000, 0.5000)} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorXYZ", "FinScale", (1.0000, 1.0000, 0.5000)), + ("NodeSocketFloat", "RoundWeight", 1.0000), + ("NodeSocketFloat", "Freq", 69.1150), + ("NodeSocketFloat", "OffsetWeightZ", 1.0000), + ("NodeSocketVector", "PatternRotation", (4.0000, 0.0000, 2.0000)), + ("NodeSocketFloat", "OffsetWeightY", 1.0000), + ("NodeSocketFloat", "RoundingWeight", 0.0000), + ("NodeSocketFloat", "AffineX", 0.0000), + ("NodeSocketFloat", "AffineZ", 0.0000), + ("NodeSocketFloat", "Value", 0.5000), + ("NodeSocketFloat", "NoiseWeight", 0.0000), + ("NodeSocketFloat", "BumpX", 0.0000), + ("NodeSocketFloat", "BumpZ", 0.0000), + ("NodeSocketFloat", "NoiseRatioZ", 1.0000), + ("NodeSocketFloat", "NoiseRatioX", 1.0000), + ], + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Z"], + 1: group_input.outputs["NoiseWeight"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: separate_xyz_1.outputs["X"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["AffineZ"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_2.outputs["X"]}) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": add_2}) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0068, 0.0000), + (0.0455, 0.3812), + (0.1091, 0.5419), + (0.1955, 0.6437), + (0.3205, 0.7300), + (0.4955, 0.7719), + (0.7545, 0.7350), + (0.8705, 0.6562), + (1.0000, 0.4413), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 0.7000}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["RoundWeight"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["Value"]}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: group_input.outputs["AffineX"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_4}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_1, "Z": add_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 3.0000}) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + nodegroup_mix2_values_no_gc = nw.new_node( + nodegroup_mix2_values().name, + input_kwargs={ + "Ratio": group_input.outputs["NoiseRatioX"], + "Value1": separate_xyz_2.outputs["X"], + "Value2": subtract_1, + }, + ) + + add_5 = nw.new_node( + Nodes.Math, input_kwargs={0: nodegroup_mix2_values_no_gc, 1: 10.0000} + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["PatternRotation"]}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_5, 1: 0.1000}, attrs={"operation": "MULTIPLY"} + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_3.outputs["X"], 1: multiply_5}, + attrs={"operation": "SUBTRACT"}, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: subtract_2}) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: add_6, 1: 2.0000}, attrs={"operation": "POWER"} + ) + + nodegroup_mix2_values_no_gc_1 = nw.new_node( + nodegroup_mix2_values().name, + input_kwargs={ + "Ratio": group_input.outputs["NoiseRatioZ"], + "Value1": separate_xyz_2.outputs["Z"], + "Value2": subtract_1, + }, + ) + + add_7 = nw.new_node( + Nodes.Math, input_kwargs={0: nodegroup_mix2_values_no_gc_1, 1: 1.0000} + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_7, 1: 0.1000}, attrs={"operation": "MULTIPLY"} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_3.outputs["Z"], 1: multiply_6}, + attrs={"operation": "SUBTRACT"}, + ) + + add_8 = nw.new_node(Nodes.Math, input_kwargs={0: add_7, 1: subtract_3}) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_8}, attrs={"operation": "MULTIPLY"} + ) + + power_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_7, 1: 2.0000}, + attrs={"operation": "POWER"}, + ) + + add_9 = nw.new_node(Nodes.Math, input_kwargs={0: power, 1: power_1}) + + sqrt = nw.new_node(Nodes.Math, input_kwargs={0: add_9}, attrs={"operation": "SQRT"}) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: sqrt, 1: group_input.outputs["Freq"]}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_8}, attrs={"operation": "SINE"} + ) + + power_2 = nw.new_node( + Nodes.Math, input_kwargs={0: sine, 1: 2.1000}, attrs={"operation": "POWER"} + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, input_kwargs={"Geometry": set_position, 2: power_2} + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: group_input.outputs["BumpX"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: group_input.outputs["BumpZ"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_10 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_9, 1: multiply_10}) + + subtract_4 = nw.new_node( + Nodes.Math, input_kwargs={1: add_10}, attrs={"operation": "SUBTRACT"} + ) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: subtract_4, + }, + ) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 100.0000}) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + multiply_11 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_5, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.0010 + + multiply_12 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_11.outputs["Vector"], 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Offset": multiply_12.outputs["Vector"], + }, + ) + + subtract_6 = nw.new_node( + Nodes.Math, + input_kwargs={1: separate_xyz_2.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_13 = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: subtract_6}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_14 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["OffsetWeightZ"], 1: -0.0200}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_15 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_13, 1: multiply_14}, + attrs={"operation": "MULTIPLY"}, + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"]}, + attrs={"operation": "SIGN"}, + ) + + multiply_16 = nw.new_node( + Nodes.Math, input_kwargs={0: power_2, 1: sign}, attrs={"operation": "MULTIPLY"} + ) + + multiply_17 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["OffsetWeightY"], 1: 0.0060}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_18 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_17, 1: subtract_4}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_19 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_16, 1: multiply_18}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_20 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["OffsetWeightZ"], 1: 0.0300}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_21 = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: multiply_20}, + attrs={"operation": "MULTIPLY"}, + ) + + add_11 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_21, 1: 0.0000}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_15, "Y": multiply_19, "Z": add_11} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Offset": combine_xyz_1}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": -0.6000, "Scale": 0.8000}, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_7 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_22 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract_7.outputs["Vector"], + 1: group_input.outputs["NoiseWeight"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + set_position_3 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_2, + "Offset": multiply_22.outputs["Vector"], + }, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_2}) + + subtract_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_4.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_8}, attrs={"operation": "ABSOLUTE"} + ) + + power_3 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute, 1: 1.0000}, attrs={"operation": "POWER"} + ) + + multiply_23 = nw.new_node( + Nodes.Math, + input_kwargs={0: power_3, 1: 0.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_23}) + + set_position_4 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_3, "Offset": combine_xyz_2}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_position_4, + "Translation": (0.0000, 0.0000, 0.4000), + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform, + "Rotation": (0.0000, 0.0000, -1.5708), + "Scale": group_input.outputs["FinScale"], + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_1, "Rotation": (1.5708, 0.0000, 1.5708)}, + ) + + multiply_24 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: capture_attribute_1.outputs[2], + 1: capture_z_rigidity.outputs[2], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_25 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_24, 1: 1.600}, + attrs={"operation": "MULTIPLY"}, + ) + + add_final_rigidity = nw.new_node( + Nodes.Math, + input_kwargs={0: capture_z_rigidity.outputs[2], 1: multiply_25}, + label="add_final_rigidity", + attrs={"use_clamp": True}, + ) + + store_cloth_pin = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": transform_2, + "Name": "cloth_pin_rigidity", + "Value": add_final_rigidity, + }, + label="store_cloth_pin", + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": store_cloth_pin, + "Bump": capture_attribute_1.outputs[2], + "BumpMask": capture_attribute_2.outputs[2], + }, + ) + + +class FishFin(PartFactory): + tags = ["limb", "fin"] + + def __init__(self, *args, rig=True, **kwargs): + super().__init__(*args, **kwargs) + self.rig = rig + + def sample_params(self): + params = { + "FinScale": np.array((1.0, 1.0, 0.5), dtype=np.float32), + "RoundWeight": sample_range(0, 1), + "Freq": sample_range(50, 100), + "OffsetWeightZ": sample_range(0.1, 0.5), + "PatternRotation": np.array( + (4.0 if random.random() < 0.5 else -4, 0.0, 2.0), dtype=np.float32 + ), + "OffsetWeightY": sample_range(0.5, 1), + "RoundingWeight": sample_range(0.02, 0.07), + "AffineX": sample_range(0, 0.3), + "AffineZ": sample_range(0, 1), + "Value": 0.5, + "NoiseWeight": 0.0, + "BumpX": 0.0, + "BumpZ": 1.0, + "NoiseRatioZ": 1.0, + "NoiseRatioX": sample_range(0.9, 0.95), + } + return params + + def make_part(self, params): + part = Part( + skeleton=np.zeros((2, 3), dtype=float), obj=butil.spawn_vert("fin_parent") + ) + + fin = butil.spawn_vert("Fin") + fin.parent = part.obj + + _, mod = butil.modify_mesh( + fin, "NODES", apply=False, return_mod=True, node_group=nodegroup_fish_fin() + ) + butil.set_geomod_inputs(mod, params) + + id1 = mod.node_group.outputs["Bump"].identifier + mod[f"{id1}_attribute_name"] = "Bump" + id2 = mod.node_group.outputs["BumpMask"].identifier + mod[f"{id2}_attribute_name"] = "BumpMask" + + butil.apply_modifiers(fin, mod) + + part.settings["rig_extras"] = self.rig + tag_object(part.obj, "fish_fin") + return part + + +if __name__ == "__main__": + fin = FishFin() + import os + + fn = os.path.join(os.path.abspath(os.curdir), "dev_scene_test_fin.blend") + bpy.ops.wm.save_as_mainfile(filepath=fn) diff --git a/infinigen/assets/objects/creatures/parts/tail.py b/infinigen/assets/objects/creatures/parts/tail.py new file mode 100644 index 000000000..ca442ed31 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/tail.py @@ -0,0 +1,68 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import numpy as np +from numpy.random import normal as N + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.curve import nodegroup_simple_tube_v2 +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_object + + +@node_utils.to_nodegroup("nodegroup_tail", singleton=False, type="GeometryNodeTree") +def nodegroup_tail(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.49, 0.05, 0.02)), + ("NodeSocketVector", "angles_deg", (31.39, 65.81, -106.93)), + ("NodeSocketFloat", "aspect", 1.0), + ], + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": group_input.outputs["angles_deg"], + "aspect": group_input.outputs["aspect"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + }, + ) + + +class Tail(PartFactory): + tags = ["tail"] + + def sample_params(self): + return { + "length_rad1_rad2": (N(0.5, 0.1), 0.05, 0.02), + "angles_deg": np.array((31.39, 65.81, -106.93)) * N(1, 0.1), + "aspect": N(1, 0.05), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_tail, params) + part.joints = { + i: Joint(rest=(0, 0, 0), bounds=np.array([[-30, 0, -30], [30, 0, 30]])) + for i in np.linspace(0, 1, 6) + } + part.iks = {1.0: IKParams(name="tail", chain_parts=1)} + tag_object(part.obj, "tail") + return part diff --git a/infinigen/assets/objects/creatures/parts/utils/draw.py b/infinigen/assets/objects/creatures/parts/utils/draw.py new file mode 100644 index 000000000..e3f933949 --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/utils/draw.py @@ -0,0 +1,104 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np + +from infinigen.assets.utils.decorate import displace_vertices +from infinigen.assets.utils.draw import spin +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.util import blender as butil + + +def make_segments(x_cuts, y_cuts, x_anchors, y_anchors, params): + x_length, y_length, z_length = map(params.get, ["x_length", "y_length", "z_length"]) + segments = [] + for i in range(len(x_cuts) - 1): + x_start, x_end = x_cuts[i], x_cuts[i + 1] + y_start, y_end = y_cuts[i], y_cuts[i + 1] + xs = x_anchors(x_start, x_end) + ys = y_anchors(y_start, y_end) + obj = spin( + [ + np.array([xs[0], *xs, xs[-1]]) * x_length, + np.array([0, *ys, 0]) * y_length, + 0.0, + ], + [1, len(xs)], + axis=(1, 0, 0), + ) + + y_base = y_length * y_start + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + -np.clip(z + y_base * params["bottom_cutoff"], None, 0) + * (1 - params["bottom_shift"]), + ), + ) + displace_vertices( + obj, + lambda x, y, z: ( + 0, + 0, + np.where( + z > 0, + np.clip(params["top_cutoff"] * y_base - np.abs(y), 0, None) + * params["top_shift"], + 0, + ), + ), + ) + + decorate_segment(obj, params, x_start, x_end) + obj.scale[-1] = params["z_length"] / y_length + butil.apply_transform(obj) + segments.append(obj) + return segments + + +def decorate_segment(obj, params, x_start, x_end): + def offset(nw: NodeWrangler, vector): + noise_texture = nw.new_node( + Nodes.NoiseTexture, [vector], input_kwargs={"Scale": params["noise_scale"]} + ) + x = nw.separate(nw.new_node(Nodes.InputPosition))[0] + ratio = nw.build_float_curve( + nw.scalar_divide(x, params["x_length"]), + [(x_start, 1), (x_end - 0.01, 1), (x_end, 0), (x_end + 0.01, 0)], + ) + return nw.scale( + nw.scalar_multiply( + ratio, nw.scalar_multiply(noise_texture, params["noise_strength"]) + ), + nw.new_node(Nodes.InputNormal), + ) + + surface.add_geomod(obj, geo_symmetric_texture, input_args=[offset], apply=True) + butil.modify_mesh(obj, "WELD", merge_threshold=0.001) + + +def geo_symmetric_texture(nw: NodeWrangler, offset, selection=None): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + pos = nw.new_node(Nodes.InputPosition) + x, y, z = nw.separate(pos) + vector = nw.combine(x, nw.math("ABSOLUTE", y), z) + distance = nw.new_node(Nodes.NamedAttribute, ["distance"]) + geometry = nw.new_node( + Nodes.SetPosition, + [ + geometry, + surface.eval_argument(nw, selection), + None, + surface.eval_argument(nw, offset, vector=vector, distance=distance), + ], + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/objects/creatures/parts/wings.py b/infinigen/assets/objects/creatures/parts/wings.py new file mode 100644 index 000000000..2cfa03c6f --- /dev/null +++ b/infinigen/assets/objects/creatures/parts/wings.py @@ -0,0 +1,1144 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Alexander Raistrick: base version +# - Beining Han: flying variant + + +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.assets.objects.creatures.util.creature import PartFactory +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.assets.objects.creatures.util.part_util import nodegroup_to_part +from infinigen.assets.utils.nodegroups.curve import ( + nodegroup_simple_tube, + nodegroup_simple_tube_v2, +) +from infinigen.assets.utils.nodegroups.geometry import ( + nodegroup_symmetric_clone, +) +from infinigen.assets.utils.nodegroups.math import nodegroup_deg2_rad +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.tagging import tag_nodegroup, tag_object +from infinigen.core.util.math import clip_gaussian + + +@node_utils.to_nodegroup("nodegroup_feather", singleton=False, type="GeometryNodeTree") +def nodegroup_feather(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketVector", "Length Rad1 Rad2", (0.5, 0.1, 0.1))], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Length Rad1 Rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (1.0, 0.0, 0.0), "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": scale.outputs["Vector"]} + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": curve_line, "Cuts": 30} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.2327, 0.985), (0.8909, 0.6), (1.0, 0.0)], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz.outputs["Y"], + 4: separate_xyz.outputs["Z"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": subdivide_curve, "Radius": multiply}, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={"Start": (0.0, -1.0, 0.0), "End": (0.0, 1.0, 0.0)}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_radius, "Profile Curve": curve_line_1}, + ) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": curve_line, "Cuts": 4} + ) + + trim_curve = nw.new_node( + Nodes.TrimCurve, input_kwargs={"Curve": subdivide_curve_1, "End": 0.8742} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 0.15, 4: 0.05}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": trim_curve, "Radius": multiply_1} + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": 6}) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + # join_geometry = nw.new_node(Nodes.JoinGeometry, + # input_kwargs={'Geometry': [curve_to_mesh, curve_to_mesh_1]}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, curve_to_mesh, "feather")}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bird_tail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bird_tail(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + simple_tube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Angles Deg": (0.0, 0.0, 0.0), + "Seg Lengths": (0.11, 0.11, 0.11), + "Start Radius": 0.07, + "End Radius": 0.02, + "Fullness": 3.0, + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Feather Length Rad1 Rad2", (0.5, 0.08, 0.1)), + ("NodeSocketVector", "Feather Rot Extent", (136.51, -11.8, 34.0)), + ("NodeSocketVector", "Feather Rot Rand Bounds", (5.0, 5.0, 5.0)), + ("NodeSocketIntUnsigned", "N Feathers", 16), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["N Feathers"], + "Start": (0.0, 0.0, -0.1), + "Middle": (0.0, 0.15, -0.05), + "End": (0.0, 0.15, 0.11), + }, + ) + + feather = nw.new_node( + nodegroup_feather().name, + input_kwargs={ + "Length Rad1 Rad2": group_input.outputs["Feather Length Rad1 Rad2"] + }, + ) + + index = nw.new_node(Nodes.Index) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["N Feathers"]}, + attrs={"operation": "DIVIDE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": divide, + 9: (-90.0, -14.88, 4.01), + 10: group_input.outputs["Feather Rot Extent"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Feather Rot Rand Bounds"], "Scale": -1.0}, + attrs={"operation": "SCALE"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 0: scale.outputs["Vector"], + 1: group_input.outputs["Feather Rot Rand Bounds"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: map_range.outputs["Vector"], 1: random_value.outputs["Value"]}, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": add.outputs["Vector"]} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": quadratic_bezier, + "Instance": feather, + "Rotation": deg2rad, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": realize_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube.outputs["Geometry"], + "Skeleton Curve": simple_tube.outputs["Skeleton Curve"], + "TailFeathers": symmetric_clone.outputs["Both"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bird_wing", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bird_wing(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.26, 0.0)), + ("NodeSocketFloat", "feather_density", 18.7), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketFloat", "fullness", 4.0), + ("NodeSocketFloatFactor", "Wing Shape Sculpting", 1.0), + ("NodeSocketVector", "Feather length_rad1_rad2", (0.6, 0.04, 0.04)), + ("NodeSocketFloat", "Extension", 1.68), + ], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["Extension"], + 9: (-83.46, 154.85, -155.38), + 10: (-15.04, 60.5, -41.1), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": map_range_3.outputs["Vector"], + "proportions": (0.2, 0.27, 0.3), + "aspect": group_input.outputs["aspect"], + "do_bezier": False, + "fullness": group_input.outputs["fullness"], + }, + ) + + curve_length = nw.new_node( + Nodes.CurveLength, + input_kwargs={"Curve": simple_tube_v2.outputs["Skeleton Curve"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: curve_length, 1: group_input.outputs["feather_density"]}, + attrs={"operation": "MULTIPLY"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Count": multiply, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": resample_curve} + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": curve_to_mesh}) + + feather = nw.new_node( + nodegroup_feather().name, + input_kwargs={ + "Length Rad1 Rad2": group_input.outputs["Feather length_rad1_rad2"] + }, + ) + + index = nw.new_node(Nodes.Index) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, input_kwargs={"Geometry": curve_to_mesh, 2: index} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": index, + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + transfer_attribute_index = nw.new_node( + Nodes.SampleNearest, + input_kwargs={ + "Geometry": curve_to_mesh, + "Sample Position": map_range_1.outputs["Result"], + }, + ) + + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": curve_to_mesh, "Index": transfer_attribute_index}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Wing Shape Sculpting"], + "Value": (transfer_attribute, "Value"), + }, + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.5164, 0.245), (0.7564, 0.625), (1.0, 1.0)], + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Extension"], 3: 115.65, 4: 0.0}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": map_range_2.outputs["Result"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Vector": float_curve, 9: (0.0, 80.0, 0.0), 10: combine_xyz}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: map_range.outputs["Vector"], 1: (-5.0, 0.0, -1.0)}, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": add.outputs["Vector"]} + ) + + vector_curves = nw.new_node( + Nodes.VectorCurve, + input_kwargs={ + "Fac": group_input.outputs["Wing Shape Sculpting"], + "Vector": transfer_attribute, + }, + ) + node_utils.assign_curve( + vector_curves.mapping.curves[0], + [ + (-1.0, -0.0), + (0.0036, 0.0), + (0.0473, 0.6), + (0.3527, 0.54), + (0.6, 0.9), + (0.8836, 0.92), + (1.0, 0.58), + ], + handles=["AUTO", "VECTOR", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO"], + ) + node_utils.assign_curve(vector_curves.mapping.curves[1], [(-1.0, 1.0), (1.0, 1.0)]) + node_utils.assign_curve(vector_curves.mapping.curves[2], [(-1.0, 1.0), (1.0, 1.0)]) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": reroute, + "Instance": feather, + "Rotation": deg2rad, + "Scale": vector_curves, + }, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: map_range.outputs["Vector"], 1: (-5.0, 0.0, 0.0)}, + ) + + deg2rad_1 = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": add_1.outputs["Vector"]} + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_curves, 1: (0.75, 1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": reroute, + "Instance": feather, + "Rotation": deg2rad_1, + "Scale": multiply_1.outputs["Vector"], + }, + ) + + add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: map_range.outputs["Vector"], 1: (-10.3, 0.0, 1.0)}, + ) + + deg2rad_2 = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": add_2.outputs["Vector"]} + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_curves, 1: (0.45, 1.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": reroute, + "Instance": feather, + "Rotation": deg2rad_2, + "Scale": multiply_2.outputs["Vector"], + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [instance_on_points, instance_on_points_1, instance_on_points_2] + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Feathers": realize_instances, + }, + ) + + +class BirdTail(PartFactory): + tags = ["tail", "wing"] + + def sample_params(self): + return { + "Feather Length Rad1 Rad2": np.array((0.4, 0.06, 0.04)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Feather Rot Extent": np.array((25, -10, -16)) * N(1, 0.1, 3), + "Feather Rot Rand Bounds": np.array((5.0, 5.0, 5.0)) + * N(1, 0.1) + * N(1, 0.05, 3), + "N Feathers": int(N(16, 3)), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_bird_tail, params) + return part + + +class BirdWing(PartFactory): + tags = ["limb", "wing"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array((clip_gaussian(1.2, 0.7, 0.4, 2), 0.1, 0.02)), + "feather_density": 30, + "aspect": N(0.4, 0.05), + "fullness": N(4, 0.1), + "Wing Shape Sculpting": U(0.6, 1), + "Feather length_rad1_rad2": np.array((0.7 * N(1, 0.2), 0.04, 0.04)), + "Extension": U(0, 0.05) if U() < 0.8 else U(0.7, 1), + } + + def make_part(self, params): + # split extras is essential to make automatic rigging work. We will join them back together later + part = nodegroup_to_part(nodegroup_bird_wing, params, split_extras=True) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.27: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), + 0.65: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + part.iks = {1.0: IKParams(name="wingtip", chain_parts=1)} + tag_object(part.obj, "bird_wing") + part.settings["parent_extras_rigid"] = True + return part + + +@node_utils.to_nodegroup( + "nodegroup_flying_feather", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flying_feather(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + vector = nw.new_node(Nodes.Vector) + vector.vector = (0.5000, 0.0500, 0.0000) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Length Rad1 Rad2", (0.5000, 0.1000, 0.1000)) + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Length Rad1 Rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector, "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (1.0000, 0.0000, 0.0000), "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 32, + "Start": (0.0000, 0.0000, 0.0000), + "Middle": scale.outputs["Vector"], + "End": scale_1.outputs["Vector"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": quadratic_bezier} + ) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": set_position, "Cuts": 4} + ) + + trim_curve = nw.new_node( + Nodes.TrimCurve, input_kwargs={"Curve": subdivide_curve_1, "End": 0.8742} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: 0.1500, + 4: 0.0100, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": trim_curve, "Radius": multiply} + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": 6}) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": set_position, "Cuts": 30} + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0000, 0.0000), (0.3373, 0.8188), (0.7182, 0.7375), (1.0000, 0.0000)], + ) + + white_noise_texture = nw.new_node(Nodes.WhiteNoiseTexture) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: white_noise_texture.outputs["Value"], 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: multiply_1}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz.outputs["Y"], + 4: separate_xyz.outputs["Z"], + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": subdivide_curve, "Radius": multiply_2}, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (0.0000, -1.0000, 0.1000), + "End": (0.0000, 1.0000, 0.0000), + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_line_1, + "Fill Caps": True, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh_1, curve_to_mesh]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": join_geometry}) + + +@node_utils.to_nodegroup( + "nodegroup_flying_bird_tail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flying_bird_tail(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + simple_tube = nw.new_node( + nodegroup_simple_tube().name, + input_kwargs={ + "Angles Deg": (0.0000, 0.0000, 0.0000), + "Seg Lengths": (0.00, 0.00, 0.00), + "Start Radius": 0.000, + "End Radius": 0.000, + "Fullness": 3.0000, + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Feather Length Rad1 Rad2", (0.5000, 0.0800, 0.1000)), + ("NodeSocketVector", "Feather Rot Extent", (136.5100, -11.8000, 34.0000)), + ("NodeSocketVector", "Feather Rot Rand Bounds", (5.0000, 5.0000, 5.0000)), + ("NodeSocketIntUnsigned", "N Feathers", 16), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["N Feathers"], + "Start": (0.0000, 0.0000, 0.0000), + "Middle": (0.0000, 0.0500, 0.0000), + "End": (-0.0500, 0.1000, 0.0300), + }, + ) + + feather = nw.new_node( + nodegroup_flying_feather().name, + input_kwargs={ + "Length Rad1 Rad2": group_input.outputs["Feather Length Rad1 Rad2"] + }, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Y"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": quadratic_bezier, + "Instance": feather, + "Rotation": align_euler_to_vector, + }, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.1000, 3: 0.1000} + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.1000, 3: 0.1000, "Seed": 1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.1000, 3: 0.1000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_2.outputs[1], + "Z": random_value_3.outputs[1], + }, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances, "Rotation": combine_xyz}, + ) + + index_1 = nw.new_node(Nodes.Index) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": index_1, 2: group_input.outputs["N Feathers"]}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + + if U(0, 1) < 0.5: + control_points = [0.2, 0.3, 0.45, 0.9] + else: + control_points = [0.25, 0.3, 0.35, 0.4] + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0136, control_points[0] + N(0.0, 0.02)), + (0.3273, control_points[1] + N(0.0, 0.02)), + (0.7500, control_points[2] + N(0.0, 0.03)), + (1.0000, control_points[3] + N(0.0, 0.04)), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.2000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_add, "Y": 1.0000, "Z": 1.0000} + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": rotate_instances_1, "Scale": combine_xyz_1}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": scale_instances} + ) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, input_kwargs={"Geometry": realize_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube.outputs["Geometry"], + "Skeleton Curve": simple_tube.outputs["Skeleton Curve"], + "Feathers": symmetric_clone.outputs["Both"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_flying_bird_wing", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flying_bird_wing(nw: NodeWrangler): + # Code generated using version 2.5.1 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0000, 0.2600, 0.0000)), + ("NodeSocketFloat", "feather_density", 18.7000), + ("NodeSocketFloat", "aspect", 1.0000), + ("NodeSocketFloat", "fullness", 4.0000), + ("NodeSocketFloatFactor", "Wing Shape Sculpting", 1.0000), + ("NodeSocketVector", "Length Rad1 Rad2", (0.6000, 0.0400, 0.0400)), + ("NodeSocketFloat", "Extension", 1.6800), + ], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["Extension"], + 9: (-76.2600, 170.9500, -144.3800), + 10: (10.0000, -10.0000, 0.0000), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + simple_tube_v2 = nw.new_node( + nodegroup_simple_tube_v2().name, + input_kwargs={ + "length_rad1_rad2": group_input.outputs["length_rad1_rad2"], + "angles_deg": map_range_3.outputs["Vector"], + "proportions": (0.2000, 0.2700, 0.5000), + "aspect": group_input.outputs["aspect"], + "do_bezier": False, + "fullness": group_input.outputs["fullness"], + }, + ) + + curve_length = nw.new_node( + Nodes.CurveLength, + input_kwargs={"Curve": simple_tube_v2.outputs["Skeleton Curve"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: curve_length, 1: group_input.outputs["feather_density"]}, + attrs={"operation": "MULTIPLY"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Count": multiply, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": resample_curve} + ) + + reroute = nw.new_node(Nodes.Reroute, input_kwargs={"Input": curve_to_mesh}) + + feather = nw.new_node( + nodegroup_flying_feather().name, + input_kwargs={"Length Rad1 Rad2": group_input.outputs["Length Rad1 Rad2"]}, + ) + + index = nw.new_node(Nodes.Index) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, input_kwargs={"Geometry": curve_to_mesh, 2: index} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": index, + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + transfer_attribute_index = nw.new_node( + Nodes.SampleNearest, + input_kwargs={ + "Geometry": curve_to_mesh, + "Sample Position": map_range_1.outputs["Result"], + }, + ) + + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": curve_to_mesh, "Index": transfer_attribute_index}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Extension"], + 3: 115.6500, + 4: 0.0000, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": map_range_2.outputs["Result"]} + ) + + wing_feathers = [] + + for i in range(3): + float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Wing Shape Sculpting"], + "Value": (transfer_attribute, "Value"), + }, + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0000, 0.0000), + (0.25, 0.2), + (0.50, 0.4), + (0.75, 0.6), + (1.0000, 0.8 - i * 0.02 + N(0.0, 0.02)), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": float_curve, + 9: (0.0000, 80.0000, 0.0000), + 10: combine_xyz, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: map_range.outputs["Vector"], + 1: (0.0, -5 + 5 * i, (i - 1) * 8.0), + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": add.outputs["Vector"]} + ) + + vector_curves = nw.new_node( + Nodes.VectorCurve, + input_kwargs={ + "Fac": group_input.outputs["Wing Shape Sculpting"], + "Vector": (transfer_attribute, "Value"), + }, + ) + node_utils.assign_curve( + vector_curves.mapping.curves[0], + [ + (-1.0000, -0.0000), + (0.0218, 0.4), + (0.20, 0.45), + (0.5, 0.5), + (0.65000, 0.6), + (0.80, 0.7), + (1.0000, 0.78 + N(0.0, 0.02)), + ], + handles=["AUTO", "VECTOR", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO"], + ) + node_utils.assign_curve( + vector_curves.mapping.curves[1], [(-1.0000, 1.0000), (1.0000, 1.0000)] + ) + node_utils.assign_curve( + vector_curves.mapping.curves[2], [(-1.0000, 1.0000), (1.0000, 1.0000)] + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_curves, "Scale": U(1.6, 2.0) - i * 0.65}, + attrs={"operation": "SCALE"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": reroute, + "Instance": feather, + "Rotation": deg2rad, + "Scale": scale.outputs["Vector"], + }, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.01, 3: 0.01} + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.03, 3: 0.03, "Seed": 1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.01, 3: 0.01, "Seed": 2} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_2.outputs[1], + "Z": random_value_3.outputs[1], + }, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": instance_on_points, "Rotation": combine_xyz}, + ) + wing_feathers.append(rotate_instances_1) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": wing_feathers} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": simple_tube_v2.outputs["Geometry"], + "Skeleton Curve": simple_tube_v2.outputs["Skeleton Curve"], + "Feathers": realize_instances, + }, + ) + + +class FlyingBirdTail(PartFactory): + tags = ["tail", "wing"] + + def sample_params(self): + return { + "Feather Length Rad1 Rad2": np.array((0.4, 0.06, 0.04)) + * N(1, 0.1) + * N(1, 0.1, 3), + "Feather Rot Extent": np.array((25, -10, -16)) * N(1, 0.1, 3), + "Feather Rot Rand Bounds": np.array((5.0, 5.0, 5.0)) + * N(1, 0.1) + * N(1, 0.05, 3), + "N Feathers": int(N(16, 3)), + } + + def make_part(self, params): + part = nodegroup_to_part(nodegroup_flying_bird_tail, params) + return part + + +class FlyingBirdWing(PartFactory): + tags = ["limb", "wing"] + + def sample_params(self): + return { + "length_rad1_rad2": np.array( + (clip_gaussian(1.2, 0.7, 0.4, 2), U(0.08, 0.13), 0.02) + ), + "feather_density": 40, + "aspect": N(0.35, 0.04), + "fullness": N(4, 0.1), + "Wing Shape Sculpting": U(0.6, 1), + "Length Rad1 Rad2": np.array((0.6 * N(1, 0.2), 0.04, 0.04)), + "Extension": U(0, 0.05) if U() < 0.8 else U(0.7, 1), + } + + def make_part(self, params): + # split extras is essential to make automatic rigging work. We will join them back together later + part = nodegroup_to_part(nodegroup_flying_bird_wing, params, split_extras=True) + part.joints = { + 0: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # shoulder + 0.27: Joint(rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]])), + 0.65: Joint( + rest=(0, 0, 0), bounds=np.array([[-35, 0, -70], [35, 0, 70]]) + ), # elbow + } + part.iks = {1.0: IKParams(name="wingtip", chain_length=3)} + part.settings["parent_extras_rigid"] = True + return part diff --git a/infinigen/assets/creatures/reptile.py b/infinigen/assets/objects/creatures/reptile.py similarity index 65% rename from infinigen/assets/creatures/reptile.py rename to infinigen/assets/objects/creatures/reptile.py index 1931fe40e..b0c187af0 100644 --- a/infinigen/assets/creatures/reptile.py +++ b/infinigen/assets/objects/creatures/reptile.py @@ -1,50 +1,38 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Hongyu Wen: primary author # - Alexander Raistrick: snake curve following animation -import pdb -import gin import logging import bpy - +import gin import numpy as np -from numpy.random import normal as N, uniform as U - -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util.genome import Joint -from infinigen.assets.creatures import parts - -from infinigen.assets.creatures.util.creature_util import euler +from numpy.random import normal as N +from numpy.random import uniform as U -import infinigen.assets.materials.spot_sparse_attr -import infinigen.assets.materials.snake_scale -import infinigen.assets.materials.snake_shaders import infinigen.assets.materials.bird import infinigen.assets.materials.scale - -from infinigen.assets.creatures.util import creature, joining, animation as creature_animation -from infinigen.core.util import blender as butil - -from infinigen.assets.materials import bone, tongue, eyeball, nose, horn +import infinigen.assets.materials.snake_scale +import infinigen.assets.materials.snake_shaders +import infinigen.assets.materials.spot_sparse_attr +from infinigen.assets.materials import bone, eyeball, nose, tongue +from infinigen.assets.objects.creatures import parts +from infinigen.assets.objects.creatures.util import animation as creature_animation +from infinigen.assets.objects.creatures.util import creature, genome, joining +from infinigen.assets.objects.creatures.util.animation import curve_slither +from infinigen.assets.objects.creatures.util.animation.run_cycle import follow_path +from infinigen.assets.objects.creatures.util.genome import Joint from infinigen.core import surface - +from infinigen.core.placement import animation_policy from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.creatures.util import creature, joining - -from infinigen.core.util.math import clip_gaussian, FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed, clip_gaussian from infinigen.core.util.random import random_general -from infinigen.assets.creatures.util.animation import curve_slither -from infinigen.core.placement import animation_policy - -from infinigen.assets.creatures.util.animation.run_cycle import follow_path def dinosaur(): open_mouth = U() > 0 @@ -53,7 +41,7 @@ def dinosaur(): # 'scale_y': 1, # 'scale_z': 1, # } - body_fac = parts.body_tube.ReptileBody(type='dinosaur_body') + body_fac = parts.body_tube.ReptileBody(type="dinosaur_body") body = genome.part(body_fac) shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) @@ -80,7 +68,6 @@ def dinosaur(): # neck = genome.part(neck_fac) # genome.attach(neck, body, coord=(0.1, 0, 0.2), joint=Joint(rest=(180, 180, 0)), rotation_basis='global', bridge_rad=0.2, smooth_rad=0.1) - # head_size = { # 'scale_x': 0.8 + N(0, 0.02), # 'scale_y': 0.3 + N(0, 0.02), @@ -104,7 +91,6 @@ def dinosaur(): # horn = genome.part(horn_fac) # genome.attach(horn, head, coord=(t, splay, 0.8), joint=Joint(rest=(30, 130, -20)), rotation_basis='global', side=side) - # jaw_fac = parts.reptile_detail.ReptileLowerHead(head_size) # jaw = genome.part(jaw_fac) # genome.attach(jaw, neck, coord=(0.88, 0, 0.1), joint=Joint(rest=(180, 170, 180)), rotation_basis='global', bridge_rad=0.1, smooth_rad=0.1) @@ -112,13 +98,14 @@ def dinosaur(): return genome.CreatureGenome( parts=body, postprocess_params=dict( - animation=dict(), + animation=dict(), surface_registry=[ (infinigen.assets.materials.snake_scale, 1), - ] - ) + ], + ), ) + def lizard_genome(): open_mouth = U() > 0 # body_fac = parts.reptile_detail.ReptileBody(type='lizard', n_bones=15, shoulder_ik_ts=[0.0, 0.3, 0.6, 1.0]) @@ -143,10 +130,10 @@ def lizard_genome(): # leg = genome.attach(genome.part(toe_fac), leg, coord=(0.98, 0.5, -0.3), joint=Joint(rest=(0,0,-13))) # leg = genome.attach(genome.part(toe_fac), leg, coord=(0.97, 0.5, -0.6), joint=Joint(rest=(0,0,-40))) # genome.attach(leg, body, coord=(U(0.21, 0.23), 0.5, 0.6), joint=Joint(rest=(0, 0, 80), bounds=shoulder_bounds), side=side, bridge_rad=0.1, smooth_rad=0.1) - + head_size = { - 'scale_x': 0.8 + N(0, 0.02), - 'scale_y': 0.3 + N(0, 0.02), + "scale_x": 0.8 + N(0, 0.02), + "scale_y": 0.3 + N(0, 0.02), } head_fac = parts.reptile_detail.ReptileUpperHead(head_size) head = genome.part(head_fac) @@ -167,70 +154,115 @@ def lizard_genome(): # horn = genome.part(horn_fac) # genome.attach(horn, head, coord=(t, splay, 0.8), joint=Joint(rest=(30, 130, -20)), rotation_basis='global', side=side) - # jaw_fac = parts.reptile_detail.ReptileLowerHead(head_size) # jaw = genome.part(jaw_fac) # genome.attach(jaw, body, coord=(0.01, 0, 0.1), joint=Joint(rest=(180, 150, 0)), rotation_basis='global', bridge_rad=0.1, smooth_rad=0.1) return genome.CreatureGenome( - parts=head, + parts=head, postprocess_params=dict( anim=lizard_run_params(), surface_registry=[ (infinigen.assets.materials.snake_scale, 1), - ] - ) + ], + ), ) + def snake_genome(): open_mouth = U() > 0 w_mod = N(1, 0.05) h_mod = N(1, 0.05) - body_fac = parts.reptile_detail.ReptileBody(type='snake', n_bones=15, shoulder_ik_ts=[0.0, 0.3, 0.6, 1.0], mod=(1, w_mod, h_mod)) + body_fac = parts.reptile_detail.ReptileBody( + type="snake", + n_bones=15, + shoulder_ik_ts=[0.0, 0.3, 0.6, 1.0], + mod=(1, w_mod, h_mod), + ) body = genome.part(body_fac) head_size = { - 'scale_x': 0.8 + N(0, 0.02), - 'scale_y': 0.3 + N(0, 0.02), + "scale_x": 0.8 + N(0, 0.02), + "scale_y": 0.3 + N(0, 0.02), } head_fac = parts.reptile_detail.ReptileUpperHead(head_size, mod=(1, w_mod, h_mod)) head = genome.part(head_fac) - genome.attach(head, body, coord=(0.01, 0, 0.2), joint=Joint(rest=(180, 180, 0)), rotation_basis='global', bridge_rad=0.2, smooth_rad=0.1) + genome.attach( + head, + body, + coord=(0.01, 0, 0.2), + joint=Joint(rest=(180, 180, 0)), + rotation_basis="global", + bridge_rad=0.2, + smooth_rad=0.1, + ) - eye_fac = parts.eye.MammalEye({'Radius': N(0.03, 0.005)}) - t, splay = U(0.7, 0.7), 100/180 + eye_fac = parts.eye.MammalEye({"Radius": N(0.03, 0.005)}) + t, splay = U(0.7, 0.7), 100 / 180 r = 1 rot = np.array([0, 0, 90]) * N(1, 0.1, 3) for side in [-1, 1]: eye = genome.part(eye_fac) - genome.attach(eye, head, coord=(t, splay, r), joint=Joint(rest=(0,0,0)), rotation_basis='normal', side=side) + genome.attach( + eye, + head, + coord=(t, splay, r), + joint=Joint(rest=(0, 0, 0)), + rotation_basis="normal", + side=side, + ) # teeth - horn_fac = parts.horn.Horn({'depth_of_ridge': 0, 'length': U(0.2, 0.3), 'rad1': U(0.4, 0.4), 'rad2': U(0.3, 0.3), 'thickness': U(0.04, 0.08), 'height': 0}) - t, splay = U(0.67, 0.7), 60/180 + horn_fac = parts.horn.Horn( + { + "depth_of_ridge": 0, + "length": U(0.2, 0.3), + "rad1": U(0.4, 0.4), + "rad2": U(0.3, 0.3), + "thickness": U(0.04, 0.08), + "height": 0, + } + ) + t, splay = U(0.67, 0.7), 60 / 180 for side in [-1, 1]: horn = genome.part(horn_fac) - genome.attach(horn, head, coord=(t, splay, 0.8), joint=Joint(rest=(30, 130, -20)), rotation_basis='global', side=side) + genome.attach( + horn, + head, + coord=(t, splay, 0.8), + joint=Joint(rest=(30, 130, -20)), + rotation_basis="global", + side=side, + ) jaw_fac = parts.reptile_detail.ReptileLowerHead(head_size, mod=(1, w_mod, h_mod)) jaw = genome.part(jaw_fac) mouth_open_deg = 0 - genome.attach(jaw, body, coord=(0.01, 0, 0.15), joint=Joint(rest=(180, 180 - mouth_open_deg, 0)), rotation_basis='global', bridge_rad=0.1, smooth_rad=0.1) + genome.attach( + jaw, + body, + coord=(0.01, 0, 0.15), + joint=Joint(rest=(180, 180 - mouth_open_deg, 0)), + rotation_basis="global", + bridge_rad=0.1, + smooth_rad=0.1, + ) return genome.CreatureGenome( - parts=body, + parts=body, postprocess_params=dict( anim=snake_swim_params(), surface_registry=[ (infinigen.assets.materials.snake_scale, 1), - ] - ) + ], + ), ) + def chameleon_genome(): open_mouth = U() > 0 @@ -238,19 +270,20 @@ def chameleon_genome(): body = genome.part(body_fac) return genome.CreatureGenome( - parts=body, + parts=body, postprocess_params=dict( anim=snake_swim_params(), surface_registry=[ (infinigen.assets.materials.snake_scale, 1), - ] - ) + ], + ), ) + def frog_genome(): - #body_fac = parts.reptile_detail.ReptileHeadBody(params={'open_mouth': False}, type='frog') - #body = genome.part(body_fac) - #shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) + # body_fac = parts.reptile_detail.ReptileHeadBody(params={'open_mouth': False}, type='frog') + # body = genome.part(body_fac) + # shoulder_bounds = np.array([[-20, -20, -20], [20, 20, 20]]) # open_mouth = U() > 0 # body_fac = parts.body_tube.ReptileBody(type='frog_body') # body = genome.part(body_fac) @@ -274,9 +307,9 @@ def frog_genome(): # leg = genome.attach(genome.part(toe_fac), leg, coord=(0.98, 0.5, -0.3), joint=Joint(rest=(0,0,-13))) # leg = genome.attach(genome.part(toe_fac), leg, coord=(0.97, 0.5, -0.6), joint=Joint(rest=(0,0,-40))) # genome.attach(leg, body, coord=(U(0.70, 0.75), 0.45, 0.8), joint=Joint(rest=(0, 0, 50), bounds=shoulder_bounds), side=side) - + head_size = { - 'scale_y': 0.4 + N(0, 0.02), + "scale_y": 0.4 + N(0, 0.02), } head_fac = parts.reptile_detail.ReptileUpperHead(head_size) head = genome.part(head_fac) @@ -298,127 +331,174 @@ def frog_genome(): parts=head, postprocess_func=reptile_postprocessing, postprocess_params=dict( - animation=dict( - mode='swim', - speed_m_s=0.5 - ), + animation=dict(mode="swim", speed_m_s=0.5), surface_registry=[ (infinigen.assets.materials.snake_scale, 1), - ] - ) + ], + ), ) + def snake_swim_params(): swim_freq = 2 * clip_gaussian(1, 0.3, 0.1, 2) swim_mag = N(200, 3) return dict( swim_mag=swim_mag, swim_freq=swim_freq, - flipper_freq = 2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, - flipper_mag = 0.25 * N(1, 0.1) * swim_mag, - flipper_var = U(0, 0.2), + flipper_freq=2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, + flipper_mag=0.25 * N(1, 0.1) * swim_mag, + flipper_var=U(0, 0.2), ) + def chameleon_eye_params(): swim_freq = 0.2 * clip_gaussian(1, 0.3, 0.1, 2) swim_mag = N(20, 3) return dict( swim_mag=swim_mag, swim_freq=swim_freq, - flipper_freq = 2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, - flipper_mag = 0.25 * N(1, 0.1) * swim_mag, - flipper_var = U(0, 0.2), + flipper_freq=2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, + flipper_mag=0.25 * N(1, 0.1) * swim_mag, + flipper_var=U(0, 0.2), ) def animate_snake_swim(root, arma, params, ik_targets): - spine = [b for b in arma.pose.bones if 'Body' in b.name] + spine = [b for b in arma.pose.bones if "Body" in b.name] creature_animation.animate_wiggle_bones( - arma=arma, bones=spine, fixed_head=False, off=1/2, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(0.2, 0.4)) - + arma=arma, + bones=spine, + fixed_head=False, + off=1 / 2, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(0.2, 0.4), + ) + + def animate_chameleon_eye(root, arma, params, ik_targets): - spine = [b for b in arma.pose.bones if 'Eye' in b.name] + spine = [b for b in arma.pose.bones if "Eye" in b.name] creature_animation.animate_wiggle_bones( - arma=arma, bones=spine, fixed_head=False, off=1/2, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(0.2, 0.4)) - + arma=arma, + bones=spine, + fixed_head=False, + off=1 / 2, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(0.2, 0.4), + ) + + def lizard_run_params(): swim_freq = 1 * clip_gaussian(1, 0.3, 0.1, 2) swim_mag = N(50, 3) return dict( swim_mag=swim_mag, swim_freq=swim_freq, - flipper_freq = 2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, - flipper_mag = 0.25 * N(1, 0.1) * swim_mag, - flipper_var = U(0, 0.2), + flipper_freq=2 * clip_gaussian(1, 0.5, 0.1, 3) * swim_freq, + flipper_mag=0.25 * N(1, 0.1) * swim_mag, + flipper_var=U(0, 0.2), ) + def animate_lizard_run(root, arma, params, ik_targets): - spine = [b for b in arma.pose.bones if 'Body' in b.name] + spine = [b for b in arma.pose.bones if "Body" in b.name] creature_animation.animate_wiggle_bones( - arma=arma, bones=spine, fixed_head=False, off=1/2, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(1, 1.2)) - - spine = [b for b in arma.pose.bones if 'FrontLeg' in b.name] + arma=arma, + bones=spine, + fixed_head=False, + off=1 / 2, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(1, 1.2), + ) + + spine = [b for b in arma.pose.bones if "FrontLeg" in b.name] print(spine) creature_animation.animate_running_front_leg( - arma=arma, bones=spine, fixed_head=False, off=1/2, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(1, 1.2)) + arma=arma, + bones=spine, + fixed_head=False, + off=1 / 2, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(1, 1.2), + ) - spine = [b for b in arma.pose.bones if 'BackLeg' in b.name] + spine = [b for b in arma.pose.bones if "BackLeg" in b.name] print(spine) creature_animation.animate_running_back_leg( - arma=arma, bones=spine, fixed_head=False, off=0, - mag_deg=params['swim_mag'], freq=params['swim_freq'], wavelength=U(1, 1.2)) + arma=arma, + bones=spine, + fixed_head=False, + off=0, + mag_deg=params["swim_mag"], + freq=params["swim_freq"], + wavelength=U(1, 1.2), + ) # creature_animation.animate_run(root, arma, ik_targets) + def reptile_postprocessing(body_parts, extras, params): - get_extras = lambda k: [o for o in extras if k in o.name] - main_template = surface.registry.sample_registry(params['surface_registry']) - body = body_parts + get_extras('BodyExtra') + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + body = body_parts + get_extras("BodyExtra") main_template.apply(body) - tongue.apply(get_extras('Tongue')) - bone.apply(get_extras('Horn')) - eyeball.apply(get_extras('Eyeball'), shader_kwargs={"coord": "X"}) - nose.apply(get_extras('Nose')) + tongue.apply(get_extras("Tongue")) + bone.apply(get_extras("Horn")) + eyeball.apply(get_extras("Eyeball"), shader_kwargs={"coord": "X"}) + nose.apply(get_extras("Nose")) + def chameleon_postprocessing(body_parts, extras, params): - get_extras = lambda k: [o for o in extras if k in o.name] - main_template = surface.registry.sample_registry(params['surface_registry']) - body = body_parts + get_extras('BodyExtra') + def get_extras(k): + return [o for o in extras if k in o.name] + + main_template = surface.registry.sample_registry(params["surface_registry"]) + body = body_parts + get_extras("BodyExtra") main_template.apply(body) - #chameleon_eye.apply(get_extras('Eye')) + # chameleon_eye.apply(get_extras('Eye')) + @gin.configurable class LizardFactory(AssetFactory): - max_distance = 40 def __init__(self, factory_seed, bvh=None, coarse=False): super().__init__(factory_seed, coarse) self.bvh = bvh - def create_asset(self, i, animate=False, rigging=False, cloth=False, **kwargs): + def create_asset(self, i, animate=False, rigging=False, cloth=False, **kwargs): genome = lizard_genome() - root, parts = creature.genome_to_creature(genome, name=f'lizard({self.factory_seed}, {i})') - - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - postprocess_func=reptile_postprocessing, adapt_mode='remesh', rigging=rigging, **kwargs) + root, parts = creature.genome_to_creature( + genome, name=f"lizard({self.factory_seed}, {i})" + ) + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + postprocess_func=reptile_postprocessing, + adapt_mode="remesh", + rigging=rigging, + **kwargs, + ) if animate and arma is not None: - pass + pass else: joined = butil.join_objects([joined] + extras) - + butil.purge_empty_materials(joined) return root - + + @gin.configurable class FrogFactory(AssetFactory): - max_distance = 40 def __init__(self, factory_seed, bvh=None, coarse=False): @@ -426,34 +506,52 @@ def __init__(self, factory_seed, bvh=None, coarse=False): self.bvh = bvh def create_asset(self, i, animate=False, rigging=False, simulate=False, **kwargs): - genome = frog_genome() - root, parts = creature.genome_to_creature(genome, name=f'frog({self.factory_seed}, {i})') - - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - postprocess_func=reptile_postprocessing, adapt_mode='remesh', rigging=rigging, **kwargs) + root, parts = creature.genome_to_creature( + genome, name=f"frog({self.factory_seed}, {i})" + ) + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + postprocess_func=reptile_postprocessing, + adapt_mode="remesh", + rigging=rigging, + **kwargs, + ) if animate and arma is not None: - pass + pass if simulate: pass else: joined = butil.join_objects([joined] + extras) - + return root - + + @gin.configurable class SnakeFactory(AssetFactory): - max_distance = 40 - def __init__(self, factory_seed, bvh=None, coarse=False, snake_length=('uniform', 0.5, 3), **kwargs): + def __init__( + self, + factory_seed, + bvh=None, + coarse=False, + snake_length=("uniform", 0.5, 3), + **kwargs, + ): super().__init__(factory_seed, coarse) self.bvh = bvh with FixedSeed(factory_seed): self.snake_length = random_general(snake_length) self.policy = animation_policy.AnimPolicyRandomForwardWalk( - forward_vec=(1, 0, 0), speed=min(self.snake_length, 2)*U(0.5, 1), - step_range=(0.2, 0.2), yaw_dist=("uniform", -7, 7)) # take very small steps, to avoid clipping into convex surfaces + forward_vec=(1, 0, 0), + speed=min(self.snake_length, 2) * U(0.5, 1), + step_range=(0.2, 0.2), + yaw_dist=("uniform", -7, 7), + ) # take very small steps, to avoid clipping into convex surfaces def create_placeholder(self, i, loc, rot, **kwargs): p = butil.spawn_cube(size=self.snake_length) @@ -462,58 +560,84 @@ def create_placeholder(self, i, loc, rot, **kwargs): if self.bvh is None: return p - - curve = animation_policy.policy_create_bezier_path(p, self.bvh, self.policy, eval_offset=(0, 0, 0.5), retry_rotation=True) - curve.name = f'animhelper:{self}.create_placeholder({i}).path' + + curve = animation_policy.policy_create_bezier_path( + p, self.bvh, self.policy, eval_offset=(0, 0, 0.5), retry_rotation=True + ) + curve.name = f"animhelper:{self}.create_placeholder({i}).path" slither_curve = butil.deep_clone_obj(curve) curve_slither.add_curve_slithers(slither_curve, snake_length=self.snake_length) - if slither_curve.type != 'CURVE': - logging.warning(f'{self.__class__.__name__} created invalid path {curve.name} with {curve.type=}') + if slither_curve.type != "CURVE": + logging.warning( + f"{self.__class__.__name__} created invalid path {curve.name} with {curve.type=}" + ) return p curve_slither.snap_curve_to_floor(slither_curve, self.bvh) butil.parent_to(curve, slither_curve, keep_transform=True) # animate the placeholder to the APPROX location of the snake, so the camera can follow it - follow_path(p, curve, use_curve_follow=True, offset=0, - duration=bpy.context.scene.frame_end-bpy.context.scene.frame_start) - curve.data.driver_add('eval_time').driver.expression = 'frame' + follow_path( + p, + curve, + use_curve_follow=True, + offset=0, + duration=bpy.context.scene.frame_end - bpy.context.scene.frame_start, + ) + curve.data.driver_add("eval_time").driver.expression = "frame" return p def create_asset(self, i, placeholder, **kwargs): - genome = snake_genome() - root, parts = creature.genome_to_creature(genome, name=f'snake({self.factory_seed}, {i})') - - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - postprocess_func=reptile_postprocessing, adaptive_resolution=False, rigging=False, **kwargs) + root, parts = creature.genome_to_creature( + genome, name=f"snake({self.factory_seed}, {i})" + ) + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + postprocess_func=reptile_postprocessing, + adaptive_resolution=False, + rigging=False, + **kwargs, + ) joined = butil.join_objects([joined] + extras) - s = self.snake_length / 20 # convert to real units. existing code averages 20m length + s = ( + self.snake_length / 20 + ) # convert to real units. existing code averages 20m length joined.scale = (s, s, s) butil.apply_transform(joined, scale=True) - if len(placeholder.constraints) and placeholder.constraints[0].type == 'FOLLOW_PATH': + if ( + len(placeholder.constraints) + and placeholder.constraints[0].type == "FOLLOW_PATH" + ): curve = placeholder.constraints[0].target.parent - assert curve.type == 'CURVE', curve.type + assert curve.type == "CURVE", curve.type if len(curve.data.splines[0].points) > 3: - orig_len = curve.data.splines[0].calc_length() - + joined.parent = None - curve_slither.slither_along_path(joined, curve, speed=self.policy.speed, orig_len=orig_len) + curve_slither.slither_along_path( + joined, curve, speed=self.policy.speed, orig_len=orig_len + ) - root.parent = butil.spawn_empty('snake_parent_temp') # so AssetFactory.spawn_asset doesnt attempt to parent + root.parent = butil.spawn_empty( + "snake_parent_temp" + ) # so AssetFactory.spawn_asset doesnt attempt to parent butil.parent_to(joined, root, keep_transform=True) butil.purge_empty_materials(joined) return joined + @gin.configurable class ChameleonFactory(AssetFactory): max_distance = 40 @@ -530,13 +654,21 @@ def create_placeholder(self, i, loc, rot, **kwargs): return p def create_asset(self, i, placeholder, **kwargs): - genome = chameleon_genome() - root, parts = creature.genome_to_creature(genome, name=f'snake({self.factory_seed}, {i})') - - joined, extras, arma, ik_targets = joining.join_and_rig_parts(root, parts, genome, - postprocess_func=reptile_postprocessing, adaptive_resolution=False, rigging=False, **kwargs) + root, parts = creature.genome_to_creature( + genome, name=f"snake({self.factory_seed}, {i})" + ) + + joined, extras, arma, ik_targets = joining.join_and_rig_parts( + root, + parts, + genome, + postprocess_func=reptile_postprocessing, + adaptive_resolution=False, + rigging=False, + **kwargs, + ) joined = butil.join_objects([joined] + extras) - return root \ No newline at end of file + return root diff --git a/infinigen/assets/creatures/util/__init__.py b/infinigen/assets/objects/creatures/util/__init__.py similarity index 100% rename from infinigen/assets/creatures/util/__init__.py rename to infinigen/assets/objects/creatures/util/__init__.py diff --git a/infinigen/assets/creatures/util/animation/__init__.py b/infinigen/assets/objects/creatures/util/animation/__init__.py similarity index 100% rename from infinigen/assets/creatures/util/animation/__init__.py rename to infinigen/assets/objects/creatures/util/animation/__init__.py diff --git a/infinigen/assets/objects/creatures/util/animation/curve_slither.py b/infinigen/assets/objects/creatures/util/animation/curve_slither.py new file mode 100644 index 000000000..4fd197db9 --- /dev/null +++ b/infinigen/assets/objects/creatures/util/animation/curve_slither.py @@ -0,0 +1,327 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import logging + +import bpy +import numpy as np +from mathutils import Vector +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_add_wiggles", singleton=True, type="GeometryNodeTree" +) +def nodegroup_add_wiggles(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Magnitude", 1.6800), + ("NodeSocketFloat", "MagRandom", 0.5000), + ("NodeSocketVector", "Up", (0.0000, 0.0000, 1.0000)), + ], + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: curve_tangent, 1: group_input.outputs["Up"]}, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + index = nw.new_node(Nodes.Index) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 4.0000}, attrs={"operation": "MODULO"} + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: 2.0000}, + attrs={"operation": "LESS_THAN"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": less_than, 3: -1.0000} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range.outputs["Result"], + 1: group_input.outputs["Magnitude"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["MagRandom"]}, + attrs={"operation": "SUBTRACT"}, + ) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: subtract}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: random_value.outputs[1]}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: cross_product.outputs["Vector"], "Scale": multiply_1}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_add_loopbacks", singleton=True, type="GeometryNodeTree" +) +def nodegroup_add_loopbacks(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Vector", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "Amount", 0.5800), + ("NodeSocketFloat", "Randomness", 0.0000), + ], + ) + + index_1 = nw.new_node(Nodes.Index) + + add = nw.new_node(Nodes.Math, input_kwargs={0: index_1, 1: 1.0000}) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "MODULO"} + ) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: modulo}, attrs={"operation": "LESS_THAN"} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": less_than, 3: -1.0000} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["Randomness"]}, + attrs={"operation": "SUBTRACT"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={2: subtract, "ID": index_1} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: random_value.outputs[1]}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], "Scale": multiply_1}, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_wiggles", singleton=True, type="GeometryNodeTree") +def nodegroup_wiggles(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloatDistance", "Wavelength", 2.3300), + ("NodeSocketFloat", "Magnitude", 1.6800), + ("NodeSocketFloat", "MagRandom", 1.0000), + ("NodeSocketFloat", "Loopyness", 0.5800), + ("NodeSocketFloat", "LoopRandom", 0.0000), + ("NodeSocketFloat", "AltitudeOffset", 0.00), + ("NodeSocketVector", "Up", (0.0000, 0.0000, 1.0000)), + ], + ) + + curve_tangent_1 = nw.new_node(Nodes.CurveTangent) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: curve_tangent_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Wavelength"], 1: 4.0000}, + attrs={"operation": "DIVIDE"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": capture_attribute.outputs["Geometry"], "Length": divide}, + attrs={"mode": "LENGTH"}, + ) + + addwiggles = nw.new_node( + nodegroup_add_wiggles().name, + input_kwargs={ + "Geometry": resample_curve, + "Magnitude": group_input.outputs["Magnitude"], + "MagRandom": group_input.outputs["MagRandom"], + "Up": group_input.outputs["Up"], + }, + ) + + addloopbacks = nw.new_node( + nodegroup_add_loopbacks().name, + input_kwargs={ + "Geometry": addwiggles, + "Vector": capture_attribute.outputs["Attribute"], + "Amount": group_input.outputs["Loopyness"], + "Randomness": group_input.outputs["LoopRandom"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": addloopbacks, "Fill Caps": True} + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": curve_to_mesh, "Level": 3} + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": subdivision_surface} + ) + + off = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["AltitudeOffset"]} + ) + result = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": mesh_to_curve, "Offset": off} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": result}, + attrs={"is_active_output": True}, + ) + + +def add_curve_slithers(curve, snake_length): + params = { + "Wavelength": snake_length / U(2, 4), + "Magnitude": snake_length * 0.05 * N(1, 0.2), + "MagRandom": U(0, 0.7), + "Loopyness": 0, + "LoopRandom": 0, + "AltitudeOffset": 0.02, + } + butil.modify_mesh( + curve, + "NODES", + node_group=nodegroup_wiggles(), + ng_inputs=params, + apply=False, + show_viewport=True, + ) + with butil.SelectObjects(curve): + bpy.ops.object.convert(target="MESH") + bpy.ops.object.convert(target="CURVE") + return curve + + +def slither_along_path(obj, curve, speed, zoff_pct=0.7, orig_len=None): + if not curve.type == "CURVE": + with butil.SelectObjects(curve): + bpy.ops.object.convert(target="CURVE") + curve = bpy.context.active_object + if curve.type != "CURVE": + message = f"slither_along_path failed, {curve.name=} had {curve.type=} but expected CURVE" + if curve.type == "MESH": + message == f". {len(curve.data.vertices)=}" + logging.warning(message) + return + + curve.data.twist_mode = "Z_UP" + + xmax = max(v[0] for v in obj.bound_box) + + l = curve.data.splines[0].calc_length() + + zoff = zoff_pct * obj.dimensions[-1] / 2 + obj.location = (xmax, 0, zoff) + obj.keyframe_insert(data_path="location", frame=0) + obj.location = (l, 0, zoff) + obj.keyframe_insert(data_path="location", frame=bpy.context.scene.frame_end) + + for fc in obj.animation_data.action.fcurves: + for k in fc.keyframe_points: + k.interpolation = "LINEAR" + + butil.modify_mesh(obj, "CURVE", object=curve, apply=False, show_viewport=True) + obj.rotation_euler = (0, 0, np.pi) + + +def snap_curve_to_floor(curve, bvh, step_height=1): + s = curve.data.splines[0] + for p in s.points: + raystart = Vector(p.co[:3]) + Vector((0, 0, step_height)) + loc, *_ = bvh.ray_cast(raystart, Vector((0, 0, -1))) + if loc is not None: + p.co = (*loc, 1) diff --git a/infinigen/assets/creatures/util/animation/driver_repeated.py b/infinigen/assets/objects/creatures/util/animation/driver_repeated.py similarity index 69% rename from infinigen/assets/creatures/util/animation/driver_repeated.py rename to infinigen/assets/objects/creatures/util/animation/driver_repeated.py index b1f1d64f4..1eb8b156d 100644 --- a/infinigen/assets/creatures/util/animation/driver_repeated.py +++ b/infinigen/assets/objects/creatures/util/animation/driver_repeated.py @@ -5,7 +5,7 @@ import numpy as np -from numpy.random import uniform as U, uniform +from numpy.random import uniform from infinigen.core.util.math import FixedSeed, int_hash @@ -16,9 +16,9 @@ def repeated_driver(start, end, freq, off=None, seed=None): if seed is None: seed = np.random.randint(1e5) with FixedSeed(seed): - phase = uniform(.2, .8) - u = phase * uniform(.8, 1.) - v = (1 - phase) * uniform(.8, 1.) + phase = uniform(0.2, 0.8) + u = phase * uniform(0.8, 1.0) + v = (1 - phase) * uniform(0.8, 1.0) t = f"{freq: .4f} * frame+{off:.4f}" t = f"{t}-floor({t})" return f"{start}+{end - start}*(smoothstep(0,{u},{t})-smoothstep({phase},{phase + v},{t}))" @@ -27,22 +27,33 @@ def repeated_driver(start, end, freq, off=None, seed=None): def bend_bones_lerp(arma, bones, total, freq, rot=None, symmetric=True): bone_lengths = [] for bone in bones: - length = bone.bone['length'] if isinstance(bone.bone['length'], (int, float)) else 0 + length = ( + bone.bone["length"] if isinstance(bone.bone["length"], (int, float)) else 0 + ) if length >= 0: bone_lengths.append((bone, length)) bone_lengths = list(sorted(bone_lengths, key=lambda _: _[1])) bones = [b for (b, _) in bone_lengths] if bone_lengths else bones bone = bones[0].bone - hashable = arma.parent.name, bone['factory_class'], bone['index'] + (bone['side'] > 0) * symmetric + hashable = ( + arma.parent.name, + bone["factory_class"], + bone["index"] + (bone["side"] > 0) * symmetric, + ) with FixedSeed(int_hash(hashable)): ratio = uniform(1, 2, len(bones)) ratio /= ratio.sum() - total = [(t if uniform(0, 1) < .5 else (t[1], t[0])) if isinstance(t, tuple) else (t, t) for t in total] + total = [ + (t if uniform(0, 1) < 0.5 else (t[1], t[0])) + if isinstance(t, tuple) + else (t, t) + for t in total + ] o0, o1, o2 = uniform(0, 1, 3) seed = np.random.randint(1e5) for i, (bone, r) in enumerate(zip(bones, ratio)): - s = bone.bone['side'] - bone.rotation_mode = 'XYZ' + s = bone.bone["side"] + bone.rotation_mode = "XYZ" (x0, x1), (y0, y1), (z0, z1) = total if rot is not None and i == 0: x0 += rot[0] @@ -51,7 +62,9 @@ def bend_bones_lerp(arma, bones, total, freq, rot=None, symmetric=True): y1 += rot[1] z0 += rot[2] z1 += rot[2] - driver_x, driver_y, driver_z = [_.driver for _ in bone.driver_add('rotation_euler')] + driver_x, driver_y, driver_z = [ + _.driver for _ in bone.driver_add("rotation_euler") + ] driver_x.expression = f"({repeated_driver(x0, x1, freq, o0, seed)})*{r}" driver_y.expression = f"({repeated_driver(y0, y1, freq, o1, seed)})*{r}" driver_z.expression = f"({repeated_driver(z0, z1, freq, o2, seed)})*{s * r}" diff --git a/infinigen/assets/creatures/util/animation/driver_wiggle.py b/infinigen/assets/objects/creatures/util/animation/driver_wiggle.py similarity index 58% rename from infinigen/assets/creatures/util/animation/driver_wiggle.py rename to infinigen/assets/objects/creatures/util/animation/driver_wiggle.py index 1f86c2f10..76c00732c 100644 --- a/infinigen/assets/creatures/util/animation/driver_wiggle.py +++ b/infinigen/assets/objects/creatures/util/animation/driver_wiggle.py @@ -7,51 +7,47 @@ import logging import bpy -import bpy_types -import mathutils - import numpy as np -from numpy.random import uniform as U, normal as N - -import pdb -from infinigen.assets.creatures.util import creature, creature_util as cutil -from infinigen.core.util.math import clip_gaussian, randomspacing, lerp from infinigen.core.util import blender as butil logger = logging.getLogger(__name__) + def sinusoid_driver(driver, mag, freq, off): - driver.expression = f'{mag:.4f}*sin(({freq:.4f}*frame+{off:.4f})/(2*pi))' + driver.expression = f"{mag:.4f}*sin(({freq:.4f}*frame+{off:.4f})/(2*pi))" + def remove_ik_constraints(bones): for b in bones: for c in b.constraints: - logger.debug(f'Removing {c.name} from {b.name=}') - if hasattr(c, 'target'): + logger.debug(f"Removing {c.name} from {b.name=}") + if hasattr(c, "target"): butil.delete(c.target) b.constraints.remove(c) -def animate_wiggle_bones(arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True): - ''' +def animate_wiggle_bones( + arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True +): + """ mag_deg = sum of magnitudes across al bones freq = flaps per second off = global time offset wavelength = how many flaps fit into one creature - ''' + """ - logger.debug(f'animate_wiggle_bones on {len(bones)=} {mag_deg=}') + logger.debug(f"animate_wiggle_bones on {len(bones)=} {mag_deg=}") # remove any iks, we will be overriding them if remove_iks: remove_ik_constraints(bones) - + mag = np.deg2rad(mag_deg) / len(bones) frame_period = int(bpy.context.scene.render.fps / freq) for i, b in enumerate(bones): b_off = -(off + i / len(bones)) * frame_period / wavelength - b.rotation_mode = 'XYZ' - sinusoid_driver(b.driver_add('rotation_euler')[0].driver, mag, freq, b_off) \ No newline at end of file + b.rotation_mode = "XYZ" + sinusoid_driver(b.driver_add("rotation_euler")[0].driver, mag, freq, b_off) diff --git a/infinigen/assets/objects/creatures/util/animation/idle.py b/infinigen/assets/objects/creatures/util/animation/idle.py new file mode 100644 index 000000000..19d2e903f --- /dev/null +++ b/infinigen/assets/objects/creatures/util/animation/idle.py @@ -0,0 +1,199 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +import logging + +import bpy +import mathutils +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U + + +def compute_ik_length_height(targets): + bounds = [] + for i in range(3): + vmin = min(t.matrix_world.translation[i] for t in targets) + vmax = max(t.matrix_world.translation[i] for t in targets) + bounds.append([vmin, vmax]) + return np.array(bounds) + + +def snap_iks_to_floor(targets, floor_bvh, minweight=0.7): + assert floor_bvh is not None + + bpy.context.view_layer.update() + + def get_targets(k): + return [t for t in targets if k in t.name] + + bounds = compute_ik_length_height(targets) + + def find_floor_offset(t): + ray_origin = mathutils.Vector( + (t.matrix_world.translation.x, t.matrix_world.translation.y, bounds[2, 1]) + ) + location, normal, index, dist = floor_bvh.ray_cast( + ray_origin, mathutils.Vector((0, 0, -1)) + ) + if location is None: + return None + return location - t.matrix_world.translation + + feet = get_targets("foot") + feet_offsets = [find_floor_offset(f) for f in feet] + + if any(off is None for off in feet_offsets): + logging.warning( + f"snap_iks_to_floor found {feet_offsets=}, aborting snap operation" + ) + return + + # dont allow the pose diff to be too large (ie, prevent weird behavior at cliffs) + for i, o in enumerate(feet_offsets): + if o.length > bounds[2, 1] - bounds[2, 0]: + logging.warning(f"snap_iks_to_floor ignoring too-long offset {o.length=}") + feet_offsets[i] = mathutils.Vector() + + for ( + f, + fo, + ) in zip(feet, feet_offsets): + f.location += fo + + hips = get_targets("body") + if len(feet) == len(hips) * 2: + # hips seem coupled to pairs of feet, take that into consideration + # TODO: Restructure to make detecting this more robust + + hip_offsets = [] + for i in range(len(feet) // 2): + o1, o2 = feet_offsets[2 * i], feet_offsets[2 * i + 1] + hip_off = minweight * min(o1, o2) + (1 - minweight) * max(o1, o2) + hip_offsets.append(hip_off) + + for h, ho in zip(hips, hip_offsets): + h.location += ho + + for o in get_targets("head"): # front-associated + o.location += hip_offsets[-1] + for o in get_targets("tail"): # back associated + o.location += hip_offsets[0] + + else: + logging.warning("Couldnt establish feet-hip mapping") + off = mathutils.Vector(np.array(feet_offsets).mean(axis=0)) + for o in targets: + if o in feet: + continue + o.location += off + + +def idle_body_noise_drivers( + targets, foot_motion_chance=0.2, head_benddown=1.0, body_mag=1.0, wing_mag=1.0 +): + # all magnitudes are determined as multiples of the creatures overall length/height/width + bounds = compute_ik_length_height(targets) + ls = bounds[:, 1] - bounds[:, 0] + + # scalars for the whole creature + freq_scalar = N(1, 0.15) + mag_scalar = N(1, 0.15) + + def add_noise(t, k, axis, mag, freq, off=0, mode="noise", seeds=None): + d = t.driver_add(k, axis) + p = getattr(t, k)[axis] + + if k == "location": + mag *= ls[axis] + + freq = freq / bpy.context.scene.render.fps + + freq *= freq_scalar + mag *= mag_scalar + + if mode == "noise": + s1, s2 = ( + seeds if seeds is not None else U(0, 1000, 2) + ) # random offsets as 'seeds' + varying = f"noise.noise(({freq:.6f}*frame, {s1:.2f}, {s2:.2f}))" + elif mode == "sin": + varying = f"sin({freq:6f}*frame*2*pi)" + else: + raise ValueError(mode) + + d.driver.expression = f"{p:.4f}+{mag:.4f}*({off:.4f}+{varying})" + + def get_targets(k): + return [t for t in targets if k in t.name] + + for i, t in enumerate(get_targets("body")): + add_noise( + t, "location", 0, mag=body_mag * 0.025 * N(1, 0.2), freq=0.25 * N(1, 0.2) + ) + if i != 0: + add_noise( + t, + "location", + 2, + mag=body_mag * 0.015 * N(1, 0.2), + freq=0.5 * N(1, 0.2), + mode="sin", + ) + + for t in get_targets("foot"): + if U() < foot_motion_chance: + add_noise(t, "location", 0, mag=0.07 * N(1, 0.1), freq=U(0.2, 0.7)) + add_noise(t, "location", 2, mag=0.04 * N(1, 0.1), freq=U(0.2, 0.7)) + + for t in get_targets("head"): + headfreq = 0.4 + add_noise( + t, + "location", + 0, + mag=0.07 * N(1, 0.1), + freq=headfreq, + off=-0.5 * head_benddown, + ) + add_noise(t, "location", 1, mag=0.03 * N(1, 0.1), freq=headfreq) + add_noise( + t, + "location", + 2, + mag=0.2 * N(1, 0.1), + freq=headfreq / 2, + off=-0.7 * head_benddown, + ) + # add_noise(t, 'rotation_euler', 0, mag=0.4*N(1, 0.1), freq=U(0.1, 0.4)) + # add_noise(t, 'rotation_euler', 1, mag=0.4*N(1, 0.1), freq=U(0.1, 0.4)) + + seeds = U(0, 1000, 2) # synchronize wing motion a little bit + for t in get_targets("wingtip"): + add_noise( + t, + "location", + 0, + mag=wing_mag * 0.1 * N(1, 0.1), + freq=U(0.6, 4), + seeds=seeds + N(0, 0.2, 2), + ) + add_noise( + t, + "location", + 2, + mag=wing_mag * 0.2 * N(1, 0.1), + freq=U(0.6, 4), + seeds=seeds + N(0, 0.2, 2), + ) + + for t in get_targets("tail"): + for i in range(3): + add_noise(t, "location", 0, mag=0.07 * N(1, 0.1), freq=headfreq, off=-0.5) + + +def head_look_around(targets): + pass diff --git a/infinigen/assets/creatures/util/animation/run_cycle.py b/infinigen/assets/objects/creatures/util/animation/run_cycle.py similarity index 52% rename from infinigen/assets/creatures/util/animation/run_cycle.py rename to infinigen/assets/objects/creatures/util/animation/run_cycle.py index a442ff085..dce9389e0 100644 --- a/infinigen/assets/creatures/util/animation/run_cycle.py +++ b/infinigen/assets/objects/creatures/util/animation/run_cycle.py @@ -5,82 +5,80 @@ import logging - import re -import bpy -import bpy_types -import mathutils - -import numpy as np -from numpy.random import uniform as U, normal as N from math import pi -import pdb +import bpy +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform as U -from infinigen.assets.creatures.util import creature, creature_util as cutil -from infinigen.core.util.math import clip_gaussian, randomspacing, lerp from infinigen.core.util import blender as butil +from infinigen.core.util.math import clip_gaussian, lerp logger = logging.getLogger(__name__) -def foot_path(length, height, upturn, down_stroke, curve_resolution=8): - curve = bpy.data.curves.new('foot_path', 'CURVE') - curve.dimensions = '3D' +def foot_path(length, height, upturn, down_stroke, curve_resolution=8): + curve = bpy.data.curves.new("foot_path", "CURVE") + curve.dimensions = "3D" curve.resolution_u = curve_resolution curve.render_resolution_u = curve_resolution - obj = bpy.data.objects.new('foot_path', curve) + obj = bpy.data.objects.new("foot_path", curve) bpy.context.scene.collection.objects.link(obj) - s = curve.splines.new(type='NURBS') - s.use_cyclic_u = True - s.use_bezier_u = False + s = curve.splines.new(type="NURBS") + s.use_cyclic_u = True + s.use_bezier_u = False s.points.add(4 - len(s.points)) - s.points[0].co = (0, 0, -down_stroke, 1) - s.points[1].co = (-length/2, 0, -down_stroke + upturn, 1) - s.points[2].co = (0, 0, height, 1) - s.points[3].co = (length/2, 0, -down_stroke + upturn, 1) + s.points[0].co = (0, 0, -down_stroke, 1) + s.points[1].co = (-length / 2, 0, -down_stroke + upturn, 1) + s.points[2].co = (0, 0, height, 1) + s.points[3].co = (length / 2, 0, -down_stroke + upturn, 1) curve.splines[0].order_u = 3 return obj -def body_path(length, height, upturn, down_stroke, curve_resolution=8): - curve = bpy.data.curves.new('body_path', 'CURVE') - curve.dimensions = '3D' +def body_path(length, height, upturn, down_stroke, curve_resolution=8): + curve = bpy.data.curves.new("body_path", "CURVE") + curve.dimensions = "3D" curve.resolution_u = curve_resolution curve.render_resolution_u = curve_resolution - obj = bpy.data.objects.new('body_path', curve) + obj = bpy.data.objects.new("body_path", curve) bpy.context.scene.collection.objects.link(obj) - s = curve.splines.new(type='NURBS') - s.use_cyclic_u = True - s.use_bezier_u = False + s = curve.splines.new(type="NURBS") + s.use_cyclic_u = True + s.use_bezier_u = False s.points.add(4 - len(s.points)) - s.points[0].co = (0, -down_stroke, 0, 1) - s.points[1].co = (-length/2, -down_stroke + upturn, 0, 1) - s.points[2].co = (0, height, 0, 1) - s.points[3].co = (length/2, -down_stroke + upturn, 0, 1) + s.points[0].co = (0, -down_stroke, 0, 1) + s.points[1].co = (-length / 2, -down_stroke + upturn, 0, 1) + s.points[2].co = (0, height, 0, 1) + s.points[3].co = (length / 2, -down_stroke + upturn, 0, 1) curve.splines[0].order_u = 3 return obj -def follow_path(target, path, duration: int, offset: float = 0, reset_rot=True, **kwargs): - + +def follow_path( + target, path, duration: int, offset: float = 0, reset_rot=True, **kwargs +): target.location = (0, 0, 0) if reset_rot: target.rotation_euler = (0, 0, 0) - c = butil.constrain_object(target, 'FOLLOW_PATH', - target=path, offset=offset, **kwargs) - + c = butil.constrain_object( + target, "FOLLOW_PATH", target=path, offset=offset, **kwargs + ) + path.data.use_path = True path.data.path_duration = duration - -def follow_gait_path(targets, path_dims, period, offset, spread): - offsets = offset + np.linspace(-spread/2, spread/2, len(targets), endpoint=True) + +def follow_gait_path(targets, path_dims, period, offset, spread): + offsets = offset + np.linspace(-spread / 2, spread / 2, len(targets), endpoint=True) offsets *= period paths = [] @@ -93,9 +91,9 @@ def follow_gait_path(targets, path_dims, period, offset, spread): return paths -def follow_body_path(targets, path_dims, period, offset, spread): - offsets = offset + np.linspace(-spread/2, spread/2, len(targets), endpoint=True) +def follow_body_path(targets, path_dims, period, offset, spread): + offsets = offset + np.linspace(-spread / 2, spread / 2, len(targets), endpoint=True) offsets *= period paths = [] @@ -108,80 +106,127 @@ def follow_body_path(targets, path_dims, period, offset, spread): return paths -def animate_run(root, arma, targets, steps_per_sec=1, body=True, motion=True, squash_gait_pct=0.1): - ''' +def animate_run( + root, arma, targets, steps_per_sec=1, body=True, motion=True, squash_gait_pct=0.1 +): + """ Animate creature by moving its IK targets - ''' - - assert arma.type == 'ARMATURE' + """ + + assert arma.type == "ARMATURE" stride_length = 0.7 * clip_gaussian(0.4, 0.3, 0.3, 0.6) * arma.dimensions.x spread = clip_gaussian(0.15, 0.1, 0, 0.5) stride_height = U(0.15, 0.4) body_height = stride_height * clip_gaussian(0.6, 0.4, 0.3, 1.2) - + base_offset = U(0, 1) offset = U(0.5, 0.7) frame_period = int(bpy.context.scene.render.fps / steps_per_sec) - get_targets = lambda k: [t for t in targets if k in t.name] - - feet_targets = get_targets('foot') - - foot_paths = [] - foot_paths += follow_gait_path(targets=feet_targets[:2], period=frame_period, - path_dims=(stride_length, stride_height, 0.0, 0.0), offset=base_offset, spread=spread) - foot_paths += follow_gait_path(targets=feet_targets[2:], period=frame_period, - path_dims=(stride_length, stride_height, 0.0, 0.0), offset=base_offset+offset, spread=spread) + def get_targets(k): + return [t for t in targets if k in t.name] + + feet_targets = get_targets("foot") + + foot_paths = [] + foot_paths += follow_gait_path( + targets=feet_targets[:2], + period=frame_period, + path_dims=(stride_length, stride_height, 0.0, 0.0), + offset=base_offset, + spread=spread, + ) + foot_paths += follow_gait_path( + targets=feet_targets[2:], + period=frame_period, + path_dims=(stride_length, stride_height, 0.0, 0.0), + offset=base_offset + offset, + spread=spread, + ) for p in foot_paths: p.parent = root - feet_targets = get_targets('knee') - knee_paths = [] - knee_paths += follow_gait_path(targets=feet_targets[:2], period=frame_period, - path_dims=(0.1 * stride_length, 0.1 * stride_height, 0.0, 0.0), offset=base_offset, spread=spread) - knee_paths += follow_gait_path(targets=feet_targets[2:], period=frame_period, - path_dims=(0.1 * stride_length, 0.1 * stride_height, 0.0, 0.0), offset=base_offset+offset, spread=spread) + feet_targets = get_targets("knee") + knee_paths = [] + knee_paths += follow_gait_path( + targets=feet_targets[:2], + period=frame_period, + path_dims=(0.1 * stride_length, 0.1 * stride_height, 0.0, 0.0), + offset=base_offset, + spread=spread, + ) + knee_paths += follow_gait_path( + targets=feet_targets[2:], + period=frame_period, + path_dims=(0.1 * stride_length, 0.1 * stride_height, 0.0, 0.0), + offset=base_offset + offset, + spread=spread, + ) for p in knee_paths: p.location.z = -0.1 p.parent = root - + body_paths = [] if body: - body_paths += follow_gait_path(targets=get_targets('body_0'), period=frame_period, - path_dims=(0, body_height, 0.0, 0.0), offset=base_offset+1-offset/2, spread=0) - body_paths += follow_gait_path(targets=get_targets('body_1'), period=frame_period, - path_dims=(0, body_height, 0.0, 0.0), offset=base_offset+offset/2, spread=0) - #body_paths += animate_feet(targets=get_targets('tail'), period=frame_period, + body_paths += follow_gait_path( + targets=get_targets("body_0"), + period=frame_period, + path_dims=(0, body_height, 0.0, 0.0), + offset=base_offset + 1 - offset / 2, + spread=0, + ) + body_paths += follow_gait_path( + targets=get_targets("body_1"), + period=frame_period, + path_dims=(0, body_height, 0.0, 0.0), + offset=base_offset + offset / 2, + spread=0, + ) + # body_paths += animate_feet(targets=get_targets('tail'), period=frame_period, # path_dims=(0.1, 0.4, 0.0, 0.0), offset=0, spread=0) - body_paths += follow_gait_path(targets=get_targets('head'), period=frame_period, - path_dims=(0, body_height*0.5*N(1, 0.05), 0.0, 0.0), offset=0, spread=0) + body_paths += follow_gait_path( + targets=get_targets("head"), + period=frame_period, + path_dims=(0, body_height * 0.5 * N(1, 0.05), 0.0, 0.0), + offset=0, + spread=0, + ) flap_height = U(0.3, 1) - flap_speed_mult = 1 #uniform(0.7, 2) - body_paths += follow_gait_path(targets=get_targets('wingtip'), period=int(frame_period/flap_speed_mult), - path_dims=(0, flap_height, 0.0, flap_height), offset=base_offset+offset, spread=0) - + flap_speed_mult = 1 # uniform(0.7, 2) + body_paths += follow_gait_path( + targets=get_targets("wingtip"), + period=int(frame_period / flap_speed_mult), + path_dims=(0, flap_height, 0.0, flap_height), + offset=base_offset + offset, + spread=0, + ) + for p in body_paths: if len(foot_paths): - p.location.z = (1 - squash_gait_pct) * p.location.z + squash_gait_pct * foot_paths[0].location.z + p.location.z = ( + 1 - squash_gait_pct + ) * p.location.z + squash_gait_pct * foot_paths[0].location.z p.parent = root all_paths = foot_paths + knee_paths + body_paths if motion: for p in all_paths: - p.data.driver_add('eval_time').driver.expression = 'frame' + p.data.driver_add("eval_time").driver.expression = "frame" return all_paths -def animate_wiggle_body_iks(root, arma, targets, compression_ratio=1.01, cycles_per_bodylen=1.5, fix_head=True): - assert arma.type == 'ARMATURE' +def animate_wiggle_body_iks( + root, arma, targets, compression_ratio=1.01, cycles_per_bodylen=1.5, fix_head=True +): + assert arma.type == "ARMATURE" - logger.info('Starting animate_wiggle') + logger.info("Starting animate_wiggle") steps_per_sec = clip_gaussian(1.5, 0.5, 0.1, 3) @@ -191,74 +236,83 @@ def animate_wiggle_body_iks(root, arma, targets, compression_ratio=1.01, cycles_ frame_period = int(bpy.context.scene.render.fps / steps_per_sec) body_paths = [] - targets = [t for t in targets if 'body_' in t.name] + targets = [t for t in targets if "body_" in t.name] for i, t in enumerate(targets): - offset = cycles_per_bodylen * (i/len(targets)) * frame_period - w = lerp(start_percent * width, width, 1 - i/len(targets)) + offset = cycles_per_bodylen * (i / len(targets)) * frame_period + w = lerp(start_percent * width, width, 1 - i / len(targets)) dims = (w, 0, 0.0, 0.0) - body_paths += follow_gait_path(targets=[t], path_dims=dims, period=frame_period, - offset=offset, spread=0) + body_paths += follow_gait_path( + targets=[t], path_dims=dims, period=frame_period, offset=offset, spread=0 + ) - bstart = body_paths[0].location.x + bstart = body_paths[0].location.x for p in body_paths: p.rotation_euler.z += np.pi / 2 p.location.x = (p.location.x - bstart) * compression_ratio + bstart p.parent = root - p.data.driver_add('eval_time').expression = 'frame' + p.data.driver_add("eval_time").expression = "frame" return body_paths + def sinusoid_driver(driver, mag, freq, off): - driver.expression = f'{mag:.4f}*sin(({freq:.4f}*frame+{off:.4f})/(2*pi))' + driver.expression = f"{mag:.4f}*sin(({freq:.4f}*frame+{off:.4f})/(2*pi))" + def cosusoid_driver(driver, mag, freq, off): - driver.expression = f'{mag:.4f}*cos(({freq:.4f}*frame+{off:.4f})/(2*pi))' + driver.expression = f"{mag:.4f}*cos(({freq:.4f}*frame+{off:.4f})/(2*pi))" -def animate_wiggle_bones(arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True): - ''' +def animate_wiggle_bones( + arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True +): + """ mag_deg = sum of magnitudes across al bones freq = flaps per second off = global time offset wavelength = how many flaps fit into one creature - ''' + """ # remove any iks, we will be overriding them if remove_iks: for b in bones: for c in b.constraints: - if hasattr(c, 'target'): + if hasattr(c, "target"): butil.delete(c.target) b.constraints.remove(c) mag = np.deg2rad(mag_deg) / len(bones) frame_period = int(bpy.context.scene.render.fps / freq) - print('freq:', freq) + print("freq:", freq) for i, b in enumerate(bones): b_off = -(off + i / len(bones)) * frame_period / wavelength - b.rotation_mode = 'XYZ' - sinusoid_driver(b.driver_add('rotation_euler')[0].driver, mag, freq, b_off) - if not fixed_head and i == 0: # move head - cosusoid_driver(b.driver_add('location')[2].driver, mag / (freq / (2 * pi)), freq, b_off) + b.rotation_mode = "XYZ" + sinusoid_driver(b.driver_add("rotation_euler")[0].driver, mag, freq, b_off) + if not fixed_head and i == 0: # move head + cosusoid_driver( + b.driver_add("location")[2].driver, mag / (freq / (2 * pi)), freq, b_off + ) # sinusoid_driver(b.driver_add('rotation_euler')[0].driver, -mag, freq, b_off) -def animate_running_front_leg(arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True): - ''' +def animate_running_front_leg( + arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True +): + """ mag_deg = sum of magnitudes across al bones freq = flaps per second off = global time offset wavelength = how many flaps fit into one creature - ''' + """ # remove any iks, we will be overriding them if remove_iks: for b in bones: for c in b.constraints: - if hasattr(c, 'target'): + if hasattr(c, "target"): butil.delete(c.target) b.constraints.remove(c) @@ -279,27 +333,29 @@ def number_finder(s): for i, b in enumerate(bones): rr = number_finder(b.name) b_off = -(off + (1 / 2 * (rr == right))) * frame_period / wavelength - b.rotation_mode = 'XYZ' - sinusoid_driver(b.driver_add('rotation_euler')[0].driver, mag, freq, b_off) - sinusoid_driver(b.driver_add('rotation_euler')[2].driver, 3 * mag, freq, b_off) - b.driver_add('location')[2].driver.expression = '-0.1' + b.rotation_mode = "XYZ" + sinusoid_driver(b.driver_add("rotation_euler")[0].driver, mag, freq, b_off) + sinusoid_driver(b.driver_add("rotation_euler")[2].driver, 3 * mag, freq, b_off) + b.driver_add("location")[2].driver.expression = "-0.1" # sinusoid_driver(b.driver_add('location')[0].driver, 5 * mag, freq, b_off) # sinusoid_driver(b.driver_add('location')[2].driver, 0.1 * mag, freq, b_off) -def animate_running_back_leg(arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True): - ''' +def animate_running_back_leg( + arma, bones, mag_deg, freq, off=0, wavelength=1, remove_iks=True, fixed_head=True +): + """ mag_deg = sum of magnitudes across al bones freq = flaps per second off = global time offset wavelength = how many flaps fit into one creature - ''' + """ # remove any iks, we will be overriding them if remove_iks: for b in bones: for c in b.constraints: - if hasattr(c, 'target'): + if hasattr(c, "target"): butil.delete(c.target) b.constraints.remove(c) @@ -320,14 +376,9 @@ def number_finder(s): for i, b in enumerate(bones): rr = number_finder(b.name) b_off = -(off + (1 / 2 * (rr == right))) * frame_period / wavelength - b.rotation_mode = 'XYZ' - sinusoid_driver(b.driver_add('rotation_euler')[0].driver, 3 * mag, freq, b_off) + b.rotation_mode = "XYZ" + sinusoid_driver(b.driver_add("rotation_euler")[0].driver, 3 * mag, freq, b_off) # sinusoid_driver(b.driver_add('rotation_euler')[1].driver, 3 * mag, freq, b_off) - b.driver_add('location')[2].driver.expression = '-0.1' + b.driver_add("location")[2].driver.expression = "-0.1" # sinusoid_driver(b.driver_add('location')[0].driver, 5 * mag, freq, b_off) # sinusoid_driver(b.driver_add('location')[2].driver, 0.1 * mag, freq, b_off) - - - - - \ No newline at end of file diff --git a/infinigen/assets/creatures/util/boid_swarm.py b/infinigen/assets/objects/creatures/util/boid_swarm.py similarity index 52% rename from infinigen/assets/creatures/util/boid_swarm.py rename to infinigen/assets/objects/creatures/util/boid_swarm.py index c5c6ba5f2..539c171ff 100644 --- a/infinigen/assets/creatures/util/boid_swarm.py +++ b/infinigen/assets/objects/creatures/util/boid_swarm.py @@ -4,95 +4,105 @@ # Authors: Alexander Raistrick -from functools import partial import logging import bpy - import numpy as np -from numpy.random import uniform as U, normal as N +from numpy.random import normal as N +from infinigen.core.placement import animation_policy, particles from infinigen.core.placement.factory import AssetFactory -from infinigen.core.placement import particles, animation_policy - from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import random_general logger = logging.getLogger(__name__) -def creature_col_to_particle_col(col, name, prefix='particleassets'): +def creature_col_to_particle_col(col, name, prefix="particleassets"): roots = [o for o in col.objects if o.parent is None] objs = [] - + for root in roots: - first, *rest = [o for o in butil.iter_object_tree(root) if o.type == 'MESH'] + first, *rest = [o for o in butil.iter_object_tree(root) if o.type == "MESH"] if len(rest): - logger.warning(f'{col.name=} had {root.name=} with multiple child meshes, taking {first.name=}, but it should have been joined with {rest}') + logger.warning( + f"{col.name=} had {root.name=} with multiple child meshes, taking {first.name=}, but it should have been joined with {rest}" + ) objs.append(first) - root.location.z -= 100 # we will have hide_render=False so make sure the base asset is not visible + root.location.z -= 100 # we will have hide_render=False so make sure the base asset is not visible - col = butil.group_in_collection(objs, name=f'{prefix}:{name}', exclusive=False) + col = butil.group_in_collection(objs, name=f"{prefix}:{name}", exclusive=False) col.hide_viewport = True col.hide_render = False return col -class BoidSwarmFactory(AssetFactory): +class BoidSwarmFactory(AssetFactory): def __init__( - self, factory_seed, child_col, settings, bvh, - collider_col, volume=0.1, coarse=False + self, + factory_seed, + child_col, + settings, + bvh, + collider_col, + volume=0.1, + coarse=False, ): super().__init__(factory_seed, coarse) - + self.collider_col = collider_col self.settings = settings self.bvh = bvh self.target_child_volume = volume - self.col = creature_col_to_particle_col(child_col, name=f'{self}.children') + self.col = creature_col_to_particle_col(child_col, name=f"{self}.children") def create_placeholder(self, loc, rot, **kwargs) -> bpy.types.Object: - p = butil.spawn_cube(size=4) p.location = loc p.rotation_euler = rot - speed_keys = ['land_speed_max', 'air_speed_max', 'climb_speed_max'] - speed = max(self.settings['boids_settings'].get(k, 0) for k in speed_keys) + speed_keys = ["land_speed_max", "air_speed_max", "climb_speed_max"] + speed = max(self.settings["boids_settings"].get(k, 0) for k in speed_keys) step_size_range = speed * 3 * N(1, 0.1) * np.array([0.5, 1.5]) - policy = animation_policy.AnimPolicyRandomForwardWalk(forward_vec=(1, 0, 0), - speed=speed*N(1, 0.1), step_range=step_size_range, yaw_dist=("normal", 0, 70)) - animation_policy.animate_trajectory(p, self.bvh, policy, retry_rotation=True, max_full_retries=20) + policy = animation_policy.AnimPolicyRandomForwardWalk( + forward_vec=(1, 0, 0), + speed=speed * N(1, 0.1), + step_range=step_size_range, + yaw_dist=("normal", 0, 70), + ) + animation_policy.animate_trajectory( + p, self.bvh, policy, retry_rotation=True, max_full_retries=20 + ) return p def create_asset(self, placeholder, **params) -> bpy.types.Object: - assert self.col is not None - size = self.settings['particle_size'] - size_random = self.settings['size_random'] - avg_size = (size + size_random*size)/2 + size = self.settings["particle_size"] + size_random = self.settings["size_random"] + avg_size = (size + size_random * size) / 2 child_vol = butil.avg_approx_vol(self.col.objects) - count = random_general(self.target_child_volume) / float(avg_size**3 * child_vol) - self.settings['count'] = int(count) + count = random_general(self.target_child_volume) / float( + avg_size**3 * child_vol + ) + self.settings["count"] = int(count) emitter, system = particles.particle_system( - emitter=placeholder, subject=self.col, + emitter=placeholder, + subject=self.col, collision_collection=self.collider_col, - settings=self.settings) + settings=self.settings, + ) for r in system.settings.boids.states[0].rules: - if r.type in ['GOAL', 'FOLLOW_LEADER']: + if r.type in ["GOAL", "FOLLOW_LEADER"]: r.object = placeholder - logger.info(f'Baking {emitter.name=} with {self.col.name=}') + logger.info(f"Baking {emitter.name=} with {self.col.name=}") particles.bake(emitter, system) emitter.hide_render = False - - return emitter - + return emitter diff --git a/infinigen/assets/objects/creatures/util/cloth_sim.py b/infinigen/assets/objects/creatures/util/cloth_sim.py new file mode 100644 index 000000000..f052d50da --- /dev/null +++ b/infinigen/assets/objects/creatures/util/cloth_sim.py @@ -0,0 +1,161 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick +# Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=YDrbyITWMGU by Mr. Cheebs + + +import logging + +import bpy +from numpy.random import normal, uniform + +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.surface import attribute_to_vertex_group +from infinigen.core.util import blender as butil +from infinigen.core.util.logging import Timer + +logger = logging.getLogger(__name__) + + +def local_pos_rigity_mask(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "To Min", 0.4), + ("NodeSocketFloat", "To Max", 0.9), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": nw.expose_input("Local Pos", attribute="local_pos")}, + ) + + clamp = nw.new_node( + Nodes.Clamp, + input_kwargs={ + "Value": nw.expose_input("Radius", attribute="skeleton_rad"), + "Min": 0.03, + "Max": 0.49, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: clamp, 1: -1.0}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: clamp, 1: 1.5}, attrs={"operation": "MULTIPLY"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Z"], 1: multiply, 2: multiply_1}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"W": uniform(1e3), "Scale": normal(10, 1)}, + attrs={"musgrave_dimensions": "4D"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture, 1: normal(0.07, 0.007)}, + attrs={"operation": "MULTIPLY"}, + ) + + musgrave_texture_1 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Scale": normal(5, 0.5), "W": uniform(1e3)}, + attrs={"musgrave_dimensions": "4D"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture_1, 1: normal(0.12, 0.01)}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: add} + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_1}) + colorramp.color_ramp.elements.new(1) + colorramp.color_ramp.elements[0].position = normal(0.23, 0.05) + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = normal(0.6, 0.05) + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": colorramp.outputs["Color"], + 3: group_input.outputs["To Min"], + 4: group_input.outputs["To Max"], + }, + ) + + musgrave_texture_2 = nw.new_node(Nodes.MusgraveTexture) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: musgrave_texture_2, 1: normal(0.1, 0.02)}, + attrs={"operation": "MULTIPLY"}, + ) + + return nw.new_node( + Nodes.Math, input_kwargs={0: map_range_1.outputs["Result"], 1: multiply_4} + ) + + +def bake_cloth(obj, settings=None, attributes=None, frame_start=None, frame_end=None): + if frame_start is None: + frame_start = bpy.context.scene.frame_start + if frame_end is None: + frame_end = bpy.context.scene.frame_end + if settings is None: + settings = {} + if attributes is None: + attributes = {} + + mod = obj.modifiers.new("bake_cloth", "CLOTH") + + mod.settings.effector_weights.gravity = settings.pop("gravity", 1) + mod.collision_settings.distance_min = settings.pop("distance_min", 0.015) + mod.collision_settings.use_self_collision = settings.pop( + "use_self_collision", False + ) + + for k, v in settings.items(): + setattr(mod.settings, k, v) + + with butil.DisableModifiers(obj): + for name, attr in attributes.items(): + vgroup = attribute_to_vertex_group(obj, attr, name=f"skin_sim.{name}") + setattr(mod.settings, name, vgroup.name) + + mod.point_cache.frame_start = frame_start + mod.point_cache.frame_end = frame_end + with ( + butil.ViewportMode(obj, mode="OBJECT"), + butil.SelectObjects(obj), + Timer("Baking fish cloth"), + ): + override = { + "scene": bpy.context.scene, + "active_object": obj, + "point_cache": mod.point_cache, + } + bpy.ops.ptcache.bake(override, bake=True) + + return mod diff --git a/infinigen/assets/creatures/util/creature.py b/infinigen/assets/objects/creatures/util/creature.py similarity index 62% rename from infinigen/assets/creatures/util/creature.py rename to infinigen/assets/objects/creatures/util/creature.py index 8675831f2..cf835cdc0 100644 --- a/infinigen/assets/creatures/util/creature.py +++ b/infinigen/assets/objects/creatures/util/creature.py @@ -4,30 +4,23 @@ # Authors: Alexander Raistrick -import pdb -from dataclasses import dataclass, field -import typing -import warnings import logging +import warnings +from dataclasses import dataclass, field import bpy import mathutils -from mathutils.bvhtree import BVHTree - import numpy as np +from mathutils.bvhtree import BVHTree -from infinigen.assets.creatures.util.geometry import lofting, skin_ops -from infinigen.assets.creatures.util.creature_util import interp_dict, euler -from infinigen.assets.creatures.util import genome -from infinigen.assets.creatures.util import tree, join_smoothing - -from infinigen.assets.creatures.util import genome - +from infinigen.assets.objects.creatures.util import genome, tree +from infinigen.assets.objects.creatures.util.creature_util import euler, interp_dict +from infinigen.assets.utils.geometry import lofting from infinigen.core import surface -from infinigen.core.placement import detail -from infinigen.core.util import blender as butil, logging as logging_util -from infinigen.core.util.math import homogenize, lerp_sample, lerp from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.util import blender as butil +from infinigen.core.util import logging as logging_util +from infinigen.core.util.math import homogenize, lerp, lerp_sample logger = logging.getLogger(__name__) @@ -39,7 +32,9 @@ def infer_skeleton_from_mesh(obj): v_xmax = vs[vs[:, 0].argmax()] return np.array([v_xmin, v_xmax]) except ValueError: - warnings.warn(f'infer_skeleton_from_mesh({obj=}) failed, returning null skeleton') + warnings.warn( + f"infer_skeleton_from_mesh({obj=}) failed, returning null skeleton" + ) return np.array([[0, 0, 0], [0.1, 0, 0]]) @@ -64,26 +59,27 @@ def __post_init__(self): def bvh(self): if self._bvh is None: - logger.debug(f'Computing part bvh for {self.obj.name}') + logger.debug(f"Computing part bvh for {self.obj.name}") target = self.attach_basemesh or self.obj - assert target.type == 'MESH' + assert target.type == "MESH" depsgraph = bpy.context.evaluated_depsgraph_get() self._bvh = BVHTree.FromObject(target, depsgraph) return self._bvh def __repr__(self): - return f'{self.__class__.__name__}(obj.name={repr(self.obj.name)}, skeleton.shape=' \ - f'{self.skeleton.shape if self.skeleton is not None else None})' + return ( + f"{self.__class__.__name__}(obj.name={repr(self.obj.name)}, skeleton.shape=" + f"{self.skeleton.shape if self.skeleton is not None else None})" + ) def skeleton_global(self): return homogenize(self.skeleton) @ np.array(self.obj.matrix_world)[:-1].T -ALL_TAGS = ['body', 'neck', 'head', 'jaw', 'head_detail', 'limb', 'foot', 'rigid'] +ALL_TAGS = ["body", "neck", "head", "jaw", "head_detail", "limb", "foot", "rigid"] class PartFactory: - def __init__(self, params=None, sample=True): if sample: self.params = self.sample_params() @@ -106,11 +102,13 @@ def __call__(self, rand=0) -> Part: other_sample = self.sample_params() params = interp_dict(params, other_sample, rand) - logger.debug(f'Computing {self}.make_part()') + logger.debug(f"Computing {self}.make_part()") part = self.make_part(params) if part is None: - raise ValueError(f'{self}.make_part() returned None, did you forget a return?') + raise ValueError( + f"{self}.make_part() returned None, did you forget a return?" + ) return part @@ -131,7 +129,9 @@ def quat_align_vecs(a, b): return mathutils.Quaternion(a.cross(b), a.angle(b)) -def raycast_surface(part: Part, idx_pct, dir_rot: mathutils.Quaternion, r=1, debug=False): +def raycast_surface( + part: Part, idx_pct, dir_rot: mathutils.Quaternion, r=1, debug=False +): # figure out axis of rotation idx = np.array([idx_pct]) * (len(part.skeleton) - 1) tangents = lofting.skeleton_to_tangents(part.skeleton) @@ -139,24 +139,28 @@ def raycast_surface(part: Part, idx_pct, dir_rot: mathutils.Quaternion, r=1, deb # raycast to find surface of the part origin = mathutils.Vector(lerp_sample(part.skeleton, idx).reshape(-1)) - basis = part.obj.rotation_euler.to_quaternion() @ quat_align_vecs((1, 0, 0), forward.reshape(-1)) + basis = part.obj.rotation_euler.to_quaternion() @ quat_align_vecs( + (1, 0, 0), forward.reshape(-1) + ) direction = basis @ dir_rot @ mathutils.Vector([1, 0, 0]) location, normal, index, dist = part.bvh().ray_cast(origin, direction) if location is None: - logger.warning(f'Raycast did not intersect {part} with {dist=} {dir_rot=} {idx_pct=}') + logger.warning( + f"Raycast did not intersect {part} with {dist=} {dir_rot=} {idx_pct=}" + ) location = origin dist = 0 normal = (1, 0, 0) elif debug: - o = butil.spawn_empty('origin') + o = butil.spawn_empty("origin") o.location = origin - d = butil.spawn_empty('dir') - d.location = (origin + 0.05 * direction) + d = butil.spawn_empty("dir") + d.location = origin + 0.05 * direction - e = butil.spawn_empty('hit') + e = butil.spawn_empty("hit") e.location = location for v in [o, d, e]: @@ -174,71 +178,88 @@ def write_local_attributes(part, idx, tags): n = len(part.obj.data.vertices) # local position - surface.write_attribute(part.obj, lambda nw: nw.new_node(Nodes.InputPosition), name='local_pos', apply=True) + surface.write_attribute( + part.obj, + lambda nw: nw.new_node(Nodes.InputPosition), + name="local_pos", + apply=True, + ) # float repr of integer part idx, useful after join/remesh - part_idx_attr = part.obj.data.attributes.new('part_idx', 'FLOAT', 'POINT') - part_idx_attr.data.foreach_set('value', np.full(n, idx)) + part_idx_attr = part.obj.data.attributes.new("part_idx", "FLOAT", "POINT") + part_idx_attr.data.foreach_set("value", np.full(n, idx)) for t in tags: - attr = part.obj.data.attributes.new(f'tag_{t}', 'FLOAT', 'POINT') - attr.data.foreach_set('value', np.ones(n)) + attr = part.obj.data.attributes.new(f"tag_{t}", "FLOAT", "POINT") + attr.data.foreach_set("value", np.ones(n)) def write_global_attributes(part): skeleton = part.skeleton_global() verts = np.array([part.obj.matrix_world @ v.co for v in part.obj.data.vertices]) - dists = np.linalg.norm(skeleton.reshape(1, -1, 3) - verts.reshape(-1, 1, 3), axis=-1) + dists = np.linalg.norm( + skeleton.reshape(1, -1, 3) - verts.reshape(-1, 1, 3), axis=-1 + ) closest_idx = dists.argmin(axis=1) rads = dists[np.arange(dists.shape[0]), closest_idx] - rad_attr = part.obj.data.attributes.new('skeleton_rad', 'FLOAT', 'POINT') - rad_attr.data.foreach_set('value', rads) + rad_attr = part.obj.data.attributes.new("skeleton_rad", "FLOAT", "POINT") + rad_attr.data.foreach_set("value", rads) # location of nearest skeleton point - skeleton_loc_attr = part.obj.data.attributes.new('skeleton_loc', 'FLOAT_VECTOR', 'POINT') - skeleton_loc_attr.data.foreach_set('vector', skeleton[closest_idx].reshape(-1)) + skeleton_loc_attr = part.obj.data.attributes.new( + "skeleton_loc", "FLOAT_VECTOR", "POINT" + ) + skeleton_loc_attr.data.foreach_set("vector", skeleton[closest_idx].reshape(-1)) # location of the parent of the nearest skeleton point parent_loc = skeleton[np.clip(closest_idx - 1, 0, len(skeleton))] if skeleton.shape[0] > 1: parent_loc[closest_idx == 0] = skeleton[0] - (skeleton[1] - skeleton[0]) - parent_skeleton_loc_attr = part.obj.data.attributes.new('parent_skeleton_loc', 'FLOAT_VECTOR', 'POINT') - parent_skeleton_loc_attr.data.foreach_set('vector', parent_loc.reshape(-1)) + parent_skeleton_loc_attr = part.obj.data.attributes.new( + "parent_skeleton_loc", "FLOAT_VECTOR", "POINT" + ) + parent_skeleton_loc_attr.data.foreach_set("vector", parent_loc.reshape(-1)) def sanitize_for_boolean(o): - ''' + """ Attempt to clean up `o` to make boolean operations more likely to succeed - ''' + """ with butil.SelectObjects(o), logging_util.Suppress(): bpy.ops.object.transform_apply(scale=True) - bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.object.mode_set(mode="EDIT") bpy.ops.mesh.select_all() bpy.ops.mesh.remove_doubles() bpy.ops.mesh.normals_make_consistent(inside=False) - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") def apply_attach_transform(part, target, att): u, v, rad = att.coord - loc, normal, tangent = raycast_surface(target, idx_pct=u, dir_rot=euler(180 * v, 0, 0) @ euler(0, 90, 0), - r=rad, debug=False) + loc, normal, tangent = raycast_surface( + target, + idx_pct=u, + dir_rot=euler(180 * v, 0, 0) @ euler(0, 90, 0), + r=rad, + debug=False, + ) - if att.rotation_basis == 'global': + if att.rotation_basis == "global": basis_rot = mathutils.Quaternion() - elif att.rotation_basis == 'normal': + elif att.rotation_basis == "normal": basis_rot = quat_align_vecs((1, 0, 0), normal) - elif att.rotation_basis == 'tangent': + elif att.rotation_basis == "tangent": basis_rot = quat_align_vecs((1, 0, 0), tangent) else: - raise ValueError(f'Unrecognized {att.rotation_basis=}') + raise ValueError(f"Unrecognized {att.rotation_basis=}") rot = basis_rot @ euler(*att.joint.rest) att.joint.rest = np.rad2deg( - np.array(rot.to_euler())) # write back so subsequent steps can use updated global pose + np.array(rot.to_euler()) + ) # write back so subsequent steps can use updated global pose part.obj.parent = target.obj part.obj.location = loc @@ -250,15 +271,17 @@ def apply_attach_transform(part, target, att): if obj is None: continue part.side = target.side * att.side - obj.matrix_world = mathutils.Matrix.Scale(att.side, 4, ( - 0, 1, 0)) @ obj.matrix_world # # butil.apply_transform(obj, loc=False, rot=False, scale=True) + obj.matrix_world = ( + mathutils.Matrix.Scale(att.side, 4, (0, 1, 0)) @ obj.matrix_world + ) # # butil.apply_transform(obj, loc=False, rot=False, scale=True) def attach(part: Part, target: Part, att: genome.Attachment): - if target.obj.type != 'MESH': + if target.obj.type != "MESH": raise ValueError( - f'attach() recieved {target.obj=} with {target.obj.type=} which is not valid for raycast ' - f'attachment, please convert to type=MESH') + f"attach() recieved {target.obj=} with {target.obj.type=} which is not valid for raycast " + f"attachment, please convert to type=MESH" + ) apply_attach_transform(part, target, att) @@ -269,10 +292,17 @@ def attach(part: Part, target: Part, att: genome.Attachment): # bevel_obj.parent = part.obj # Cut any cutters from the parent - cutter_extras = [o for o in butil.iter_object_tree(part.obj) if 'Cutter' in o.name] + cutter_extras = [o for o in butil.iter_object_tree(part.obj) if "Cutter" in o.name] for o in cutter_extras: sanitize_for_boolean(o) - butil.modify_mesh(target.obj, 'BOOLEAN', object=o, operation='DIFFERENCE', apply=True, solver='FAST') + butil.modify_mesh( + target.obj, + "BOOLEAN", + object=o, + operation="DIFFERENCE", + apply=True, + solver="FAST", + ) butil.delete(o) @@ -281,30 +311,32 @@ def genome_to_creature(genome: genome.CreatureGenome, name: str): for i, (part, cnode) in enumerate(zip(parts, genome.parts)): factory_class = cnode.part_factory.__class__.__name__ - part.obj.name = f'{name}.parts({i}, factory={factory_class})' - part.obj['factory_class'] = factory_class - part.obj['index'] = i + part.obj.name = f"{name}.parts({i}, factory={factory_class})" + part.obj["factory_class"] = factory_class + part.obj["index"] = i for extra in part.obj.children: - extra.name = f'{name}.parts({i}).extra({extra.name}, {i})' + extra.name = f"{name}.parts({i}).extra({extra.name}, {i})" extra.parent = part.obj - extra['factory_class'] = factory_class - extra['index'] = i + extra["factory_class"] = factory_class + extra["index"] = i # write attribute values that must come before posing/arrangement - logger.debug(f'Writing local attributes') + logger.debug("Writing local attributes") for i, (part, genode) in enumerate(tree.tzip(parts, genome.parts)): tags = genode.part_factory.tags write_local_attributes(part, i, tags) - for genome, (parent, part) in zip(tree.iter_items(genome.parts, postorder=True), - tree.iter_parent_child(parts, postorder=True)): + for genome, (parent, part) in zip( + tree.iter_items(genome.parts, postorder=True), + tree.iter_parent_child(parts, postorder=True), + ): if parent is None: continue # root object doesnt need attaching - logger.debug(f'Attaching {part} to {parent}') + logger.debug(f"Attaching {part} to {parent}") attach(part, parent, genome.att) # write any attributes that must come after posign/arrangement - logger.debug(f'Writing global attributes') + logger.debug("Writing global attributes") for part in parts: write_global_attributes(part) @@ -315,7 +347,7 @@ def genome_to_creature(genome: genome.CreatureGenome, name: str): p.attach_basemesh = None p._bvh = None with butil.SelectObjects(p.obj): - bpy.ops.object.parent_clear(type='CLEAR_KEEP_TRANSFORM') + bpy.ops.object.parent_clear(type="CLEAR_KEEP_TRANSFORM") p.obj.parent = root return root, parts diff --git a/infinigen/assets/creatures/util/creature_parser.py b/infinigen/assets/objects/creatures/util/creature_parser.py similarity index 61% rename from infinigen/assets/creatures/util/creature_parser.py rename to infinigen/assets/objects/creatures/util/creature_parser.py index 5f6560432..4518ce26b 100644 --- a/infinigen/assets/creatures/util/creature_parser.py +++ b/infinigen/assets/objects/creatures/util/creature_parser.py @@ -5,70 +5,77 @@ from pathlib import Path -import pdb import numpy as np +from infinigen.assets.utils.geometry import lofting +from infinigen.core.nodes.node_transpiler.transpiler import indent from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_transpiler.transpiler import transpile, indent -from infinigen.assets.creatures.util.geometry import lofting + def prefix(): return ( "import numpy as np\n" - "from infinigen.assets.creatures.util.creature import CreatureGenome, PartGenome, Attachment, Joint\n" + "from infinigen.assets.objects.creatures.util.creature import CreatureGenome, PartGenome, Attachment, Joint\n" ) + def repr_np_array(v): v = np.round(v, 3) - return f'np.{repr(v)}' + return f"np.{repr(v)}" + def basename(obj): - return obj.name.split('.')[0] + return obj.name.split(".")[0] -def parse_nurbs_data(obj, i=0): - ''' +def parse_nurbs_data(obj, i=0): + """ Given a blender object, read it's handles out as a (n,m,3) vertex array TODO: Read out knotvector. Function should yield all data necessary to define that NURBS - ''' - - assert obj.type == 'SURFACE' - + """ + + assert obj.type == "SURFACE" + spline = obj.data.splines[i] m, n = spline.point_count_u, spline.point_count_v - + points = np.array([p.co for p in spline.points]) points = points.reshape(n, m, -1) return points + def parse_part(nurbs_part, mesh_part, profiles_folder): - name = basename(nurbs_part) part_genome_kwargs = {} handles = parse_nurbs_data(nurbs_part) - skeleton, ts, rads, profiles_norm = lofting.factorize_nurbs_handles(handles[..., :-1]) + skeleton, ts, rads, profiles_norm = lofting.factorize_nurbs_handles( + handles[..., :-1] + ) - part_genome_kwargs['skeleton'] = repr_np_array(skeleton) - part_genome_kwargs['rads'] = repr_np_array(rads.reshape(-1)) + part_genome_kwargs["skeleton"] = repr_np_array(skeleton) + part_genome_kwargs["rads"] = repr_np_array(rads.reshape(-1)) - path = Path(profiles_folder)/f'profile_{name}.npy' + path = Path(profiles_folder) / f"profile_{name}.npy" np.save(path, profiles_norm) - print(f'Saving {path}') - part_genome_kwargs['profile'] = f'np.load({repr(str(path))})' + print(f"Saving {path}") + part_genome_kwargs["profile"] = f"np.load({repr(str(path))})" body = f"return {repr_function_call('PartGenome', part_genome_kwargs)}" - code = f'def {name}():\n' + indent(body) + code = f"def {name}():\n" + indent(body) return name, code + def find_approx_uvr_coord(child, parent_mesh, parent_nurbs): - assert parent_mesh.type == 'MESH' + assert parent_mesh.type == "MESH" loc = np.array(child.matrix_world.translation) - verts = np.array([parent_mesh.matrix_world @ v.co for v in parent_mesh.data.vertices]) + verts = np.array( + [parent_mesh.matrix_world @ v.co for v in parent_mesh.data.vertices] + ) dists = np.linalg.norm(verts - loc, axis=-1) i = dists.argmin() @@ -88,29 +95,30 @@ def find_approx_uvr_coord(child, parent_mesh, parent_nurbs): return np.array([u, v, r]) -def parse_attachment(part, parent_mesh, parent_nurbs): +def parse_attachment(part, parent_mesh, parent_nurbs): uvr = find_approx_uvr_coord(part, parent_mesh, parent_nurbs) - kwargs = { - 'target': repr(basename(parent_mesh)), - 'coord': tuple(np.round(uvr, 2)), - 'joint': f'Joint(rest={tuple(np.round(part.rotation_euler, 2))})', + kwargs = { + "target": repr(basename(parent_mesh)), + "coord": tuple(np.round(uvr, 2)), + "joint": f"Joint(rest={tuple(np.round(part.rotation_euler, 2))})", } - return repr_function_call('Attachment', kwargs, spacing=' ') + return repr_function_call("Attachment", kwargs, spacing=" ") + + +def repr_function_call(funcname, kwargs, spacing="\n", multiline=True): + kwargs_str = f",{spacing}".join([f"{k}={v}" for k, v in kwargs.items()]) + paren_sep = "\n" if multiline else "" + return f"{funcname}({paren_sep}{indent(kwargs_str)}{paren_sep})" -def repr_function_call(funcname, kwargs, spacing='\n', multiline=True): - kwargs_str = f',{spacing}'.join([f'{k}={v}' for k, v in kwargs.items()]) - paren_sep = '\n' if multiline else '' - return f'{funcname}({paren_sep}{indent(kwargs_str)}{paren_sep})' def parse_creature(nurbs_root, mesh_root, profiles_folder): - - assert nurbs_root.type == 'SURFACE' - assert mesh_root.type == 'MESH' + assert nurbs_root.type == "SURFACE" + assert mesh_root.type == "MESH" - code = prefix() + '\n' + code = prefix() + "\n" nurbs_parts = list(butil.iter_object_tree(nurbs_root)) mesh_parts = list(butil.iter_object_tree(mesh_root)) @@ -119,20 +127,21 @@ def parse_creature(nurbs_root, mesh_root, profiles_folder): names = [] atts = {} for nurbs_part, mesh_part in zip(nurbs_parts, mesh_parts): - assert basename(nurbs_part) == basename(mesh_part) - print(f'Processing {basename(nurbs_part)}') + print(f"Processing {basename(nurbs_part)}") name, new_code = parse_part(nurbs_part, mesh_part, profiles_folder) names.append(name) - code += new_code + '\n\n' + code += new_code + "\n\n" if mesh_part.parent is not None: - atts[name] = parse_attachment(mesh_part, mesh_part.parent, nurbs_part.parent) + atts[name] = parse_attachment( + mesh_part, mesh_part.parent, nurbs_part.parent + ) joiningome_args = { - 'parts': repr_function_call('dict', {name: f'{name}()' for name in names}), - 'attachments': repr_function_call('dict', atts) + "parts": repr_function_call("dict", {name: f"{name}()" for name in names}), + "attachments": repr_function_call("dict", atts), } body = f"return {repr_function_call('CreatureGenome', joiningome_args)}" diff --git a/infinigen/assets/creatures/util/creature_util.py b/infinigen/assets/objects/creatures/util/creature_util.py similarity index 75% rename from infinigen/assets/creatures/util/creature_util.py rename to infinigen/assets/objects/creatures/util/creature_util.py index 4335874bd..7dfe5622b 100644 --- a/infinigen/assets/creatures/util/creature_util.py +++ b/infinigen/assets/objects/creatures/util/creature_util.py @@ -4,44 +4,46 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass import numbers -import bpy import mathutils -from mathutils import Vector, Euler, Quaternion - import numpy as np +from mathutils import Euler, Quaternion, Vector -from infinigen.core.util.math import lerp from infinigen.core.util import blender as butil +from infinigen.core.util.math import lerp + def euler(r, p, y): return mathutils.Euler(np.deg2rad([r, p, y])).to_quaternion() -def interp_dict(a: dict, b: dict, t: float, keys='assert', fill=0, recurse=True, lerp=lerp): - ''' +def interp_dict( + a: dict, b: dict, t: float, keys="assert", fill=0, recurse=True, lerp=lerp +): + """ keys: 'a', 'b', 'intersect', 'union', 'asset', 'switch' - ''' + """ - if keys == 'switch': - keys = 'b' if t > 0.5 else 'a' + if keys == "switch": + keys = "b" if t > 0.5 else "a" - if keys == 'assert': + if keys == "assert": if not a.keys() == b.keys(): - raise ValueError(f'lerp_dict(..., {keys=}) recieved {a.keys()=}, {b.keys()}=') + raise ValueError( + f"lerp_dict(..., {keys=}) recieved {a.keys()=}, {b.keys()}=" + ) out_keys = a.keys() - elif keys == 'a': + elif keys == "a": out_keys = a.keys() - elif keys == 'b': + elif keys == "b": out_keys = b.keys() - elif keys == 'union': + elif keys == "union": out_keys = set(a.keys()).union(b.keys()) - elif keys == 'intersect': + elif keys == "intersect": out_keys = set(a.keys()).intersection(b.keys()) else: - raise ValueError(f'Unrecognized lerp_dict(..., {keys=})') + raise ValueError(f"Unrecognized lerp_dict(..., {keys=})") res = {} for k in out_keys: @@ -50,16 +52,18 @@ def interp_dict(a: dict, b: dict, t: float, keys='assert', fill=0, recurse=True, elif k not in a: res[k] = b[k] elif recurse and isinstance(a[k], dict): - res[k] = interp_dict(a[k], b[k], t, keys=keys, fill=fill, recurse=recurse, lerp=lerp) + res[k] = interp_dict( + a[k], b[k], t, keys=keys, fill=fill, recurse=recurse, lerp=lerp + ) elif isinstance(a[k], numbers.Number) or isinstance(a[k], np.ndarray): res[k] = lerp(a[k], b[k], t) else: - raise TypeError(f'interp_dict could not handle {type(a[k])=}') + raise TypeError(f"interp_dict could not handle {type(a[k])=}") return res -def polar_skeleton(rads, eulers): +def polar_skeleton(rads, eulers): assert len(rads.shape) == 1 # if too few eulers are provided, we will assume the user only cares about the latter angles @@ -83,8 +87,8 @@ def polar_skeleton(rads, eulers): return positions + def offset_center(obj, x=True, z=True): - # find all bbox corners vs = [] for ob in butil.iter_object_tree(obj): @@ -97,4 +101,4 @@ def offset_center(obj, x=True, z=True): zoff = -vs[:, -1].min() if z else 0 offset = mathutils.Vector((xoff, 0, zoff)) for ob in obj.children: - ob.location += offset \ No newline at end of file + ob.location += offset diff --git a/infinigen/assets/creatures/util/genome.py b/infinigen/assets/objects/creatures/util/genome.py similarity index 74% rename from infinigen/assets/creatures/util/genome.py rename to infinigen/assets/objects/creatures/util/genome.py index c60ed5334..dc80a78d0 100644 --- a/infinigen/assets/creatures/util/genome.py +++ b/infinigen/assets/objects/creatures/util/genome.py @@ -4,29 +4,27 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass, field +import copy import itertools import typing -import pdb -import copy +from dataclasses import dataclass, field import numpy as np -from scipy.sparse.csgraph import maximum_bipartite_matching from scipy.sparse import csr_matrix +from scipy.sparse.csgraph import maximum_bipartite_matching -from infinigen.assets.creatures.util.tree import Tree -from infinigen.assets.creatures.util.creature_util import interp_dict - +from infinigen.assets.objects.creatures.util.creature_util import interp_dict +from infinigen.assets.objects.creatures.util.tree import Tree from infinigen.core.util.math import lerp @dataclass class IKParams: name: str - chain_parts: int = 1 # how many parts up the hierarchy can this IK affect + chain_parts: int = 1 # how many parts up the hierarchy can this IK affect chain_length: int = None rotation_weight: float = 0 - mode: str = 'iksolve' # iksolve, pin + mode: str = "iksolve" # iksolve, pin target_size: float = 0.2 @@ -56,7 +54,7 @@ class Attachment: joint: Joint = None bridge: str = None side: int = 1 - rotation_basis: str = 'global' + rotation_basis: str = "global" bridge_rad: float = 0.0 smooth_rad: float = 0.0 @@ -86,7 +84,7 @@ def match_cost(a: Tree, b: Tree): cost_matrix = cost_matrix.reshape(len(a), len(b)) cost_matrix = csr_matrix(cost_matrix) - perm = maximum_bipartite_matching(-cost_matrix, perm_type='column') + perm = maximum_bipartite_matching(-cost_matrix, perm_type="column") res = [] for ai, bi in enumerate(perm): @@ -117,18 +115,24 @@ def interp_attachment(a: Attachment, b: Attachment, t: float): joint = Joint(rest=lerp(a.joint.rest, b.joint.rest, t), bounds=s.joint.bounds) - att = Attachment(coord=lerp(a.coord, b.coord, t), joint=joint, bridge=s.bridge, side=s.side) + att = Attachment( + coord=lerp(a.coord, b.coord, t), joint=joint, bridge=s.bridge, side=s.side + ) return att def interp_creature_node(a: CreatureNode, b: CreatureNode, t): - s = b if t > 0.5 else a # which of a,b should we take non-interpolatable things from + s = ( + b if t > 0.5 else a + ) # which of a,b should we take non-interpolatable things from fac = copy.copy(s.part_factory) - fac.params = interp_dict(a.part_factory.params, b.part_factory.params, t, keys='switch', lerp=lerp_any) + fac.params = interp_dict( + a.part_factory.params, b.part_factory.params, t, keys="switch", lerp=lerp_any + ) - #att = interp_attachment(a.att, b.att, t) - att = a.att # TODO: Enable attachment interp later, debug symmetry + # att = interp_attachment(a.att, b.att, t) + att = a.att # TODO: Enable attachment interp later, debug symmetry return CreatureNode(part_factory=fac, att=att) @@ -158,30 +162,43 @@ def interp_genome(a: CreatureGenome, b: CreatureGenome, t: float) -> CreatureGen return a elif t == 1: return b - - #postprocess = interp_dict(a.postprocess_params, b.postprocess_params, t, recurse=True, keys='switch') - #TODO a.postprocess_params + + # postprocess = interp_dict(a.postprocess_params, b.postprocess_params, t, recurse=True, keys='switch') + # TODO a.postprocess_params postprocess = a.postprocess_params - return CreatureGenome(parts=interp_part_tree(a.parts, b.parts, t), - postprocess_params=postprocess) + return CreatureGenome( + parts=interp_part_tree(a.parts, b.parts, t), postprocess_params=postprocess + ) ################ # Syntactic sugar to make defining trees of part params less verbose ################ + def part(fac): return Tree(CreatureNode(fac, None)) -def attach(child: Tree, parent: Tree, coord=None, joint=None, bridge=None, side=1, rotation_basis='global', - bridge_rad=.0, smooth_rad=.0): +def attach( + child: Tree, + parent: Tree, + coord=None, + joint=None, + bridge=None, + side=1, + rotation_basis="global", + bridge_rad=0.0, + smooth_rad=0.0, +): assert child.item.att is None if coord is None: coord = np.array([0, 0, 0]) if joint is None: joint = Joint((0, 0, 0)) - child.item.att = Attachment(coord, joint, bridge, side, rotation_basis, bridge_rad, smooth_rad) + child.item.att = Attachment( + coord, joint, bridge, side, rotation_basis, bridge_rad, smooth_rad + ) parent.children.append(child) return parent diff --git a/infinigen/assets/creatures/util/geonode_part.py b/infinigen/assets/objects/creatures/util/geonode_part.py similarity index 57% rename from infinigen/assets/creatures/util/geonode_part.py rename to infinigen/assets/objects/creatures/util/geonode_part.py index ad7b91e4b..c42b7b6a0 100644 --- a/infinigen/assets/creatures/util/geonode_part.py +++ b/infinigen/assets/objects/creatures/util/geonode_part.py @@ -5,14 +5,15 @@ import numpy as np -from infinigen.assets.creatures.util.creature import Part, Joint, infer_skeleton_from_mesh +from infinigen.assets.objects.creatures.util.creature import ( + Part, + infer_skeleton_from_mesh, +) +from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes, geometry_node_group_empty_new -from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo class GeonodePartFactory: - def __init__(self, nodegroup_func, joints=None): self.nodegroup_func = nodegroup_func self.joints = joints @@ -21,40 +22,42 @@ def __init__(self, nodegroup_func, joints=None): def base_obj(self): # May be overridden - return butil.spawn_vert('temp') + return butil.spawn_vert("temp") def params(self): # Must be overridden - raise NotImplementedError(f'{self.__class__} did not override abstract base method GeonodePartFactory.params') + raise NotImplementedError( + f"{self.__class__} did not override abstract base method GeonodePartFactory.params" + ) def _extract_geo_results(self): - ng_params = self.species_params with butil.TemporaryObject(self.base_obj()) as base_obj: ng = self.nodegroup_func() - geo_outputs = [o for o in ng.outputs if o.bl_socket_idname == 'NodeSocketGeometry'] + geo_outputs = [ + o for o in ng.outputs if o.bl_socket_idname == "NodeSocketGeometry" + ] results = { - o.name: extract_nodegroup_geo( - base_obj, ng, o.name, ng_params=ng_params - ) + o.name: extract_nodegroup_geo(base_obj, ng, o.name, ng_params=ng_params) for o in geo_outputs } - + return results def __call__(self): - objs = self._extract_geo_results() - skin_obj = objs.pop('Geometry') - attach_basemesh = objs.pop('Base Mesh', None) + skin_obj = objs.pop("Geometry") + attach_basemesh = objs.pop("Base Mesh", None) - if 'Skeleton Curve' in objs: - skeleton_obj = objs.pop('Skeleton Curve') + if "Skeleton Curve" in objs: + skeleton_obj = objs.pop("Skeleton Curve") skeleton = np.array([v.co for v in skeleton_obj.data.vertices]) if len(skeleton) == 0: - raise ValueError(f"Skeleton export failed for {self}, {skeleton_obj}, got {skeleton.shape=}") + raise ValueError( + f"Skeleton export failed for {self}, {skeleton_obj}, got {skeleton.shape=}" + ) butil.delete(skeleton_obj) else: skeleton = infer_skeleton_from_mesh(skin_obj) @@ -62,17 +65,9 @@ def __call__(self): # Handle any 'Extras' exported by the nodegroup for k, o in objs.items(): o.name = k - o.mesh.name = k + '.mesh' + o.mesh.name = k + ".mesh" o.parent = skin_obj return Part( - skeleton, - obj=skin_obj, - attach_basemesh=attach_basemesh, - joints=self.joints + skeleton, obj=skin_obj, attach_basemesh=attach_basemesh, joints=self.joints ) - - - - - \ No newline at end of file diff --git a/infinigen/assets/objects/creatures/util/hair.py b/infinigen/assets/objects/creatures/util/hair.py new file mode 100644 index 000000000..14708f302 --- /dev/null +++ b/infinigen/assets/objects/creatures/util/hair.py @@ -0,0 +1,584 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=dCIKH649gac by Hey Pictures + +import logging +import warnings + +import bpy +import numpy as np +from scipy.spatial import KDTree + +from infinigen.assets.utils.nodegroups.hair import ( + nodegroup_comb_direction, + nodegroup_comb_hairs, + nodegroup_duplicate_to_clumps, + nodegroup_hair_length_rescale, + nodegroup_hair_position, + nodegroup_snap_roots_to_surface, + nodegroup_strand_noise, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util import blender as butil + +logger = logging.getLogger(__name__) + + +def add_hair_particles(obj, params, props): + _, mod = butil.modify_mesh(obj, "PARTICLE_SYSTEM", apply=False, return_mod=True) + + settings = mod.particle_system.settings + settings.type = "HAIR" + for k, v in params.items(): + setattr(settings, k, v) + + for k, v in props.items(): + setattr(mod.particle_system, k, v) + + +def as_hair_bsdf(mat, hair_bsdf_params): + assert mat.use_nodes + + new_mat = mat.copy() + new_mat.name = f"as_hair_bsdf({mat.name})" + ng = new_mat.node_tree + + def child(inp): + return next(link.from_node for link in ng.links if link.to_socket == inp) + + try: + out = ng.nodes["Material Output"] + shader = child(out.inputs["Surface"]) + rgb = child(shader.inputs["Base Color"]) + except StopIteration: + # shader didnt match expected structure, abort and use original shader + warnings.warn( + f"as_hair_bsdf failed for {mat.name=}, did not match expected structure" + ) + return new_mat + + nw = NodeWrangler(ng) + hair_bsdf = nw.new_node( + Nodes.PrincipledHairBSDF, input_kwargs={"Color": rgb, **hair_bsdf_params} + ) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": hair_bsdf}) + + return new_mat + + +def compute_hair_placement_vertgroup(obj, root, avoid_features_dist): + avoid_types = ["Eyeball", "Teeth", "Tongue"] # , 'Nose'] + extras = [o for o in butil.iter_object_tree(root) if "extra" in o.name] + avoid_extras = [o for o in extras if any(n in o.name for n in avoid_types)] + + avoid_verts = [] + for o in avoid_extras: + for v in o.data.vertices: + avoid_verts.append(o.matrix_world @ v.co) + avoid_verts = np.array(avoid_verts).reshape(-1, 3) + + verts = np.array([obj.matrix_world @ v.co for v in obj.data.vertices]) + if len(avoid_verts): + kd = KDTree(avoid_verts) + dists, _ = kd.query(verts, k=1) + else: + dists = np.full(len(verts), 1e5) + + tag_bald_mask = np.zeros(len(verts), dtype=np.float32) + if "tag_bald" in obj.data.attributes: + obj.data.attributes["tag_bald"].data.foreach_get("value", tag_bald_mask) + + idxs = np.where((dists > avoid_features_dist) & (tag_bald_mask < 0.5))[0] + + group = obj.vertex_groups.new(name="hair_placement") + group.add( + idxs.tolist(), 1.0, "ADD" + ) # .tolist() necessary to avoid np.int64 type error + + return group + + +@node_utils.to_nodegroup( + "nodegroup_decode_noise", singleton=True, type="GeometryNodeTree" +) +def nodegroup_decode_noise(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "MinMaxScale", (0.0, 0.0, 0.0)), + ("NodeSocketGeometry", "Source", None), + ("NodeSocketVector", "Source Position", (0.0, 0.0, 0.0)), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["MinMaxScale"]} + ) + + noise_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Scale": separate_xyz.outputs["Z"], "Detail": 5.0}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture.outputs["Fac"], + 3: separate_xyz.outputs["X"], + 4: separate_xyz.outputs["Y"], + }, + ) + + transfer_attribute = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={ + "Mesh": group_input.outputs["Source"], + "Value": map_range_1.outputs["Result"], + "Sample Position": group_input.outputs["Source Position"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Attribute": (transfer_attribute, "Value")} + ) + + +@node_utils.to_nodegroup( + "nodegroup_hair_grooming", singleton=True, type="GeometryNodeTree" +) +def nodegroup_hair_grooming(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketObject", "Object", None), + ("NodeSocketVector", "Length MinMaxScale", (0.014, 0.04, 40.0)), + ("NodeSocketVector", "Puff MinMaxScale", (0.14, 0.40, 40.0)), + ("NodeSocketFloat", "Combing", 0.0), + ("NodeSocketFloat", "Strand Random Mag", 0.001), + ("NodeSocketFloat", "Strand Perlin Mag", 0.05), + ("NodeSocketFloat", "Strand Perlin Scale", 33.38), + ("NodeSocketInt", "Tuft Amount", 1), + ("NodeSocketFloat", "Tuft Spread", 0.005), + ("NodeSocketFloat", "Tuft Clumping", 0.5), + ("NodeSocketFloat", "Root Radius", 0.01), + ("NodeSocketFloat", "Post Clump Noise Mag", 0.0), + ("NodeSocketFloat", "Hair Length Pct Min", 0.7), + ], + ) + + hairposition = nw.new_node( + nodegroup_hair_position().name, + input_kwargs={"Curves": group_input.outputs["Geometry"]}, + ) + + object_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": group_input.outputs["Object"]} + ) + + combdirection = nw.new_node( + nodegroup_comb_direction().name, + input_kwargs={ + "Surface": object_info.outputs["Geometry"], + "Root Positiion": hairposition.outputs["Root Position"], + }, + ) + + decode_length = nw.new_node( + nodegroup_decode_noise().name, + input_kwargs={ + "MinMaxScale": group_input.outputs["Length MinMaxScale"], + "Source": object_info.outputs["Geometry"], + "Source Position": hairposition.outputs["Root Position"], + }, + label="Decode Length", + ) + + decode_puff = nw.new_node( + nodegroup_decode_noise().name, + input_kwargs={ + "MinMaxScale": group_input.outputs["Puff MinMaxScale"], + "Source": object_info.outputs["Geometry"], + "Source Position": hairposition.outputs["Root Position"], + }, + label="Decode Puff", + ) + + combhairs = nw.new_node( + nodegroup_comb_hairs().name, + input_kwargs={ + "Curves": group_input.outputs["Geometry"], + "Root Position": hairposition.outputs["Root Position"], + "Comb Dir": combdirection.outputs["Combing Direction"], + "Surface Normal": combdirection.outputs["Surface Normal"], + "Length": decode_length, + "Puiff": group_input.outputs["Combing"], + "Comb": decode_puff, + }, + ) + + strandnoise = nw.new_node( + nodegroup_strand_noise().name, + input_kwargs={ + "Geometry": combhairs, + "Random Mag": group_input.outputs["Strand Random Mag"], + "Perlin Mag": group_input.outputs["Strand Perlin Mag"], + "Perlin Scale": group_input.outputs["Strand Perlin Scale"], + }, + ) + + duplicatetoclumps = nw.new_node( + nodegroup_duplicate_to_clumps().name, + input_kwargs={ + "Geometry": strandnoise, + "Surface Normal": combdirection.outputs["Surface Normal"], + "Amount": group_input.outputs["Tuft Amount"], + "Tuft Spread": group_input.outputs["Tuft Spread"], + "Tuft Clumping": group_input.outputs["Tuft Clumping"], + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: (-1.0, -1.0, -1.0)}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: random_value.outputs["Value"], + "Scale": group_input.outputs["Post Clump Noise Mag"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": duplicatetoclumps, "Offset": scale.outputs["Vector"]}, + ) + + hairlengthrescale = nw.new_node( + nodegroup_hair_length_rescale().name, + input_kwargs={ + "Curves": set_position, + "Min": group_input.outputs["Hair Length Pct Min"], + }, + ) + + snaprootstosurface = nw.new_node( + nodegroup_snap_roots_to_surface().name, + input_kwargs={ + "Target": object_info.outputs["Geometry"], + "Curves": hairlengthrescale, + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["Root Radius"], + 4: 0.0, + }, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": snaprootstosurface, + "Radius": map_range.outputs["Result"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_curve_radius} + ) + + +def mat_attr_dependencies(node_tree): + attrs = set() + for node in node_tree.nodes: + if node.bl_idname == Nodes.Attribute: + attrs.add(node.attribute_name) + elif node.bl_idname == "ShaderNodeGroup": + attrs = attrs | mat_attr_dependencies(node.node_tree) + + return attrs + + +def geo_transfer_hair_attributes(nw, obj, attrs): + group_input = nw.new_node(Nodes.GroupInput) + + hairposition = nw.new_node( + nodegroup_hair_position().name, + input_kwargs={"Curves": group_input.outputs["Geometry"]}, + ) + + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": obj}) + + attrs_out = {} + for attr_name in attrs: + if attr_name not in obj.data.attributes: + logger.warn( + f"Attempted to geo_transfer_hair_attributes() including {attr_name=} which is not present on {obj=}. Available are {list(obj.data.attributes.keys())}" + ) + continue + + obj_attr = obj.data.attributes[attr_name] + + named_attr = nw.new_node( + Nodes.NamedAttribute, + attrs={"data_type": obj_attr.data_type}, + input_kwargs={"Name": attr_name}, + ) + transfer = nw.new_node( + Nodes.SampleNearestSurface, + attrs={"data_type": obj_attr.data_type}, + input_kwargs={ + "Mesh": object_info.outputs["Geometry"], + "Value": named_attr, + "Sample Position": hairposition, + }, + ) + attrs_out[attr_name] = (transfer, "Value") + + nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": group_input.outputs["Geometry"], **attrs_out}, + ) + + +def configure_hair(obj, root, hair_genome: dict, apply=True, is_dynamic=None): + if is_dynamic is None: + is_dynamic = any(m.type == "ARMATURE" for m in obj.modifiers) + + # re-parameterize density params + sa = butil.surface_area(obj) + count = int(sa * hair_genome["density"]) + n_guide_hairs = count // hair_genome["clump_n"] + hair_genome["grooming"]["Tuft Amount"] = hair_genome["clump_n"] + + logger.debug("Computing hair placement vertex group") + avoid_group = compute_hair_placement_vertgroup( + obj, root, avoid_features_dist=hair_genome["avoid_features_dist"] + ) + + logger.debug(f"Add particle system with {n_guide_hairs=}") + add_hair_particles( + obj, + params={"count": n_guide_hairs}, + props={"vertex_group_density": avoid_group.name}, + ) + + logger.debug("Converting particles to curves") + with butil.SelectObjects(obj): + for m in obj.modifiers: + if m.type == "PARTICLE_SYSTEM": + m.show_viewport = True + bpy.ops.curves.convert_from_particle_system() + curves = bpy.context.active_object + + with butil.SelectObjects(obj): + bpy.ops.object.particle_system_remove() + + logger.debug("Performing geonodes hair grooming") + with butil.DisableModifiers(obj): + _, mod = butil.modify_mesh(curves, "NODES", apply=False, return_mod=True) + mod.node_group = nodegroup_hair_grooming() + butil.set_geomod_inputs(mod, {"Object": obj, **hair_genome["grooming"]}) + + if apply: + butil.apply_modifiers(curves, mod=mod) + + curves.parent = obj + curves.matrix_parent_inverse = ( + obj.matrix_world.inverted() + ) # keep prexisting transform + curves.data.surface = obj + + if len(obj.material_slots) == 0: + return + + if obj.active_material is not None: + hair_mat = as_hair_bsdf(obj.active_material, hair_genome["material"]) + + logger.debug("Transfer material attr dependencies from surf to curves") + attr_deps = mat_attr_dependencies(hair_mat.node_tree) + attr_deps = [a for a in attr_deps if a in obj.data.attributes] + surface.add_geomod( + curves, + geo_transfer_hair_attributes, + apply=apply, + input_kwargs=dict(obj=obj, attrs=attr_deps), + attributes=attr_deps, + ) + curves.active_material = hair_mat + + if is_dynamic: + attach_hair_to_surface(curves, obj) + + curves.name = obj.name + ".hair_curves" + + return curves + + +@node_utils.to_nodegroup("nodegroup_transfer_uvs_to_curves_vec3", singleton=True) +def nodegroup_transfer_uvs_to_curves_vec3(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketObject", "Object", None), + ("NodeSocketString", "from_uv", None), + ("NodeSocketString", "to_attr", None), + ], + ) + + object_info = nw.new_node( + Nodes.ObjectInfo, + input_kwargs={"Object": group_input.outputs["Object"]}, + attrs={"transform_space": "RELATIVE"}, + ) + obj = object_info.outputs["Geometry"] + + domain = "POINT" + uvtype = "FLOAT_VECTOR" + + uv = nw.new_node( + Nodes.NamedAttribute, + input_kwargs={"Name": group_input.outputs["from_uv"]}, + attrs={"data_type": uvtype}, + ) + + capture = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": obj, "Value": uv}, + attrs={"data_type": uvtype, "domain": domain}, + ) + + root_pos = nw.new_node( + nodegroup_hair_position().name, [group_input.outputs["Geometry"]] + ) + + nearest_idx = nw.new_node( + Nodes.SampleNearest, + input_kwargs={ + "Geometry": capture.outputs["Geometry"], + "Sample Position": root_pos, + }, + attrs={"domain": domain}, + ) + # transfer_attribute = nw.new_node(Nodes.SampleNearest, + # input_kwargs={ + # 'Mesh': capture.outputs['Geometry'], + # 'Value': capture.outputs["Attribute"], + # 'Sample Position': root_pos + # }, + # attrs={'data_type': 'FLOAT_VECTOR'}) + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": capture.outputs["Geometry"], + "Index": nearest_idx.outputs["Index"], + "Value": capture.outputs["Attribute"], + }, + attrs={"data_type": uvtype, "domain": domain}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Name": group_input.outputs["to_attr"], + "Value": transfer_attribute, + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CURVE"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": store_named_attribute} + ) + + +def transfer_uvs_to_curves(curves, target, uv_name): + # blender doesnt seem to support writing directly to FLOAT2 uv attributes. + # lets write to a FLOAT_VECTOR then change it over to a FLOAT2 + + curve_uv_attr = "surface_uv_coordinate" + butil.modify_mesh( + curves, + "NODES", + node_group=nodegroup_transfer_uvs_to_curves_vec3(), + ng_inputs={"Object": target, "from_uv": uv_name, "to_attr": curve_uv_attr}, + apply=True, + ) + + # rip uvs to np array + n = len(curves.data.curves) + uvs = np.empty(3 * n, dtype=np.float32) + attr = curves.data.attributes[curve_uv_attr] + assert attr.domain == "CURVE" and attr.data_type == "FLOAT_VECTOR" + attr.data.foreach_get("vector", uvs) + curves.data.attributes.remove(attr) + + # write back as FLOAT2 + uvs = uvs.reshape(n, 3)[:, :2].reshape(-1) + attr = curves.data.attributes.new(curve_uv_attr, type="FLOAT2", domain="CURVE") + attr.data.foreach_set("vector", uvs) + + +@node_utils.to_nodegroup("nodegroup_deform_curves_on_surface", singleton=True) +def nodegroup_deform_curves_on_surface(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + deform_curves_on_surface = nw.new_node( + "GeometryNodeDeformCurvesOnSurface", + input_kwargs={"Curves": group_input.outputs["Geometry"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": deform_curves_on_surface} + ) + + +def attach_hair_to_surface(curves, target): + # target object needs UVMap and rest_position attribute, + # curves obj needs surface_uv_coordinate attribute + # defined in https://docs.blender.org/manual/en/latest/modeling/geometry_nodes/curve/deform_curves_on_surface.html + + surface.write_attribute( + target, lambda nw: nw.new_node(Nodes.InputPosition), "rest_position", apply=True + ) + with butil.ViewportMode(target, mode="EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.uv.smart_project(island_margin=0.03) + assert len(target.data.uv_layers) > 0 + + curves.data.surface = target + curves.data.surface_uv_map = target.data.uv_layers[-1].name + transfer_uvs_to_curves(curves, target, curves.data.surface_uv_map) + + butil.modify_mesh( + curves, + "NODES", + apply=False, + show_viewport=True, + node_group=nodegroup_deform_curves_on_surface(), + ) diff --git a/infinigen/assets/creatures/util/join_smoothing.py b/infinigen/assets/objects/creatures/util/join_smoothing.py similarity index 72% rename from infinigen/assets/creatures/util/join_smoothing.py rename to infinigen/assets/objects/creatures/util/join_smoothing.py index 2a480f9da..6784e5ec0 100644 --- a/infinigen/assets/creatures/util/join_smoothing.py +++ b/infinigen/assets/objects/creatures/util/join_smoothing.py @@ -4,25 +4,24 @@ # Authors: Alexander Raistrick -import pdb -import warnings - -import bpy, mathutils +import bpy +import mathutils +import numpy as np +from mathutils import geometry from mathutils.bvhtree import BVHTree -from mathutils import geometry, Vector +from infinigen.assets.utils.geometry.nurbs import ( + blender_mesh_from_pydata, + compute_cylinder_topology, +) from infinigen.core.util import blender as butil -from infinigen.assets.creatures.util.geometry.nurbs import compute_cylinder_topology, blender_mesh_from_pydata - -import numpy as np def invert_line(line, point, eps=1e-4): - - ''' + """ assumes point is on line = (p, v) returns t st `p + v * t = point` - ''' + """ if line[0] is None or line[1] is None: raise ValueError() @@ -31,14 +30,15 @@ def invert_line(line, point, eps=1e-4): div = div[~np.isnan(div)] return div.mean() + def intersect_line_seg(line, seg): v1, v2 = seg line_start, line_dir = line res = geometry.intersect_line_line(line_start, line_start + line_dir, v1, v2) - + if res is None: return None, None - + lp, vp = res t = invert_line((v1, v2 - v1), vp) @@ -47,12 +47,12 @@ def intersect_line_seg(line, seg): return lp, vp -def find_poly_line_bounds(mesh, poly_idx, line, eps=1e-5): - ''' +def find_poly_line_bounds(mesh, poly_idx, line, eps=1e-5): + """ assumes `mesh.polygons[poly_idx]` is valid, convex, and contains `line` returns t1, t2 such that `for all t1 tmax: return None - + p0 = np.array(line[0] + tmin * line[1]) p1 = np.array(line[0] + tmax * line[1]) - + if not return_normals: return p0, p1 raise NotImplementedError - + + def normal_offset_verts(verts, pusher_bvh, snap_to_bvh, dist): offset_verts = np.empty_like(verts) for i, v in enumerate(verts): @@ -113,63 +118,78 @@ def normal_offset_verts(verts, pusher_bvh, snap_to_bvh, dist): offset_verts[i] = snapped return offset_verts -def compute_intersection_curve(a, b, a_bvh, b_bvh, simplify_thresh=1.5e-2): +def compute_intersection_curve(a, b, a_bvh, b_bvh, simplify_thresh=1.5e-2): overlap = a_bvh.overlap(b_bvh) segs = [intersect_poly_poly(a.data, b.data, ai, bi) for ai, bi in overlap] segs = np.array([s for s in segs if s is not None]) - + # join and merge by distance m = len(overlap) - loop_verts = segs.reshape(2*m, 3) - pair_edges = np.arange(2*m).reshape(-1, 2) + loop_verts = segs.reshape(2 * m, 3) + pair_edges = np.arange(2 * m).reshape(-1, 2) obj = blender_mesh_from_pydata(loop_verts, pair_edges, []) butil.merge_by_distance(obj, simplify_thresh) return obj + def create_bevel_connection( - a, b, a_bvh: BVHTree, b_bvh: BVHTree, - width: float, segments=9, - close_caps=True, intersection_curve=None, + a, + b, + a_bvh: BVHTree, + b_bvh: BVHTree, + width: float, + segments=9, + close_caps=True, + intersection_curve=None, ): - inter = intersection_curve or compute_intersection_curve(a, b, a_bvh, b_bvh) - verts = np.empty((len(inter.data.vertices), 3)) + verts = np.empty((len(inter.data.vertices), 3)) edges = np.empty((len(inter.data.edges), 2), dtype=int) inter.data.vertices.foreach_get("co", verts.ravel()) - inter.data.edges.foreach_get('vertices', edges.ravel()) + inter.data.edges.foreach_get("vertices", edges.ravel()) if intersection_curve is None: # only delete it if we made it ourselvse butil.delete(inter) if len(verts) < 3: - raise ValueError(f'create_bevel_connection({a=}, {b=}) had only {len(verts)=} intersecting points') + raise ValueError( + f"create_bevel_connection({a=}, {b=}) had only {len(verts)=} intersecting points" + ) a_offset = normal_offset_verts(verts, a_bvh, b_bvh, width) b_offset = normal_offset_verts(verts, b_bvh, a_bvh, width) - final_vert_parts = [a_offset, verts, b_offset ] + final_vert_parts = [a_offset, verts, b_offset] if close_caps: - close = lambda vs: np.ones_like(vs) * vs.mean(axis=0, keepdims=True) + + def close(vs): + return np.ones_like(vs) * vs.mean(axis=0, keepdims=True) + final_vert_parts = [close(a_offset)] + final_vert_parts + [close(b_offset)] final_verts = np.concatenate(final_vert_parts, axis=0) - final_edges, final_faces = compute_cylinder_topology(len(final_vert_parts), len(verts), cyclic=True, h_neighbors=edges) - final = blender_mesh_from_pydata(final_verts, final_edges.reshape(-1, 2), final_faces) + final_edges, final_faces = compute_cylinder_topology( + len(final_vert_parts), len(verts), cyclic=True, h_neighbors=edges + ) + final = blender_mesh_from_pydata( + final_verts, final_edges.reshape(-1, 2), final_faces + ) def select_loop(li): - in_loop = lambda vi: (li * len(verts) <= vi) and (vi < (li + 1) * len(verts)) + def in_loop(vi): + return li * len(verts) <= vi and vi < (li + 1) * len(verts) + for vi, v in enumerate(final.data.vertices): v.select = in_loop(vi) for e in final.data.edges: e.select = in_loop(e.vertices[0]) and in_loop(e.vertices[1]) - with butil.ViewportMode(final, 'EDIT'): - + with butil.ViewportMode(final, "EDIT"): if close_caps: select_loop(0) bpy.ops.mesh.mark_sharp() @@ -177,21 +197,21 @@ def select_loop(li): bpy.ops.mesh.mark_sharp() center_part_idx = next(i for i, v in enumerate(final_vert_parts) if v is verts) - select_loop(center_part_idx) - bpy.ops.mesh.bevel(offset_type='PERCENT', offset_pct=98, segments=segments) + select_loop(center_part_idx) + bpy.ops.mesh.bevel(offset_type="PERCENT", offset_pct=98, segments=segments) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bpy.ops.mesh.normals_make_consistent(inside=False) return final + def smooth_around_line(obj, line_obj, rad, iters=30, factor=0.9): - - ''' + """ Assumes: polyline is fairly densely sampled with points, obj and line_obj have same transform - ''' + """ assert obj.matrix_world == line_obj.matrix_world @@ -202,22 +222,7 @@ def smooth_around_line(obj, line_obj, rad, iters=30, factor=0.9): ds = np.array([kd.find(v.co)[2] for v in obj.data.vertices]) for i, v in enumerate(obj.data.vertices): - v.select = (ds[i] < rad) + v.select = ds[i] < rad - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): bpy.ops.mesh.vertices_smooth(repeat=iters, factor=0.9) - - - - - - - - - - - - - - - diff --git a/infinigen/assets/creatures/util/joining.py b/infinigen/assets/objects/creatures/util/joining.py similarity index 56% rename from infinigen/assets/creatures/util/joining.py rename to infinigen/assets/objects/creatures/util/joining.py index 08cb5998d..766ea3b88 100644 --- a/infinigen/assets/creatures/util/joining.py +++ b/infinigen/assets/objects/creatures/util/joining.py @@ -4,15 +4,13 @@ # Authors: Alexander Raistrick -import bpy import logging +import bpy import numpy as np -from infinigen.assets.creatures.util import tree, join_smoothing - -from infinigen.assets.creatures.util import rigging as creature_rigging - +from infinigen.assets.objects.creatures.util import join_smoothing, tree +from infinigen.assets.objects.creatures.util import rigging as creature_rigging from infinigen.core import surface from infinigen.core.placement import detail from infinigen.core.util import blender as butil @@ -20,22 +18,21 @@ logger = logging.getLogger(__name__) -def compute_joining_effects(genome, parts): - ''' - Compute all joining curves between parts +def compute_joining_effects(genome, parts): + """ + Compute all joining curves between parts (only those with some bridge or smooth rad specified in attachment params), - and compute any bridge parts requested. + and compute any bridge parts requested. ASSUMES: All parts have same matrix_world, and are triangles only - ''' + """ inter_curves, bridge_objs = {}, [] g_items = enumerate(tree.iter_items(genome.parts, postorder=True)) part_items = tree.iter_parent_child(parts, postorder=True) for (i, genome), (parent, part) in zip(g_items, part_items): - if genome.att is None: continue @@ -43,25 +40,36 @@ def compute_joining_effects(genome, parts): if not br > 0 and not sr > 0: continue - logger.debug(f'Computing joining geometry for {i=} with {br=} and {sr=}') + logger.debug(f"Computing joining geometry for {i=} with {br=} and {sr=}") try: inter = join_smoothing.compute_intersection_curve( - parent.obj, part.obj, parent.bvh(), part.bvh()) - inter.name = 'intersection_curve' + parent.obj, part.obj, parent.bvh(), part.bvh() + ) + inter.name = "intersection_curve" except ValueError as e: - logger.warning(f'join_smoothing.compute_intersection_curve for threw {e}, skipping') + logger.warning( + f"join_smoothing.compute_intersection_curve for threw {e}, skipping" + ) inter = None - + if inter is not None and len(inter.data.vertices) < 4: - logger.warning(f'join_smoothing.compute_intersection_curve found too few verts, skipping') + logger.warning( + "join_smoothing.compute_intersection_curve found too few verts, skipping" + ) inter = None if br > 0 and inter is not None: b = join_smoothing.create_bevel_connection( - parent.obj, part.obj, parent.bvh(), part.bvh(), - width=br, intersection_curve=inter, segments=5) - b.name = part.obj.name + '.bevel_connector' + parent.obj, + part.obj, + parent.bvh(), + part.bvh(), + width=br, + intersection_curve=inter, + segments=5, + ) + b.name = part.obj.name + ".bevel_connector" b.parent = parent.obj bridge_objs.append(b) @@ -69,9 +77,9 @@ def compute_joining_effects(genome, parts): return inter_curves, bridge_objs + def select_large_component(o, thresh=0.95, tries=5): - - with butil.ViewportMode(o, 'EDIT'): + with butil.ViewportMode(o, "EDIT"): bpy.ops.mesh.select_all(action="DESELECT") r = 0 @@ -79,9 +87,9 @@ def select_large_component(o, thresh=0.95, tries=5): o.data.vertices[r].select = False r = np.random.randint(len(o.data.vertices)) o.data.vertices[r].select = True - - with butil.ViewportMode(o, 'EDIT'): - bpy.ops.mesh.select_mode(type='VERT') + + with butil.ViewportMode(o, "EDIT"): + bpy.ops.mesh.select_mode(type="VERT") bpy.ops.mesh.select_linked() pct = np.array([v.select for v in o.data.vertices]).mean() @@ -90,38 +98,56 @@ def select_large_component(o, thresh=0.95, tries=5): return 0 + def join_and_rig_parts( - root, parts, genome, face_size, postprocess_func, - adaptive_resolution=True, adapt_mode='remesh', min_remesh_size=0.01, - smooth_joins=True, smooth_attrs=False, - rigging=False, constraints=False, rig_before_subdiv=False, - materials=True, roll='GLOBAL_POS_Y', - **_ + root, + parts, + genome, + face_size, + postprocess_func, + adaptive_resolution=True, + adapt_mode="remesh", + min_remesh_size=0.01, + smooth_joins=True, + smooth_attrs=False, + rigging=False, + constraints=False, + rig_before_subdiv=False, + materials=True, + roll="GLOBAL_POS_Y", + **_, ): - - body_parts = [o for o in root.children if o.type == 'MESH'] - extras = [o for o in butil.iter_object_tree(root) if not o in body_parts and o is not root] + body_parts = [o for o in root.children if o.type == "MESH"] + extras = [ + o for o in butil.iter_object_tree(root) if o not in body_parts and o is not root + ] if rigging: - logger.debug(f'Computing creature rig') - arma, ik_targets = creature_rigging.creature_rig(root, genome, parts, constraints=constraints, roll=roll) - arma.show_in_front=True + logger.debug("Computing creature rig") + arma, ik_targets = creature_rigging.creature_rig( + root, genome, parts, constraints=constraints, roll=roll + ) + arma.show_in_front = True with butil.SelectObjects(extras): - bpy.ops.object.parent_clear(type='CLEAR_KEEP_TRANSFORM') - - with butil.SelectObjects(body_parts), butil.CursorLocation(root.location), Suppress(): - # must convert to all transforms applied & triangles only, + bpy.ops.object.parent_clear(type="CLEAR_KEEP_TRANSFORM") + + with ( + butil.SelectObjects(body_parts), + butil.CursorLocation(root.location), + Suppress(), + ): + # must convert to all transforms applied & triangles only, # in case we want to do join_smoothing bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) - bpy.ops.object.origin_set(type='ORIGIN_CURSOR') + bpy.ops.object.origin_set(type="ORIGIN_CURSOR") bpy.ops.object.mode_set(mode="EDIT") - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.remove_doubles(threshold=0.01) - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bpy.ops.mesh.normals_make_consistent(inside=False) - bpy.ops.object.mode_set(mode='OBJECT') - + bpy.ops.object.mode_set(mode="OBJECT") + # bvhs no longer valid due to transform / triangulate for p in parts: p._bvh = None @@ -130,7 +156,7 @@ def join_and_rig_parts( inter_curves, bridge_objs = compute_joining_effects(genome, parts) body_parts += bridge_objs - logger.debug(f'Joining {len(body_parts)=}') + logger.debug(f"Joining {len(body_parts)=}") joined = butil.join_objects(body_parts, check_attributes=False) body_parts = [joined] joined.parent = root @@ -138,76 +164,80 @@ def join_and_rig_parts( for o in extras: o.parent = root for p in parts: - p.obj = None # deleted by join, should not be referenced + p.obj = None # deleted by join, should not be referenced def rig(): - with Timer(f'Computing creature rig weights'): + with Timer("Computing creature rig weights"): with butil.SelectObjects(body_parts + extras, active=-1), Suppress(): bpy.ops.object.mode_set(mode="EDIT") - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.remove_doubles(threshold=0.001) bpy.ops.object.mode_set(mode="OBJECT") with butil.SelectObjects(body_parts + extras + [arma], active=-1): - bpy.ops.object.parent_set(type='ARMATURE_AUTO') + bpy.ops.object.parent_set(type="ARMATURE_AUTO") arma.parent = root if rigging and rig_before_subdiv: rig() if adaptive_resolution: + if adapt_mode == "remesh": + butil.modify_mesh(joined, "SUBSURF", levels=1) - if adapt_mode == 'remesh': - butil.modify_mesh(joined, 'SUBSURF', levels=1) - - logger.debug(f'Adapting {joined.name=}') - detail.adapt_mesh_resolution(joined, - face_size=max(face_size, min_remesh_size), - method=adapt_mode, apply=True) + logger.debug(f"Adapting {joined.name=}") + detail.adapt_mesh_resolution( + joined, + face_size=max(face_size, min_remesh_size), + method=adapt_mode, + apply=True, + ) # remeshing can create outlier islands that mess with rigging. Clear them out percent = select_large_component(joined, thresh=0.9) - if percent < 0.99: - logger.warning(f'Creature had largest component {percent=}') + if percent < 0.99: + logger.warning(f"Creature had largest component {percent=}") else: - with butil.ViewportMode(joined, 'EDIT'): - bpy.ops.mesh.select_all(action='INVERT') - bpy.ops.mesh.delete(type='VERT') - - #for e in extras: + with butil.ViewportMode(joined, "EDIT"): + bpy.ops.mesh.select_all(action="INVERT") + bpy.ops.mesh.delete(type="VERT") + + # for e in extras: # detail.adapt_mesh_resolution(e, face_size=face_size, method='subdivide', apply=True) # Apply smoothing around any intersection curves found before remeshing if adaptive_resolution and smooth_joins: - assert 'inter_curves' in locals() + assert "inter_curves" in locals() for i, g in enumerate(tree.iter_items(genome.parts, postorder=True)): - if g.att is None or g.att.smooth_rad == 0: continue - if not (l := inter_curves.get(i)): continue - logger.debug(f'Smoothing mesh geometry around {i, l}') + if g.att is None or g.att.smooth_rad == 0: + continue + if not (l := inter_curves.get(i)): + continue + logger.debug(f"Smoothing mesh geometry around {i, l}") join_smoothing.smooth_around_line(joined, l, g.att.smooth_rad) # Cleanup any remaining join-smoothing-curves - if smooth_joins and 'inter_curves' in locals(): + if smooth_joins and "inter_curves" in locals(): for o in inter_curves.values(): if o is None: continue butil.delete(o) - + if adaptive_resolution and smooth_attrs: for attr in joined.data.attributes.keys(): if butil.blender_internal_attr(attr): continue - logger.debug(f'Smoothing attr {attr}') + logger.debug(f"Smoothing attr {attr}") surface.smooth_attribute(joined, attr, iters=10) if materials: - logger.debug(f'Applying postprocess func') + logger.debug("Applying postprocess func") with butil.DisableModifiers(body_parts): postprocess_func(body_parts, extras, genome.postprocess_params) - - logger.debug(f'Finalizing material geomods') + + logger.debug("Finalizing material geomods") for o in body_parts: for m in o.modifiers: - if m.type == 'NODES': + if m.type == "NODES": butil.apply_modifiers(o, mod=m) if rigging and not rig_before_subdiv: diff --git a/infinigen/assets/creatures/util/part_util.py b/infinigen/assets/objects/creatures/util/part_util.py similarity index 68% rename from infinigen/assets/creatures/util/part_util.py rename to infinigen/assets/objects/creatures/util/part_util.py index e42805929..db4ecd0a7 100644 --- a/infinigen/assets/creatures/util/part_util.py +++ b/infinigen/assets/objects/creatures/util/part_util.py @@ -4,45 +4,52 @@ # Authors: Alexander Raistrick -import pdb -from pathlib import Path import logging import bpy import numpy as np -from infinigen.assets.creatures.util.creature import Part, PartFactory, infer_skeleton_from_mesh -from infinigen.assets.creatures.util.geometry import nurbs -from infinigen.core.util import blender as butil - -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes - +from infinigen.assets.objects.creatures.util.creature import ( + Part, + infer_skeleton_from_mesh, +) from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo +from infinigen.assets.utils.geometry import nurbs +from infinigen.core.util import blender as butil -def nodegroup_to_part(nodegroup_func, params, kwargs=None, base_obj=None, split_extras=False): +def nodegroup_to_part( + nodegroup_func, params, kwargs=None, base_obj=None, split_extras=False +): if base_obj is None: - base_obj = butil.spawn_vert('temp') + base_obj = butil.spawn_vert("temp") with butil.TemporaryObject(base_obj) as base_obj: if kwargs is not None: ng = nodegroup_func(**kwargs) else: ng = nodegroup_func() - geo_outputs = [o for o in ng.outputs if o.bl_socket_idname == 'NodeSocketGeometry'] - objs = {o.name: extract_nodegroup_geo(base_obj, ng, o.name, ng_params=params) for o in geo_outputs} - - skin_obj = objs.pop('Geometry', None) + geo_outputs = [ + o for o in ng.outputs if o.bl_socket_idname == "NodeSocketGeometry" + ] + objs = { + o.name: extract_nodegroup_geo(base_obj, ng, o.name, ng_params=params) + for o in geo_outputs + } + + skin_obj = objs.pop("Geometry", None) if skin_obj is None: - skin_obj = butil.spawn_vert('nodegroup_to_part.no_geo_temp') + skin_obj = butil.spawn_vert("nodegroup_to_part.no_geo_temp") - attach_basemesh = objs.pop('Base Mesh', None) + attach_basemesh = objs.pop("Base Mesh", None) - if 'Skeleton Curve' in objs: - skeleton_obj = objs.pop('Skeleton Curve') + if "Skeleton Curve" in objs: + skeleton_obj = objs.pop("Skeleton Curve") skeleton = np.array([v.co for v in skeleton_obj.data.vertices]) if len(skeleton) == 0: - raise ValueError(f"Skeleton export failed for {nodegroup_func}, {skeleton_obj}, got {skeleton.shape=}") + raise ValueError( + f"Skeleton export failed for {nodegroup_func}, {skeleton_obj}, got {skeleton.shape=}" + ) butil.delete(skeleton_obj) else: skeleton = infer_skeleton_from_mesh(skin_obj) @@ -51,33 +58,32 @@ def nodegroup_to_part(nodegroup_func, params, kwargs=None, base_obj=None, split_ for k, o in objs.items(): if split_extras: for i, piece in enumerate(butil.split_object(o)): - logging.debug(f'Processing piece {i} for split_extras on {nodegroup_func}') + logging.debug( + f"Processing piece {i} for split_extras on {nodegroup_func}" + ) with butil.SelectObjects(piece): - bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY') - piece.name = f'{k}_{i}' + bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY") + piece.name = f"{k}_{i}" piece.parent = skin_obj else: o.parent = skin_obj o.name = k return Part( - skeleton, - obj=skin_obj, - attach_basemesh=attach_basemesh, - joints=None, iks=None + skeleton, obj=skin_obj, attach_basemesh=attach_basemesh, joints=None, iks=None ) -def nurbs_to_part(handles, face_size=0.07): +def nurbs_to_part(handles, face_size=0.07): assert handles.shape[-1] == 3 skeleton = handles.mean(axis=1) - obj = nurbs.nurbs(handles, method='geomdl', face_size=face_size) + obj = nurbs.nurbs(handles, method="geomdl", face_size=face_size) # first and last ring are used to close the part, need not be included in skeleton - skeleton = skeleton[1:-1] - skeleton_obj = butil.spawn_line('skeleton_subdiv_temp', skeleton) - butil.modify_mesh(skeleton_obj, 'SUBSURF', levels=2, apply=True) + skeleton = skeleton[1:-1] + skeleton_obj = butil.spawn_line("skeleton_subdiv_temp", skeleton) + butil.modify_mesh(skeleton_obj, "SUBSURF", levels=2, apply=True) mesh = skeleton_obj.data verts = [mesh.vertices[0].co] @@ -90,16 +96,16 @@ def nurbs_to_part(handles, face_size=0.07): verts.append(mesh.vertices[curr].co) curr = edge.vertices[1] - skeleton = np.array(verts) butil.delete(skeleton_obj) return Part(skeleton=skeleton, obj=obj) + def linear_combination(corners, weights): assert len(corners) == len(weights) first = corners[0] - + if not isinstance(first, dict): ret = sum(corners[i] * weights[i] for i in range(len(corners))) return ret @@ -110,11 +116,11 @@ def linear_combination(corners, weights): results[k] = linear_combination(new_corners, weights) return results -def rdict_comb(corners, weights): - ''' +def rdict_comb(corners, weights): + """ Take a linear combination of the dicts in `corners`, according to correspondng `weights` - ''' + """ norm = sum(weights.values()) for k in weights: @@ -128,6 +134,7 @@ def rdict_comb(corners, weights): return linear_combination(corners_list, weights_list) + # def rdict_comb(corners, weights): # ''' @@ -146,13 +153,13 @@ def rdict_comb(corners, weights): # res[k] = int(res[k]) # return res -def random_convex_coord(names, select=None, temp=1): - ''' +def random_convex_coord(names, select=None, temp=1): + """ corners: dict[dict[]] select: str | dict temp: float - like softmax, high temp = more even numbers, low temp = more 0s and 1s - ''' + """ if isinstance(temp, (float, int)): temp = temp * np.ones(len(names)) @@ -161,26 +168,26 @@ def random_convex_coord(names, select=None, temp=1): elif isinstance(temp, np.ndarray): pass else: - raise ValueError(f'Unrecognized {temp=}') - + raise ValueError(f"Unrecognized {temp=}") if isinstance(select, str): - if not select in names: - raise ValueError(f'Attempted to random_convex_comb({names=}, {select=}) but select is invalid') + if select not in names: + raise ValueError( + f"Attempted to random_convex_comb({names=}, {select=}) but select is invalid" + ) return {n: 1 if n == select else 0 for n in names} - + if isinstance(select, dict): if any(k not in names for k in select): - raise ValueError(f'Attempted to random_convex_comb({names=}, {select.keys()=}) but select is invalid') + raise ValueError( + f"Attempted to random_convex_comb({names=}, {select.keys()=}) but select is invalid" + ) weights = select norm = sum(weights.values()) for k, v in weights.items(): weights[k] = v / norm return weights - vs = np.random.dirichlet(temp) weights = {k: vs[i] for i, k in enumerate(names)} return weights - - diff --git a/infinigen/assets/creatures/util/rigging.py b/infinigen/assets/objects/creatures/util/rigging.py similarity index 66% rename from infinigen/assets/creatures/util/rigging.py rename to infinigen/assets/objects/creatures/util/rigging.py index f085d9463..33e5c6f45 100644 --- a/infinigen/assets/creatures/util/rigging.py +++ b/infinigen/assets/objects/creatures/util/rigging.py @@ -3,29 +3,28 @@ # Authors: Alexander Raistrick -import re +import logging import math -from numbers import Number +import re from functools import partial -import logging +from numbers import Number import bpy -import mathutils import numpy as np -from tqdm import tqdm -from infinigen.core.util import blender as butil, math as mutil -from infinigen.assets.creatures.util import tree -from infinigen.assets.creatures.util.creature import Part, infer_skeleton_from_mesh -from infinigen.assets.creatures.util.genome import Joint, IKParams +from infinigen.assets.objects.creatures.util import tree +from infinigen.assets.objects.creatures.util.creature import infer_skeleton_from_mesh +from infinigen.assets.objects.creatures.util.genome import IKParams, Joint +from infinigen.core.util import blender as butil +from infinigen.core.util import math as mutil logger = logging.getLogger(__name__) -IK_TARGET_PREFIX = 'ik_target' +IK_TARGET_PREFIX = "ik_target" def bone(editbones, head, tail, parent): - bone = editbones.new('bone') # name overriden later + bone = editbones.new("bone") # name overriden later bone.head = head bone.tail = tail bone.parent = parent @@ -33,7 +32,6 @@ def bone(editbones, head, tail, parent): def get_bone_idxs(part_node: tree.Tree): - part, att = part_node.item child_ts = [c.item[1].coord[0] for c in part_node.children] @@ -42,7 +40,7 @@ def get_bone_idxs(part_node: tree.Tree): bounds = [0.0, 1.0] - tr = part.settings.get('trim_bounds_child_margin', 0.15) + tr = part.settings.get("trim_bounds_child_margin", 0.15) if tr > 0 and len(child_ts): if min(child_ts) < tr: bounds[0] = min(child_ts) @@ -54,14 +52,14 @@ def get_bone_idxs(part_node: tree.Tree): idxs = idxs.union(part.joints.keys()) return sorted(list(idxs)) + def create_part_bones(part_node: tree.Tree, editbones, parent): - bones = {} part, att = part_node.item skeleton = part.skeleton_global() idxs = get_bone_idxs(part_node) - if part.settings.get('rig_reverse_skeleton', False): + if part.settings.get("rig_reverse_skeleton", False): idxs = list(reversed(idxs)) for idx1, idx2 in zip(idxs[:-1], idxs[1:]): @@ -70,23 +68,25 @@ def create_part_bones(part_node: tree.Tree, editbones, parent): parent = bone(editbones, head, tail, parent) bones[idx1] = parent - if part.settings.get('rig_extras', False): + if part.settings.get("rig_extras", False): for i, extra in enumerate(part.obj.children): - if extra.type != 'MESH': + if extra.type != "MESH": continue - skeleton = mutil.homogenize(infer_skeleton_from_mesh(extra)) @ np.array(extra.matrix_world)[:-1].T + skeleton = ( + mutil.homogenize(infer_skeleton_from_mesh(extra)) + @ np.array(extra.matrix_world)[:-1].T + ) head = mutil.lerp_sample(skeleton, 0 * (len(skeleton) - 1)).reshape(-1) tail = mutil.lerp_sample(skeleton, 1 * (len(skeleton) - 1)).reshape(-1) - extra_id = re.fullmatch('.*\.extra\((.*),.*', extra.name).group(1) + extra_id = re.fullmatch(".*\.extra\((.*),.*", extra.name).group(1) bones[extra_id] = bone(editbones, head, tail, parent) return bones -def create_bones(parts_atts, arma): +def create_bones(parts_atts, arma): def make_parent_connector_bone(part, att, parent_bones, parent_bone_t): - u, v, r = att.coord parent_bone = parent_bones[parent_bone_t] @@ -114,14 +114,20 @@ def make_bones(node: tree.Tree, parent_bones: dict, editbones): bones = {} parent_bone = None - + if parent_bones is not None: bonekeys = [k for k in parent_bones.keys() if not isinstance(k, str)] - parent_bone_t = max((i for i in bonekeys if i <= att.coord[0]), default=min(bonekeys)) + parent_bone_t = max( + (i for i in bonekeys if i <= att.coord[0]), default=min(bonekeys) + ) parent_bone = parent_bones[parent_bone_t] - - if att.coord[-1] > part.settings.get('connector_collapse_margin_radpct', 0.5): - bones[-1] = parent_bone = make_parent_connector_bone(part, att, parent_bones, parent_bone_t) + + if att.coord[-1] > part.settings.get( + "connector_collapse_margin_radpct", 0.5 + ): + bones[-1] = parent_bone = make_parent_connector_bone( + part, att, parent_bones, parent_bone_t + ) part_bones = create_part_bones(node, editbones, parent=parent_bone) bones.update(part_bones) @@ -131,23 +137,25 @@ def make_bones(node: tree.Tree, parent_bones: dict, editbones): def finalize_bonedict_to_leave_editmode(bones): # the edit bones wont continue to exist once we leave edit mode, store their names instead for j, b in bones.items(): - partname = part.obj.name.split('.')[-1] + partname = part.obj.name.split(".")[-1] if isinstance(j, (int, float)): - b.name = f'{partname}.side({part.side}).bone({j:.2f})' + b.name = f"{partname}.side({part.side}).bone({j:.2f})" elif isinstance(j, str): - b.name = f'{partname}.side({part.side}).extra_bone({j})' + b.name = f"{partname}.side({part.side}).extra_bone({j})" else: - raise ValueError(f'Unrecognized {j=}') - b['side'] = part.side - b['factory_class'] = part.obj['factory_class'] - b['index'] = part.obj['index'] - b['length'] = j + raise ValueError(f"Unrecognized {j=}") + b["side"] = part.side + b["factory_class"] = part.obj["factory_class"] + b["index"] = part.obj["index"] + b["length"] = j bones[j] = b.name - with butil.ViewportMode(arma, mode='EDIT'): + with butil.ViewportMode(arma, mode="EDIT"): editbones = arma.data.edit_bones - part_bones = tree.map_parent_child(parts_atts, partial(make_bones, editbones=editbones)) + part_bones = tree.map_parent_child( + parts_atts, partial(make_bones, editbones=editbones) + ) for (part, _), bones in tree.tzip(parts_atts, part_bones): finalize_bonedict_to_leave_editmode(bones) @@ -155,7 +163,6 @@ def finalize_bonedict_to_leave_editmode(bones): def compute_chain_length(parts_atts: tree.Tree, bones, part, ik: IKParams): - if ik.chain_parts is None: assert ik.chain_length is not None return ik.chain_length @@ -165,7 +172,9 @@ def compute_chain_length(parts_atts: tree.Tree, bones, part, ik: IKParams): chain_length = 0 for i in range(math.ceil(ik.chain_parts)): p = 1 if i < int(ik.chain_parts) else (ik.chain_parts - int(ik.chain_parts)) - n_skeleton_bones = len([b for b in nodes[curr_idx][1].values() if 'extra' not in b]) + n_skeleton_bones = len( + [b for b in nodes[curr_idx][1].values() if "extra" not in b] + ) chain_length += math.ceil(p * n_skeleton_bones) if curr_idx not in parents: break @@ -178,57 +187,55 @@ def compute_chain_length(parts_atts: tree.Tree, bones, part, ik: IKParams): def create_ik_targets(arma, parts_atts: tree.Tree, bones): - def make_target(part_node, part_bones, ik: IKParams): - part, att = part_node.item joint_ts = get_bone_idxs(part_node) - bone_idx = t if t != joint_ts[-1] else joint_ts[ - -2] # the last idx doesnt have its own bone, it is just the endpoint - base_keys =[k for k in part_bones.keys() if isinstance(k,Number)] + bone_idx = ( + t if t != joint_ts[-1] else joint_ts[-2] + ) # the last idx doesnt have its own bone, it is just the endpoint + base_keys = [k for k in part_bones.keys() if isinstance(k, Number)] bone_idx = max((i for i in base_keys if i <= bone_idx), default=min(base_keys)) name = part_bones[bone_idx] pbone = arma.pose.bones[name] - if ik.mode == 'iksolve': - con = pbone.constraints.new('IK') + if ik.mode == "iksolve": + con = pbone.constraints.new("IK") con.chain_count = compute_chain_length(parts_atts, bones, part, ik) - elif ik.mode == 'pin': - con = pbone.constraints.new('COPY_LOCATION') + elif ik.mode == "pin": + con = pbone.constraints.new("COPY_LOCATION") else: - raise ValueError(f'Unrecognized {ik.mode=}') + raise ValueError(f"Unrecognized {ik.mode=}") - con.target = butil.spawn_empty(f'{IK_TARGET_PREFIX}({ik.name})', disp_type='CUBE', s=ik.target_size) + con.target = butil.spawn_empty( + f"{IK_TARGET_PREFIX}({ik.name})", disp_type="CUBE", s=ik.target_size + ) con.target.location = pbone.tail if t != 0 else pbone.head if ik.rotation_weight > 0: - if ik.mode == 'iksolve': + if ik.mode == "iksolve": con.use_rotation = True con.orient_weight = ik.rotation_weight con.target.rotation_euler = (pbone.matrix).to_euler() else: - rot_con = pbone.constraints.new('COPY_ROTATION') + rot_con = pbone.constraints.new("COPY_ROTATION") rot_con.target = con.target rot_con.influence = ik.rotation_weight return con.target targets = [] - with butil.ViewportMode(arma, mode='POSE'): + with butil.ViewportMode(arma, mode="POSE"): # TODO: risky zip, silent fail on non-matching topology - data_iter = zip( - tree.iter_nodes(parts_atts), - tree.iter_nodes(bones) - ) - for part_node, bones_node in data_iter: + data_iter = zip(tree.iter_nodes(parts_atts), tree.iter_nodes(bones)) + for part_node, bones_node in data_iter: part, att = part_node.item assert part.iks is not None, part for t, ik in part.iks.items(): targets.append(make_target(part_node, bones_node.item, ik)) - col = butil.get_collection('ik_targets') + col = butil.get_collection("ik_targets") for t in targets: butil.put_in_collection(t, col) @@ -242,34 +249,35 @@ def apply_joint_constraint(joint: Joint, pose_bone, eps=1e-2): bounds = np.deg2rad(joint.bounds) if not bounds.shape == (2, 3): - raise ValueError(f'Encountered invalid {joint.bounds=}, {joint.bounds.shape=}') + raise ValueError( + f"Encountered invalid {joint.bounds=}, {joint.bounds.shape=}" + ) ranges = bounds[1] - bounds[0] - for i, ax in enumerate('xyz'): + for i, ax in enumerate("xyz"): if ranges[i] > eps: - setattr(pb, f'use_ik_limit_{ax}', True) - setattr(pb, f'ik_min_{ax}', bounds[0, i]) - setattr(pb, f'ik_max_{ax}', bounds[1, i]) + setattr(pb, f"use_ik_limit_{ax}", True) + setattr(pb, f"ik_min_{ax}", bounds[0, i]) + setattr(pb, f"ik_max_{ax}", bounds[1, i]) else: - setattr(pb, f'lock_ik_{ax}', True) + setattr(pb, f"lock_ik_{ax}", True) else: - for ax in 'xyz': - setattr(pb, f'use_ik_limit_{ax}', False) - setattr(pb, f'lock_ik_{ax}', False) + for ax in "xyz": + setattr(pb, f"use_ik_limit_{ax}", False) + setattr(pb, f"lock_ik_{ax}", False) if joint.stretch is not None: pb.ik_stretch = joint.stretch if joint.stiffness is not None: s = joint.stiffness - if not (hasattr(s, '__len__') and len(s) == 3): + if not (hasattr(s, "__len__") and len(s) == 3): s = (s,) * 3 pb.ik_stiffness_x, pb.ik_stiffness_y, pb.ik_stiffness_z = s def constrain_bones(arma, parts_atts, bones, shoulder_auto_stiffness=0.85): - def constrain_bone(part, att, skeleton_idx, bname): pb = arma.pose.bones[bname] @@ -285,20 +293,23 @@ def constrain_bone(part, att, skeleton_idx, bname): if skeleton_idx < 0 and shoulder_auto_stiffness > 0: # shoulder bones have index < 1, and were added automatically # make them stiff to minimally affect final outcome - pb.ik_stiffness_x, pb.ik_stiffness_y, pb.ik_stiffness_z = (shoulder_auto_stiffness,) * 3 + pb.ik_stiffness_x, pb.ik_stiffness_y, pb.ik_stiffness_z = ( + shoulder_auto_stiffness, + ) * 3 pb.lock_ik_x = True pb.lock_ik_y = True pb.lock_ik_z = True - - with butil.ViewportMode(arma, mode='POSE'): + + with butil.ViewportMode(arma, mode="POSE"): for (part, att), part_bones in tree.tzip(parts_atts, bones): for skeleton_idx, bname in part_bones.items(): if not isinstance(skeleton_idx, int): continue constrain_bone(part, att, skeleton_idx, bname) + def pose_bones(arma, parts_atts, bones): - with butil.ViewportMode(arma, mode='POSE'): + with butil.ViewportMode(arma, mode="POSE"): for (part, att), part_bones in tree.tzip(parts_atts, bones): if part.joints is None: continue @@ -317,36 +328,39 @@ def parent_to_bones(objs, arma): for obj in objs: save_pos = obj.location with butil.SelectObjects([obj, arma], active=arma): - bpy.ops.object.parent_set(type='ARMATURE_AUTO') + bpy.ops.object.parent_set(type="ARMATURE_AUTO") obj.location = save_pos def parent_bones_by_part(creature, arma, part_bones): assert creature.parts[0] is creature.root for i, part in enumerate(creature.parts[1:]): - with butil.SelectObjects([part.obj, arma]), butil.ViewportMode(arma, mode='POSE'): + with ( + butil.SelectObjects([part.obj, arma]), + butil.ViewportMode(arma, mode="POSE"), + ): for bone in arma.pose.bones: - select = (bone.name in part_bones[i].values()) + select = bone.name in part_bones[i].values() arma.data.bones[bone.name].select = select bone.bone.select = select - bpy.ops.object.parent_set(type='ARMATURE_AUTO') + bpy.ops.object.parent_set(type="ARMATURE_AUTO") - with butil.ViewportMode(arma, mode='POSE'): + with butil.ViewportMode(arma, mode="POSE"): for bone in arma.pose.bones: bone.bone.select = False -def creature_rig(root, genome, parts, constraints=True, roll='GLOBAL_POS_Y'): - data = bpy.data.armatures.new(name=f'{root.name}.armature_data') - arma = bpy.data.objects.new(f'{root.name}_armature', data) +def creature_rig(root, genome, parts, constraints=True, roll="GLOBAL_POS_Y"): + data = bpy.data.armatures.new(name=f"{root.name}.armature_data") + arma = bpy.data.objects.new(f"{root.name}_armature", data) bpy.context.scene.collection.objects.link(arma) parts_atts = tree.tzip(parts, tree.map(genome.parts, lambda n: n.att)) bones = create_bones(parts_atts, arma) # force recalculate roll to eliminate bad guesses made by blender - with butil.ViewportMode(arma, mode='EDIT'): - bpy.ops.armature.select_all(action='SELECT') + with butil.ViewportMode(arma, mode="EDIT"): + bpy.ops.armature.select_all(action="SELECT") bpy.ops.armature.calculate_roll(type=roll) targets = create_ik_targets(arma, parts_atts, bones) @@ -361,9 +375,11 @@ def creature_rig(root, genome, parts, constraints=True, roll='GLOBAL_POS_Y'): return arma, targets -def create_ragdoll(root, arma, min_col_length=0.1, col_joint_margin=0.2, col_radius=0.07): +def create_ragdoll( + root, arma, min_col_length=0.1, col_joint_margin=0.2, col_radius=0.07 +): def include_bone(b): - if '-1' in b.name: + if "-1" in b.name: return False if (b.head - b.tail).length < min_col_length: return False @@ -373,16 +389,16 @@ def create_bone_collider(pbone): col_head = mutil.lerp(pbone.head, pbone.tail, col_joint_margin) col_tail = mutil.lerp(pbone.head, pbone.tail, 1 - col_joint_margin) - col = butil.spawn_line(pbone.name + '.col', np.array([col_head, col_tail])) + col = butil.spawn_line(pbone.name + ".col", np.array([col_head, col_tail])) with butil.SelectObjects(col), butil.CursorLocation(col_head): - bpy.ops.object.origin_set(type='ORIGIN_CURSOR', center='MEDIAN') + bpy.ops.object.origin_set(type="ORIGIN_CURSOR", center="MEDIAN") - skin_mod = butil.modify_mesh(col, 'SKIN', apply=False) + skin_mod = butil.modify_mesh(col, "SKIN", apply=False) for svert in col.data.skin_vertices[0].data: svert.radius = (col_radius, col_radius) butil.apply_modifiers(col, mod=skin_mod) - con = pbone.constraints.new('CHILD_OF') + con = pbone.constraints.new("CHILD_OF") con.target = col with butil.SelectObjects(col): bpy.ops.rigidbody.object_add() @@ -392,33 +408,33 @@ def create_bone_collider(pbone): return col def configure_rigidbody_joint(child_bone, child_obj, parent_obj): - o = butil.spawn_empty(child_bone.name + '.phys_joint') + o = butil.spawn_empty(child_bone.name + ".phys_joint") o.location = child_bone.head with butil.SelectObjects(o): bpy.ops.rigidbody.constraint_add() cons = bpy.context.object.rigid_body_constraint - cons.type = 'GENERIC_SPRING' + cons.type = "GENERIC_SPRING" cons.object1 = child_obj cons.object2 = parent_obj # no linear sliding - for ax in 'xyz': - setattr(cons, f'use_limit_lin_{ax}', True) - setattr(cons, f'limit_lin_{ax}_lower', 0) - setattr(cons, f'limit_lin_{ax}_upper', 0) + for ax in "xyz": + setattr(cons, f"use_limit_lin_{ax}", True) + setattr(cons, f"limit_lin_{ax}_lower", 0) + setattr(cons, f"limit_lin_{ax}_upper", 0) # copy over any angle constraints - for ax in 'xyz': - do_limit = getattr(child_bone, f'use_ik_limit_{ax}') - setattr(cons, f'use_limit_ang_{ax}', do_limit) - for ck, bk in (('lower', 'min'), ('upper', 'max')): - lim = getattr(child_bone, f'ik_{bk}_{ax}') - setattr(cons, f'limit_ang_{ax}_{ck}', lim / 3) + for ax in "xyz": + do_limit = getattr(child_bone, f"use_ik_limit_{ax}") + setattr(cons, f"use_limit_ang_{ax}", do_limit) + for ck, bk in (("lower", "min"), ("upper", "max")): + lim = getattr(child_bone, f"ik_{bk}_{ax}") + setattr(cons, f"limit_ang_{ax}_{ck}", lim / 3) - for ax in 'xyz': - setattr(cons, f'use_spring_ang_{ax}', True) + for ax in "xyz": + setattr(cons, f"use_spring_ang_{ax}", True) return o @@ -427,7 +443,7 @@ def ancestors(pbone): pbone = pbone.parent yield pbone - with butil.ViewportMode(arma, mode='POSE'): + with butil.ViewportMode(arma, mode="POSE"): # remove any ik constraints for b in arma.pose.bones: for c in b.constraints: @@ -447,15 +463,18 @@ def ancestors(pbone): hinge_target = next(b for b in ancestors(b) if b in col_bones) except StopIteration: continue - joint_obj = configure_rigidbody_joint(b, col_objs[b.name], col_objs[hinge_target.name]) + joint_obj = configure_rigidbody_joint( + b, col_objs[b.name], col_objs[hinge_target.name] + ) joint_obj.parent = root - col_bone_names = [b.name for b in - col_bones] # store names so we can reference outside of pose mode in the next step + col_bone_names = [ + b.name for b in col_bones + ] # store names so we can reference outside of pose mode in the next step # animation will be applied wrong if children inherit physics transformations from parents - unparent all # bones - with butil.ViewportMode(arma, mode='EDIT'): + with butil.ViewportMode(arma, mode="EDIT"): for b in arma.data.edit_bones: b.select = b.name in col_bone_names - bpy.ops.armature.parent_clear(type='CLEAR') + bpy.ops.armature.parent_clear(type="CLEAR") diff --git a/infinigen/assets/creatures/util/tree.py b/infinigen/assets/objects/creatures/util/tree.py similarity index 80% rename from infinigen/assets/creatures/util/tree.py rename to infinigen/assets/objects/creatures/util/tree.py index 65daaf752..be7a08c58 100644 --- a/infinigen/assets/creatures/util/tree.py +++ b/infinigen/assets/objects/creatures/util/tree.py @@ -4,31 +4,35 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass, field import typing -import itertools +from dataclasses import dataclass, field + @dataclass class Tree: item: typing.Any children: list = field(default_factory=list) + def iter_nodes(t: Tree, postorder=False): if not postorder: - yield t + yield t for c in t.children: yield from iter_nodes(c, postorder=postorder) if postorder: yield t + def iter_items(t: Tree, postorder=False): for n in iter_nodes(t, postorder=postorder): yield n.item + Tree.__iter__ = iter_items inorder = iter_items + def iter_parent_child(t: Tree, parent=None, postorder=False): if not postorder: yield None if parent is None else parent.item, t.item @@ -36,28 +40,42 @@ def iter_parent_child(t: Tree, parent=None, postorder=False): yield from iter_parent_child(c, parent=t, postorder=postorder) if postorder: yield None if parent is None else parent.item, t.item - + + def fold(t: Tree, func): child_res = [fold(func, node=child) for child in t.children] return func(t.item, child_res) + def map(t: Tree, func) -> Tree: return Tree(item=func(t.item), children=[map(c, func) for c in t.children]) + def map_parent_child(t, func, parent_node=None, parent_res=None, **opts) -> Tree: - arg = (t, parent_node) if opts.get('include_parent_node', False) else t + arg = (t, parent_node) if opts.get("include_parent_node", False) else t res = func(arg, parent_res) - return Tree(res, children=[map_parent_child(c, func, parent_node=t, parent_res=res, **opts) for c in t.children]) + return Tree( + res, + children=[ + map_parent_child(c, func, parent_node=t, parent_res=res, **opts) + for c in t.children + ], + ) + def tzip(*trees): - return Tree(tuple(t.item for t in trees), - children=[tzip(*children) for children in zip(*[t.children for t in trees])]) + return Tree( + tuple(t.item for t in trees), + children=[tzip(*children) for children in zip(*[t.children for t in trees])], + ) + def to_node_parent(t): nodes = list(iter_items(t)) parents = {} - index = lambda x: next(i for i, v in enumerate(nodes) if v is x) + def index(x): + return next(i for i, v in enumerate(nodes) if v is x) for parent, child in iter_parent_child(t): if parent is None: @@ -65,5 +83,3 @@ def to_node_parent(t): parents[index(child)] = index(parent) return nodes, parents - - diff --git a/infinigen/assets/objects/decor/__init__.py b/infinigen/assets/objects/decor/__init__.py new file mode 100644 index 000000000..01ea0ca11 --- /dev/null +++ b/infinigen/assets/objects/decor/__init__.py @@ -0,0 +1 @@ +from .aquarium_tank import AquariumTankFactory diff --git a/infinigen/assets/decor/aquarium_tank.py b/infinigen/assets/objects/decor/aquarium_tank.py similarity index 55% rename from infinigen/assets/decor/aquarium_tank.py rename to infinigen/assets/objects/decor/aquarium_tank.py index 627c5db35..64f356516 100644 --- a/infinigen/assets/decor/aquarium_tank.py +++ b/infinigen/assets/objects/decor/aquarium_tank.py @@ -6,47 +6,57 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.rocks.boulder import BoulderFactory -from infinigen.assets.cactus import CactusFactory -from infinigen.assets.corals import CoralFactory -from infinigen.assets.mollusk import MolluskFactory -from infinigen.assets.mushroom import MushroomFactory -from infinigen.assets.underwater.seaweed import SeaweedFactory -from infinigen.assets.materials import metal, water -from infinigen.assets.materials import glass +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects import ( + cactus, + corals, + mollusk, + mushroom, + rocks, + underwater, +) from infinigen.assets.utils.decorate import read_co, write_attribute from infinigen.assets.utils.object import join_objects, new_bbox, new_cube, new_plane from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList class AquariumTankFactory(AssetFactory): - dry_factories = [MushroomFactory, CactusFactory, BoulderFactory] - wet_factories = [MolluskFactory, CoralFactory, SeaweedFactory] + dry_factories = [ + mushroom.MushroomFactory, + cactus.CactusFactory, + rocks.BoulderFactory, + ] + wet_factories = [ + mollusk.MolluskFactory, + corals.CoralFactory, + underwater.SeaweedFactory, + ] def __init__(self, factory_seed, coarse=False): super(AquariumTankFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.is_wet = uniform() < .5 - base_factory_fn = np.random.choice(self.wet_factories if self.is_wet else self.dry_factories) + self.is_wet = uniform() < 0.5 + base_factory_fn = np.random.choice( + self.wet_factories if self.is_wet else self.dry_factories + ) self.base_factory = base_factory_fn(self.factory_seed) - self.width = log_uniform(.5, 1) - self.depth = log_uniform(.5, .8) - self.height = log_uniform(.5, 1) - self.thickness = uniform(.01, .02) - self.belt_thickness = log_uniform(.02, .05) + self.width = log_uniform(0.5, 1) + self.depth = log_uniform(0.5, 0.8) + self.height = log_uniform(0.5, 1) + self.thickness = uniform(0.01, 0.02) + self.belt_thickness = log_uniform(0.02, 0.05) - materials = AssetList['AquariumTankFactory']() - self.glass_surface = materials['glass_surface'].assign_material() - self.belt_surface = materials['belt_surface'].assign_material() - self.water_surface = materials['water_surface'].assign_material() + materials = AssetList["AquariumTankFactory"]() + self.glass_surface = materials["glass_surface"].assign_material() + self.belt_surface = materials["belt_surface"].assign_material() + self.water_surface = materials["water_surface"].assign_material() - scratch_prob, edge_wear_prob = materials['wear_tear_prob'] - self.scratch, self.edge_wear = materials['wear_tear'] + scratch_prob, edge_wear_prob = materials["wear_tear_prob"] + self.scratch, self.edge_wear = materials["wear_tear"] is_scratch = uniform() < scratch_prob is_edge_wear = uniform() < edge_wear_prob if not is_scratch: @@ -56,7 +66,12 @@ def __init__(self, factory_seed, coarse=False): def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox( - -self.thickness - self.depth, self.thickness, -self.thickness, self.width + self.thickness, 0, self.height + -self.thickness - self.depth, + self.thickness, + -self.thickness, + self.width + self.thickness, + 0, + self.height, ) def create_asset(self, **params) -> bpy.types.Object: @@ -64,14 +79,16 @@ def create_asset(self, **params) -> bpy.types.Object: butil.apply_transform(tank, loc=True) tank.scale = self.width / 2, self.depth / 2, self.height / 2 butil.apply_transform(tank) - butil.modify_mesh(tank, 'SOLIDIFY', thickness=self.thickness) - write_attribute(tank, 1, 'glass', 'FACE') + butil.modify_mesh(tank, "SOLIDIFY", thickness=self.thickness) + write_attribute(tank, 1, "glass", "FACE") parts = [tank] parts.extend(self.make_belts()) base_obj = self.base_factory.create_asset(**params) co = read_co(base_obj) x_min, x_max = np.amin(co, 0), np.amax(co, 0) - scale = uniform(.7, .9) / np.max((x_max - x_min) / np.array([self.width, self.depth, self.height])) + scale = uniform(0.7, 0.9) / np.max( + (x_max - x_min) / np.array([self.width, self.depth, self.height]) + ) base_obj.location = -(x_min + x_max) * np.array(base_obj.scale) / 2 base_obj.location[-1] = -(x_min * base_obj.scale)[-1] butil.apply_transform(base_obj, True) @@ -86,18 +103,20 @@ def create_asset(self, **params) -> bpy.types.Object: def make_belts(self): belt = new_plane() - with butil.ViewportMode(belt, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.delete(type='ONLY_FACE') + with butil.ViewportMode(belt, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.delete(type="ONLY_FACE") belt.location = self.width / 2, self.depth / 2, 0 belt.scale = self.width / 2, self.depth / 2, 0 butil.apply_transform(belt, loc=True) - with butil.ViewportMode(belt, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, self.belt_thickness)}) - butil.modify_mesh(belt, 'SOLIDIFY', thickness=self.thickness) - write_attribute(belt, 1, 'belt', 'FACE') + with butil.ViewportMode(belt, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, self.belt_thickness)} + ) + butil.modify_mesh(belt, "SOLIDIFY", thickness=self.thickness) + write_attribute(belt, 1, "belt", "FACE") belt_ = deep_clone_obj(belt) belt_.location[-1] = self.height - self.belt_thickness @@ -105,8 +124,8 @@ def make_belts(self): return [belt, belt_] def finalize_assets(self, assets): - self.glass_surface.apply(assets, selection='glass') - self.belt_surface.apply(assets, selection='belt') + self.glass_surface.apply(assets, selection="glass") + self.belt_surface.apply(assets, selection="belt") if self.scratch: self.scratch.apply(assets) diff --git a/infinigen/assets/deformed_trees/__init__.py b/infinigen/assets/objects/deformed_trees/__init__.py similarity index 100% rename from infinigen/assets/deformed_trees/__init__.py rename to infinigen/assets/objects/deformed_trees/__init__.py index d7d76d5c8..86bef7a17 100644 --- a/infinigen/assets/deformed_trees/__init__.py +++ b/infinigen/assets/objects/deformed_trees/__init__.py @@ -1,5 +1,5 @@ from .fallen import FallenTreeFactory +from .generate import DeformedTreeFactory +from .hollow import HollowTreeFactory from .rotten import RottenTreeFactory from .truncated import TruncatedTreeFactory -from .hollow import HollowTreeFactory -from .generate import DeformedTreeFactory diff --git a/infinigen/assets/objects/deformed_trees/base.py b/infinigen/assets/objects/deformed_trees/base.py new file mode 100644 index 000000000..1c57ff0df --- /dev/null +++ b/infinigen/assets/objects/deformed_trees/base.py @@ -0,0 +1,85 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +import colorsys + +from numpy.random import uniform + +from infinigen.assets.objects.trees.generate import GenericTreeFactory, random_species +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import NoApply +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class BaseDeformedTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(BaseDeformedTreeFactory, self).__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + (tree_params, _, _), _ = random_species() + tree_params.skinning.update({"Scaling": 0.2}) + self.base_factory = GenericTreeFactory( + factory_seed, tree_params, None, NoApply, coarse + ) + self.trunk_surface = surface.registry("bark") + self.base_hue = uniform(0.02, 0.08) + self.material = surface.shaderfunc_to_material( + self.shader_rings, self.base_hue + ) + + def build_tree(self, i, distance, **kwargs): + return self.base_factory.spawn_asset(i=i, distance=distance) + + @staticmethod + def geo_xyz(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + for name, component in zip( + "xyz", nw.separate(nw.new_node(Nodes.InputPosition)) + ): + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": geometry, "Name": name, "Value": component}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def shader_rings(nw: NodeWrangler, base_hue): + position = nw.combine( + *map( + lambda n: nw.new_node(Nodes.Attribute, attrs={"attribute_name": n}), + "xyz", + ) + ) + ratio = nw.new_node( + Nodes.WaveTexture, + [position], + input_kwargs={"Scale": uniform(10, 20), "Distortion": uniform(4, 10)}, + attrs={"wave_type": "RINGS", "rings_direction": "Z", "wave_profile": "SAW"}, + ) + bright_color = hsv2rgba(base_hue, uniform(0.4, 0.8), log_uniform(0.2, 0.8)) + dark_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.02, 0.02)) % 1, + uniform(0.4, 0.8), + log_uniform(0.02, 0.05), + ), + 1.0, + ) + color = nw.new_node(Nodes.MixRGB, [ratio, dark_color, bright_color]) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color} + ) + return principled_bsdf + + def create_asset(self, face_size, **params): + raise NotImplementedError diff --git a/infinigen/assets/objects/deformed_trees/fallen.py b/infinigen/assets/objects/deformed_trees/fallen.py new file mode 100644 index 000000000..d32ff8058 --- /dev/null +++ b/infinigen/assets/objects/deformed_trees/fallen.py @@ -0,0 +1,145 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory +from infinigen.assets.utils.decorate import remove_vertices +from infinigen.assets.utils.draw import cut_plane +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import join_objects, separate_loose +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj + + +class FallenTreeFactory(BaseDeformedTreeFactory): + @staticmethod + def geo_cutter(nw: NodeWrangler, strength, scale, radius, metric_fn): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + selection = nw.compare( + "LESS_THAN", nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1 + ) + offset = nw.scalar_multiply( + nw.new_node( + Nodes.Clamp, + [ + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.new_node(Nodes.InputPosition), + "Scale": scale, + }, + ), + 0.3, + 0.7, + ], + ), + strength, + ) + offset = nw.scalar_multiply( + offset, nw.build_float_curve(x, [(-radius, 1), (radius, 0)]) + ) + anchors = (-1, 0), (-0.5, 0), (0, -1), (0.5, 0), (1, 0) + offset = nw.scalar_multiply( + offset, nw.build_float_curve(surface.eval_argument(nw, metric_fn), anchors) + ) + geometry = nw.new_node( + Nodes.SetPosition, [geometry, selection, None, nw.combine(0, 0, offset)] + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def build_half( + self, + obj, + cut_center, + cut_normal, + noise_strength, + noise_scale, + radius, + is_up=True, + ): + obj, cut = cut_plane(obj, cut_center, cut_normal, not is_up) + assign_material(cut, self.material) + obj = join_objects([obj, cut]) + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.remove_doubles(threshold=1e-2) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.fill_holes() + + def metric_fn(nw): + return nw.dot( + nw.sub(nw.new_node(Nodes.InputPosition), cut_center), cut_normal + ) + + surface.add_geomod( + obj, + self.geo_cutter, + apply=True, + input_args=[noise_strength, noise_scale, radius, metric_fn], + ) + obj = separate_loose(obj) + surface.add_geomod(obj, self.geo_xyz, apply=True) + return obj + + def create_asset(self, i, distance=0, **params): + upper = self.build_tree(i, distance, **params) + radius = max( + [ + np.sqrt(v.co[0] ** 2 + v.co[1] ** 2) + for v in upper.data.vertices + if v.co[-1] < 0.1 + ] + ) + self.trunk_surface.apply(upper) + butil.apply_modifiers(upper) + lower = deep_clone_obj(upper, keep_materials=True) + cut_center = np.array([0, 0, uniform(0.6, 1.2)]) + cut_normal = np.array([uniform(0.1, 0.2), 0, 1]) + noise_strength = uniform(0.3, 0.5) + noise_scale = uniform(10, 15) + upper = self.build_half( + upper, cut_center, cut_normal, noise_strength, noise_scale, radius, True + ) + lower = self.build_half( + lower, cut_center, cut_normal, noise_strength, noise_scale, radius, False + ) + + ortho = np.array([-cut_normal[0], 0, 1]) + locations = np.array([v.co for v in lower.data.vertices]) + highest = locations[np.argmax(locations @ ortho)] + np.array( + [-uniform(0.05, 0.15), 0, -uniform(0.05, 0.15)] + ) + upper.location = -highest + butil.apply_transform(upper, loc=True) + + x, _, z = np.mean(np.stack([v.co for v in upper.data.vertices]), 0) + r = np.sqrt(x * x + z * z) + if r > 0: + upper.rotation_euler[1] = ( + np.pi / 2 + + np.arcsin((highest[-1] - uniform(0, 0.2)) / r) + - np.arctan(x / z) + ) + upper.location = highest + butil.apply_transform(upper, loc=True) + remove_vertices(upper, lambda x, y, z: z < -0.5) + upper = separate_loose(upper) + obj = join_objects([upper, lower]) + tag_object(obj, "fallen_tree") + return obj diff --git a/infinigen/assets/deformed_trees/generate.py b/infinigen/assets/objects/deformed_trees/generate.py similarity index 65% rename from infinigen/assets/deformed_trees/generate.py rename to infinigen/assets/objects/deformed_trees/generate.py index 24ac17a63..6ad37b6fa 100644 --- a/infinigen/assets/deformed_trees/generate.py +++ b/infinigen/assets/objects/deformed_trees/generate.py @@ -4,24 +4,32 @@ # Authors: Lingjie Mei -import bpy import numpy as np -from infinigen.assets.deformed_trees import FallenTreeFactory, HollowTreeFactory, RottenTreeFactory +from infinigen.assets.deformed_trees import ( + FallenTreeFactory, + HollowTreeFactory, + RottenTreeFactory, +) from infinigen.assets.deformed_trees.truncated import TruncatedTreeFactory from infinigen.core.placement.factory import AssetFactory from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup class DeformedTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): super(DeformedTreeFactory, self).__init__(factory_seed, coarse) - self.maker_factories = [FallenTreeFactory, RottenTreeFactory, TruncatedTreeFactory, HollowTreeFactory] + self.maker_factories = [ + FallenTreeFactory, + RottenTreeFactory, + TruncatedTreeFactory, + HollowTreeFactory, + ] self.weights = np.array([1, 1, 1, 1]) with FixedSeed(factory_seed): - self.maker_factory = np.random.choice(self.maker_factories, p=self.weights / self.weights.sum()) + self.maker_factory = np.random.choice( + self.maker_factories, p=self.weights / self.weights.sum() + ) self.maker = self.maker_factory(factory_seed, coarse) def create_asset(self, **params): diff --git a/infinigen/assets/objects/deformed_trees/hollow.py b/infinigen/assets/objects/deformed_trees/hollow.py new file mode 100644 index 000000000..d25e71dbf --- /dev/null +++ b/infinigen/assets/objects/deformed_trees/hollow.py @@ -0,0 +1,130 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory +from infinigen.assets.utils.decorate import ( + read_co, + read_material_index, + write_material_index, +) +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.nodegroup import geo_selection +from infinigen.assets.utils.object import join_objects +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj, select_none + + +class HollowTreeFactory(BaseDeformedTreeFactory): + @staticmethod + def geo_texture(nw: NodeWrangler, material_index): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.compare( + "EQUAL", nw.new_node(Nodes.MaterialIndex), material_index + ) + offset = nw.scale( + nw.scalar_multiply(nw.musgrave(uniform(10, 20)), -uniform(0.03, 0.06)), + nw.new_node(Nodes.InputNormal), + ) + geometry = nw.new_node(Nodes.SetPosition, [geometry, selection, None, offset]) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def filter_lower(obj): + select_none() + objs = butil.split_object(obj) + filtered = [o for o in objs if np.min(read_co(o)[:, -1]) < 0.5] + obj = filtered[np.argmax([len(o.data.vertices) for o in filtered])] + objs.remove(obj) + butil.delete(objs) + return obj + + def create_asset(self, i, distance=0, **params): + obj = self.build_tree(i, distance, **params) + scale = uniform(0.8, 1.0) + threshold = uniform(0.36, 0.4) + + def selection(nw: NodeWrangler): + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + radius = nw.power(nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 0.5) + vector = nw.combine( + nw.scalar_divide(x, radius), nw.scalar_divide(y, radius), z + ) + noise = nw.compare( + "GREATER_THAN", + nw.new_node( + Nodes.NoiseTexture, [vector], input_kwargs={"Scale": scale} + ), + threshold, + ) + r_outside = nw.compare( + "GREATER_THAN", nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1 + ) + z_lower = nw.scalar_add( + 0.1, + nw.scale( + nw.new_node(Nodes.NoiseTexture, attrs={"noise_dimensions": "2D"}), + 0.4, + ), + ) + z_upper = nw.scalar_sub( + 3.5, + nw.scale( + nw.new_node(Nodes.NoiseTexture, attrs={"noise_dimensions": "2D"}), + 0.4, + ), + ) + z_outside = nw.boolean_math( + "OR", + nw.compare("LESS_THAN", z, z_lower), + nw.compare("GREATER_THAN", z, z_upper), + ) + return nw.boolean_math( + "OR", nw.boolean_math("OR", z_outside, noise), r_outside + ) + + surface.add_geomod(obj, geo_selection, apply=True, input_args=[selection]) + hollow = deep_clone_obj(obj) + + self.trunk_surface.apply(obj) + butil.apply_modifiers(obj) + assign_material(hollow, self.material) + obj = join_objects([self.filter_lower(obj), self.filter_lower(hollow)]) + + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.bridge_edge_loops( + type="PAIRS", number_cuts=10, interpolation="LINEAR" + ) + + ring_material_index = list(obj.data.materials).index( + obj.data.materials["shader_rings"] + ) + surface.add_geomod( + obj, self.geo_texture, apply=True, input_args=[ring_material_index] + ) + + material_indices = read_material_index(obj) + null_indices = np.array( + [i for i, m in enumerate(obj.data.materials) if not hasattr(m, "name")] + ) + material_indices[ + np.any(material_indices[:, np.newaxis] == null_indices[np.newaxis, :], -1) + ] = ring_material_index + write_material_index(obj, material_indices) + tag_object(obj, "hollow_tree") + return obj diff --git a/infinigen/assets/objects/deformed_trees/rotten.py b/infinigen/assets/objects/deformed_trees/rotten.py new file mode 100644 index 000000000..2ccace381 --- /dev/null +++ b/infinigen/assets/objects/deformed_trees/rotten.py @@ -0,0 +1,164 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.deformed_trees.base import BaseDeformedTreeFactory +from infinigen.assets.utils.decorate import ( + read_material_index, + remove_vertices, + write_material_index, +) +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import join_objects, new_icosphere, separate_loose +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.random import log_uniform + + +class RottenTreeFactory(BaseDeformedTreeFactory): + @staticmethod + def geo_cutter(nw: NodeWrangler, strength, scale, metric_fn): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + selection = nw.compare( + "LESS_THAN", nw.scalar_add(nw.power(x, 2), nw.power(y, 2)), 1 + ) + offset = nw.scalar_multiply( + nw.new_node( + Nodes.Clamp, + [ + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.new_node(Nodes.InputPosition), + "Scale": scale, + }, + attrs={"noise_dimensions": "2D"}, + ), + 0.3, + 0.7, + ], + ), + strength, + ) + anchors = (0, 1), (1.02, 1), (1.05, 0), (2, 0) + metric = surface.eval_argument(nw, metric_fn) + offset = nw.scalar_multiply(offset, nw.build_float_curve(metric, anchors)) + offset = nw.scalar_multiply( + offset, + nw.switch( + nw.compare( + "GREATER_THAN", nw.separate(nw.new_node(Nodes.InputNormal))[-1], 0 + ), + 1, + -1, + ), + ) + geometry = nw.new_node( + Nodes.SetPosition, [geometry, selection, None, nw.combine(0, 0, offset)] + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def build_cutter(self, radius, height): + cutter = new_icosphere(subdivisions=6) + angle = uniform(-np.pi, 0) + depth = radius * uniform(0.4, 0.9) + cutter_scale = np.array( + [ + radius * uniform(0.8, 1.2), + radius * uniform(0.8, 1.2), + log_uniform(1.0, 1.2), + ] + ) + cutter_location = np.array( + [depth * np.cos(angle), depth * np.sin(angle), height] + ) + cutter.scale = cutter_scale + cutter.location = cutter_location + assign_material(cutter, self.material) + + def metric(x, y, z): + return np.linalg.norm( + (np.stack([x, y, z], -1) - cutter_location[np.newaxis, :]) + / cutter_scale[np.newaxis, :], + axis=-1, + ) + + def fn(x, y, z): + return metric(x, y, z) < 1 + 0.0001 + + def inverse_fn(x, y, z): + return metric(x, y, z) > 1 + 0.0001 + + def metric_fn(nw): + return nw.vector_math( + "LENGTH", + nw.divide( + nw.sub(nw.new_node(Nodes.InputPosition), cutter_location), + cutter_scale, + ), + ) + + return cutter, fn, inverse_fn, metric_fn + + def create_asset(self, i, distance=0, **params): + outer = self.build_tree(i, distance, **params) + radius = max( + [ + np.sqrt(v.co[0] ** 2 + v.co[1] ** 2) + for v in outer.data.vertices + if v.co[-1] < 0.1 + ] + ) + height = uniform(0.8, 1.6) + cutter, fn, inverse_fn, metric_fn = self.build_cutter(radius, height) + butil.modify_mesh(outer, "BOOLEAN", object=cutter, operation="DIFFERENCE") + outer = separate_loose(outer) + inner = deep_clone_obj(outer) + remove_vertices(outer, fn) + remove_vertices(inner, inverse_fn) + self.trunk_surface.apply(outer) + butil.apply_modifiers(outer) + + obj = join_objects([outer, inner]) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.bridge_edge_loops(number_cuts=10, interpolation="LINEAR") + + ring_material_index = list(obj.data.materials).index( + obj.data.materials["shader_rings"] + ) + material_indices = read_material_index(obj) + null_indices = np.array( + [i for i, m in enumerate(obj.data.materials) if not hasattr(m, "name")] + ) + material_indices[ + np.any(material_indices[:, np.newaxis] == null_indices[np.newaxis, :], -1) + ] = ring_material_index + write_material_index(obj, material_indices) + + noise_strength = cutter.scale[-1] * uniform(0.5, 0.8) + noise_scale = uniform(10, 15) + surface.add_geomod( + obj, + self.geo_cutter, + apply=True, + input_args=[noise_strength, noise_scale, metric_fn], + ) + surface.add_geomod(obj, self.geo_xyz, apply=True) + butil.delete(cutter) + tag_object(outer, "rotten_tree") + return outer diff --git a/infinigen/assets/objects/deformed_trees/truncated.py b/infinigen/assets/objects/deformed_trees/truncated.py new file mode 100644 index 000000000..85a7bd72c --- /dev/null +++ b/infinigen/assets/objects/deformed_trees/truncated.py @@ -0,0 +1,66 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +import numpy as np +from numpy.random import uniform + +from infinigen.assets.deformed_trees import FallenTreeFactory +from infinigen.assets.utils.decorate import read_co +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +class TruncatedTreeFactory(FallenTreeFactory): + @staticmethod + def geo_cutter(nw: NodeWrangler, strength, scale, radius, metric_fn): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + offset = nw.scalar_multiply( + nw.new_node( + Nodes.Clamp, + [ + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": nw.new_node(Nodes.InputPosition), + "Scale": scale, + }, + ), + 0.3, + 0.7, + ], + ), + strength, + ) + anchors = (-1, 0), (-0.5, 0), (0, 1), (0.5, 0), (1, 0) + offset = nw.scalar_multiply( + offset, nw.build_float_curve(surface.eval_argument(nw, metric_fn), anchors) + ) + geometry = nw.new_node( + Nodes.SetPosition, [geometry, None, None, nw.combine(0, 0, offset)] + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def create_asset(self, i, distance=0, **params): + obj = self.build_tree(i, distance, **params) + x, y, z = read_co(obj).T + radius = np.amax(np.sqrt(x**2 + y**2)[z < 0.1]) + self.trunk_surface.apply(obj) + butil.apply_modifiers(obj) + cut_center = np.array([0, 0, uniform(0.8, 1.5)]) + cut_normal = np.array([uniform(-0.4, 0.4), 0, 1]) + noise_strength = uniform(0.6, 1.0) + noise_scale = uniform(10, 15) + obj = self.build_half( + obj, cut_center, cut_normal, noise_strength, noise_scale, radius, False + ) + tag_object(obj, "truncated_tree") + return obj diff --git a/infinigen/assets/objects/elements/__init__.py b/infinigen/assets/objects/elements/__init__.py new file mode 100644 index 000000000..bb6ba94a0 --- /dev/null +++ b/infinigen/assets/objects/elements/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + +from .doors import ( + DoorCasingFactory, + GlassPanelDoorFactory, + LiteDoorFactory, + LouverDoorFactory, + PanelDoorFactory, + random_door_factory, +) +from .nature_shelf_trinkets.generate import NatureShelfTrinketsFactory +from .pillars import PillarFactory +from .rug import RugFactory +from .staircases import ( + CantileverStaircaseFactory, + CurvedStaircaseFactory, + LShapedStaircaseFactory, + SpiralStaircaseFactory, + StraightStaircaseFactory, + UShapedStaircaseFactory, + random_staircase_factory, +) +from .warehouses import ( + PalletFactory, + RackFactory, +) diff --git a/infinigen/assets/elements/doors/__init__.py b/infinigen/assets/objects/elements/doors/__init__.py similarity index 84% rename from infinigen/assets/elements/doors/__init__.py rename to infinigen/assets/objects/elements/doors/__init__.py index e0d675979..0779c93e3 100644 --- a/infinigen/assets/elements/doors/__init__.py +++ b/infinigen/assets/objects/elements/doors/__init__.py @@ -6,16 +6,22 @@ import numpy as np from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed -from .panel import PanelDoorFactory, GlassPanelDoorFactory + +from .casing import DoorCasingFactory from .lite import LiteDoorFactory from .louver import LouverDoorFactory -from .casing import DoorCasingFactory +from .panel import GlassPanelDoorFactory, PanelDoorFactory -from infinigen.core.util import blender as butil def random_door_factory(): - door_factories = [PanelDoorFactory, GlassPanelDoorFactory, LouverDoorFactory, LiteDoorFactory] + door_factories = [ + PanelDoorFactory, + GlassPanelDoorFactory, + LouverDoorFactory, + LiteDoorFactory, + ] door_probs = np.array([4, 2, 3, 3]) return np.random.choice(door_factories, p=door_probs / door_probs.sum()) diff --git a/infinigen/assets/objects/elements/doors/base.py b/infinigen/assets/objects/elements/doors/base.py new file mode 100644 index 000000000..243a5f1dc --- /dev/null +++ b/infinigen/assets/objects/elements/doors/base.py @@ -0,0 +1,276 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: +# - Lingjie Mei: primary author + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials import glass, metal, wood +from infinigen.assets.materials.common import unique_surface +from infinigen.assets.utils.autobevel import BevelSharp +from infinigen.assets.utils.decorate import mirror, read_co, write_attribute, write_co +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.assets.utils.object import ( + data2mesh, + join_objects, + mesh2obj, + new_cube, + new_line, +) +from infinigen.core import surface +from infinigen.core.constraints.example_solver.room import constants +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.bevelling import add_bevel, get_bevel_edges +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class BaseDoorFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(BaseDoorFactory, self).__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.width = constants.DOOR_WIDTH + self.height = constants.DOOR_SIZE + self.depth = uniform(0.04, 0.06) + self.panel_margin = log_uniform(0.08, 0.12) + self.bevel_width = uniform(0.005, 0.01) + self.out_bevel = uniform() < 0.7 + self.shrink_width = log_uniform(0.005, 0.06) + + surface_fn = np.random.choice([metal, wood], p=[0.2, 0.8]) + self.surface = unique_surface(surface_fn, self.factory_seed) + self.has_glass = False + self.glass_surface = glass + self.has_louver = False + self.louver_surface = np.random.choice([metal, wood], p=[0.2, 0.8]) + + self.handle_type = np.random.choice(["knob", "lever", "pull"]) + self.handle_surface = np.random.choice([metal, wood], p=[0.2, 0.8]) + self.handle_offset = self.panel_margin * 0.5 + self.handle_height = self.height * uniform(0.45, 0.5) + + self.knob_radius = uniform(0.03, 0.04) + base_radius = uniform(1.1, 1.2) + mid_radius = uniform(0.4, 0.5) + self.knob_radius_mid = ( + base_radius, + base_radius, + mid_radius, + mid_radius, + 1, + uniform(0.6, 0.8), + 0, + ) + self.knob_depth = uniform(0.08, 0.1) + self.knob_depth_mid = [ + 0, + uniform(0.1, 0.15), + uniform(0.25, 0.3), + uniform(0.35, 0.45), + uniform(0.6, 0.8), + 1, + 1 + 1e-3, + ] + + self.lever_radius = uniform(0.03, 0.04) + self.lever_mid_radius = uniform(0.01, 0.02) + self.lever_depth = uniform(0.05, 0.08) + self.lever_mid_depth = uniform(0.15, 0.25) + self.lever_length = log_uniform(0.15, 0.2) + self.level_type = np.random.choice(["wave", "cylinder", "bent"]) + + self.pull_size = log_uniform(0.1, 0.4) + self.pull_depth = uniform(0.05, 0.08) + self.pull_width = log_uniform(0.08, 0.15) + self.pull_extension = uniform(0.05, 0.15) + self.to_pull_bevel = uniform() < 0.5 + self.pull_bevel_width = uniform(0.02, 0.04) + self.pull_radius = uniform(0.01, 0.02) + self.pull_type = np.random.choice(["u", "tee", "zed"]) + self.is_pull_circular = uniform() < 0.5 or self.pull_type == "zed" + self.panel_surface = unique_surface(surface_fn, np.random.randint(1e5)) + self.auto_bevel = BevelSharp() + self.side_bevel = log_uniform(0.005, 0.015) + + self.metal_color = metal.sample_metal_color() + + def create_asset(self, **params) -> bpy.types.Object: + for _ in range(100): + obj = self._create_asset() + if max(obj.dimensions) < 5: + return obj + else: + raise ValueError("Bad door booleaning") + + def _create_asset(self): + obj = new_cube(location=(1, 1, 1)) + butil.apply_transform(obj, loc=True) + obj.scale = self.width / 2, self.depth / 2, self.height / 2 + butil.apply_transform(obj) + panels = self.make_panels() + extras = [] + for panel in panels: + extras.extend(panel["func"](obj, panel)) + match self.handle_type: + case "knob": + extras.extend(self.make_knobs()) + case "lever": + extras.extend(self.make_levers()) + case "pull": + extras.extend(self.make_pulls()) + obj = join_objects([obj] + extras) + self.auto_bevel(obj) + obj.location = -self.width, -self.depth, 0 + butil.apply_transform(obj, True) + obj = add_bevel(obj, get_bevel_edges(obj), offset=self.side_bevel) + return obj + + def make_panels(self): + return [] + + def finalize_assets(self, assets): + self.surface.apply(assets, metal_color=self.metal_color, vertical=True) + if self.has_glass: + self.glass_surface.apply(assets, selection="glass", clear=True) + if self.has_louver: + self.louver_surface.apply( + assets, selection="louver", metal_color=self.metal_color + ) + self.handle_surface.apply(assets, selection="handle", metal_color="natural") + + def make_knobs(self): + x_anchors = np.array(self.knob_radius_mid) * self.knob_radius + y_anchors = np.array(self.knob_depth_mid) * self.knob_depth + obj = spin([x_anchors, y_anchors, 0], [0, 2, 3], axis=(0, 1, 0)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.edge_face_add() + return self.make_handles(obj) + + def make_handles(self, obj): + write_attribute(obj, 1, "handle", "FACE") + obj.location = self.handle_offset, 0, self.handle_height + butil.apply_transform(obj, loc=True) + other = deep_clone_obj(obj) + obj.location[1] += self.depth + butil.apply_transform(obj, loc=True) + mirror(other, 1) + return [obj, other] + + def make_levers(self): + x_anchors = ( + self.lever_radius, + self.lever_radius, + self.lever_mid_radius, + self.lever_mid_radius, + 0, + ) + y_anchors = ( + np.array([0, self.lever_mid_depth, self.lever_mid_depth, 1, 1 + 1e-3]) + * self.lever_depth + ) + obj = spin([x_anchors, y_anchors, 0], [0, 1, 2, 3], axis=(0, 1, 0)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.region_to_loop() + bpy.ops.mesh.fill() + lever = new_line(4) + if self.level_type == "wave": + co = read_co(lever) + co[1, -1] = -uniform(0.2, 0.3) + co[3, -1] = uniform(0.1, 0.15) + write_co(lever, co) + elif self.level_type == "bent": + co = read_co(lever) + co[4, 1] = -uniform(0.2, 0.3) + write_co(lever, co) + lever.scale = [self.lever_length] * 3 + butil.apply_transform(lever) + butil.select_none() + with butil.ViewportMode(lever, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, self.lever_mid_radius * 2)} + ) + butil.modify_mesh( + lever, "SOLIDIFY", lever, thickness=self.lever_mid_radius, offset=0 + ) + butil.modify_mesh(lever, "SUBSURF", render_levels=1, levels=1) + lever.location = ( + -self.lever_mid_radius, + self.lever_depth, + -self.lever_mid_radius, + ) + butil.apply_transform(lever, loc=True) + obj = join_objects([obj, lever]) + return self.make_handles(obj) + + def make_pulls(self): + if self.pull_type == "u": + vertices = ( + (0, 0, self.pull_size), + (0, self.pull_depth, self.pull_size), + (0, self.pull_depth, 0), + ) + edges = (0, 1), (1, 2) + elif self.pull_type == "tee": + vertices = ( + (0, 0, self.pull_size), + (0, self.pull_depth, self.pull_size), + (0, self.pull_depth, 0), + (0, self.pull_depth, self.pull_size + self.pull_extension), + ) + edges = (0, 1), (1, 2), (1, 3) + else: + vertices = ( + (0, 0, self.pull_size), + (0, self.pull_depth, self.pull_size), + (self.pull_width, self.pull_depth, self.pull_size), + (self.pull_width, self.pull_depth, 0), + ) + edges = (0, 1), (1, 2), (2, 3) + obj = mesh2obj(data2mesh(vertices, edges)) + butil.modify_mesh(obj, "MIRROR", use_axis=(False, False, True)) + if self.to_pull_bevel: + butil.modify_mesh( + obj, "BEVEL", width=self.pull_bevel_width, segments=4, affect="VERTICES" + ) + if self.is_pull_circular: + surface.add_geomod( + obj, + geo_radius, + apply=True, + input_args=[self.pull_radius, 32], + input_kwargs={"to_align_tilt": False}, + ) + else: + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (self.pull_radius * 2, 0, 0)} + ) + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.normals_make_consistent(inside=False) + obj.location = -self.pull_radius, -self.pull_radius, -self.pull_radius + butil.apply_transform(obj, loc=True) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.pull_radius * 2, offset=0) + return self.make_handles(obj) + + @property + def casing_factory(self): + from infinigen.assets.objects.elements import DoorCasingFactory + + factory = DoorCasingFactory(self.factory_seed, self.coarse) + factory.surface = self.surface + factory.metal_color = self.metal_color + return factory diff --git a/infinigen/assets/elements/doors/casing.py b/infinigen/assets/objects/elements/doors/casing.py similarity index 68% rename from infinigen/assets/elements/doors/casing.py rename to infinigen/assets/objects/elements/doors/casing.py index 32317fe93..4989d358e 100644 --- a/infinigen/assets/elements/doors/casing.py +++ b/infinigen/assets/objects/elements/doors/casing.py @@ -8,13 +8,12 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.materials import wood, metal +from infinigen.assets.materials import metal, wood from infinigen.assets.utils.decorate import read_edge_center, read_edge_direction from infinigen.assets.utils.mesh import bevel from infinigen.assets.utils.object import new_cube from infinigen.core.constraints.example_solver.room import constants from infinigen.core.placement.factory import AssetFactory - from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed @@ -23,9 +22,9 @@ class DoorCasingFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(DoorCasingFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.margin = constants.DOOR_SIZE * uniform(.05, .1) - self.extrude = uniform(.02, .08) - self.bevel_all_sides = uniform() < .3 + self.margin = constants.DOOR_SIZE * uniform(0.05, 0.1) + self.extrude = uniform(0.02, 0.08) + self.bevel_all_sides = uniform() < 0.3 self.surface = np.random.choice([metal, wood]) self.metal_color = metal.sample_metal_color() @@ -35,27 +34,31 @@ def create_asset(self, **params) -> bpy.types.Object: butil.apply_transform(obj, True) w = constants.DOOR_WIDTH s = constants.DOOR_SIZE - obj.scale = w / 2 + self.margin, constants.WALL_THICKNESS / 2 + self.extrude, \ - s / 2 + self.margin / 2 + obj.scale = ( + w / 2 + self.margin, + constants.WALL_THICKNESS / 2 + self.extrude, + s / 2 + self.margin / 2, + ) butil.apply_transform(obj) cutter = new_cube() cutter.location = 0, 0, 1 - 1e-3 butil.apply_transform(cutter, True) cutter.scale = w / 2 - 1e-3, constants.WALL_THICKNESS + self.extrude, s / 2 butil.apply_transform(cutter) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) x, y, z = read_edge_center(obj).T x_, y_, z_ = read_edge_direction(obj).T if self.bevel_all_sides: - selection = (np.abs(z_) > .5) | (np.abs(x_) > .5) + selection = (np.abs(z_) > 0.5) | (np.abs(x_) > 0.5) else: - selection = ((np.abs(z_) > .5) & (np.abs(x) < w / 2 + self.margin / 2)) | ( - (np.abs(x_) > .5) & (z < s + self.margin / 2)) - obj.data.edges.foreach_set('bevel_weight', selection) - bevel(obj, self.extrude, limit_method='WEIGHT') + selection = ((np.abs(z_) > 0.5) & (np.abs(x) < w / 2 + self.margin / 2)) | ( + (np.abs(x_) > 0.5) & (z < s + self.margin / 2) + ) + obj.data.edges.foreach_set("bevel_weight", selection) + bevel(obj, self.extrude, limit_method="WEIGHT") return obj def finalize_assets(self, assets): diff --git a/infinigen/assets/elements/doors/lite.py b/infinigen/assets/objects/elements/doors/lite.py similarity index 58% rename from infinigen/assets/elements/doors/lite.py rename to infinigen/assets/objects/elements/doors/lite.py index e96e0a663..44345186a 100644 --- a/infinigen/assets/elements/doors/lite.py +++ b/infinigen/assets/objects/elements/doors/lite.py @@ -6,6 +6,7 @@ from numpy.random import uniform from infinigen.core.util.math import FixedSeed + from .panel import PanelDoorFactory @@ -16,40 +17,50 @@ def __init__(self, factory_seed, coarse=False): r = uniform() subdivide_glass = False if r <= 1 / 6: - dimension = 0, 1, uniform(.4, .6), 1 + dimension = 0, 1, uniform(0.4, 0.6), 1 subdivide_glass = True elif r <= 1 / 3: dimension = 0, 1, 0, 1 subdivide_glass = True elif r <= 1 / 2: - dimension = 0, uniform(.3, .4), uniform(.4, .6), 1 + dimension = 0, uniform(0.3, 0.4), uniform(0.4, 0.6), 1 elif r <= 2 / 3: - dimension = 0, uniform(.3, .4), uniform(.4, .6), 1 + dimension = 0, uniform(0.3, 0.4), uniform(0.4, 0.6), 1 elif r <= 5 / 6: dimension = 0, 1, 0, 1 else: - x = uniform(.3, .35) - dimension = x, 1 - x, uniform(.7, .8), 1 + x = uniform(0.3, 0.35) + dimension = x, 1 - x, uniform(0.7, 0.8), 1 self.x_min, self.x_max, self.y_min, self.y_max = dimension if subdivide_glass: self.x_subdivisions = np.random.choice([1, 3]) - self.y_subdivisions = int(self.height / self.width * self.x_subdivisions) + np.random.randint( - -1, 2 - ) + self.y_subdivisions = int( + self.height / self.width * self.x_subdivisions + ) + np.random.randint(-1, 2) else: self.x_subdivisions = 1 self.y_subdivisions = 1 self.has_glass = True def make_panels(self): - x_range = np.linspace(self.x_min, self.x_max, self.x_subdivisions + 1) * ( - self.width - self.panel_margin * 2) + self.panel_margin - y_range = np.linspace(self.y_min, self.y_max, self.y_subdivisions + 1) * ( - self.height - self.panel_margin * 2) + self.panel_margin + x_range = ( + np.linspace(self.x_min, self.x_max, self.x_subdivisions + 1) + * (self.width - self.panel_margin * 2) + + self.panel_margin + ) + y_range = ( + np.linspace(self.y_min, self.y_max, self.y_subdivisions + 1) + * (self.height - self.panel_margin * 2) + + self.panel_margin + ) panels = [] for x_min, x_max in zip(x_range[:-1], x_range[1:]): for y_min, y_max in zip(y_range[:-1], y_range[1:]): panels.append( - {'dimension': (x_min, x_max, y_min, y_max), 'func': self.bevel, 'attribute_name': 'glass'} + { + "dimension": (x_min, x_max, y_min, y_max), + "func": self.bevel, + "attribute_name": "glass", + } ) return panels diff --git a/infinigen/assets/elements/doors/louver.py b/infinigen/assets/objects/elements/doors/louver.py similarity index 59% rename from infinigen/assets/elements/doors/louver.py rename to infinigen/assets/objects/elements/doors/louver.py index 48f8bee6e..8b50e2ba9 100644 --- a/infinigen/assets/elements/doors/louver.py +++ b/infinigen/assets/objects/elements/doors/louver.py @@ -5,12 +5,13 @@ import numpy as np from numpy.random import uniform -from .panel import PanelDoorFactory from infinigen.assets.utils.decorate import write_attribute, write_co from infinigen.assets.utils.object import new_cube, new_plane +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil + +from .panel import PanelDoorFactory class LouverDoorFactory(PanelDoorFactory): @@ -18,33 +19,44 @@ def __init__(self, factory_seed, coarse=False): super(LouverDoorFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): self.x_subdivisions = 1 - self.y_subdivisions = np.clip(np.random.binomial(5, .4), 1, None) - self.has_panel = uniform() < .7 - self.has_upper_panel = uniform() < .5 - self.louver_width = uniform(.002, .004) - self.louver_margin = uniform(.02, .03) - self.louver_size = log_uniform(.05, .1) + self.y_subdivisions = np.clip(np.random.binomial(5, 0.4), 1, None) + self.has_panel = uniform() < 0.7 + self.has_upper_panel = uniform() < 0.5 + self.louver_width = uniform(0.002, 0.004) + self.louver_margin = uniform(0.02, 0.03) + self.louver_size = log_uniform(0.05, 0.1) self.louver_angle = uniform(np.pi / 4.5, np.pi / 3.5) self.has_louver = True def louver(self, obj, panel): - x_min, x_max, y_min, y_max = panel['dimension'] + x_min, x_max, y_min, y_max = panel["dimension"] cutter = new_cube(location=(1, 1, 1)) butil.apply_transform(cutter, loc=True) - write_attribute(cutter, 1, 'louver', 'FACE') - cutter.location = x_min - self.louver_margin, -self.louver_width, y_min - self.louver_margin - cutter.scale = [(x_max - x_min) / 2 + self.louver_margin, self.depth / 2 + self.louver_width, - (y_max - y_min) / 2 + self.louver_margin] + write_attribute(cutter, 1, "louver", "FACE") + cutter.location = ( + x_min - self.louver_margin, + -self.louver_width, + y_min - self.louver_margin, + ) + cutter.scale = [ + (x_max - x_min) / 2 + self.louver_margin, + self.depth / 2 + self.louver_width, + (y_max - y_min) / 2 + self.louver_margin, + ] butil.apply_transform(cutter, loc=True) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") hole = new_cube(location=(1, 1, 1)) butil.apply_transform(hole, loc=True) - write_attribute(hole, 1, 'louver', 'FACE') + write_attribute(hole, 1, "louver", "FACE") hole.location = x_min, -self.louver_width * 2, y_min - hole.scale = (x_max - x_min) / 2, self.depth / 2 + self.louver_width * 2, (y_max - y_min) / 2 + hole.scale = ( + (x_max - x_min) / 2, + self.depth / 2 + self.louver_width * 2, + (y_max - y_min) / 2, + ) butil.apply_transform(hole, loc=True) - butil.modify_mesh(cutter, 'BOOLEAN', object=hole, operation='DIFFERENCE') + butil.modify_mesh(cutter, "BOOLEAN", object=hole, operation="DIFFERENCE") butil.delete(hole) louver = new_plane() @@ -53,28 +65,31 @@ def louver(self, obj, panel): y_upper = y_min + self.depth * np.tan(self.louver_angle) z = y_min, y_min, y_upper, y_upper write_co(louver, np.stack([x, y, z], -1)) - butil.modify_mesh(louver, 'SOLIDIFY', thickness=self.louver_width, offset=0) + butil.modify_mesh(louver, "SOLIDIFY", thickness=self.louver_width, offset=0) butil.modify_mesh( - louver, 'ARRAY', use_relative_offset=False, use_constant_offset=True, + louver, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, constant_offset_displace=(0, 0, self.louver_size), - count=int(np.ceil((y_max - y_min) / self.louver_size) + .5) + count=int(np.ceil((y_max - y_min) / self.louver_size) + 0.5), ) - write_attribute(louver, 1, 'louver', 'FACE') + write_attribute(louver, 1, "louver", "FACE") return [cutter, louver] def make_panels(self): panels = super(LouverDoorFactory, self).make_panels() if len(panels) == 1: - panels[0]['func'] = self.louver + panels[0]["func"] = self.louver elif len(panels) == 2: if not self.has_panel: - panels[0]['func'] = self.louver - panels[1]['func'] = self.louver + panels[0]["func"] = self.louver + panels[1]["func"] = self.louver else: if self.has_upper_panel: panels = [panels[0], panels[-1]] else: panels = [panels[0]] for panel in panels: - panel['func'] = self.louver + panel["func"] = self.louver return panels diff --git a/infinigen/assets/elements/doors/panel.py b/infinigen/assets/objects/elements/doors/panel.py similarity index 53% rename from infinigen/assets/elements/doors/panel.py rename to infinigen/assets/objects/elements/doors/panel.py index eb73f1434..d355e531c 100644 --- a/infinigen/assets/elements/doors/panel.py +++ b/infinigen/assets/objects/elements/doors/panel.py @@ -6,45 +6,49 @@ import numpy as np from numpy.random import uniform -from infinigen.core import surface -from infinigen.core.surface import write_attr_data, read_attr_data -from .casing import DoorCasingFactory -from infinigen.assets.elements.doors.base import BaseDoorFactory -from infinigen.assets.utils.decorate import write_attribute, select_faces, read_area +from infinigen.assets.objects.elements.doors.base import BaseDoorFactory +from infinigen.assets.utils.decorate import read_area, select_faces, write_attribute from infinigen.assets.utils.object import new_cube -from infinigen.core.util.math import FixedSeed +from infinigen.core.surface import read_attr_data, write_attr_data from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed class PanelDoorFactory(BaseDoorFactory): def __init__(self, factory_seed, coarse=False): super(PanelDoorFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.x_subdivisions = 1 if uniform() < .5 else 2 - self.y_subdivisions = np.clip(np.random.binomial(5, .45), 1, None) + self.x_subdivisions = 1 if uniform() < 0.5 else 2 + self.y_subdivisions = np.clip(np.random.binomial(5, 0.45), 1, None) def bevel(self, obj, panel): - x_min, x_max, y_min, y_max = panel['dimension'] + x_min, x_max, y_min, y_max = panel["dimension"] assert x_min <= x_max and y_min <= y_max cutter = new_cube() butil.apply_transform(cutter, loc=True) - if panel['attribute_name'] is not None: - write_attribute(cutter, 1, panel['attribute_name'], 'FACE') - cutter.location = (x_max + x_min) / 2, self.bevel_width * .5 - .1, (y_max + y_min) / 2 - cutter.scale = (x_max - x_min) / 2 - 2e-3, .1, (y_max - y_min) / 2 - 2e-3 + if panel["attribute_name"] is not None: + write_attribute(cutter, 1, panel["attribute_name"], "FACE") + cutter.location = ( + (x_max + x_min) / 2, + self.bevel_width * 0.5 - 0.1, + (y_max + y_min) / 2, + ) + cutter.scale = (x_max - x_min) / 2 - 2e-3, 0.1, (y_max - y_min) / 2 - 2e-3 butil.apply_transform(cutter, loc=True) # butil.modify_mesh(cutter, 'BEVEL', width=self.bevel_width) - write_attr_data(cutter, 'cut', np.ones(len(cutter.data.polygons), dtype=int), 'INT', 'FACE') - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') - cutter.location[1] += .2 + self.depth - self.bevel_width + write_attr_data( + cutter, "cut", np.ones(len(cutter.data.polygons), dtype=int), "INT", "FACE" + ) + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") + cutter.location[1] += 0.2 + self.depth - self.bevel_width butil.apply_transform(cutter, loc=True) - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) - select_faces(obj, (read_area(obj) > .01) & (read_attr_data(obj, 'cut'))) - with butil.ViewportMode(obj, 'EDIT'): + select_faces(obj, (read_area(obj) > 0.01) & (read_attr_data(obj, "cut"))) + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.inset(thickness=self.shrink_width) bpy.ops.mesh.inset(thickness=self.bevel_width, depth=self.bevel_width) - obj.data.attributes.remove(obj.data.attributes['cut']) + obj.data.attributes.remove(obj.data.attributes["cut"]) return [] def make_panels(self): @@ -55,38 +59,50 @@ def make_panels(self): y_cuts = np.cumsum(y_cuts / y_cuts.sum()) for j in range(len(y_cuts)): for i in range(len(x_cuts)): - x_min = self.panel_margin + (self.width - self.panel_margin) * (x_cuts[i - 1] if i > 0 else 0) + x_min = self.panel_margin + (self.width - self.panel_margin) * ( + x_cuts[i - 1] if i > 0 else 0 + ) x_max = (self.width - self.panel_margin) * x_cuts[i] - y_min = self.panel_margin + (self.height - self.panel_margin) * (y_cuts[j - 1] if j > 0 else 0) + y_min = self.panel_margin + (self.height - self.panel_margin) * ( + y_cuts[j - 1] if j > 0 else 0 + ) y_max = (self.height - self.panel_margin) * y_cuts[j] panels.append( - {'dimension': (x_min, x_max, y_min, y_max), 'func': self.bevel, 'attribute_name': None} + { + "dimension": (x_min, x_max, y_min, y_max), + "func": self.bevel, + "attribute_name": None, + } ) return panels class GlassPanelDoorFactory(PanelDoorFactory): - def __init__(self, factory_seed, coarse=False): super(GlassPanelDoorFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): self.x_subdivisions = 2 - self.y_subdivisions = np.clip(np.random.binomial(5, .5), 2, None) + self.y_subdivisions = np.clip(np.random.binomial(5, 0.5), 2, None) self.merge_glass = self.y_subdivisions < 4 self.has_glass = True def make_panels(self): panels = super(GlassPanelDoorFactory, self).make_panels() if self.merge_glass: - first_dimension = panels[-self.x_subdivisions]['dimension'] - last_dimension = panels[- 1]['dimension'] + first_dimension = panels[-self.x_subdivisions]["dimension"] + last_dimension = panels[-1]["dimension"] merged = { - 'dimension': (first_dimension[0], last_dimension[1], first_dimension[2], last_dimension[3]), - 'func': self.bevel, - 'attribute_name': 'glass' + "dimension": ( + first_dimension[0], + last_dimension[1], + first_dimension[2], + last_dimension[3], + ), + "func": self.bevel, + "attribute_name": "glass", } - return [merged, *panels[:self.x_subdivisions]] + return [merged, *panels[: self.x_subdivisions]] else: - for panel in panels[-self.x_subdivisions:]: - panel['attribute_name'] = 'glass' + for panel in panels[-self.x_subdivisions :]: + panel["attribute_name"] = "glass" return panels diff --git a/infinigen/assets/objects/elements/nature_shelf_trinkets/generate.py b/infinigen/assets/objects/elements/nature_shelf_trinkets/generate.py new file mode 100644 index 000000000..685ea8243 --- /dev/null +++ b/infinigen/assets/objects/elements/nature_shelf_trinkets/generate.py @@ -0,0 +1,96 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Stamatis Alexandropulos + + +import bpy +import mathutils +import numpy as np +import trimesh + +from infinigen.assets.objects import corals, creatures, mollusk, monocot, rocks +from infinigen.assets.utils import object as obj +from infinigen.assets.utils.object import join_objects +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +class NatureShelfTrinketsFactory(AssetFactory): + factories = [ + corals.CoralFactory, + rocks.BlenderRockFactory, + rocks.BoulderFactory, + monocot.PineconeFactory, + mollusk.MolluskFactory, + mollusk.AugerFactory, + mollusk.ClamFactory, + mollusk.ConchFactory, + mollusk.MusselFactory, + mollusk.ScallopFactory, + mollusk.VoluteFactory, + creatures.CarnivoreFactory, + creatures.HerbivoreFactory, + ] + probs = np.array([1, 1, 1, 1, 3, 2, 3, 2, 2, 2, 2, 5, 5]) + + def __init__(self, factory_seed, coarse=False): + super(NatureShelfTrinketsFactory, self).__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + base_factory_fn = np.random.choice( + self.factories, p=self.probs / self.probs.sum() + ) + + kwargs = {} + if base_factory_fn in [ + creatures.HerbivoreFactory, + creatures.CarnivoreFactory, + ]: + kwargs.update({"hair": False}) + + self.base_factory = base_factory_fn(self.factory_seed, **kwargs) + + def create_placeholder(self, **params) -> bpy.types.Object: + size = np.random.uniform(0.1, 0.15) + bpy.ops.mesh.primitive_cube_add(size=size, location=(0, 0, size / 2)) + placeholder = bpy.context.active_object + return placeholder + + def create_asset(self, i, placeholder=None, **params): + asset = self.base_factory.spawn_asset( + np.random.randint(1e7), distance=200, adaptive_resolution=False + ) + + if list(asset.children): + asset = join_objects(list(asset.children)) + + # butil.modify_mesh(asset, 'DECIMATE') + butil.apply_transform(asset, loc=True) + butil.apply_modifiers(asset) + if isinstance(self.base_factory, creatures.HerbivoreFactory) or isinstance( + self.base_factory, creatures.CarnivoreFactory + ): + pass + else: + if not isinstance(asset, trimesh.Trimesh): + mesh = obj.obj2trimesh(asset) + stable_poses, probs = trimesh.poses.compute_stable_poses(mesh) + stable_pose = stable_poses[np.argmax(probs)] + asset.rotation_euler = mathutils.Matrix(stable_pose[:3, :3]).to_euler() + butil.apply_transform(asset, rot=True) + dim = asset.dimensions + bounding_box = placeholder.dimensions + scale = min([bounding_box[i] / dim[i] for i in range(3)]) + asset.scale = [scale for i in range(3)] + # asset.dimensions = placeholder.dimensions + butil.apply_transform(asset, loc=True) + bounds = butil.bounds(asset) + cur_loc = asset.location + new_location = [ + cur_loc[i] - (bounds[0][i] + bounds[1][i]) / 2 for i in range(3) + ] + new_location[2] = cur_loc[2] - (bounds[0][2] + bounding_box[2] / 2) + asset.location = new_location + butil.apply_transform(asset, loc=True) + return asset diff --git a/infinigen/assets/elements/pillars.py b/infinigen/assets/objects/elements/pillars.py similarity index 51% rename from infinigen/assets/elements/pillars.py rename to infinigen/assets/objects/elements/pillars.py index 8507f1931..b1c949909 100644 --- a/infinigen/assets/elements/pillars.py +++ b/infinigen/assets/objects/elements/pillars.py @@ -4,24 +4,25 @@ # Authors: Lingjie Mei import bmesh import bpy -import gin import numpy as np from numpy.random import uniform from infinigen.assets.materials import marble_regular, marble_voronoi from infinigen.assets.utils.decorate import ( - read_co, read_edge_center, read_selected, select_edges, - subdivide_edge_ring, subsurf, write_co, -) -from infinigen.assets.utils.object import ( - join_objects, new_base_circle, new_base_cylinder, new_circle, - new_cylinder, + read_co, + read_edge_center, + read_selected, + select_edges, + subdivide_edge_ring, + subsurf, + write_co, ) +from infinigen.assets.utils.object import join_objects, new_base_circle, new_cylinder from infinigen.core.constraints.example_solver.room import constants from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil from infinigen.core.util.random import log_uniform @@ -31,69 +32,91 @@ def __init__(self, factory_seed, coarse=False): with FixedSeed(factory_seed): self.height = constants.WALL_HEIGHT - constants.WALL_THICKNESS self.n = np.random.randint(5, 10) - self.radius = uniform(.08, .12) + self.radius = uniform(0.08, 0.12) self.outer_radius = self.radius * uniform(1.3, 1.5) - self.lower_offset = uniform(.05, .15) - self.upper_offset = uniform(.05, .15) - self.detail_type = np.random.choice(['fluting', 'reeding']) + self.lower_offset = uniform(0.05, 0.15) + self.upper_offset = uniform(0.05, 0.15) + self.detail_type = np.random.choice(["fluting", "reeding"]) width = np.pi / 2 / self.n - self.inset_width = width * log_uniform(.1, .2) - self.inset_width_ = (width - self.inset_width * 2) * uniform(-.1, .3) - self.inset_depth = uniform(.1, .15) - self.inset_scale = uniform(.05, .1) + self.inset_width = width * log_uniform(0.1, 0.2) + self.inset_width_ = (width - self.inset_width * 2) * uniform(-0.1, 0.3) + self.inset_depth = uniform(0.1, 0.15) + self.inset_scale = uniform(0.05, 0.1) self.outer_n = np.random.choice([1, 2, self.n]) self.m = np.random.randint(12, 20) z_profile = uniform(1, 3, self.m) - self.z_profile = np.array([0, *(np.cumsum(z_profile) / np.sum(z_profile))[:-1]]) - alpha = uniform(.7, .85) + self.z_profile = np.array( + [0, *(np.cumsum(z_profile) / np.sum(z_profile))[:-1]] + ) + alpha = uniform(0.7, 0.85) r_profile = uniform(0, 1, self.m + 3) r_profile[[0, 1]] = 1 r_profile[[-2, -1]] = 0 - r_profile = np.convolve(r_profile, np.array([(1 - alpha) / 2, alpha, (1 - alpha) / 2])) - self.r_profile = np.array([1, *r_profile[2:-2]]) * (self.outer_radius - self.radius) + self.radius + r_profile = np.convolve( + r_profile, np.array([(1 - alpha) / 2, alpha, (1 - alpha) / 2]) + ) + self.r_profile = ( + np.array([1, *r_profile[2:-2]]) * (self.outer_radius - self.radius) + + self.radius + ) self.n_profile = np.where( - np.arange(self.m) < np.random.randint(2, self.m - 1), self.outer_n, - self.n + np.arange(self.m) < np.random.randint(2, self.m - 1), + self.outer_n, + self.n, ) - self.inset_profile = uniform(0, 1, self.m) < .3 + self.inset_profile = uniform(0, 1, self.m) < 0.3 self.surface = np.random.choice([marble_regular, marble_voronoi]) def create_asset(self, **params) -> bpy.types.Object: obj = new_cylinder(vertices=4 * self.n) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [f for f in bm.faces if len(f.verts) > 4] - bmesh.ops.delete(bm, geom=geom, context='FACES_ONLY') + bmesh.ops.delete(bm, geom=geom, context="FACES_ONLY") bmesh.update_edit_mesh(obj.data) - obj.scale = self.radius, self.radius, (1 - self.lower_offset - self.upper_offset) * self.height + obj.scale = ( + self.radius, + self.radius, + (1 - self.lower_offset - self.upper_offset) * self.height, + ) obj.location[-1] = self.lower_offset * self.height butil.apply_transform(obj, True) - inset_scale = 1 + self.inset_scale * (1 if self.detail_type == 'reeding' else -1) - if self.detail_type in ['fluting', 'reeding']: - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.inset(thickness=self.inset_width * self.radius, use_individual=True) - bpy.ops.mesh.inset(thickness=self.inset_width_ * self.radius, use_individual=True) + inset_scale = 1 + self.inset_scale * ( + 1 if self.detail_type == "reeding" else -1 + ) + if self.detail_type in ["fluting", "reeding"]: + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.inset( + thickness=self.inset_width * self.radius, use_individual=True + ) + bpy.ops.mesh.inset( + thickness=self.inset_width_ * self.radius, use_individual=True + ) bpy.ops.transform.resize(value=(inset_scale, inset_scale, 1)) subdivide_edge_ring(obj, 16) parts = [obj] - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() z_rot = np.pi / 2 * np.random.randint(2) - for z, r, n, i in zip(self.z_profile, self.r_profile, self.n_profile, self.inset_profile): + for z, r, n, i in zip( + self.z_profile, self.r_profile, self.n_profile, self.inset_profile + ): o = new_base_circle(vertices=4 * n) if i: co = read_co(o) stride = np.random.choice([2, 4, 8]) - co *= np.where(np.arange(len(co)) % stride == 0, 1, inset_scale)[:, np.newaxis] + co *= np.where(np.arange(len(co)) % stride == 0, 1, inset_scale)[ + :, np.newaxis + ] write_co(o, co) - with butil.ViewportMode(o, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(o, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.subdivide(number_cuts=self.n // n - 1) o.location[-1] = z * self.lower_offset * self.height r_ = r / np.cos(np.pi / 4 / n) @@ -105,16 +128,20 @@ def create_asset(self, **params) -> bpy.types.Object: butil.apply_transform(o_, True) parts.extend([o, o_]) obj = join_objects(parts) - selection = read_selected(obj, 'EDGE') + selection = read_selected(obj, "EDGE") z = read_edge_center(obj)[:, -1] number_cuts = 0 smoothness = uniform(1, 1.4) - select_edges(obj, selection & (z < .5)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.bridge_edge_loops(number_cuts=number_cuts, smoothness=smoothness) - select_edges(obj, selection & (z > .5)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.bridge_edge_loops(number_cuts=number_cuts, smoothness=smoothness) + select_edges(obj, selection & (z < 0.5)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.bridge_edge_loops( + number_cuts=number_cuts, smoothness=smoothness + ) + select_edges(obj, selection & (z > 0.5)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.bridge_edge_loops( + number_cuts=number_cuts, smoothness=smoothness + ) subsurf(obj, 1, True) subsurf(obj, 1) return obj diff --git a/infinigen/assets/elements/rug.py b/infinigen/assets/objects/elements/rug.py similarity index 61% rename from infinigen/assets/elements/rug.py rename to infinigen/assets/objects/elements/rug.py index 0f5072902..12e3da63a 100644 --- a/infinigen/assets/elements/rug.py +++ b/infinigen/assets/objects/elements/rug.py @@ -6,16 +6,14 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.materials import rug -from infinigen.assets.materials.art import Art, ArtRug -from infinigen.assets.utils.object import new_bbox, new_circle, new_plane, new_base_circle +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials.art import ArtRug +from infinigen.assets.utils.object import new_base_circle, new_bbox, new_plane from infinigen.assets.utils.uv import wrap_sides -from infinigen.core.nodes import NodeWrangler, Nodes from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import log_uniform, clip_gaussian from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import clip_gaussian class RugFactory(AssetFactory): @@ -24,41 +22,50 @@ def __init__(self, factory_seed, coarse=False): with FixedSeed(self.factory_seed): self.width = clip_gaussian(3, 1, 2, 6) self.length = self.width * uniform(1, 1.5) - self.rug_shape = np.random.choice(['rectangle', 'circle', 'rounded', 'ellipse']) - if self.rug_shape == 'circle': + self.rug_shape = np.random.choice( + ["rectangle", "circle", "rounded", "ellipse"] + ) + if self.rug_shape == "circle": self.length = self.width - self.rounded_buffer = self.width * uniform(.1, .5) - self.thickness = uniform(.01, .02) - material_assignments = AssetList['RugFactory']() - self.surface = material_assignments['surface'].assign_material() + self.rounded_buffer = self.width * uniform(0.1, 0.5) + self.thickness = uniform(0.01, 0.02) + material_assignments = AssetList["RugFactory"]() + self.surface = material_assignments["surface"].assign_material() if self.surface == ArtRug: self.surface = self.surface(self.factory_seed) def build_shape(self): match self.rug_shape: - case 'rectangle': + case "rectangle": obj = new_plane() obj.scale = self.length / 2, self.width / 2, 1 butil.apply_transform(obj, True) - case 'rounded': + case "rounded": obj = new_plane() obj.scale = self.length / 2, self.width / 2, 1 butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'BEVEL', width=self.rounded_buffer, segments=16) + butil.modify_mesh(obj, "BEVEL", width=self.rounded_buffer, segments=16) case _: obj = new_base_circle(vertices=128) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.edge_face_add() obj.scale = self.length / 2, self.width / 2, 1 butil.apply_transform(obj, True) return obj def create_placeholder(self, **kwargs) -> bpy.types.Object: - return new_bbox(-self.length / 2, self.length / 2, -self.width / 2, self.width / 2, 0, self.thickness) + return new_bbox( + -self.length / 2, + self.length / 2, + -self.width / 2, + self.width / 2, + 0, + self.thickness, + ) def create_asset(self, **params) -> bpy.types.Object: obj = self.build_shape() - wrap_sides(obj, self.surface, 'z', 'x', 'y') - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=1) + wrap_sides(obj, self.surface, "z", "x", "y") + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=1) return obj diff --git a/infinigen/assets/elements/staircases/__init__.py b/infinigen/assets/objects/elements/staircases/__init__.py similarity index 68% rename from infinigen/assets/elements/staircases/__init__.py rename to infinigen/assets/objects/elements/staircases/__init__.py index 0558c4205..74998c43a 100644 --- a/infinigen/assets/elements/staircases/__init__.py +++ b/infinigen/assets/objects/elements/staircases/__init__.py @@ -3,16 +3,23 @@ # Authors: Lingjie Mei import numpy as np + +from .cantilever import CantileverStaircaseFactory from .curved import CurvedStaircaseFactory +from .l_shaped import LShapedStaircaseFactory from .spiral import SpiralStaircaseFactory from .straight import StraightStaircaseFactory -from .l_shaped import LShapedStaircaseFactory from .u_shaped import UShapedStaircaseFactory -from .cantilever import CantileverStaircaseFactory def random_staircase_factory(): - door_factories = [StraightStaircaseFactory, LShapedStaircaseFactory, UShapedStaircaseFactory, - SpiralStaircaseFactory, CurvedStaircaseFactory, CantileverStaircaseFactory] - door_probs = np.array([2, 2, 2, .5, 2, 2]) + door_factories = [ + StraightStaircaseFactory, + LShapedStaircaseFactory, + UShapedStaircaseFactory, + SpiralStaircaseFactory, + CurvedStaircaseFactory, + CantileverStaircaseFactory, + ] + door_probs = np.array([2, 2, 2, 0.5, 2, 2]) return np.random.choice(door_factories, p=door_probs / door_probs.sum()) diff --git a/infinigen/assets/elements/staircases/cantilever.py b/infinigen/assets/objects/elements/staircases/cantilever.py similarity index 61% rename from infinigen/assets/elements/staircases/cantilever.py rename to infinigen/assets/objects/elements/staircases/cantilever.py index 581a28672..0aa49f538 100644 --- a/infinigen/assets/elements/staircases/cantilever.py +++ b/infinigen/assets/objects/elements/staircases/cantilever.py @@ -5,16 +5,18 @@ import numpy as np import shapely import shapely.affinity -from infinigen.assets.elements.staircases.straight import StraightStaircaseFactory + +from infinigen.assets.objects.elements.staircases.straight import ( + StraightStaircaseFactory, +) from infinigen.assets.utils.decorate import read_co from infinigen.assets.utils.object import join_objects - from infinigen.core.util import blender as butil class CantileverStaircaseFactory(StraightStaircaseFactory): - support_types = 'wall' - handrail_types = 'weighted_choice', (2, 'horizontal-post'), (2, 'vertical-post') + support_types = "wall" + handrail_types = "weighted_choice", (2, "horizontal-post"), (2, "vertical-post") def valid_contour(self, offset, contour, doors, lower=True): valid = super().valid_contour(offset, contour, doors, lower) @@ -25,8 +27,14 @@ def valid_contour(self, offset, contour, doors, lower=True): butil.delete(obj) if self.mirror: co[:, 0] = -co[:, 0] - points = [shapely.affinity.translate(shapely.affinity.rotate(p, self.rot_z, (0, 0)), *offset) for p in - shapely.points(co)] + points = [ + shapely.affinity.translate( + shapely.affinity.rotate(p, self.rot_z, (0, 0)), *offset + ) + for p in shapely.points(co) + ] others = [shapely.ops.nearest_points(p, contour.boundary)[0] for p in points] - distance = np.array([np.abs(p.x - o.x) + np.abs(p.y - o.y) for p, o in zip(points, others)]) - return (distance < .1).sum() / len(distance) > .5 + distance = np.array( + [np.abs(p.x - o.x) + np.abs(p.y - o.y) for p, o in zip(points, others)] + ) + return (distance < 0.1).sum() / len(distance) > 0.5 diff --git a/infinigen/assets/elements/staircases/curved.py b/infinigen/assets/objects/elements/staircases/curved.py similarity index 83% rename from infinigen/assets/elements/staircases/curved.py rename to infinigen/assets/objects/elements/staircases/curved.py index 2190e1af3..dab01e79a 100644 --- a/infinigen/assets/elements/staircases/curved.py +++ b/infinigen/assets/objects/elements/staircases/curved.py @@ -2,24 +2,32 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei # - Karhan Kayan: fix constants import numpy as np -from infinigen.assets.elements.staircases.straight import StraightStaircaseFactory +from infinigen.assets.objects.elements.staircases.straight import ( + StraightStaircaseFactory, +) from infinigen.assets.utils.decorate import read_co, write_co from infinigen.core.constraints.example_solver.room import constants -from infinigen.core.util.random import log_uniform from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class CurvedStaircaseFactory(StraightStaircaseFactory): - support_types = 'weighted_choice', (2, 'single-rail'), (2, 'double-rail'), (4, 'side'), (4, 'solid'), ( - 4, 'hole') + support_types = ( + "weighted_choice", + (2, "single-rail"), + (2, "double-rail"), + (4, "side"), + (4, "solid"), + (4, "hole"), + ) - handrail_types = 'weighted_choice', (2, 'horizontal-post'), (2, 'vertical-post') + handrail_types = "weighted_choice", (2, "horizontal-post"), (2, "vertical-post") def __init__(self, factory_seed, coarse=False): self.full_angle, self.radius, self.theta = 0, 0, 0 @@ -34,7 +42,7 @@ def build_size_config(self): self.step_height = constants.WALL_HEIGHT / self.n self.theta = self.full_angle / self.n self.step_length = self.step_height * log_uniform(1, 1.5) - self.step_width = log_uniform(.9, 1.5) + self.step_width = log_uniform(0.9, 1.5) self.radius = self.step_length / self.theta if self.radius / self.step_width > 1.5: break diff --git a/infinigen/assets/elements/staircases/generate.py b/infinigen/assets/objects/elements/staircases/generate.py similarity index 76% rename from infinigen/assets/elements/staircases/generate.py rename to infinigen/assets/objects/elements/staircases/generate.py index 13e12ffef..1ca67f1f2 100644 --- a/infinigen/assets/elements/staircases/generate.py +++ b/infinigen/assets/objects/elements/staircases/generate.py @@ -7,23 +7,32 @@ from infinigen.core.placement.factory import AssetFactory from infinigen.core.util.math import FixedSeed -from .straight import StraightStaircaseFactory -from .l_shaped import LShapedStaircaseFactory -from .u_shaped import UShapedStaircaseFactory + from .cantilever import CantileverStaircaseFactory -from .spiral import SpiralStaircaseFactory from .curved import CurvedStaircaseFactory +from .l_shaped import LShapedStaircaseFactory +from .spiral import SpiralStaircaseFactory +from .straight import StraightStaircaseFactory +from .u_shaped import UShapedStaircaseFactory class StaircaseFactory(AssetFactory): - factories = [StraightStaircaseFactory, LShapedStaircaseFactory, UShapedStaircaseFactory, - SpiralStaircaseFactory, CurvedStaircaseFactory, CantileverStaircaseFactory] + factories = [ + StraightStaircaseFactory, + LShapedStaircaseFactory, + UShapedStaircaseFactory, + SpiralStaircaseFactory, + CurvedStaircaseFactory, + CantileverStaircaseFactory, + ] probs = np.array([4, 3, 3, 1, 2, 2]) def __init__(self, factory_seed, coarse=False): super(StaircaseFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - base_factory_fn = np.random.choice(self.factories, p=self.probs / self.probs.sum()) + base_factory_fn = np.random.choice( + self.factories, p=self.probs / self.probs.sum() + ) self.base_factory = base_factory_fn(self.factory_seed) def create_asset(self, **params) -> bpy.types.Object: diff --git a/infinigen/assets/elements/staircases/l_shaped.py b/infinigen/assets/objects/elements/staircases/l_shaped.py similarity index 65% rename from infinigen/assets/elements/staircases/l_shaped.py rename to infinigen/assets/objects/elements/staircases/l_shaped.py index 78014290e..0b6b9f81b 100644 --- a/infinigen/assets/elements/staircases/l_shaped.py +++ b/infinigen/assets/objects/elements/staircases/l_shaped.py @@ -6,43 +6,58 @@ import numpy as np from numpy.random import uniform -from .straight import StraightStaircaseFactory +import infinigen.core.util.blender as butil from infinigen.assets.utils.decorate import read_co, write_attribute, write_co from infinigen.assets.utils.object import new_cube, new_line from infinigen.core.util.math import FixedSeed -import infinigen.core.util.blender as butil + +from .straight import StraightStaircaseFactory class LShapedStaircaseFactory(StraightStaircaseFactory): def __init__(self, factory_seed, coarse=False): super(LShapedStaircaseFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.m = int(self.n * uniform(.4, .6)) + self.m = int(self.n * uniform(0.4, 0.6)) self.is_rail_circular = True def make_line(self, alpha): obj = new_line(self.n + 2) x = np.concatenate( - [np.full(self.m + 2, alpha * self.step_width), -np.arange(self.n - self.m + 1) * self.step_length] + [ + np.full(self.m + 2, alpha * self.step_width), + -np.arange(self.n - self.m + 1) * self.step_length, + ] ) y = np.concatenate( - [np.arange(self.m + 1) * self.step_length, [self.m * self.step_length + alpha * self.step_width], - np.full(self.n - self.m + 1, self.m * self.step_length + alpha * self.step_width)] + [ + np.arange(self.m + 1) * self.step_length, + [self.m * self.step_length + alpha * self.step_width], + np.full( + self.n - self.m + 1, + self.m * self.step_length + alpha * self.step_width, + ), + ] + ) + z = ( + np.concatenate( + [np.arange(self.m + 1), [self.m], np.arange(self.m, self.n + 1)] + ) + * self.step_height ) - z = np.concatenate([np.arange(self.m + 1), [self.m], np.arange(self.m, self.n + 1)]) * self.step_height write_co(obj, np.stack([x, y, z], -1)) return obj def make_line_offset(self, alpha): obj = self.make_line(alpha) co = read_co(obj) - co[self.m:self.m + 2] = co[self.m + 1:self.m + 3] + co[self.m : self.m + 2] = co[self.m + 1 : self.m + 3] x, y, z = co.T x[self.m + 1] += min(self.step_length / 2, alpha * self.step_width) - x[self.m + 2:] -= self.step_length / 2 - y[:self.m] += self.step_length / 2 + x[self.m + 2 :] -= self.step_length / 2 + y[: self.m] += self.step_length / 2 z += self.step_height - z[[self.m, self.m + 1, - 1]] -= self.step_height + z[[self.m, self.m + 1, -1]] -= self.step_height write_co(obj, np.stack([x, y, z], -1)) return obj @@ -52,9 +67,12 @@ def make_post_locs(self, alpha): butil.delete(temp) chunks = self.split(self.m - 1) chunks_ = self.split(self.m + 1, self.n + 2) - indices = list(c[0] for c in chunks) + [self.m - 1, self.m, self.m + 1] + list( - c[0] for c in chunks_ - ) + [self.n + 1, self.n + 2] + indices = ( + list(c[0] for c in chunks) + + [self.m - 1, self.m, self.m + 1] + + list(c[0] for c in chunks_) + + [self.n + 1, self.n + 2] + ) return cos[indices] def make_vertical_post_locs(self, alpha): @@ -68,13 +86,15 @@ def make_vertical_post_locs(self, alpha): mid_cos = [] mid = [self.m - 1, self.m] for m in mid: - for r in np.linspace(0, 1, self.post_k + 1 if m >= self.m else self.post_k + 2)[1:-1]: + for r in np.linspace( + 0, 1, self.post_k + 1 if m >= self.m else self.post_k + 2 + )[1:-1]: mid_cos.append(r * cos[m] + (1 - r) * cos[m + 1]) return np.concatenate([cos[indices], np.stack(mid_cos), cos[indices_]], 0) def make_steps(self): objs = super(LShapedStaircaseFactory, self).make_steps() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi / 2 obj.location = self.m * self.step_length, self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -82,14 +102,18 @@ def make_steps(self): platform = new_cube(location=(1, 1, 1)) butil.apply_transform(platform, loc=True) platform.location = 0, self.step_length * self.m, lowest - platform.scale = self.step_width / 2, self.step_width / 2, (self.step_height * self.m - lowest) / 2 + platform.scale = ( + self.step_width / 2, + self.step_width / 2, + (self.step_height * self.m - lowest) / 2, + ) butil.apply_transform(platform, loc=True) - write_attribute(platform, 1, 'steps', 'FACE') + write_attribute(platform, 1, "steps", "FACE") return objs + [platform] def make_treads(self): objs = super(LShapedStaircaseFactory, self).make_treads() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi / 2 obj.location = self.m * self.step_length, self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -98,12 +122,12 @@ def make_treads(self): platform.location = 0, self.step_length * self.m, self.step_height * self.m platform.scale = self.step_width / 2, self.step_width / 2, self.tread_height / 2 butil.apply_transform(platform, loc=True) - write_attribute(platform, 1, 'treads', 'FACE') + write_attribute(platform, 1, "treads", "FACE") return objs + [platform] def make_inner_sides(self): objs = super(LShapedStaircaseFactory, self).make_inner_sides() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi / 2 obj.location = self.m * self.step_length, self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -112,32 +136,37 @@ def make_inner_sides(self): butil.apply_transform(top_cutter, loc=True) top_cutter.scale = [100] * 3 top_cutter.location[-1] = self.m * self.step_height + self.tread_height - for obj in objs[:self.m]: - butil.modify_mesh(obj, 'BOOLEAN', object=top_cutter, operation='DIFFERENCE') + for obj in objs[: self.m]: + butil.modify_mesh(obj, "BOOLEAN", object=top_cutter, operation="DIFFERENCE") butil.delete(top_cutter) return objs def make_outer_sides(self): objs = self.make_inner_sides() - for obj in objs[:self.m]: + for obj in objs[: self.m]: obj.location[0] += self.step_width butil.apply_transform(obj, loc=True) - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.location[1] += self.step_width butil.apply_transform(obj, loc=True) platform = new_line(2) x = self.step_width, self.step_width, 0 - y = self.m * self.step_length, self.m * self.step_length + self.step_width, self.m * self.step_length \ - + self.step_width + y = ( + self.m * self.step_length, + self.m * self.step_length + self.step_width, + self.m * self.step_length + self.step_width, + ) z = [self.m * self.step_height] * 3 write_co(platform, np.stack([x, y, z], -1)) butil.select_none() - with butil.ViewportMode(platform, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, -self.side_height)}) - butil.modify_mesh(platform, 'SOLIDIFY', thickness=self.side_thickness) - write_attribute(platform, 1, 'sides', 'FACE') + with butil.ViewportMode(platform, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, -self.side_height)} + ) + butil.modify_mesh(platform, "SOLIDIFY", thickness=self.side_thickness) + write_attribute(platform, 1, "sides", "FACE") return objs + [platform] @property diff --git a/infinigen/assets/elements/staircases/spiral.py b/infinigen/assets/objects/elements/staircases/spiral.py similarity index 81% rename from infinigen/assets/elements/staircases/spiral.py rename to infinigen/assets/objects/elements/staircases/spiral.py index 6d5a75c9d..86c00f217 100644 --- a/infinigen/assets/elements/staircases/spiral.py +++ b/infinigen/assets/objects/elements/staircases/spiral.py @@ -2,31 +2,31 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei # - Karhan Kayan: fix constants import numpy as np from numpy.random import uniform -from infinigen.assets.elements.staircases.curved import CurvedStaircaseFactory +import infinigen.core.util.blender as butil +from infinigen.assets.objects.elements.staircases.curved import CurvedStaircaseFactory from infinigen.assets.utils.decorate import read_co, remove_vertices, write_attribute -from infinigen.core.constraints.example_solver.room import constants -from infinigen.core.util.random import log_uniform from infinigen.assets.utils.nodegroup import geo_radius from infinigen.assets.utils.object import new_line, separate_loose from infinigen.core import surface +from infinigen.core.constraints.example_solver.room import constants from infinigen.core.util.math import FixedSeed -import infinigen.core.util.blender as butil +from infinigen.core.util.random import log_uniform class SpiralStaircaseFactory(CurvedStaircaseFactory): - support_types = 'column' + support_types = "column" def __init__(self, factory_seed, coarse=False): super(SpiralStaircaseFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.column_radius = self.radius - self.step_width + uniform(.05, .08) + self.column_radius = self.radius - self.step_width + uniform(0.05, 0.08) self.has_column = True self.handrail_alphas = [1 - self.handrail_offset / self.step_width] @@ -38,22 +38,24 @@ def build_size_config(self): self.theta = self.full_angle / self.n self.step_length = self.step_height * log_uniform(1, 1.2) self.radius = self.step_length / self.theta - if .9 < self.radius < 1.5: - self.step_width = self.radius * uniform(.9, .95) + if 0.9 < self.radius < 1.5: + self.step_width = self.radius * uniform(0.9, 0.95) break def make_column(self): obj = new_line(self.n, self.step_height * self.n + self.post_height) - obj.rotation_euler[1] = - np.pi / 2 + obj.rotation_euler[1] = -np.pi / 2 butil.apply_transform(obj) - surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.column_radius, 16]) - write_attribute(obj, 1, 'steps', 'FACE') + surface.add_geomod( + obj, geo_radius, apply=True, input_args=[self.column_radius, 16] + ) + write_attribute(obj, 1, "steps", "FACE") return obj def unmake_spiral(self, obj): obj = super().unmake_spiral(obj) x, y, z = read_co(obj).T - margin = .1 + margin = 0.1 if (x >= 0).sum() >= (x <= 0).sum(): remove_vertices(obj, lambda x, y, z: x < margin) else: diff --git a/infinigen/assets/elements/staircases/straight.py b/infinigen/assets/objects/elements/staircases/straight.py similarity index 54% rename from infinigen/assets/elements/staircases/straight.py rename to infinigen/assets/objects/elements/staircases/straight.py index 11a204524..aab5836b0 100644 --- a/infinigen/assets/elements/staircases/straight.py +++ b/infinigen/assets/objects/elements/staircases/straight.py @@ -2,49 +2,69 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei # - Karhan Kayan: fix constants -import bpy import bmesh -import gin +import bpy import numpy as np import shapely from numpy.random import uniform from shapely import LineString, Polygon +from infinigen.assets.materials import fabrics, glass, metal, plaster, wood from infinigen.assets.materials.stone_and_concrete import concrete -from infinigen.assets.utils.mesh import canonicalize_ls, convert2ls -from infinigen.assets.utils.shapes import cut_polygon_by_line -from infinigen.assets.materials import metal, glass, plaster, wood, fabrics from infinigen.assets.utils.decorate import ( - mirror, read_co, remove_faces, remove_vertices, subsurf, - write_attribute, write_co, + mirror, + read_co, + remove_faces, + remove_vertices, + subsurf, + write_attribute, + write_co, ) +from infinigen.assets.utils.mesh import canonicalize_ls, convert2ls from infinigen.assets.utils.nodegroup import geo_radius from infinigen.assets.utils.object import ( - data2mesh, join_objects, mesh2obj, new_circle, new_cube, new_line, + data2mesh, + join_objects, + mesh2obj, + new_circle, + new_cube, + new_line, separate_loose, ) +from infinigen.assets.utils.shapes import cut_polygon_by_line +from infinigen.core import surface +from infinigen.core import tags as t from infinigen.core.constraints.example_solver.room import constants -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.placement.detail import sharp_remesh_with_attrs from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface from infinigen.core.surface import read_attr_data, write_attr_data from infinigen.core.tagging import PREFIX from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed, normalize -from infinigen.core.util.random import log_uniform, random_general as rg - -from infinigen.core import tags as t +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg class StraightStaircaseFactory(AssetFactory): - support_types = 'weighted_choice', (2, 'single-rail'), (2, 'double-rail'), (3, 'side'), (3, 'solid'), ( - 3, 'hole') - handrail_types = 'weighted_choice', (2, 'glass'), (2, 'horizontal-post'), (2, 'vertical-post') + support_types = ( + "weighted_choice", + (2, "single-rail"), + (2, "double-rail"), + (3, "side"), + (3, "solid"), + (3, "hole"), + ) + handrail_types = ( + "weighted_choice", + (2, "glass"), + (2, "horizontal-post"), + (2, "vertical-post"), + ) def __init__(self, factory_seed, coarse=False): super(StraightStaircaseFactory, self).__init__(factory_seed, coarse) @@ -53,74 +73,96 @@ def __init__(self, factory_seed, coarse=False): self.n, self.step_height, self.step_width, self.step_length = 0, 0, 0, 0 self.build_size_config() - self.has_step = self.support_type in ['solid', 'hole'] - self.hole_size = log_uniform(.6, 1.) + self.has_step = self.support_type in ["solid", "hole"] + self.hole_size = log_uniform(0.6, 1.0) probs = np.array([3, 2, 2, 2]) - self.step_surface = np.random.choice([wood, plaster, concrete, fabrics], p=probs / probs.sum()) + self.step_surface = np.random.choice( + [wood, plaster, concrete, fabrics], p=probs / probs.sum() + ) - self.has_rail = self.support_type in ['single-rail', 'double-rail'] - self.rail_offset = self.step_width * uniform(.15, .3) - self.is_rail_circular = uniform() < .5 - self.rail_width = log_uniform(.08, .2) - self.rail_height = log_uniform(.08, .12) + self.has_rail = self.support_type in ["single-rail", "double-rail"] + self.rail_offset = self.step_width * uniform(0.15, 0.3) + self.is_rail_circular = uniform() < 0.5 + self.rail_width = log_uniform(0.08, 0.2) + self.rail_height = log_uniform(0.08, 0.12) probs = np.array([3, 2, 2, 1]) - self.rail_surface = np.random.choice([metal, plaster, concrete, fabrics], p=probs / probs.sum()) + self.rail_surface = np.random.choice( + [metal, plaster, concrete, fabrics], p=probs / probs.sum() + ) - self.has_tread = not self.has_step or uniform() < .75 - self.tread_height = uniform(.01, .02) if self.has_step else uniform(.06, .08) - self.tread_length = self.step_length + uniform(.01, .02) - self.tread_width = self.step_width + uniform(.01, .02) if uniform() < .8 else self.step_width + self.has_tread = not self.has_step or uniform() < 0.75 + self.tread_height = ( + uniform(0.01, 0.02) if self.has_step else uniform(0.06, 0.08) + ) + self.tread_length = self.step_length + uniform(0.01, 0.02) + self.tread_width = ( + self.step_width + uniform(0.01, 0.02) + if uniform() < 0.8 + else self.step_width + ) probs = np.array([3, 3, 1]) - self.tread_surface = np.random.choice([wood, metal, glass], p=probs / probs.sum()) + self.tread_surface = np.random.choice( + [wood, metal, glass], p=probs / probs.sum() + ) - self.has_sides = self.support_type in ['side', 'solid', 'hole'] - self.side_type = np.random.choice(['zig-zag', 'straight']) - self.side_height = self.step_height * log_uniform(.2, .8) - self.side_thickness = uniform(.03, .08) + self.has_sides = self.support_type in ["side", "solid", "hole"] + self.side_type = np.random.choice(["zig-zag", "straight"]) + self.side_height = self.step_height * log_uniform(0.2, 0.8) + self.side_thickness = uniform(0.03, 0.08) probs = np.array([3, 3, 1, 2]) - self.side_surface = np.random.choice([wood, metal, plaster, fabrics], p=probs / probs.sum()) + self.side_surface = np.random.choice( + [wood, metal, plaster, fabrics], p=probs / probs.sum() + ) - self.has_column = self.support_type == 'chord' + self.has_column = self.support_type == "chord" self.handrail_type = rg(self.handrail_types) - self.is_handrail_circular = uniform() < .7 - self.handrail_width = log_uniform(.02, .06) - self.handrail_height = log_uniform(.02, .06) + self.is_handrail_circular = uniform() < 0.7 + self.handrail_width = log_uniform(0.02, 0.06) + self.handrail_height = log_uniform(0.02, 0.06) self.handrail_offset = self.handrail_width * log_uniform(1, 2) - self.handrail_extension = uniform(.1, .2) - self.handrail_alphas = [self.handrail_offset / self.step_width, - 1 - self.handrail_offset / self.step_width] + self.handrail_extension = uniform(0.1, 0.2) + self.handrail_alphas = [ + self.handrail_offset / self.step_width, + 1 - self.handrail_offset / self.step_width, + ] probs = np.array([3, 2, 3]) - self.handrail_surface = np.random.choice([wood, metal, fabrics], p=probs / probs.sum()) + self.handrail_surface = np.random.choice( + [wood, metal, fabrics], p=probs / probs.sum() + ) - self.post_height = log_uniform(.8, 1.2) + self.post_height = log_uniform(0.8, 1.2) self.post_k = int(np.ceil(self.step_width / self.step_length)) - self.post_width = self.handrail_width * log_uniform(.6, .8) - self.post_minor_width = self.post_width * log_uniform(.3, .5) - self.is_post_circular = uniform() < .5 + self.post_width = self.handrail_width * log_uniform(0.6, 0.8) + self.post_minor_width = self.post_width * log_uniform(0.3, 0.5) + self.is_post_circular = uniform() < 0.5 probs = np.array([3, 3, 2]) - self.post_surface = np.random.choice([wood, metal, fabrics], p=probs / probs.sum()) - self.has_vertical_post = self.handrail_type == 'vertical-post' + self.post_surface = np.random.choice( + [wood, metal, fabrics], p=probs / probs.sum() + ) + self.has_vertical_post = self.handrail_type == "vertical-post" - self.has_bars = self.handrail_type == 'horizontal-post' - self.bar_size = log_uniform(.1, .2) - self.n_bars = int(np.floor(self.post_height / self.bar_size * uniform(.35, .75))) + self.has_bars = self.handrail_type == "horizontal-post" + self.bar_size = log_uniform(0.1, 0.2) + self.n_bars = int( + np.floor(self.post_height / self.bar_size * uniform(0.35, 0.75)) + ) - self.has_glasses = self.handrail_type == 'glass' - self.glass_height = self.post_height - uniform(0, .05) - self.glass_margin = self.step_height / 2 + uniform(0, .05) + self.has_glasses = self.handrail_type == "glass" + self.glass_height = self.post_height - uniform(0, 0.05) + self.glass_margin = self.step_height / 2 + uniform(0, 0.05) self.glass_surface = glass self.has_spiral = False - self.mirror = uniform() < .5 + self.mirror = uniform() < 0.5 self.rot_z = np.random.randint(4) * np.pi / 2 self.end_margin = self.step_length * 8 def build_size_config(self): self.n = np.random.randint(13, 21) self.step_height = constants.WALL_HEIGHT / self.n - self.step_width = uniform(.8, 1.6) - self.step_length = self.step_height * log_uniform(.8, 1.2) + self.step_width = uniform(0.8, 1.6) + self.step_length = self.step_height * log_uniform(0.8, 1.2) def make_line(self, alpha): obj = new_line(self.n) @@ -159,14 +201,20 @@ def make_vertical_post_locs(self, alpha): def split(self, start, end=None): return np.array_split( np.arange(start, end), - np.ceil((start if end is None else end - start) / self.post_k) + np.ceil((start if end is None else end - start) / self.post_k), ) @staticmethod def triangulate(obj): - butil.modify_mesh(obj, 'TRIANGULATE', min_vertices=3) + butil.modify_mesh(obj, "TRIANGULATE", min_vertices=3) levels = 1 - butil.modify_mesh(obj, 'SUBSURF', levels=levels, render_levels=levels, subdivision_type='SIMPLE') + butil.modify_mesh( + obj, + "SUBSURF", + levels=levels, + render_levels=levels, + subdivision_type="SIMPLE", + ) return obj def vertical_cut(self, p): @@ -176,9 +224,17 @@ def vertical_cut(self, p): for p in polygons: coords = p.boundary.coords[:][:-1] part = new_circle(vertices=len(coords)) - with butil.ViewportMode(part, 'EDIT'): + with butil.ViewportMode(part, "EDIT"): bpy.ops.mesh.edge_face_add() - write_co(part, np.array(list([0, y * self.step_length, z * self.step_height] for y, z in coords))) + write_co( + part, + np.array( + list( + [0, y * self.step_length, z * self.step_height] + for y, z in coords + ) + ), + ) parts.append(part) return parts @@ -188,42 +244,51 @@ def make_steps(self): coords.extend([(i, i + 1), (i + 1, i + 1)]) coords.extend([(self.n, 0), (0, 0)]) p = Polygon(LineString(coords)) - if self.support_type == 'hole': + if self.support_type == "hole": hole = Polygon( - [((1 - self.hole_size) * self.n, 0), (self.n, self.hole_size * self.n), (self.n, 0), - ((1 - self.hole_size) * self.n, 0)] + [ + ((1 - self.hole_size) * self.n, 0), + (self.n, self.hole_size * self.n), + (self.n, 0), + ((1 - self.hole_size) * self.n, 0), + ] ) p = p.difference(hole) objs = self.vertical_cut(p) for obj in objs: - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.step_width) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.step_width) self.triangulate(obj) - write_attribute(obj, 1, 'steps', 'FACE') + write_attribute(obj, 1, "steps", "FACE") return objs def make_rails(self): parts = [] - if self.support_type == 'single-rail': - alphas = [.5] + if self.support_type == "single-rail": + alphas = [0.5] else: - alphas = [self.rail_offset / self.step_width, 1 - self.rail_offset / self.step_width] + alphas = [ + self.rail_offset / self.step_width, + 1 - self.rail_offset / self.step_width, + ] for alpha in alphas: obj = self.make_line(alpha) if self.is_rail_circular: - surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.rail_width, 16]) + surface.add_geomod( + obj, geo_radius, apply=True, input_args=[self.rail_width, 16] + ) obj.location[-1] = -self.rail_width butil.apply_transform(obj, loc=True) else: butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, -self.rail_height * 2)} + TRANSFORM_OT_translate={"value": (0, 0, -self.rail_height * 2)} ) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.rail_width, offset=0) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.rail_width, offset=0) self.triangulate(obj) - write_attribute(obj, 1, 'rails', 'FACE') + write_attribute(obj, 1, "rails", "FACE") parts.append(obj) return parts @@ -231,11 +296,14 @@ def make_treads(self): tread = new_cube(location=(1, 1, 1)) butil.apply_transform(tread, loc=True) tread.scale = self.tread_width / 2, self.tread_length / 2, self.tread_height / 2 - tread.location = -(self.tread_width - self.step_width) / 2, -( - self.tread_length - self.step_length), self.step_height + tread.location = ( + -(self.tread_width - self.step_width) / 2, + -(self.tread_length - self.step_length), + self.step_height, + ) butil.apply_transform(tread, loc=True) self.triangulate(tread) - write_attribute(tread, 1, 'treads', 'FACE') + write_attribute(tread, 1, "treads", "FACE") treads = [tread] + list(butil.deep_clone_obj(tread) for _ in range(self.n - 1)) for i in range(1, self.n): treads[i].location = 0, self.step_length * i, self.step_height * i @@ -244,15 +312,27 @@ def make_treads(self): def make_inner_sides(self): offset = -self.side_height / self.step_height - if self.side_type == 'zig-zag': + if self.side_type == "zig-zag": coords = [(0, 0)] for i in range(self.n): coords.extend([(i, i + 1), (i + 1, i + 1)]) l = LineString(coords) - p = l.buffer(offset, join_style='mitre', single_sided=True, ) + p = l.buffer( + offset, + join_style="mitre", + single_sided=True, + ) else: p = Polygon( - LineString([(0, offset), (0, 1), (self.n, self.n + 1), (self.n, self.n + offset), (0, offset)]) + LineString( + [ + (0, offset), + (0, 1), + (self.n, self.n + 1), + (self.n, self.n + offset), + (0, offset), + ] + ) ) objs = self.vertical_cut(p) @@ -266,10 +346,10 @@ def make_inner_sides(self): top_cutter.location[-1] = self.n * self.step_height + self.tread_height for obj in objs: - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.side_thickness, offset=0) - write_attribute(obj, 1, 'sides', 'FACE') + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.side_thickness, offset=0) + write_attribute(obj, 1, "sides", "FACE") for cutter in [top_cutter, bottom_cutter]: - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete([top_cutter, bottom_cutter]) return objs @@ -295,27 +375,35 @@ def make_single_handrail(self, obj): self.extend_line(obj, self.handrail_extension) if self.is_handrail_circular: surface.add_geomod( - obj, geo_radius, apply=True, input_args=[self.handrail_width, 32], - input_kwargs={'to_align_tilt': False} + obj, + geo_radius, + apply=True, + input_args=[self.handrail_width, 32], + input_kwargs={"to_align_tilt": False}, ) else: butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, -self.handrail_height * 2)} + TRANSFORM_OT_translate={"value": (0, 0, -self.handrail_height * 2)} ) butil.modify_mesh( - obj, 'SOLIDIFY', thickness=self.handrail_width * 2, offset=0, - solidify_mode='NON_MANIFOLD' + obj, + "SOLIDIFY", + thickness=self.handrail_width * 2, + offset=0, + solidify_mode="NON_MANIFOLD", ) butil.modify_mesh( - obj, 'BEVEL', width=self.handrail_width * uniform(.2, .5), - segments=np.random.randint(4, 7) + obj, + "BEVEL", + width=self.handrail_width * uniform(0.2, 0.5), + segments=np.random.randint(4, 7), ) obj.location[-1] += self.handrail_height - write_attribute(obj, 1, 'handrails', 'FACE') + write_attribute(obj, 1, "handrails", "FACE") obj.location[-1] += self.post_height butil.apply_transform(obj, loc=True) self.triangulate(obj) @@ -324,7 +412,7 @@ def make_single_handrail(self, obj): def extend_line(obj, extension): if len(obj.data.vertices) <= 1: return - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() v0, v1, v2, v3 = bm.verts[0], bm.verts[1], bm.verts[-1], bm.verts[-2] @@ -345,28 +433,36 @@ def make_posts(self, locs, widths): existing = np.concatenate([existing, loc[:1]], 0) cos = [0] for i, l in enumerate(loc): - if i > 0 and np.min( - np.linalg.norm(existing - l[np.newaxis, :], axis=1) - ) > self.handrail_width * 2: + if ( + i > 0 + and np.min(np.linalg.norm(existing - l[np.newaxis, :], axis=1)) + > self.handrail_width * 2 + ): cos.append(i) - existing = np.concatenate([existing, loc[i:i + 1]], 0) + existing = np.concatenate([existing, loc[i : i + 1]], 0) obj = mesh2obj(data2mesh(loc[cos])) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_vertices_move(TRANSFORM_OT_translate={'value': (0, 0, self.post_height)}) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_vertices_move( + TRANSFORM_OT_translate={"value": (0, 0, self.post_height)} + ) if self.is_post_circular: surface.add_geomod(obj, geo_radius, apply=True, input_args=[width, 32]) else: - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (width * 2, 0, 0)}) - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={'value': (0, width * 2, 0)}) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (width * 2, 0, 0)} + ) + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_region_move( + TRANSFORM_OT_translate={"value": (0, width * 2, 0)} + ) obj.location = -width, -width, 0 butil.apply_transform(obj, loc=True) - write_attribute(obj, 1, 'posts', 'FACE') + write_attribute(obj, 1, "posts", "FACE") parts.append(obj) return parts @@ -378,10 +474,12 @@ def make_bars(self, locs): obj = new_line() write_co(obj, np.stack([loc, loc_])) subsurf(obj, 4) - surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.post_minor_width]) + surface.add_geomod( + obj, geo_radius, apply=True, input_args=[self.post_minor_width] + ) obj.location[-1] += self.post_height - (i + 1) * self.bar_size butil.apply_transform(obj, loc=True) - write_attribute(obj, 1, 'posts', 'FACE') + write_attribute(obj, 1, "posts", "FACE") parts.append(obj) return parts @@ -391,16 +489,18 @@ def make_glasses(self, locs): for loc, loc_ in zip(loc[:-1], loc[1:]): obj = new_line() write_co(obj, np.stack([loc, loc_])) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, self.glass_height - self.glass_margin)} + TRANSFORM_OT_translate={ + "value": (0, 0, self.glass_height - self.glass_margin) + } ) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.post_minor_width) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.post_minor_width) obj.location[-1] += self.glass_margin butil.apply_transform(obj, loc=True) - write_attribute(obj, 1, 'glasses', 'FACE') + write_attribute(obj, 1, "glasses", "FACE") parts.append(obj) return parts @@ -411,14 +511,11 @@ def unmake_spiral(self, obj): return obj def create_placeholder(self, **kwargs) -> bpy.types.Object: - obj = self.make_line_offset(.5) + obj = self.make_line_offset(0.5) if self.has_spiral: self.make_spiral(obj) self.extend_line(obj, self.end_margin) - self.decorate_line( - obj, constants.WALL_THICKNESS / 2, - constants.DOOR_SIZE - ) + self.decorate_line(obj, constants.WALL_THICKNESS / 2, constants.DOOR_SIZE) if self.mirror: mirror(obj) obj.rotation_euler[-1] = self.rot_z @@ -426,7 +523,7 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: return obj def create_cutter(self, **kwargs) -> bpy.types.Object: - obj = self.make_line_offset(.5) + obj = self.make_line_offset(0.5) if self.has_spiral: self.make_spiral(obj) self.decorate_line(obj, 0, constants.DOOR_SIZE) @@ -451,13 +548,14 @@ def create_asset(self, **params) -> bpy.types.Object: parts.extend(self.make_handrails()) post_locs = list(self.make_post_locs(alpha) for alpha in self.handrail_alphas) if self.has_vertical_post: - vertical_post_locs = list(self.make_vertical_post_locs(alpha) for alpha in self.handrail_alphas) + vertical_post_locs = list( + self.make_vertical_post_locs(alpha) for alpha in self.handrail_alphas + ) parts.extend( self.make_posts( post_locs + vertical_post_locs, - [self.post_width] * len(post_locs) + [self.post_minor_width] * len( - vertical_post_locs - ) + [self.post_width] * len(post_locs) + + [self.post_minor_width] * len(vertical_post_locs), ) ) else: @@ -480,73 +578,90 @@ def create_asset(self, **params) -> bpy.types.Object: def decorate_line(self, line, low, high): end = np.zeros(len(line.data.vertices)) end[[0, -1]] = 1 - write_attr_data(line, 'end', end) - with butil.ViewportMode(line, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + write_attr_data(line, "end", end) + with butil.ViewportMode(line, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={ - 'value': (0, 0, high - low) - } + TRANSFORM_OT_translate={"value": (0, 0, high - low)} ) bpy.ops.mesh.normals_make_consistent(inside=False) line.location[-1] -= low - butil.modify_mesh(line, 'SOLIDIFY', thickness=self.step_width, offset=0, use_even_offset=True) + butil.modify_mesh( + line, "SOLIDIFY", thickness=self.step_width, offset=0, use_even_offset=True + ) self.triangulate(line) line.location[-1] -= constants.WALL_THICKNESS / 2 butil.apply_transform(line, True) write_attribute( - line, lambda nw: nw.compare('LESS_THAN', surface.eval_argument(nw, 'end'), .99), - f'staircase_wall', 'FACE', 'INT' + line, + lambda nw: nw.compare("LESS_THAN", surface.eval_argument(nw, "end"), 0.99), + "staircase_wall", + "FACE", + "INT", ) - sharp_remesh_with_attrs(line, .05) + sharp_remesh_with_attrs(line, 0.05) zeros = np.zeros(len(line.data.polygons), dtype=int) ones = np.ones(len(line.data.polygons), dtype=int) - write_attr_data(line, f'{PREFIX}{t.Subpart.Ceiling.value}', zeros, 'INT', 'FACE') - write_attr_data(line, f'{PREFIX}{t.Subpart.SupportSurface.value}', zeros, 'INT', 'FACE') - write_attr_data(line, f'{PREFIX}{t.Subpart.Wall.value}', ones, 'INT', 'FACE') - write_attr_data(line, f'{PREFIX}{t.Subpart.Visible.value}', ones, 'INT', 'FACE') - with butil.ViewportMode(line, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + write_attr_data( + line, f"{PREFIX}{t.Subpart.Ceiling.value}", zeros, "INT", "FACE" + ) + write_attr_data( + line, f"{PREFIX}{t.Subpart.SupportSurface.value}", zeros, "INT", "FACE" + ) + write_attr_data(line, f"{PREFIX}{t.Subpart.Wall.value}", ones, "INT", "FACE") + write_attr_data(line, f"{PREFIX}{t.Subpart.Visible.value}", ones, "INT", "FACE") + with butil.ViewportMode(line, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.normals_make_consistent(inside=False) def finalize_assets(self, assets): if self.has_step: - self.step_surface.apply(assets, selection='steps', metal_color='bw+natural') + self.step_surface.apply(assets, selection="steps", metal_color="bw+natural") if self.has_tread: - self.tread_surface.apply(assets, selection='treads', metal_color='bw+natural') + self.tread_surface.apply( + assets, selection="treads", metal_color="bw+natural" + ) if self.has_rail: - self.rail_surface.apply(assets, selection='rails') + self.rail_surface.apply(assets, selection="rails") if self.has_sides: - self.side_surface.apply(assets, selection='sides') - self.handrail_surface.apply(assets, selection='handrails') - self.post_surface.apply(assets, selection='posts') + self.side_surface.apply(assets, selection="sides") + self.handrail_surface.apply(assets, selection="handrails") + self.post_surface.apply(assets, selection="posts") if self.has_glasses: - self.glass_surface.apply(assets, selection='glasses') + self.glass_surface.apply(assets, selection="glasses") def make_guardrail(self, mesh): def geo_extrude(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + geometry = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketGeometry", "Geometry", None)], + ) x, y, _ = nw.separate(nw.new_node(Nodes.InputNormal)) - offset = nw.scale(-self.handrail_offset, nw.vector_math('NORMALIZE', nw.combine(x, y, 0))) - geometry = nw.new_node(Nodes.SetPosition, [geometry], input_kwargs={'Offset': offset}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + offset = nw.scale( + -self.handrail_offset, nw.vector_math("NORMALIZE", nw.combine(x, y, 0)) + ) + geometry = nw.new_node( + Nodes.SetPosition, [geometry], input_kwargs={"Offset": offset} + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) self.unmake_spiral(mesh) - with butil.ViewportMode(mesh, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(mesh, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.normals_make_consistent(inside=False) surface.add_geomod(mesh, geo_extrude, apply=True) - remove_faces(mesh, read_attr_data(mesh, 'staircase_wall') == 0) - with butil.ViewportMode(mesh, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + remove_faces(mesh, read_attr_data(mesh, "staircase_wall") == 0) + with butil.ViewportMode(mesh, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.select_all(action='INVERT') - bpy.ops.mesh.delete(type='EDGE') + bpy.ops.mesh.select_all(action="INVERT") + bpy.ops.mesh.delete(type="EDGE") remove_vertices( - mesh, lambda x, y, z: (z < constants.WALL_THICKNESS / 4) | ( - z > constants.WALL_THICKNESS * 3 / 4) + mesh, + lambda x, y, z: (z < constants.WALL_THICKNESS / 4) + | (z > constants.WALL_THICKNESS * 3 / 4), ) - butil.modify_mesh(mesh, 'WELD', merge_threshold=constants.WALL_THICKNESS / 4) + butil.modify_mesh(mesh, "WELD", merge_threshold=constants.WALL_THICKNESS / 4) name = mesh.name mesh = separate_loose(mesh) ls = shapely.force_2d(convert2ls(mesh)) @@ -566,7 +681,8 @@ def geo_extrude(nw: NodeWrangler): parts.extend( self.make_posts( locs + minor_locs, - [self.post_width] * len(locs) + [self.post_minor_width] * len(minor_locs) + [self.post_width] * len(locs) + + [self.post_minor_width] * len(minor_locs), ) ) if self.has_bars: @@ -576,16 +692,16 @@ def geo_extrude(nw: NodeWrangler): butil.select_none() obj = join_objects(parts) self.make_spiral(obj) - self.handrail_surface.apply(obj, selection='handrails') - self.post_surface.apply(obj, selection='posts') + self.handrail_surface.apply(obj, selection="handrails") + self.post_surface.apply(obj, selection="posts") if self.has_glasses: - self.glass_surface.apply(obj, selection='glasses') + self.glass_surface.apply(obj, selection="glasses") obj.name = name return obj @property def lower(self): - return - np.pi / 2 + return -np.pi / 2 @property def upper(self): @@ -599,6 +715,9 @@ def valid_contour(self, offset, contour, doors, lower=True): t = self.lower if lower else self.upper t = (np.pi - t if self.mirror else t) + self.rot_z v = np.array([np.cos(t), np.sin(t)]) - if normalize(np.array([door.location[0] - x, door.location[1] - y])) @ v >= -.5: + if ( + normalize(np.array([door.location[0] - x, door.location[1] - y])) @ v + >= -0.5 + ): return True return False diff --git a/infinigen/assets/elements/staircases/u_shaped.py b/infinigen/assets/objects/elements/staircases/u_shaped.py similarity index 69% rename from infinigen/assets/elements/staircases/u_shaped.py rename to infinigen/assets/objects/elements/staircases/u_shaped.py index 7c0aeebf1..5c126c713 100644 --- a/infinigen/assets/elements/staircases/u_shaped.py +++ b/infinigen/assets/objects/elements/staircases/u_shaped.py @@ -2,20 +2,21 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei # - Karhan Kayan: fix constants import bpy import numpy as np -from infinigen.core.constraints.example_solver.room import constants -from .straight import StraightStaircaseFactory +import infinigen.core.util.blender as butil from infinigen.assets.utils.decorate import read_co, write_attribute, write_co from infinigen.assets.utils.object import new_cube, new_line +from infinigen.core.constraints.example_solver.room import constants from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -import infinigen.core.util.blender as butil + +from .straight import StraightStaircaseFactory class UShapedStaircaseFactory(StraightStaircaseFactory): @@ -28,35 +29,44 @@ def __init__(self, factory_seed, coarse=False): def build_size_config(self): self.n = int(np.random.randint(13, 21) / 2) * 2 self.step_height = constants.WALL_HEIGHT / self.n - self.step_width = log_uniform(.9, 1.5) + self.step_width = log_uniform(0.9, 1.5) self.step_length = self.step_height * log_uniform(1, 1.2) def make_line(self, alpha): obj = new_line(self.n + 4) x = np.concatenate( - [np.full(self.m + 2, alpha * self.step_width), [0], np.full(self.m + 2, -alpha * self.step_width)] + [ + np.full(self.m + 2, alpha * self.step_width), + [0], + np.full(self.m + 2, -alpha * self.step_width), + ] ) y = np.concatenate( - [np.arange(self.m + 1) * self.step_length, + [ + np.arange(self.m + 1) * self.step_length, [self.m * self.step_length + alpha * self.step_width] * 3, - np.arange(self.m, -1, -1) * self.step_length] + np.arange(self.m, -1, -1) * self.step_length, + ] + ) + z = ( + np.concatenate( + [np.arange(self.m + 1), [self.m] * 3, np.arange(self.m, self.n + 1)] + ) + * self.step_height ) - z = np.concatenate( - [np.arange(self.m + 1), [self.m] * 3, np.arange(self.m, self.n + 1)] - ) * self.step_height write_co(obj, np.stack([x, y, z], -1)) return obj def make_line_offset(self, alpha): obj = self.make_line(alpha) co = read_co(obj) - co[self.m:self.m + 4] = co[self.m + 1:self.m + 5] + co[self.m : self.m + 4] = co[self.m + 1 : self.m + 5] x, y, z = co.T - y[:self.m] += self.step_length / 2 + y[: self.m] += self.step_length / 2 y[self.m + 3] += min(self.step_length / 2, alpha * self.step_width) - y[self.m + 4:] -= self.step_length / 2 + y[self.m + 4 :] -= self.step_length / 2 z += self.step_height - z[[self.m, self.m + 1, self.m + 2, self.m + 3, - 1]] -= self.step_height + z[[self.m, self.m + 1, self.m + 2, self.m + 3, -1]] -= self.step_height write_co(obj, np.stack([x, y, z], -1)) return obj @@ -67,7 +77,12 @@ def make_post_locs(self, alpha): chunks = self.split(self.m - 1) chunks_ = self.split(self.m + 3, self.n + 4) mid = [self.m - 1, self.m, self.m + 1, self.m + 2, self.m + 3] - indices = list(c[0] for c in chunks) + mid + list(c[0] for c in chunks_) + [self.n + 3, self.n + 4] + indices = ( + list(c[0] for c in chunks) + + mid + + list(c[0] for c in chunks_) + + [self.n + 3, self.n + 4] + ) return cos[indices] def make_vertical_post_locs(self, alpha): @@ -75,19 +90,23 @@ def make_vertical_post_locs(self, alpha): cos = read_co(temp) butil.delete(temp) chunks = self.split(self.m - 1) - chunks_ = np.array_split(np.arange(self.m + 3, self.n + 4), np.ceil((self.n - self.m) / self.post_k)) + chunks_ = np.array_split( + np.arange(self.m + 3, self.n + 4), np.ceil((self.n - self.m) / self.post_k) + ) indices = sum(list(c[1:].tolist() for c in chunks + chunks_), []) indices_ = sum(list(c[1:].tolist() for c in chunks_), []) mid_cos = [] mid = [self.m - 1, self.m, self.m + 1, self.m + 2] for m in mid: - for r in np.linspace(0, 1, self.post_k + 1 if m >= self.m else self.post_k + 2)[1:-1]: + for r in np.linspace( + 0, 1, self.post_k + 1 if m >= self.m else self.post_k + 2 + )[1:-1]: mid_cos.append(r * cos[m] + (1 - r) * cos[m + 1]) return np.concatenate([cos[indices], np.stack(mid_cos), cos[indices_]], 0) def make_steps(self): objs = super(UShapedStaircaseFactory, self).make_steps() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi obj.location = 0, 2 * self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -95,14 +114,18 @@ def make_steps(self): platform = new_cube(location=(0, 1, 1)) butil.apply_transform(platform, loc=True) platform.location = 0, self.step_length * self.m, lowest - platform.scale = self.step_width, self.step_width / 2, (self.step_height * self.m - lowest) / 2 + platform.scale = ( + self.step_width, + self.step_width / 2, + (self.step_height * self.m - lowest) / 2, + ) butil.apply_transform(platform, loc=True) - write_attribute(platform, 1, 'steps', 'FACE') + write_attribute(platform, 1, "steps", "FACE") return objs + [platform] def make_treads(self): objs = super(UShapedStaircaseFactory, self).make_treads() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi obj.location = 0, 2 * self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -111,12 +134,12 @@ def make_treads(self): platform.location = 0, self.step_length * self.m, self.step_height * self.m platform.scale = self.step_width, self.step_width / 2, self.tread_height / 2 butil.apply_transform(platform, loc=True) - write_attribute(platform, 1, 'treads', 'FACE') + write_attribute(platform, 1, "treads", "FACE") return objs + [platform] def make_inner_sides(self): objs = super(UShapedStaircaseFactory, self).make_inner_sides() - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.rotation_euler[-1] = np.pi obj.location = 0, 2 * self.m * self.step_length, 0 butil.apply_transform(obj, loc=True) @@ -125,17 +148,17 @@ def make_inner_sides(self): butil.apply_transform(top_cutter, loc=True) top_cutter.scale = [100] * 3 top_cutter.location[-1] = self.m * self.step_height + self.tread_height - for obj in objs[:self.m]: - butil.modify_mesh(obj, 'BOOLEAN', object=top_cutter, operation='DIFFERENCE') + for obj in objs[: self.m]: + butil.modify_mesh(obj, "BOOLEAN", object=top_cutter, operation="DIFFERENCE") butil.delete(top_cutter) return objs def make_outer_sides(self): objs = self.make_inner_sides() - for obj in objs[:self.m]: + for obj in objs[: self.m]: obj.location[0] += self.step_width butil.apply_transform(obj, loc=True) - for obj in objs[self.m:]: + for obj in objs[self.m :]: obj.location[0] -= self.step_width butil.apply_transform(obj, loc=True) platform = new_line(4) @@ -145,12 +168,14 @@ def make_outer_sides(self): z = [self.m * self.step_height] * 5 write_co(platform, np.stack([x, y, z], -1)) butil.select_none() - with butil.ViewportMode(platform, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, -self.side_height)}) - butil.modify_mesh(platform, 'SOLIDIFY', thickness=self.side_thickness) - write_attribute(platform, 1, 'sides', 'FACE') + with butil.ViewportMode(platform, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, -self.side_height)} + ) + butil.modify_mesh(platform, "SOLIDIFY", thickness=self.side_thickness) + write_attribute(platform, 1, "sides", "FACE") return objs + [platform] @property diff --git a/infinigen/assets/elements/warehouses/__init__.py b/infinigen/assets/objects/elements/warehouses/__init__.py similarity index 99% rename from infinigen/assets/elements/warehouses/__init__.py rename to infinigen/assets/objects/elements/warehouses/__init__.py index ad3fe7df2..cc3b176fa 100644 --- a/infinigen/assets/elements/warehouses/__init__.py +++ b/infinigen/assets/objects/elements/warehouses/__init__.py @@ -2,5 +2,6 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from .rack import RackFactory + from .pallet import PalletFactory +from .rack import RackFactory diff --git a/infinigen/assets/elements/warehouses/pallet.py b/infinigen/assets/objects/elements/warehouses/pallet.py similarity index 54% rename from infinigen/assets/elements/warehouses/pallet.py rename to infinigen/assets/objects/elements/warehouses/pallet.py index 754ab60d1..5bcf54df0 100644 --- a/infinigen/assets/elements/warehouses/pallet.py +++ b/infinigen/assets/objects/elements/warehouses/pallet.py @@ -9,12 +9,12 @@ from infinigen.assets.materials import wood from infinigen.assets.utils.decorate import read_normal from infinigen.assets.utils.object import join_objects, new_bbox, new_cube +from infinigen.core import tags as t from infinigen.core.placement.factory import AssetFactory from infinigen.core.surface import write_attr_data from infinigen.core.tagging import PREFIX from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj -from infinigen.core import tagging, tags as t class PalletFactory(AssetFactory): @@ -22,16 +22,21 @@ def __init__(self, factory_seed, coarse=False): super(PalletFactory, self).__init__(factory_seed, coarse) self.depth = uniform(1.2, 1.4) self.width = uniform(1.2, 1.4) - self.thickness = uniform(.01, .015) - self.tile_width = uniform(.06, .1) + self.thickness = uniform(0.01, 0.015) + self.tile_width = uniform(0.06, 0.1) self.tile_slackness = uniform(1.5, 2) - self.height = uniform(.2, .25) + self.height = uniform(0.2, 0.25) self.surface = wood def create_placeholder(self, **kwargs) -> bpy.types.Object: bbox = new_bbox(0, self.width, 0, self.depth, 0, self.height) - write_attr_data(bbox, f'{PREFIX}{t.Subpart.SupportSurface.value}', read_normal(bbox)[:, -1] > .5, 'INT', - 'FACE') + write_attr_data( + bbox, + f"{PREFIX}{t.Subpart.SupportSurface.value}", + read_normal(bbox)[:, -1] > 0.5, + "INT", + "FACE", + ) return bbox def create_asset(self, **params) -> bpy.types.Object: @@ -53,10 +58,25 @@ def make_vertical(self): butil.apply_transform(obj, True) obj.scale = self.tile_width / 2, self.depth / 2, self.thickness / 2 butil.apply_transform(obj) - count = int(np.floor((self.width - self.tile_width) / self.tile_width / self.tile_slackness) / 2) * 2 - butil.modify_mesh(obj, 'ARRAY', use_relative_offset=False, use_constant_offset=True, - constant_offset_displace=((self.width - self.tile_width) / count, 0, 0), - count=count + 1) + count = ( + int( + np.floor( + (self.width - self.tile_width) + / self.tile_width + / self.tile_slackness + ) + / 2 + ) + * 2 + ) + butil.modify_mesh( + obj, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, + constant_offset_displace=((self.width - self.tile_width) / count, 0, 0), + count=count + 1, + ) return obj def make_horizontal(self): @@ -65,22 +85,53 @@ def make_horizontal(self): butil.apply_transform(obj, True) obj.scale = self.width / 2, self.tile_width / 2, self.thickness / 2 butil.apply_transform(obj) - count = int(np.floor((self.depth - self.tile_width) / self.tile_width / self.tile_slackness) / 2) * 2 - butil.modify_mesh(obj, 'ARRAY', use_relative_offset=False, use_constant_offset=True, - constant_offset_displace=(0, (self.depth - self.tile_width) / count, 0), - count=count + 1) + count = ( + int( + np.floor( + (self.depth - self.tile_width) + / self.tile_width + / self.tile_slackness + ) + / 2 + ) + * 2 + ) + butil.modify_mesh( + obj, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, + constant_offset_displace=(0, (self.depth - self.tile_width) / count, 0), + count=count + 1, + ) return obj def make_support(self): obj = new_cube() obj.location = 1, 1, 1 butil.apply_transform(obj, True) - obj.scale = self.tile_width / 2, self.tile_width / 2, self.height / 2 - 2 * self.thickness + obj.scale = ( + self.tile_width / 2, + self.tile_width / 2, + self.height / 2 - 2 * self.thickness, + ) butil.apply_transform(obj) - butil.modify_mesh(obj, 'ARRAY', use_relative_offset=False, use_constant_offset=True, - constant_offset_displace=((self.width - self.tile_width) / 2, 0, 0), count=3) - butil.modify_mesh(obj, 'ARRAY', use_relative_offset=False, use_constant_offset=True, - constant_offset_displace=(0, (self.depth - self.tile_width) / 2, 0), count=3) + butil.modify_mesh( + obj, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, + constant_offset_displace=((self.width - self.tile_width) / 2, 0, 0), + count=3, + ) + butil.modify_mesh( + obj, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, + constant_offset_displace=(0, (self.depth - self.tile_width) / 2, 0), + count=3, + ) return obj def finalize_assets(self, assets): diff --git a/infinigen/assets/elements/warehouses/rack.py b/infinigen/assets/objects/elements/warehouses/rack.py similarity index 66% rename from infinigen/assets/elements/warehouses/rack.py rename to infinigen/assets/objects/elements/warehouses/rack.py index 7a7dd2023..6bcf5d11b 100644 --- a/infinigen/assets/elements/warehouses/rack.py +++ b/infinigen/assets/objects/elements/warehouses/rack.py @@ -6,22 +6,31 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.elements.warehouses.pallet import PalletFactory from infinigen.assets.materials import metal -from infinigen.assets.materials.metal import galvanized_metal -from infinigen.assets.utils.decorate import read_co, remove_faces, solidify, write_attribute, write_co +from infinigen.assets.objects.elements.warehouses.pallet import PalletFactory +from infinigen.assets.utils.decorate import ( + read_co, + remove_faces, + solidify, + write_attribute, + write_co, +) from infinigen.assets.utils.nodegroup import geo_radius from infinigen.assets.utils.object import ( - join_objects, new_base_cylinder, new_bbox, new_cube, new_line, + join_objects, + new_base_cylinder, + new_bbox, + new_cube, + new_line, new_plane, ) from infinigen.core import surface +from infinigen.core import tags as t from infinigen.core.placement.factory import AssetFactory from infinigen.core.surface import write_attr_data from infinigen.core.tagging import PREFIX from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj -from infinigen.core import tagging, tags as t from infinigen.core.util.math import FixedSeed @@ -30,24 +39,28 @@ def __init__(self, factory_seed, coarse=False): super(RackFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): self.depth = uniform(1, 1.2) - self.width = uniform(4., 5.) + self.width = uniform(4.0, 5.0) self.height = uniform(1.6, 1.8) self.steps = np.random.randint(3, 6) - self.thickness = uniform(.06, .08) - self.hole_radius = self.thickness / 2 * uniform(.5, .6) + self.thickness = uniform(0.06, 0.08) + self.hole_radius = self.thickness / 2 * uniform(0.5, 0.6) self.support_angle = uniform(np.pi / 6, np.pi / 4) - self.is_support_round = uniform() < .5 + self.is_support_round = uniform() < 0.5 self.frame_height = self.thickness * uniform(3, 4) self.frame_count = np.random.randint(20, 30) self.stand_surface = self.support_surface = self.frame_surface = metal self.pallet_factory = PalletFactory(self.factory_seed) - self.margin_range = .3, .5 + self.margin_range = 0.3, 0.5 def create_placeholder(self, **kwargs) -> bpy.types.Object: bbox = new_bbox( - -self.depth - self.thickness / 2, self.thickness / 2, -self.thickness / 2, self.width + self.thickness / 2, - 0, self.height * self.steps + -self.depth - self.thickness / 2, + self.thickness / 2, + -self.thickness / 2, + self.width + self.thickness / 2, + 0, + self.height * self.steps, ) objs = [bbox] for i in range(self.steps): @@ -55,7 +68,13 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: obj.scale = self.depth / 2, self.width / 2 - self.thickness, 1 obj.location = -self.depth / 2, self.width / 2, self.height * i butil.apply_transform(obj, True) - write_attr_data(obj, f'{PREFIX}{t.Subpart.SupportSurface.value}', np.ones(1).astype(bool), 'INT', 'FACE') + write_attr_data( + obj, + f"{PREFIX}{t.Subpart.SupportSurface.value}", + np.ones(1).astype(bool), + "INT", + "FACE", + ) objs.append(obj) obj = join_objects(objs) return obj @@ -72,8 +91,11 @@ def create_asset(self, **params) -> bpy.types.Object: for i, p in enumerate(pallets): p.parent = obj margin = uniform(*self.margin_range) - p.location = margin if i % 2 else self.width - margin - p.dimensions[0], (self.depth - p.dimensions[ - 1]) / 2, i // 2 * self.height + p.location = ( + margin if i % 2 else self.width - margin - p.dimensions[0], + (self.depth - p.dimensions[1]) / 2, + i // 2 * self.height, + ) self.pallet_factory.finalize_assets(pallets) for p in pallets: p.parent = obj @@ -90,24 +112,28 @@ def make_stands(self): cylinder.scale = self.hole_radius, self.hole_radius, self.thickness * 2 cylinder.rotation_euler[1] = np.pi / 2 butil.apply_transform(cylinder) - butil.modify_mesh(obj, 'BOOLEAN', object=cylinder, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cylinder, operation="DIFFERENCE") cylinder.rotation_euler[-1] = np.pi / 2 butil.apply_transform(cylinder) - butil.modify_mesh(obj, 'BOOLEAN', object=cylinder, operation='DIFFERENCE') + butil.modify_mesh(obj, "BOOLEAN", object=cylinder, operation="DIFFERENCE") butil.delete(cylinder) remove_faces( obj, - lambda x, y, z: (np.abs(x) < self.thickness * .49) & (np.abs(y) < self.thickness * .49) & ( - np.abs(z) < self.thickness * .49) + lambda x, y, z: (np.abs(x) < self.thickness * 0.49) + & (np.abs(y) < self.thickness * 0.49) + & (np.abs(z) < self.thickness * 0.49), ) - remove_faces(obj, lambda x, y, z: np.abs(x) + np.abs(y) < self.thickness * .1) + remove_faces(obj, lambda x, y, z: np.abs(x) + np.abs(y) < self.thickness * 0.1) obj.location[-1] = self.thickness / 2 butil.apply_transform(obj, True) butil.modify_mesh( - obj, 'ARRAY', count=int(np.ceil(self.height / self.thickness * self.steps)), - relative_offset_displace=(0, 0, 1), use_merge_vertices=True + obj, + "ARRAY", + count=int(np.ceil(self.height / self.thickness * self.steps)), + relative_offset_displace=(0, 0, 1), + use_merge_vertices=True, ) - write_attribute(obj, 1, 'stand', 'FACE') + write_attribute(obj, 1, "stand", "FACE") stands = [obj] for locs in [(0, 1), (1, 1), (1, 0)]: o = deep_clone_obj(obj) @@ -117,7 +143,9 @@ def make_stands(self): return stands def make_supports(self): - n = int(np.floor(self.height * self.steps / self.depth / np.tan(self.support_angle))) + n = int( + np.floor(self.height * self.steps / self.depth / np.tan(self.support_angle)) + ) obj = new_line(n, self.height * self.steps) obj.rotation_euler[1] = -np.pi / 2 butil.apply_transform(obj, True) @@ -125,10 +153,12 @@ def make_supports(self): co[1::2, 1] = self.depth write_co(obj, co) if self.is_support_round: - surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.thickness / 2, 16]) + surface.add_geomod( + obj, geo_radius, apply=True, input_args=[self.thickness / 2, 16] + ) else: solidify(obj, 1, self.thickness) - write_attribute(obj, 1, 'support', 'FACE') + write_attribute(obj, 1, "support", "FACE") o = deep_clone_obj(obj) o.location[0] = self.width return [obj, o] @@ -147,8 +177,12 @@ def make_frames(self): y_bar.location = margin, self.depth / 2, self.height - self.thickness / 2 butil.apply_transform(y_bar, True) butil.modify_mesh( - y_bar, 'ARRAY', use_relative_offset=False, use_constant_offset=True, - count=self.frame_count - 1, constant_offset_displace=(margin, 0, 0) + y_bar, + "ARRAY", + use_relative_offset=False, + use_constant_offset=True, + count=self.frame_count - 1, + constant_offset_displace=(margin, 0, 0), ) frames = [x_bar, x_bar_, y_bar] for i in range(1, self.steps - 1): @@ -159,10 +193,10 @@ def make_frames(self): frames.append(o) for o in frames: - write_attribute(o, 1, 'frame', 'FACE') + write_attribute(o, 1, "frame", "FACE") return frames def finalize_assets(self, assets): - self.stand_surface.apply(assets, 'stand', metal_color='bw') - self.support_surface.apply(assets, 'support', metal_color='bw') - self.frame_surface.apply(assets, 'frame', metal_color='bw') + self.stand_surface.apply(assets, "stand", metal_color="bw") + self.support_surface.apply(assets, "support", metal_color="bw") + self.frame_surface.apply(assets, "frame", metal_color="bw") diff --git a/infinigen/assets/fruits/__init__.py b/infinigen/assets/objects/fruits/__init__.py similarity index 85% rename from infinigen/assets/fruits/__init__.py rename to infinigen/assets/objects/fruits/__init__.py index 32831e2e3..1400bd850 100644 --- a/infinigen/assets/fruits/__init__.py +++ b/infinigen/assets/objects/fruits/__init__.py @@ -2,8 +2,8 @@ from .blackberry import FruitFactoryBlackberry from .coconutgreen import FruitFactoryCoconutgreen from .coconuthairy import FruitFactoryCoconuthairy +from .compositional_fruit import FruitFactoryCompositional from .durian import FruitFactoryDurian from .pineapple import FruitFactoryPineapple from .starfruit import FruitFactoryStarfruit from .strawberry import FruitFactoryStrawberry -from .compositional_fruit import FruitFactoryCompositional \ No newline at end of file diff --git a/infinigen/assets/objects/fruits/apple.py b/infinigen/assets/objects/fruits/apple.py new file mode 100644 index 000000000..f9f3d55ce --- /dev/null +++ b/infinigen/assets/objects/fruits/apple.py @@ -0,0 +1,108 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryApple(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "apple" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "circle_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.5, 0.05), + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0), + (0.1227, 0.4281), + (0.4705, 0.6625), + (0.8886, 0.4156), + (1.0, 0.0), + ], + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "noise amount tilt": 0.0, + "noise scale pos": 0.5, + "noise amount pos": 0.1, + "Resolution": surface_resolution, + "Start": (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(-0.9, -1.1)), + "End": (0.0, 0.0, 1.0), + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + base_color = np.array((uniform(-0.05, 0.1), 0.999, 0.799)) + base_color[1] += normal(0.0, 0.05) + base_color[2] += normal(0.0, 0.05) + base_color_rgba = hsv2rgba(base_color) + + alt_color = np.copy(base_color) + alt_color[0] += normal(0.05, 0.02) + alt_color[1] += normal(0.0, 0.05) + alt_color[2] += normal(0.0, 0.05) + alt_color_rgba = hsv2rgba(alt_color) + + return { + "surface_name": "apple_surface", + "surface_func_args": { + "color1": base_color_rgba, + "color2": alt_color_rgba, + "random_seed": uniform(-100, 100), + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "spline tangent": "noderef-shapequadratic-spline tangent", + "distance to center": "noderef-shapequadratic-radius to center", + }, + "surface_output_args": {}, + "surface_resolution": 64, + "scale_multiplier": 1.0, + } + + def sample_stem_params(self): + stem_color = np.array((0.10, 0.96, 0.13)) + stem_color[0] += normal(0.0, 0.02) + stem_color[1] += normal(0.0, 0.05) + stem_color[2] += normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + return { + "stem_name": "basic_stem", + "stem_func_args": {"stem_color": stem_color_rgba}, + "stem_input_args": { + "quad_mid": ( + uniform(-0.1, 0.1), + uniform(-0.1, 0.1), + uniform(0.15, 0.2), + ), + "quad_end": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), + "quad_res": 32, + "cross_radius": uniform(0.025, 0.035), + "cross_res": 32, + "Translation": (0.0, 0.0, 0.6), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/blackberry.py b/infinigen/assets/objects/fruits/blackberry.py new file mode 100644 index 000000000..b8eb7be21 --- /dev/null +++ b/infinigen/assets/objects/fruits/blackberry.py @@ -0,0 +1,89 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryBlackberry(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "blackberry" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "circle_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(0.9, 0.05), + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0), + (0.0841, 0.3469), + (uniform(0.4, 0.6), 0.8), + (0.9432, 0.4781), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Start": (uniform(-0.5, 0.5), uniform(-0.5, 0.5), uniform(-0.5, -3.0)), + "End": (0.0, 0.0, 1.0), + "random seed tilt": uniform(-100, 100), + "noise amount tilt": 1.0, + "Resolution": surface_resolution, + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + berry_color = np.array((0.667, 0.254, 0.0)) + berry_color[0] += np.random.normal(0.0, 0.02) + berry_color[1] += np.random.normal(0.0, 0.05) + berry_color[2] += np.random.normal(0.0, 0.005) + berry_color_rgba = hsv2rgba(berry_color) + + return { + "surface_name": "blackberry_surface", + "surface_func_args": {"berry_color": berry_color_rgba}, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + }, + "surface_output_args": {}, + "surface_resolution": 64, + "scale_multiplier": 0.3, + } + + def sample_stem_params(self): + stem_color = np.array((0.179, 0.836, 0.318)) + stem_color[0] += np.random.normal(0.0, 0.02) + stem_color[1] += np.random.normal(0.0, 0.05) + stem_color[2] += np.random.normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + return { + "stem_name": "basic_stem", + "stem_func_args": {"stem_color": stem_color_rgba}, + "stem_input_args": { + "cross_radius": normal(0.075, 0.005), + "quad_mid": (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(0.2, 0.3)), + "quad_end": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.4, 0.6)), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/coconutgreen.py b/infinigen/assets/objects/fruits/coconutgreen.py new file mode 100644 index 000000000..1238d891e --- /dev/null +++ b/infinigen/assets/objects/fruits/coconutgreen.py @@ -0,0 +1,126 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryCoconutgreen(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "coconut_green" + + def sample_cross_section_params(self, surface_resolution=256): + rad_small = uniform(0.65, 0.75) + + return { + "cross_section_name": "coconut_cross_section", + "cross_section_func_args": { + "control_points": [(0.0, rad_small), (0.1, rad_small), (1.0, 0.76)] + }, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.8, 0.1), + "noise scale": 20.0, + "noise amount": 0.02, + "Resolution": surface_resolution, + }, + "cross_section_output_args": { + "crosssection_coordinate": "noderef-crosssection-curve parameters" + }, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0), + (0.0591, 0.3156), + (uniform(0.2, 0.3), 0.6125), + (uniform(0.6, 0.7), 0.675), + (0.9636, 0.3625), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Start": (uniform(-0.1, 0.1), uniform(-0.1, 0.1), normal(-1.0, 0.1)), + "End": (0.0, 0.0, 1.0), + "Resolution": surface_resolution, + }, + "shape_output_args": { + "shape_coordinate": "noderef-shapequadratic-spline parameter" + }, + } + + def sample_surface_params(self): + bottom_color = np.array((0.282, 0.951, 0.266)) + bottom_color[0] += np.random.normal(0.0, 0.02) + bottom_color[1] += np.random.normal(0.0, 0.05) + bottom_color[2] += np.random.normal(0.0, 0.05) + bottom_color_rgba = hsv2rgba(bottom_color) + + basic_color = np.array((0.235, 0.989, 0.413)) + basic_color[0] += np.random.normal(0.0, 0.025) + basic_color[1] += np.random.normal(0.0, 0.05) + basic_color[2] += np.random.normal(0.0, 0.05) + basic_color_rgba = hsv2rgba(basic_color) + + return { + "surface_name": "coconutgreen_surface", + "surface_func_args": { + "basic_color": basic_color_rgba, + "bottom_color": bottom_color_rgba, + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "spline tangent": "noderef-shapequadratic-spline tangent", + "distance to center": "noderef-shapequadratic-radius to center", + "cross section paramater": "noderef-crosssection-curve parameters", + }, + "surface_output_args": {}, + "surface_resolution": 256, + "scale_multiplier": 1.5, + } + + def sample_stem_params(self): + bottom_color = np.array((0.282, 0.951, 0.266)) + bottom_color[0] += np.random.normal(0.0, 0.02) + bottom_color[1] += np.random.normal(0.0, 0.05) + bottom_color[2] += np.random.normal(0.0, 0.05) + bottom_color_rgba = hsv2rgba(bottom_color) + + calyx_edge_color = np.array((0.039, 0.96, 0.037)) + calyx_edge_color[0] += np.random.normal(0.0, 0.02) + calyx_edge_color[1] += np.random.normal(0.0, 0.05) + calyx_edge_color[2] += np.random.normal(0.0, 0.05) + calyx_edge_color_rgba = hsv2rgba(calyx_edge_color) + + stem_x = uniform(-0.4, 0.4) + stem_y = uniform(-0.4, 0.4) + + return { + "stem_name": "coconut_stem", + "stem_func_args": { + "basic_color": bottom_color_rgba, + "edge_color": calyx_edge_color_rgba, + }, + "stem_input_args": { + "Target": "noderef-fruitsurface-Geometry", + "radius": 0.001, + "calyx width": uniform(0.2, 0.25), + "Count": randint(4, 6), + "stem_radius": normal(0.04, 0.005), + "stem_mid": (stem_x, stem_y, 0.0), + "stem_end": (2 * stem_x, 2 * stem_y, uniform(0.3, 0.5)), + }, + "stem_output_args": {"distance to edge": "noderef-stem-distance to edge"}, + } diff --git a/infinigen/assets/objects/fruits/coconuthairy.py b/infinigen/assets/objects/fruits/coconuthairy.py new file mode 100644 index 000000000..6a207281b --- /dev/null +++ b/infinigen/assets/objects/fruits/coconuthairy.py @@ -0,0 +1,84 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryCoconuthairy(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "coconut_hairy" + + def sample_cross_section_params(self, surface_resolution=256): + rad_small = uniform(0.65, 0.75) + + return { + "cross_section_name": "coconut_cross_section", + "cross_section_func_args": { + "control_points": [(0.0, rad_small), (0.1, rad_small), (1.0, 0.76)] + }, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.8, 0.1), + "noise scale": 20.0, + "noise amount": 0.02, + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0), + (0.0591, 0.3156), + (uniform(0.2, 0.3), 0.6125), + (uniform(0.6, 0.7), 0.675), + (0.9636, 0.3625), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Start": (uniform(-0.1, 0.1), uniform(-0.1, 0.1), normal(-1.0, 0.1)), + "End": (0.0, 0.0, 1.0), + "Resolution": surface_resolution, + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + basic_color = np.array((0.05, 0.97, 0.6)) + basic_color[0] += np.random.normal(0.0, 0.01) + basic_color[1] += np.random.normal(0.0, 0.05) + basic_color[2] += np.random.normal(0.0, 0.1) + basic_color_rgba = hsv2rgba(basic_color) + + return { + "surface_name": "coconuthairy_surface", + "surface_func_args": {"basic_color": basic_color_rgba}, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + }, + "surface_output_args": {}, + "surface_resolution": 256, + "scale_multiplier": 1.5, + } + + def sample_stem_params(self): + return { + "stem_name": "empty_stem", + "stem_func_args": {}, + "stem_input_args": {}, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/compositional_fruit.py b/infinigen/assets/objects/fruits/compositional_fruit.py new file mode 100644 index 000000000..a01666c75 --- /dev/null +++ b/infinigen/assets/objects/fruits/compositional_fruit.py @@ -0,0 +1,62 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np + +from infinigen.assets.objects.fruits.apple import FruitFactoryApple +from infinigen.assets.objects.fruits.blackberry import FruitFactoryBlackberry +from infinigen.assets.objects.fruits.coconutgreen import FruitFactoryCoconutgreen +from infinigen.assets.objects.fruits.coconuthairy import FruitFactoryCoconuthairy +from infinigen.assets.objects.fruits.durian import FruitFactoryDurian +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.assets.objects.fruits.pineapple import FruitFactoryPineapple +from infinigen.assets.objects.fruits.starfruit import FruitFactoryStarfruit +from infinigen.assets.objects.fruits.strawberry import FruitFactoryStrawberry +from infinigen.core.util.math import FixedSeed + +fruit_names = { + "Apple": FruitFactoryApple, + "Pineapple": FruitFactoryPineapple, + "Starfruit": FruitFactoryStarfruit, + "Strawberry": FruitFactoryStrawberry, + "Blackberry": FruitFactoryBlackberry, + "Coconuthairy": FruitFactoryCoconuthairy, + "Coconutgreen": FruitFactoryCoconutgreen, + "Durian": FruitFactoryDurian, +} + + +class FruitFactoryCompositional(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super(FruitFactoryCompositional, self).__init__( + factory_seed, scale=scale, coarse=coarse + ) + + self.name = "compositional" + self.factories = {} + + for name, factory in fruit_names.items(): + self.factories[name] = factory(factory_seed, scale, coarse) + + with FixedSeed(factory_seed): + self.cross_section_source = np.random.choice(list(fruit_names.keys())) + self.shape_source = np.random.choice(list(fruit_names.keys())) + self.surface_source = np.random.choice(list(fruit_names.keys())) + self.stem_source = np.random.choice(list(fruit_names.keys())) + + def sample_cross_section_params(self, surface_resolution=256): + return self.factories[self.cross_section_source].sample_cross_section_params( + surface_resolution + ) + + def sample_shape_params(self, surface_resolution=256): + return self.factories[self.shape_source].sample_shape_params(surface_resolution) + + def sample_surface_params(self): + return self.factories[self.surface_source].sample_surface_params() + + def sample_stem_params(self): + return self.factories[self.stem_source].sample_stem_params() diff --git a/infinigen/assets/objects/fruits/cross_section_lib.py b/infinigen/assets/objects/fruits/cross_section_lib.py new file mode 100644 index 000000000..90f69006c --- /dev/null +++ b/infinigen/assets/objects/fruits/cross_section_lib.py @@ -0,0 +1,420 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import nodegroup_rot_semmetry +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_circle_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_circle_cross_section(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 0.5), + ("NodeSocketFloat", "noise amount", 0.1), + ("NodeSocketInt", "Resolution", 256), + ("NodeSocketFloat", "radius", 0.0), + ], + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["random seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, "Scale": group_input.outputs["noise amount"]}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Offset": scale.outputs["Vector"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Scale": group_input.outputs["radius"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform, "curve parameters": value}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_star_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_star_cross_section(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 2.4), + ("NodeSocketFloat", "noise amount", 0.2), + ("NodeSocketInt", "Resolution", 256), + ("NodeSocketFloat", "radius", 1.0), + ], + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + rotsemmetry = nw.new_node( + nodegroup_rot_semmetry().name, + input_kwargs={"N": 5, "spline parameter": spline_parameter.outputs["Factor"]}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + 2: rotsemmetry.outputs["Result"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": rotsemmetry.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.4156), (0.65, 0.8125), (1.0, 1.0)] + ) + + position = nw.new_node(Nodes.InputPosition) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, "Scale": float_curve}, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["random seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, "Scale": group_input.outputs["noise amount"]}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_1.outputs["Vector"], + "Scale": group_input.outputs["radius"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": scale_2.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + "curve parameters": capture_attribute.outputs[2], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_cylax_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_cylax_cross_section(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "fork number", 10), + ("NodeSocketFloat", "bottom radius", 0.0), + ("NodeSocketFloatDistance", "noise random seed", 0.0), + ("NodeSocketFloat", "noise amount", 0.4), + ("NodeSocketFloatDistance", "radius", 1.0), + ], + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": 256, "Radius": group_input.outputs["radius"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + rotsemmetry = nw.new_node( + nodegroup_rot_semmetry().name, + input_kwargs={ + "N": group_input.outputs["fork number"], + "spline parameter": spline_parameter.outputs["Factor"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": rotsemmetry.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.65, 0.8125), (1.0, 1.0)] + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": float_curve, 3: group_input.outputs["bottom radius"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, "Scale": map_range_1.outputs["Result"]}, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["noise random seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": add.outputs["Vector"], "Scale": 2.4} + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: value}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, "Scale": group_input.outputs["noise amount"]}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Position": add_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_cross_section", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_cross_section( + nw: NodeWrangler, control_points=[(0.0, 0.7156), (0.1023, 0.7156), (1.0, 0.7594)] +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 2.4), + ("NodeSocketFloat", "noise amount", 0.2), + ("NodeSocketInt", "Resolution", 256), + ("NodeSocketFloat", "radius", 1.0), + ], + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + rot_semmetry = nw.new_node( + nodegroup_rot_semmetry().name, + input_kwargs={"N": 3, "spline parameter": spline_parameter.outputs["Factor"]}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": curve_circle.outputs["Curve"], 2: rot_semmetry}, + ) + + position = nw.new_node(Nodes.InputPosition) + + float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": rot_semmetry}) + node_utils.assign_curve(float_curve_1.mapping.curves[0], control_points) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, "Scale": float_curve_1}, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["random seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, "Scale": group_input.outputs["noise amount"]}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add_1.outputs["Vector"], + "Scale": group_input.outputs["radius"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": scale_2.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + "curve parameters": capture_attribute.outputs[2], + }, + ) diff --git a/infinigen/assets/objects/fruits/durian.py b/infinigen/assets/objects/fruits/durian.py new file mode 100644 index 000000000..9f326b6f8 --- /dev/null +++ b/infinigen/assets/objects/fruits/durian.py @@ -0,0 +1,112 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import gin +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +@gin.register +class FruitFactoryDurian(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "durian" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "circle_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.2, 0.03), + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0031), + (0.0841, 0.3469), + (uniform(0.4, 0.6), 0.8), + (0.8886, 0.6094), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "noise amount tilt": 5.0, + "noise scale tilt": 0.5, + "random seed tilt": uniform(-100, 100), + "Resolution": surface_resolution, + "Start": (uniform(-0.3, 0.3), uniform(-0.3, 0.3), uniform(-0.5, -1.5)), + "End": (0.0, 0.0, 1.0), + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + base_color = np.array((0.15, 0.74, 0.32)) + base_color[0] += np.random.normal(0.0, 0.02) + base_color[1] += np.random.normal(0.0, 0.05) + base_color[2] += np.random.normal(0.0, 0.05) + base_color_rgba = hsv2rgba(base_color) + + peak_color = np.array((0.09, 0.87, 0.24)) + peak_color[0] += np.random.normal(0.0, 0.025) + peak_color[1] += np.random.normal(0.0, 0.05) + peak_color[2] += np.random.normal(0.0, 0.05) + peak_color_rgba = hsv2rgba(peak_color) + + return { + "surface_name": "durian_surface", + "surface_func_args": { + "thorn_control_points": [(0.0, 0.0), (0.7318, 0.4344), (1.0, 1.0)], + "peak_color": peak_color_rgba, + "base_color": base_color_rgba, + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "distance Min": uniform(0.07, 0.13), + "displacement": uniform(0.25, 0.35), + "noise amount": 0.2, + }, + "surface_output_args": { + "durian thorn coordiante": "noderef-fruitsurface-distance to center" + }, + "surface_resolution": 512, + "scale_multiplier": 2.0, + } + + def sample_stem_params(self): + stem_color = np.array((0.10, 0.96, 0.13)) + stem_color[0] += np.random.normal(0.0, 0.02) + stem_color[1] += np.random.normal(0.0, 0.05) + stem_color[2] += np.random.normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + return { + "stem_name": "basic_stem", + "stem_func_args": {"stem_color": stem_color_rgba}, + "stem_input_args": { + "cross_radius": uniform(0.07, 0.09), + "quad_mid": ( + uniform(-0.1, 0.1), + uniform(-0.1, 0.1), + uniform(0.15, 0.2), + ), + "quad_end": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), + "Translation": (0.0, 0.0, 0.9), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/fruit_utils.py b/infinigen/assets/objects/fruits/fruit_utils.py new file mode 100644 index 000000000..3f0e1f9cf --- /dev/null +++ b/infinigen/assets/objects/fruits/fruit_utils.py @@ -0,0 +1,1077 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_random_rotation_scale", singleton=False, type="GeometryNodeTree" +) +def nodegroup_random_rotation_scale(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "random seed", 0.0), + ("NodeSocketFloat", "noise scale", 10.0), + ("NodeSocketVector", "rot mean", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "rot std", 1.0), + ("NodeSocketFloat", "scale mean", 0.35), + ("NodeSocketFloat", "scale std", 0.1), + ], + ) + + position_3 = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_3, 1: group_input.outputs["random seed"]}, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Color"], 1: value_2}, + attrs={"operation": "SUBTRACT"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_2.outputs["X"], + 1: group_input.outputs["rot std"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["rot mean"], 1: combine_xyz}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_2.outputs["Y"], + 1: group_input.outputs["scale std"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: group_input.outputs["scale mean"]}, + attrs={"use_clamp": True}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": add_1.outputs["Vector"], "Value": add_2}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_surface_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_surface_bump(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement", 0.02), + ("NodeSocketFloat", "Scale", 50.0), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": group_input.outputs["Scale"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_point_on_mesh", singleton=False, type="GeometryNodeTree" +) +def nodegroup_point_on_mesh(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketFloatDistance", "Distance Min", 0.2), + ("NodeSocketFloat", "parameter max", 1.0), + ("NodeSocketFloat", "parameter min", 0.0), + ("NodeSocketFloat", "noise amount", 1.0), + ("NodeSocketFloat", "noise scale", 5.0), + ], + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Distance Min": group_input.outputs["Distance Min"], + "Density Max": 10000.0, + }, + attrs={"distribute_method": "POISSON"}, + ) + + greater_than = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: group_input.outputs["spline parameter"], + 1: group_input.outputs["parameter min"], + }, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: group_input.outputs["spline parameter"], + 1: group_input.outputs["parameter max"], + }, + attrs={"operation": "LESS_THAN"}, + ) + + nand = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: greater_than, 1: less_than}, + attrs={"operation": "NAND"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": distribute_points_on_faces.outputs["Points"], + "Selection": nand, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": group_input.outputs["noise scale"]} + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: value}, + attrs={"operation": "SUBTRACT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": group_input.outputs["noise amount"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": delete_geometry, "Offset": scale.outputs["Vector"]}, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, input_kwargs={"Target": group_input.outputs["Mesh"]} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Position": geometry_proximity.outputs["Position"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Rotation": distribute_points_on_faces.outputs["Rotation"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_instance_on_points", singleton=False, type="GeometryNodeTree" +) +def nodegroup_instance_on_points(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "rotation base", (0.0, 0.0, 0.0)), + ("NodeSocketVectorEuler", "rotation delta", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "translation", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "scale", 0.0), + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + rotate_euler_1 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": group_input.outputs["rotation base"], + "Rotate By": group_input.outputs["rotation delta"], + }, + attrs={"space": "LOCAL"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": group_input.outputs["Instance"], + "Rotation": rotate_euler_1, + "Scale": group_input.outputs["scale"], + }, + ) + + translate_instances = nw.new_node( + Nodes.TranslateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Translation": group_input.outputs["translation"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": translate_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_shape_quadratic", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shape_quadratic( + nw: NodeWrangler, radius_control_points=[(0.0, 0.5), (1.0, 0.5)] +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloat", "random seed tilt", 0.5), + ("NodeSocketFloat", "noise scale tilt", 0.5), + ("NodeSocketFloat", "noise amount tilt", 0.0), + ("NodeSocketFloat", "random seed pos", 0.0), + ("NodeSocketFloat", "noise scale pos", 0.0), + ("NodeSocketFloat", "noise amount pos", 0.0), + ("NodeSocketIntUnsigned", "Resolution", 256), + ("NodeSocketVectorTranslation", "Start", (0.0, 0.0, -1.5)), + ("NodeSocketVectorTranslation", "Middle", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "End", (0.0, 0.0, 1.5)), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": quadratic_bezier, + 2: spline_parameter_2.outputs["Factor"], + }, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 1: curve_tangent, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["random seed pos"]}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale pos"], + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, + attrs={"operation": "SUBTRACT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": spline_parameter_2.outputs["Factor"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["noise amount pos"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + "Offset": scale_1.outputs["Vector"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: spline_parameter.outputs["Factor"], + 1: group_input.outputs["random seed tilt"], + }, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": add_1, "Scale": group_input.outputs["noise scale tilt"]}, + attrs={"noise_dimensions": "1D"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["noise amount tilt"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_position, "Tilt": multiply} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_2 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve(float_curve_2.mapping.curves[0], radius_control_points) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": set_curve_tilt, "Radius": float_curve_2}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": group_input.outputs["Profile Curve"], + "Fill Caps": True, + }, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": set_position}, + attrs={"mode": "EVALUATED"}, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": curve_to_points.outputs["Points"]}, + attrs={"target_element": "POINTS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": curve_to_mesh, + "spline parameter": capture_attribute.outputs[2], + "spline tangent": capture_attribute_1.outputs["Attribute"], + "radius to center": geometry_proximity.outputs["Distance"], + }, + ) + + +@node_utils.to_nodegroup("nodegroup_add_dent", singleton=False, type="GeometryNodeTree") +def nodegroup_add_dent(nw: NodeWrangler, dent_control_points): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketVector", "spline tangent", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "distance to center", 0.0), + ("NodeSocketBool", "bottom", False), + ("NodeSocketFloat", "intensity", 1.0), + ("NodeSocketFloat", "max radius", 1.0), + ], + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["spline parameter"], 1: 0.5} + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than}, attrs={"operation": "NOT"} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: group_input.outputs["bottom"], 6: greater_than, 7: op_not}, + attrs={"input_type": "BOOLEAN"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["distance to center"], + 2: group_input.outputs["max radius"], + }, + ) + + float_curve_3 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve(float_curve_3.mapping.curves[0], dent_control_points) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_3, 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["intensity"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["spline tangent"], "Scale": multiply}, + attrs={"operation": "SCALE"}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Selection": switch.outputs[2], + "Offset": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_add_crater", singleton=False, type="GeometryNodeTree" +) +def nodegroup_add_crater(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Strength", 1.5), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": group_input.outputs["Points"]}, + attrs={"target_element": "POINTS"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": geometry_proximity.outputs["Distance"], + 2: 0.08, + 3: -0.04, + 4: 0.0, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: 0.0, 2: 0.05}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": smooth_min}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["Strength"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": scale_1.outputs["Vector"], + }, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": set_position_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": subdivision_surface} + ) + + +@node_utils.to_nodegroup( + "nodegroup_mix_vector", singleton=False, type="GeometryNodeTree" +) +def nodegroup_mix_vector(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector 1", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Vector 2", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "alpha", 0.5), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["alpha"]}, + attrs={"operation": "SUBTRACT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector 1"], "Scale": subtract}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Vector 2"], + "Scale": group_input.outputs["alpha"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_add_noise_scalar", singleton=False, type="GeometryNodeTree" +) +def nodegroup_add_noise_scalar(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position = nw.new_node(Nodes.InputPosition) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "value", 0.5), + ("NodeSocketFloat", "noise random seed", 0.0), + ("NodeSocketFloat", "noise scale", 5.0), + ("NodeSocketFloat", "noise amount", 0.5), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["noise random seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["noise amount"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["value"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_attach_to_nearest", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attach_to_nearest(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Target", None), + ("NodeSocketFloat", "threshold", 0.0), + ("NodeSocketFloat", "multiplier", 0.5), + ("NodeSocketVectorTranslation", "Offset", (0.0, 0.0, 0.0)), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + geometry_proximity = nw.new_node( + Nodes.Proximity, input_kwargs={"Target": group_input.outputs["Target"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["threshold"], + 1: geometry_proximity.outputs["Distance"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["multiplier"]}, + attrs={"operation": "MULTIPLY"}, + ) + + exponent = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "EXPONENT"} + ) + + clamp = nw.new_node(Nodes.Clamp, input_kwargs={"Value": exponent}) + + mixvector = nw.new_node( + nodegroup_mix_vector().name, + input_kwargs={ + "Vector 1": position, + "Vector 2": geometry_proximity.outputs["Position"], + "alpha": clamp, + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": mixvector, + "Offset": group_input.outputs["Offset"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_manhattan", singleton=False, type="GeometryNodeTree" +) +def nodegroup_manhattan(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "v1", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "v2", (0.0, 0.0, 0.0)), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["v1"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["v2"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: separate_xyz_1.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_1}, attrs={"operation": "ABSOLUTE"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: absolute, 1: absolute_1}) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz_1.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute_2 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_2}, attrs={"operation": "ABSOLUTE"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: absolute_2}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_rot_semmetry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_rot_semmetry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "N", 0), + ("NodeSocketFloat", "spline parameter", 0.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={1: group_input.outputs["N"]}, + attrs={"operation": "DIVIDE"}, + ) + + pingpong = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["spline parameter"], 1: divide}, + attrs={"operation": "PINGPONG"}, + ) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": pingpong, 2: divide}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_scale_mesh", singleton=False, type="GeometryNodeTree" +) +def nodegroup_scale_mesh(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Scale", 1.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, "Scale": group_input.outputs["Scale"]}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("nodegroup_hair", singleton=False, type="GeometryNodeTree") +def nodegroup_hair(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "shape noise random seed", 0.0), + ("NodeSocketFloat", "shape noise amount", 1.0), + ("NodeSocketIntUnsigned", "length resolution", 8), + ("NodeSocketInt", "cross section resolution", 4), + ("NodeSocketFloat", "scale", 0.0), + ("NodeSocketFloatDistance", "Radius", 0.01), + ("NodeSocketMaterial", "Material", None), + ("NodeSocketVectorTranslation", "Start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "Middle", (0.0, 0.3, 1.0)), + ("NodeSocketVectorTranslation", "End", (0.0, -1.4, 2.0)), + ], + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["length resolution"], + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": quadratic_bezier_1} + ) + + position = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: group_input.outputs["shape noise random seed"]}, + ) + + noise_texture_3 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": add.outputs["Vector"], "Scale": 1.0} + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_3.outputs["Color"], 1: value_1}, + attrs={"operation": "SUBTRACT"}, + ) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": spline_parameter_2.outputs["Factor"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["shape noise amount"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": subdivide_curve, "Offset": scale_1.outputs["Vector"]}, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["cross section resolution"], + "Radius": group_input.outputs["Radius"], + }, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_position_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Scale": group_input.outputs["scale"], + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_1, + "Material": group_input.outputs["Material"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_align_top_to_horizon", singleton=False, type="GeometryNodeTree" +) +def nodegroup_align_top_to_horizon(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Max"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) diff --git a/infinigen/assets/objects/fruits/general_fruit.py b/infinigen/assets/objects/fruits/general_fruit.py new file mode 100644 index 000000000..9196b7c4b --- /dev/null +++ b/infinigen/assets/objects/fruits/general_fruit.py @@ -0,0 +1,231 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +from numpy.random import normal + +from infinigen.assets.objects.fruits.cross_section_lib import ( + nodegroup_circle_cross_section, + nodegroup_coconut_cross_section, + nodegroup_star_cross_section, +) +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_align_top_to_horizon, + nodegroup_shape_quadratic, +) +from infinigen.assets.objects.fruits.stem_lib import ( + nodegroup_basic_stem, + nodegroup_calyx_stem, + nodegroup_coconut_stem, + nodegroup_empty_stem, + nodegroup_pineapple_stem, +) +from infinigen.assets.objects.fruits.surfaces.apple_surface import ( + nodegroup_apple_surface, +) +from infinigen.assets.objects.fruits.surfaces.blackberry_surface import ( + nodegroup_blackberry_surface, +) +from infinigen.assets.objects.fruits.surfaces.coconutgreen_surface import ( + nodegroup_coconutgreen_surface, +) +from infinigen.assets.objects.fruits.surfaces.coconuthairy_surface import ( + nodegroup_coconuthairy_surface, +) +from infinigen.assets.objects.fruits.surfaces.durian_surface import ( + nodegroup_durian_surface, +) +from infinigen.assets.objects.fruits.surfaces.pineapple_surface import ( + nodegroup_pineapple_surface, +) +from infinigen.assets.objects.fruits.surfaces.starfruit_surface import ( + nodegroup_starfruit_surface, +) +from infinigen.assets.objects.fruits.surfaces.strawberry_surface import ( + nodegroup_strawberry_surface, +) +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + +crosssectionlib = { + "circle_cross_section": nodegroup_circle_cross_section, + "star_cross_section": nodegroup_star_cross_section, + "coconut_cross_section": nodegroup_coconut_cross_section, +} + +shapelib = {"shape_quadratic": nodegroup_shape_quadratic} + +surfacelib = { + "apple_surface": nodegroup_apple_surface, + "pineapple_surface": nodegroup_pineapple_surface, + "starfruit_surface": nodegroup_starfruit_surface, + "strawberry_surface": nodegroup_strawberry_surface, + "blackberry_surface": nodegroup_blackberry_surface, + "coconuthairy_surface": nodegroup_coconuthairy_surface, + "coconutgreen_surface": nodegroup_coconutgreen_surface, + "durian_surface": nodegroup_durian_surface, +} + +stemlib = { + "basic_stem": nodegroup_basic_stem, + "pineapple_stem": nodegroup_pineapple_stem, + "calyx_stem": nodegroup_calyx_stem, + "empty_stem": nodegroup_empty_stem, + "coconut_stem": nodegroup_coconut_stem, +} + + +def parse_args(nodeinfo, dictionary): + for k1, v1 in dictionary.items(): + if isinstance(v1, str) and v1.startswith("noderef"): + _, nodename, outputname = v1.split("-") + dictionary[k1] = nodeinfo[nodename].outputs[outputname] + + return dictionary + + +def general_fruit_geometry_nodes( + nw: NodeWrangler, cross_section_params, shape_params, surface_params, stem_params +): + nodeinfo = {} + + parse_args(nodeinfo, cross_section_params["cross_section_input_args"]) + crosssection = nw.new_node( + crosssectionlib[cross_section_params["cross_section_name"]]( + **cross_section_params["cross_section_func_args"] + ).name, + input_kwargs=cross_section_params["cross_section_input_args"], + ) + nodeinfo["crosssection"] = crosssection + parse_args(nodeinfo, cross_section_params["cross_section_output_args"]) + + parse_args(nodeinfo, shape_params["shape_input_args"]) + shapequadratic = nw.new_node( + shapelib[shape_params["shape_name"]](**shape_params["shape_func_args"]).name, + input_kwargs=shape_params["shape_input_args"], + ) + nodeinfo["shapequadratic"] = shapequadratic + parse_args(nodeinfo, shape_params["shape_output_args"]) + + parse_args(nodeinfo, surface_params["surface_input_args"]) + fruitsurface = nw.new_node( + surfacelib[surface_params["surface_name"]]( + **surface_params["surface_func_args"] + ).name, + input_kwargs=surface_params["surface_input_args"], + ) + nodeinfo["fruitsurface"] = fruitsurface + parse_args(nodeinfo, surface_params["surface_output_args"]) + + parse_args(nodeinfo, stem_params["stem_input_args"]) + stem = nw.new_node( + stemlib[stem_params["stem_name"]](**stem_params["stem_func_args"]).name, + input_kwargs=stem_params["stem_input_args"], + ) + nodeinfo["stem"] = stem + parse_args(nodeinfo, stem_params["stem_output_args"]) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [fruitsurface, stem]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + align = nw.new_node( + nodegroup_align_top_to_horizon().name, + input_kwargs={"Geometry": realize_instances}, + ) + + output_dict = {"Geometry": align} + output_dict.update(cross_section_params["cross_section_output_args"]) + output_dict.update(shape_params["shape_output_args"]) + output_dict.update(surface_params["surface_output_args"]) + output_dict.update(stem_params["stem_output_args"]) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs=output_dict) + + +class FruitFactoryGeneralFruit(AssetFactory): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super(FruitFactoryGeneralFruit, self).__init__(factory_seed, coarse=coarse) + + self.scale = scale + + def sample_cross_section_params(self, surface_resolution=256): + raise NotImplementedError + + def sample_shape_params(self, surface_resolution=256): + raise NotImplementedError + + def sample_surface_params(self): + raise NotImplementedError + + def sample_stem_params(self): + raise NotImplementedError + + def sample_geo_genome(self): + surface_params = self.sample_surface_params() + surface_resolution = surface_params["surface_resolution"] + + cross_section_params = self.sample_cross_section_params(surface_resolution) + shape_params = self.sample_shape_params(surface_resolution) + stem_params = self.sample_stem_params() + + return cross_section_params, shape_params, surface_params, stem_params + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=4, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + with FixedSeed(self.factory_seed): + cross_section_params, shape_params, surface_params, stem_params = ( + self.sample_geo_genome() + ) + + scale_multiplier = surface_params["scale_multiplier"] + + output_list = [] + output_list.extend(cross_section_params["cross_section_output_args"].keys()) + output_list.extend(shape_params["shape_output_args"].keys()) + output_list.extend(surface_params["surface_output_args"].keys()) + output_list.extend(stem_params["stem_output_args"].keys()) + + surface.add_geomod( + obj, + general_fruit_geometry_nodes, + attributes=output_list, + apply=False, + input_args=[ + cross_section_params, + shape_params, + surface_params, + stem_params, + ], + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.1) * self.scale * scale_multiplier + butil.apply_transform(obj) + + # TODO remove when empty materials from geonodes is debugged + butil.purge_empty_materials(obj) + + tag_object(obj, "fruit_" + self.name) + return obj diff --git a/infinigen/assets/objects/fruits/pineapple.py b/infinigen/assets/objects/fruits/pineapple.py new file mode 100644 index 000000000..d187188bb --- /dev/null +++ b/infinigen/assets/objects/fruits/pineapple.py @@ -0,0 +1,124 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryPineapple(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "pineapple" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "circle_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.2, 0.05), + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.1031), + (0.1182, 0.5062), + (uniform(0.3, 0.7), 0.5594), + (0.8364, 0.425), + (0.9864, 0.1406), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Start": (uniform(-0.1, 0.1), uniform(-0.1, 0.1), uniform(-0.8, -1.2)), + "End": (0.0, 0.0, 1.0), + "random seed pos": uniform(-100, 100), + "noise scale pos": 0.5, + "noise amount pos": 0.4, + "Resolution": surface_resolution, + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + bottom_color = np.array((0.192, 0.898, 0.095)) + bottom_color[0] += np.random.normal(0.0, 0.025) + bottom_color[1] += np.random.normal(0.0, 0.05) + bottom_color[2] += np.random.normal(0.0, 0.05) + bottom_color_rgba = hsv2rgba(bottom_color) + + mid_color = np.array((0.05, 0.96, 0.55)) + mid_color[0] += np.random.normal(0.0, 0.025) + mid_color[1] += np.random.normal(0.0, 0.05) + mid_color[2] += np.random.normal(0.0, 0.05) + mid_color_rgba = hsv2rgba(mid_color) + + top_color = np.array((0.04, 0.99, 0.45)) + top_color[0] += np.random.normal(0.0, 0.025) + top_color[1] += np.random.normal(0.0, 0.05) + top_color[2] += np.random.normal(0.0, 0.05) + top_color_rgba = hsv2rgba(top_color) + + center_color = np.array((0.07, 0.63, 0.84)) + center_color[0] += np.random.normal(0.0, 0.025) + center_color[1] += np.random.normal(0.0, 0.05) + center_color[2] += np.random.normal(0.0, 0.05) + center_color_rgba = hsv2rgba(center_color) + + cell_distance = uniform(0.18, 0.22) + + return { + "surface_name": "pineapple_surface", + "surface_func_args": { + "color_bottom": bottom_color_rgba, + "color_mid": mid_color_rgba, + "color_top": top_color_rgba, + "color_center": center_color_rgba, + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "point distance": cell_distance, + "cell scale": cell_distance + 0.02, + }, + "surface_output_args": {"radius": "noderef-fruitsurface-spline parameter"}, + "surface_resolution": 64, + "scale_multiplier": 1.8, + } + + def sample_stem_params(self): + leaf_color = np.array((0.32, 0.79, 0.20)) + leaf_color[0] += np.random.normal(0.0, 0.025) + leaf_color[1] += np.random.normal(0.0, 0.05) + leaf_color[2] += np.random.normal(0.0, 0.05) + leaf_color_rgba = hsv2rgba(leaf_color) + + return { + "stem_name": "pineapple_stem", + "stem_func_args": {"basic_color": leaf_color_rgba}, + "stem_input_args": { + "rotation base": (-uniform(0.5, 0.55), 0.0, 0.0), + "noise amount": 0.1, + "noise scale": uniform(10, 30), + "number of leaves": randint(40, 80), + "scale base": normal(0.5, 0.05), + "scale z base": normal(0.15, 0.03), + "scale z top": normal(0.62, 0.03), + "rot z base": normal(-0.62, 0.03), + "rot z top": normal(0.54, 0.03), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/seed_lib.py b/infinigen/assets/objects/fruits/seed_lib.py new file mode 100644 index 000000000..5d71da66a --- /dev/null +++ b/infinigen/assets/objects/fruits/seed_lib.py @@ -0,0 +1,105 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_seed_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Object"], "Scale": 7.8}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture.outputs["Fac"], + "Color1": (0.807, 0.624, 0.0704, 1.0), + "Color2": (0.3467, 0.2623, 0.0296, 1.0), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix, "Roughness": 0.5114} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_strawberry_seed", singleton=False, type="GeometryNodeTree" +) +def nodegroup_strawberry_seed(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketIntUnsigned", "Resolution", 8)] + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": (0.0, 0.0, -0.5), + "Middle": (0.0, 0.0, 0.0), + "End": (0.0, 0.0, 0.5), + }, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 0.0281), (0.7023, 0.2781), (1.0, 0.0281)], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: 0.9}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": quadratic_bezier, "Radius": multiply}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh, + "Material": surface.shaderfunc_to_material(shader_seed_shader), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material_1} + ) diff --git a/infinigen/assets/objects/fruits/starfruit.py b/infinigen/assets/objects/fruits/starfruit.py new file mode 100644 index 000000000..62ce61f43 --- /dev/null +++ b/infinigen/assets/objects/fruits/starfruit.py @@ -0,0 +1,112 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryStarfruit(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "starfruit" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "star_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.3, 0.05), + "Resolution": surface_resolution, + }, + "cross_section_output_args": { + "star parameters": "noderef-crosssection-curve parameters" + }, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0727, 0.2), + (0.2636, 0.6063), + (uniform(0.45, 0.65), uniform(0.7, 0.9)), + (0.8886, 0.6094), + (1.0, 0.0), + ], + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Resolution": surface_resolution, + "Start": (uniform(-0.3, 0.3), uniform(-0.3, 0.3), uniform(-1.0, -2.0)), + "End": (0.0, 0.0, 1.0), + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + base_color = np.array((0.10, 0.999, 0.799)) + base_color[0] += normal(0.0, 0.025) + base_color[1] += normal(0.0, 0.05) + base_color[2] += normal(0.0, 0.005) + base_color_rgba = hsv2rgba(base_color) + + ridge_color = np.copy(base_color) + ridge_color[0] += normal(0.04, 0.02) + ridge_color[2] += normal(-0.2, 0.02) + ridge_color_rgba = hsv2rgba(ridge_color) + + return { + "surface_name": "starfruit_surface", + "surface_func_args": { + "dent_control_points": [ + (0.0, 0.4219), + (0.0977, 0.4469), + (0.2273, 0.4844), + (0.5568, 0.5125), + (1.0, 0.5), + ], + "base_color": base_color_rgba, + "ridge_color": ridge_color_rgba, + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "spline tangent": "noderef-shapequadratic-spline tangent", + "distance to center": "noderef-shapequadratic-radius to center", + "dent intensity": normal(1.0, 0.1), + }, + "surface_output_args": {}, + "surface_resolution": 256, + "scale_multiplier": 1.0, + } + + def sample_stem_params(self): + stem_color = np.array((0.10, 0.96, 0.13)) + stem_color[0] += normal(0.0, 0.02) + stem_color[1] += normal(0.0, 0.05) + stem_color[2] += normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + return { + "stem_name": "basic_stem", + "stem_func_args": {"stem_color": stem_color_rgba}, + "stem_input_args": { + "quad_mid": ( + uniform(-0.1, 0.1), + uniform(-0.1, 0.1), + uniform(0.15, 0.2), + ), + "quad_end": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), + "cross_radius": uniform(0.03, 0.05), + "Translation": (0.0, 0.0, 0.8), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/stem_lib.py b/infinigen/assets/objects/fruits/stem_lib.py new file mode 100644 index 000000000..47dfaf439 --- /dev/null +++ b/infinigen/assets/objects/fruits/stem_lib.py @@ -0,0 +1,1138 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.cross_section_lib import ( + nodegroup_cylax_cross_section, +) +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_add_noise_scalar, + nodegroup_attach_to_nearest, + nodegroup_scale_mesh, + nodegroup_surface_bump, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_empty_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_empty_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + points = nw.new_node("GeometryNodePoints", input_kwargs={"Count": 0}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": points}) + + +def shader_basic_stem_shader(nw: NodeWrangler, stem_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 0.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": stem_color, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Specular": 0.1205, + "Roughness": 0.5068, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_basic_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_basic_stem(nw: NodeWrangler, stem_color=(0.179, 0.836, 0.318, 1.0)): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "quad_start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "quad_mid", (0.0, -0.05, 0.2)), + ("NodeSocketVectorTranslation", "quad_end", (-0.1, 0.0, 0.4)), + ("NodeSocketIntUnsigned", "quad_res", 128), + ("NodeSocketFloatDistance", "cross_radius", 0.08), + ("NodeSocketInt", "cross_res", 128), + ("NodeSocketVectorTranslation", "Translation", (0.0, 0.0, 1.0)), + ("NodeSocketVectorXYZ", "Scale", (1.0, 1.0, 2.0)), + ], + ) + + quadratic_bezier_2 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["quad_res"], + "Start": group_input.outputs["quad_start"], + "Middle": group_input.outputs["quad_mid"], + "End": group_input.outputs["quad_end"], + }, + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["cross_res"], + "Radius": group_input.outputs["cross_radius"], + }, + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": quadratic_bezier_2, + "Profile Curve": curve_circle_2.outputs["Curve"], + "Fill Caps": True, + }, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": curve_to_mesh_2, "Displacement": 0.01, "Scale": 2.9}, + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": surfacebump, "Scale": 20.0}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": surfacebump_1, + "Translation": group_input.outputs["Translation"], + "Scale": group_input.outputs["Scale"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_3, + "Material": surface.shaderfunc_to_material( + shader_basic_stem_shader, stem_color + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +def shader_calyx_shader(nw: NodeWrangler, stem_color): + # Code generated using version 2.4.3 of the node_transpiler + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 2.8, "Detail": 10.0, "Roughness": 0.7}, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture_1.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": stem_color, + }, + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": hue_saturation_value} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Specular": 0.5136, + "Roughness": 0.7614, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.5083, 1: translucent_bsdf, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +### straberry calyx ### +@node_utils.to_nodegroup( + "nodegroup_calyx_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_calyx_stem(nw: NodeWrangler, stem_color=(0.1678, 0.4541, 0.0397, 1.0)): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketInt", "fork number", 10), + ("NodeSocketFloatDistance", "outer radius", 1.0), + ("NodeSocketFloat", "inner radius", 0.2), + ("NodeSocketFloat", "cross section noise amount", 0.4), + ("NodeSocketFloat", "z noise amount", 1.0), + ("NodeSocketFloatDistance", "noise random seed", 0.0), + ("NodeSocketVectorTranslation", "quad_start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "quad_mid", (0.0, -0.05, 0.2)), + ("NodeSocketVectorTranslation", "quad_end", (-0.1, 0.0, 0.4)), + ("NodeSocketVectorTranslation", "Translation", (0.0, 0.0, 1.0)), + ("NodeSocketFloatDistance", "cross_radius", 0.04), + ], + ) + + cylaxcrosssection = nw.new_node( + nodegroup_cylax_cross_section().name, + input_kwargs={ + "fork number": group_input.outputs["fork number"], + "bottom radius": group_input.outputs["inner radius"], + "noise random seed": group_input.outputs["noise random seed"], + "noise amount": group_input.outputs["cross section noise amount"], + "radius": group_input.outputs["outer radius"], + }, + ) + + fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": cylaxcrosssection}) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": fill_curve} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": triangulate, "Level": 3} + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + addnoisescalar = nw.new_node( + nodegroup_add_noise_scalar().name, + input_kwargs={ + "value": separate_xyz.outputs["Z"], + "noise random seed": group_input.outputs["noise random seed"], + "noise scale": 1.0, + "noise amount": group_input.outputs["z noise amount"], + }, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position}, attrs={"operation": "LENGTH"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: addnoisescalar, 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": subdivide_mesh, "Position": combine_xyz}, + ) + + basicstem = nw.new_node( + nodegroup_basic_stem().name, + input_kwargs={ + "quad_start": group_input.outputs["quad_start"], + "quad_mid": group_input.outputs["quad_mid"], + "quad_end": group_input.outputs["quad_end"], + "quad_res": 16, + "cross_radius": group_input.outputs["cross_radius"], + "cross_res": 16, + "Translation": (0.0, 0.0, 0.0), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_position_1, basicstem]} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_2, + "Material": surface.shaderfunc_to_material(shader_calyx_shader, stem_color), + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_material, + "Translation": group_input.outputs["Translation"], + "Scale": (1.0, 1.0, 1.0), + }, + ) + + attachtonearest = nw.new_node( + nodegroup_attach_to_nearest().name, + input_kwargs={ + "Geometry": transform, + "Target": group_input.outputs["Geometry"], + "threshold": 0.1, + "multiplier": 10.0, + "Offset": (0.0, 0.0, 0.05), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": attachtonearest} + ) + + +### coconutgreen ### +@node_utils.to_nodegroup("nodegroup_jigsaw", singleton=False, type="GeometryNodeTree") +def nodegroup_jigsaw(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketFloat", "noise scale", 30.0), + ("NodeSocketFloatFactor", "noise randomness", 0.7), + ("NodeSocketFloat", "From Max", 0.15), + ("NodeSocketFloat", "To Min", 0.9), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={1: group_input.outputs["Value"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value"]}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 1: subtract, 2: add}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": map_range_1.outputs["Result"], + "Scale": group_input.outputs["noise scale"], + "Randomness": group_input.outputs["noise randomness"], + }, + attrs={"voronoi_dimensions": "1D", "feature": "DISTANCE_TO_EDGE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: group_input.outputs["From Max"], + 3: group_input.outputs["To Min"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range.outputs["Result"]} + ) + + +def shader_coconut_calyx_shader(nw: NodeWrangler, basic_color, edge_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 10.0, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.45, + 4: 0.52, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "distance to edge"} + ) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 3.0}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 0.1}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: attribute.outputs["Fac"], 1: multiply} + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0159 + colorramp.color_ramp.elements[0].color = edge_color # (0.0369, 0.0086, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.0716 + colorramp.color_ramp.elements[1].color = basic_color # (0.1119, 0.2122, 0.008, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = basic_color # (0.1119, 0.2122, 0.008, 1.0) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": hue_saturation_value, "Roughness": 0.90}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_calyx", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_calyx(nw: NodeWrangler, basic_color, edge_color): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.5), + ("NodeSocketInt", "resolution", 128), + ("NodeSocketFloatDistance", "radius", 1.0), + ("NodeSocketInt", "subdivision", 5), + ("NodeSocketFloat", "bump displacement", 0.16), + ("NodeSocketFloat", "bump scale", 3.22), + ], + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["resolution"], + "Radius": group_input.outputs["radius"], + }, + ) + + jigsaw = nw.new_node( + nodegroup_jigsaw().name, + input_kwargs={"Value": group_input.outputs["width"], "noise scale": 30.22}, + ) + + scale_mesh = nw.new_node( + nodegroup_scale_mesh().name, + input_kwargs={"Geometry": curve_circle.outputs["Curve"], "Scale": jigsaw}, + label="ScaleMesh", + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter_1.outputs["Factor"], 1: value}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": absolute, 1: value, 2: group_input.outputs["width"]}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.2409, 0.0), (0.7068, 0.275), (1.0, 0.9781)], + ) + + scale_mesh_1 = nw.new_node( + nodegroup_scale_mesh().name, + input_kwargs={"Geometry": scale_mesh, "Scale": float_curve}, + label="ScaleMesh", + ) + + fill_curve = nw.new_node( + Nodes.FillCurve, input_kwargs={"Curve": scale_mesh_1}, attrs={"mode": "NGONS"} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": fill_curve, "Level": group_input.outputs["subdivision"]}, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": subdivide_mesh, + "Displacement": group_input.outputs["bump displacement"], + "Scale": group_input.outputs["bump scale"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": surfacebump, + "Material": surface.shaderfunc_to_material( + shader_coconut_calyx_shader, basic_color, edge_color + ), + }, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": fill_curve}, + attrs={"target_element": "EDGES"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_material, + "distance to edge": geometry_proximity.outputs["Distance"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_stem( + nw: NodeWrangler, + basic_color=(0.1119, 0.2122, 0.008, 1.0), + edge_color=(0.0369, 0.0086, 0.0, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Target", None), + ("NodeSocketFloat", "radius", 0.0), + ("NodeSocketVectorTranslation", "Translation", (0.0, 0.0, 1.08)), + ("NodeSocketInt", "Count", 6), + ("NodeSocketFloat", "base scale", 0.3), + ("NodeSocketFloat", "top scale", 0.24), + ("NodeSocketFloat", "attach threshold", 0.1), + ("NodeSocketFloat", "attach multiplier", 10.0), + ("NodeSocketFloat", "calyx width", 0.5), + ("NodeSocketVectorTranslation", "stem_mid", (0.0, 0.0, 1.0)), + ("NodeSocketVectorTranslation", "stem_end", (0.0, 0.0, 1.0)), + ("NodeSocketFloat", "stem_radius", 0.5), + ], + ) + + coconutcalyx = nw.new_node( + nodegroup_coconut_calyx(basic_color=basic_color, edge_color=edge_color).name, + input_kwargs={"width": group_input.outputs["calyx width"]}, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": coconutcalyx.outputs["Geometry"], + 2: coconutcalyx.outputs["distance to edge"], + }, + ) + + spiral = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Rotations": 1.0, + "Start Radius": group_input.outputs["radius"], + "End Radius": group_input.outputs["radius"], + "Height": 0.0, + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": spiral, 2: spline_parameter.outputs["Factor"]}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Translation": group_input.outputs["Translation"], + }, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": transform, "Count": group_input.outputs["Count"]}, + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Rotation": curve_to_points.outputs["Rotation"]}, + attrs={"axis": "Z"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": capture_attribute.outputs[2], + 3: group_input.outputs["base scale"], + 4: group_input.outputs["top scale"], + }, + attrs={"interpolation_type": "SMOOTHERSTEP"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": curve_to_points.outputs["Points"], + "Instance": capture_attribute_1.outputs["Geometry"], + "Rotation": align_euler_to_vector, + "Scale": map_range_2.outputs["Result"], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": capture_attribute.outputs[2], 4: 0.01} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": map_range_1.outputs["Result"]} + ) + + attachtonearest = nw.new_node( + nodegroup_attach_to_nearest().name, + input_kwargs={ + "Geometry": realize_instances, + "Target": group_input.outputs["Target"], + "threshold": group_input.outputs["attach threshold"], + "multiplier": group_input.outputs["attach multiplier"], + "Offset": combine_xyz, + }, + ) + + basicstem = nw.new_node( + nodegroup_basic_stem(basic_color).name, + input_kwargs={ + "cross_radius": group_input.outputs["stem_radius"], + "quad_mid": group_input.outputs["stem_mid"], + "quad_end": group_input.outputs["stem_end"], + "Translation": (0.0, 0.0, 0.98), + "Scale": (1.0, 1.0, 1.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [basicstem, attachtonearest]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry, + "distance to edge": capture_attribute_1.outputs[2], + }, + ) + + +### pineapple ### +def shader_leaf(nw: NodeWrangler, basic_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": 3.48, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture_1.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Red"], + 1: 0.52, + 2: 0.48, + 3: 0.32, + 4: 0.74, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Blue"], + 1: 0.4, + 2: 0.7, + 3: 0.94, + 4: 1.1, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Saturation": map_range_3.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": basic_color, + }, + ) # (0.0545, 0.1981, 0.0409, 1.0) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Specular": 0.5955, + "Roughness": 1.0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pineapple_leaf", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pineapple_leaf(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 8), + ("NodeSocketVectorTranslation", "Start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "Middle", (0.0, -0.32, 3.72)), + ("NodeSocketVectorTranslation", "End", (0.0, 0.92, 4.32)), + ], + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], [(0.0, 1.0), (0.6818, 0.5063), (1.0, 0.0)] + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": quadratic_bezier_1, "Radius": float_curve_1}, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_1.outputs["Curve"], + "Scale": (0.5, 0.1, 1.0), + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: absolute}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": transform, "Offset": combine_xyz} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": set_position, + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": curve_to_mesh_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pineapple_crown", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pineapple_crown(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spiral_1 = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Resolution": 10, + "Rotations": 5.0, + "Start Radius": 0.01, + "End Radius": 0.01, + "Height": 0.0, + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Leaf", None), + ("NodeSocketVectorTranslation", "translation", (0.0, 0.0, 0.7)), + ("NodeSocketVectorEuler", "rotation base", (-0.4363, 0.0, 0.0)), + ("NodeSocketInt", "number of leaves", 75), + ("NodeSocketFloat", "noise amount", 0.1), + ("NodeSocketFloat", "noise scale", 50.0), + ("NodeSocketFloat", "scale base", 0.4), + ("NodeSocketFloat", "scale z base", 0.12), + ("NodeSocketFloat", "scale z top", 0.68), + ("NodeSocketFloat", "rot z base", -0.64), + ("NodeSocketFloat", "rot z top", 0.38), + ], + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": spiral_1, + "Translation": group_input.outputs["translation"], + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": transform_4, + "Count": group_input.outputs["number of leaves"], + }, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": resample_curve_1, + "Displacement": group_input.outputs["noise amount"], + "Scale": group_input.outputs["noise scale"], + }, + ) + + curve_tangent_1 = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": curve_tangent_1} + ) + + rotate_euler_3 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": align_euler_to_vector_1, + "Rotate By": group_input.outputs["rotation base"], + }, + attrs={"space": "LOCAL"}, + ) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.1, 3: 0.1}) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: spline_parameter_2.outputs["Factor"], + 1: random_value.outputs[1], + }, + ) + + map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={"Value": add, 3: 0.2}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": map_range_2.outputs["Result"], + 3: group_input.outputs["rot z base"], + 4: group_input.outputs["rot z top"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": map_range_1.outputs["Result"]} + ) + + rotate_euler_2 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={"Rotation": rotate_euler_3, "Rotate By": combine_xyz_1}, + attrs={"space": "LOCAL"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": map_range_2.outputs["Result"], + 3: group_input.outputs["scale z base"], + 4: group_input.outputs["scale z top"], + }, + attrs={"interpolation_type": "SMOOTHERSTEP"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["scale base"], + "Y": map_range.outputs["Result"], + "Z": map_range.outputs["Result"], + }, + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": surfacebump, + "Instance": group_input.outputs["Leaf"], + "Rotation": rotate_euler_2, + "Scale": combine_xyz_3, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": instance_on_points_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pineapple_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pineapple_stem(nw: NodeWrangler, basic_color): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 16), + ("NodeSocketVectorTranslation", "Start", (0.0, 0.0, 0.0)), + ("NodeSocketVectorTranslation", "Middle", (0.0, -0.32, 3.72)), + ("NodeSocketVectorTranslation", "End", (0.0, 0.92, 4.32)), + ("NodeSocketVectorTranslation", "translation", (0.0, 0.0, 0.7)), + ("NodeSocketVectorEuler", "rotation base", (-0.5236, 0.0, 0.0)), + ("NodeSocketInt", "number of leaves", 75), + ("NodeSocketFloat", "noise amount", 0.1), + ("NodeSocketFloat", "noise scale", 20.0), + ("NodeSocketFloat", "scale base", 0.5), + ("NodeSocketFloat", "scale z base", 0.15), + ("NodeSocketFloat", "scale z top", 0.62), + ("NodeSocketFloat", "rot z base", -0.62), + ("NodeSocketFloat", "rot z top", 0.54), + ], + ) + + pineappleleaf = nw.new_node( + nodegroup_pineapple_leaf().name, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": group_input.outputs["Start"], + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": pineappleleaf, + "Material": surface.shaderfunc_to_material(shader_leaf, basic_color), + }, + ) + + pineapplecrown = nw.new_node( + nodegroup_pineapple_crown().name, + input_kwargs={ + "Leaf": set_material_2, + "translation": group_input.outputs["translation"], + "rotation base": group_input.outputs["rotation base"], + "noise amount": group_input.outputs["noise amount"], + "noise scale": group_input.outputs["noise scale"], + "scale base": group_input.outputs["scale base"], + "scale z base": group_input.outputs["scale z base"], + "scale z top": group_input.outputs["scale z top"], + "rot z base": group_input.outputs["rot z base"], + "rot z top": group_input.outputs["rot z top"], + "number of leaves": group_input.outputs["number of leaves"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": pineapplecrown} + ) diff --git a/infinigen/assets/objects/fruits/strawberry.py b/infinigen/assets/objects/fruits/strawberry.py new file mode 100644 index 000000000..bd0f817c8 --- /dev/null +++ b/infinigen/assets/objects/fruits/strawberry.py @@ -0,0 +1,119 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.core.util.color import hsv2rgba + + +class FruitFactoryStrawberry(FruitFactoryGeneralFruit): + def __init__(self, factory_seed, scale=1.0, coarse=False): + super().__init__(factory_seed, scale=scale, coarse=coarse) + self.name = "strawberry" + + def sample_cross_section_params(self, surface_resolution=256): + return { + "cross_section_name": "circle_cross_section", + "cross_section_func_args": {}, + "cross_section_input_args": { + "random seed": uniform(-100, 100), + "radius": normal(1.0, 0.02), + "Resolution": surface_resolution, + }, + "cross_section_output_args": {}, + } + + def sample_shape_params(self, surface_resolution=256): + return { + "shape_name": "shape_quadratic", + "shape_func_args": { + "radius_control_points": [ + (0.0, 0.0), + (0.0227, 0.1313), + (0.2227, 0.4406), + (uniform(0.55, 0.7), uniform(0.7, 0.78)), + (0.925, 0.4719), + (1.0, 0.0), + ] + }, + "shape_input_args": { + "Profile Curve": "noderef-crosssection-Geometry", + "Start": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(-0.5, -1.0)), + "End": (0.0, 0.0, 1.0), + "random seed pos": uniform(-100, 100), + "Resolution": surface_resolution, + }, + "shape_output_args": {}, + } + + def sample_surface_params(self): + main_color = np.array((0.0, 0.995, 0.85)) + main_color[0] += np.random.normal(0.0, 0.02) + main_color[1] += np.random.normal(0.0, 0.05) + main_color[2] += np.random.normal(0.0, 0.05) + main_color_rgba = hsv2rgba(main_color) + + top_color = np.array((0.15, 0.75, 0.75)) + top_color[0] += np.random.normal(0.0, 0.02) + top_color[1] += np.random.normal(0.0, 0.05) + top_color[2] += np.random.normal(0.0, 0.05) + top_color_rgba = hsv2rgba(top_color) + + return { + "surface_name": "strawberry_surface", + "surface_func_args": { + "top_pos": uniform(0.85, 0.95), + "main_color": main_color_rgba, + "top_color": top_color_rgba, + }, + "surface_input_args": { + "Geometry": "noderef-shapequadratic-Mesh", + "spline parameter": "noderef-shapequadratic-spline parameter", + "Distance Min": 0.15, + "Strength": 1.5, + "noise random seed": uniform(-100, 100), + }, + "surface_output_args": { + "strawberry seed height": "noderef-fruitsurface-curve parameters" + }, + "surface_resolution": 64, + "scale_multiplier": 0.5, + } + + def sample_stem_params(self): + stem_color = np.array((0.28, 0.91, 0.45)) + stem_color[0] += np.random.normal(0.0, 0.02) + stem_color[1] += np.random.normal(0.0, 0.05) + stem_color[2] += np.random.normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + stem_color = np.array((0.28, 0.91, 0.45)) + stem_color[0] += np.random.normal(0.0, 0.02) + stem_color[1] += np.random.normal(0.0, 0.05) + stem_color[2] += np.random.normal(0.0, 0.05) + stem_color_rgba = hsv2rgba(stem_color) + + return { + "stem_name": "calyx_stem", + "stem_func_args": {"stem_color": stem_color_rgba}, + "stem_input_args": { + "Geometry": "noderef-fruitsurface-Geometry", + "fork number": randint(8, 13), + "outer radius": uniform(0.7, 0.9), + "noise random seed": uniform(-100, 100), + "quad_mid": ( + uniform(-0.1, 0.1), + uniform(-0.1, 0.1), + uniform(0.15, 0.2), + ), + "quad_end": (uniform(-0.2, 0.2), uniform(-0.2, 0.2), uniform(0.3, 0.4)), + "cross_radius": uniform(0.035, 0.045), + "Translation": (0.0, 0.0, 0.97), + }, + "stem_output_args": {}, + } diff --git a/infinigen/assets/objects/fruits/surfaces/apple_surface.py b/infinigen/assets/objects/fruits/surfaces/apple_surface.py new file mode 100644 index 000000000..49a09635e --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/apple_surface.py @@ -0,0 +1,159 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import nodegroup_add_dent +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_apple_shader(nw: NodeWrangler, color1, color2, random_seed): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = random_seed + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: texture_coordinate.outputs["Object"], 1: value}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.2}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + musgrave_texture_2 = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": combine_xyz, + "Scale": 10.0, + "Detail": 10.0, + "Dimension": 0.3, + "Lacunarity": 3.0, + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"Vector": add.outputs["Vector"], "Scale": 0.6, "Lacunarity": 1.0}, + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = color1 # + + rgb_1 = nw.new_node(Nodes.RGB) + rgb_1.outputs[0].default_value = color2 # + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={"Fac": musgrave_texture, "Color1": rgb, "Color2": rgb_1}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", input_kwargs={"Hue": 0.55, "Color": mix} + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": musgrave_texture_2, + "Color1": mix, + "Color2": hue_saturation_value, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_3} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_apple_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_apple_surface( + nw: NodeWrangler, + color1=(0.2881, 0.6105, 0.0709, 1.0), + color2=(0.7454, 0.6172, 0.0296, 1.0), + random_seed=0.0, + dent_control_points=[ + (0.0045, 0.3719), + (0.0727, 0.4532), + (0.2273, 0.4844), + (0.5568, 0.5125), + (1.0, 0.5), + ], +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketVector", "spline tangent", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "distance to center", 0.0), + ], + ) + + adddent = nw.new_node( + nodegroup_add_dent(dent_control_points=dent_control_points).name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "spline parameter": group_input.outputs["spline parameter"], + "spline tangent": group_input.outputs["spline tangent"], + "distance to center": group_input.outputs["distance to center"], + "intensity": 1.5, + "max radius": 1.5, + }, + ) + + adddent_1 = nw.new_node( + nodegroup_add_dent(dent_control_points=dent_control_points).name, + input_kwargs={ + "Geometry": adddent, + "spline parameter": group_input.outputs["spline parameter"], + "spline tangent": group_input.outputs["spline tangent"], + "distance to center": group_input.outputs["distance to center"], + "bottom": True, + "intensity": -1.0, + "max radius": 1.5, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": adddent_1, + "Material": surface.shaderfunc_to_material( + shader_apple_shader, color1, color2, random_seed + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) diff --git a/infinigen/assets/objects/fruits/surfaces/blackberry_surface.py b/infinigen/assets/objects/fruits/surfaces/blackberry_surface.py new file mode 100644 index 000000000..7ad95e3e7 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/blackberry_surface.py @@ -0,0 +1,219 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.cross_section_lib import ( + nodegroup_circle_cross_section, +) +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_instance_on_points, + nodegroup_point_on_mesh, + nodegroup_random_rotation_scale, + nodegroup_shape_quadratic, + nodegroup_surface_bump, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_berry_shader(nw: NodeWrangler, berry_color): + # Code generated using version 2.4.3 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": berry_color, "Specular": 0.5705, "Roughness": 0.2}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_hair_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 0.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, + input_kwargs={"Color": noise_texture.outputs["Color"]}, + attrs={"mode": "HSV"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": (0.6939, 0.2307, 0.0529, 1.0), + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": hue_saturation_value} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_blackberry_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_blackberry_surface( + nw: NodeWrangler, berry_color=(0.0212, 0.0212, 0.0284, 1.0) +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.5), + ], + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Displacement": 0.5, + "Scale": 0.5, + }, + ) + + pointonmesh = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": surfacebump, + "Distance Min": 0.4, + "spline parameter": group_input.outputs["spline parameter"], + "noise amount": 0.5, + "noise scale": 2.0, + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={"rot mean": (3.89, 0.0, 0.0)}, + ) + + uv_sphere_2 = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Segments": 32, "Rings": 16} + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": uv_sphere_2, "Displacement": 0.5, "Scale": 0.3}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": surfacebump_1} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": subdivision_surface, + "Material": surface.shaderfunc_to_material( + shader_berry_shader, berry_color + ), + }, + ) + + circlecrosssection_1 = nw.new_node( + nodegroup_circle_cross_section().name, + input_kwargs={"noise amount": 0.0, "Resolution": 8, "radius": 0.15}, + ) + + shapequadratic_1 = nw.new_node( + nodegroup_shape_quadratic().name, + input_kwargs={ + "Profile Curve": circlecrosssection_1, + "random seed tilt": 0.0, + "noise scale tilt": 0.0, + "noise amount tilt": 0.0, + "noise scale pos": 1.0, + "noise amount pos": 2.0, + "Resolution": 8, + "Start": (0.0, 0.0, 0.0), + "Middle": (0.0, 0.0, -1.0), + "End": (0.0, 0.0, -2.0), + }, + ) + + value_4 = nw.new_node(Nodes.Value) + value_4.outputs[0].default_value = 0.2 + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": shapequadratic_1, + "Translation": (0.0, 0.0, -1.0), + "Scale": value_4, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_3, + "Material": surface.shaderfunc_to_material(shader_hair_shader), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_3]} + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": pointonmesh.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, -0.5, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": pointonmesh.outputs["Geometry"], + "Instance": join_geometry, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instanceonpoints} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) diff --git a/infinigen/assets/objects/fruits/surfaces/coconutgreen_surface.py b/infinigen/assets/objects/fruits/surfaces/coconutgreen_surface.py new file mode 100644 index 000000000..99f3cad1e --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/coconutgreen_surface.py @@ -0,0 +1,210 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_add_dent, + nodegroup_surface_bump, +) +from infinigen.assets.objects.fruits.surfaces.surface_utils import ( + nodegroup_stripe_pattern, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_coconut_green_shader(nw: NodeWrangler, basic_color, bottom_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": 1.0, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture_1.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "shape_coordinate"} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_1.outputs["Fac"]} + ) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[ + 0 + ].color = bottom_color # (0.0908, 0.2664, 0.013, 1.0) + colorramp.color_ramp.elements[1].position = 0.01 + colorramp.color_ramp.elements[ + 1 + ].color = bottom_color # (0.0908, 0.2664, 0.013, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[ + 2 + ].color = basic_color # (0.2462, 0.4125, 0.0044, 1.0) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": colorramp.outputs["Color"], + }, + ) + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "crosssection_coordinate"} + ) + + group = nw.new_node( + nodegroup_stripe_pattern().name, + input_kwargs={ + "Color": hue_saturation_value_1, + "attribute": attribute_2.outputs["Fac"], + "seed": 10.0, + }, + ) + + group_1 = nw.new_node( + nodegroup_stripe_pattern().name, + input_kwargs={ + "Color": group, + "attribute": attribute_1.outputs["Fac"], + "voronoi scale": 10.0, + "voronoi randomness": 0.6446, + "seed": -10.0, + "noise amount": 0.48, + "hue min": 1.32, + "hue max": 0.9, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": group_1, "Specular": 0.4773, "Roughness": 0.4455}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconutgreen_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconutgreen_surface( + nw: NodeWrangler, + basic_color=(0.2462, 0.4125, 0.0044, 1.0), + bottom_color=(0.0908, 0.2664, 0.013, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketVector", "spline tangent", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "distance to center", 0.0), + ("NodeSocketFloat", "cross section paramater", 0.5), + ], + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Displacement": 0.2, + "Scale": 0.5, + }, + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": surfacebump, "Displacement": 0.0, "Scale": 10.0}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["distance to center"], + 1: 0.05, + 2: 0.2, + 4: 0.68, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["cross section paramater"], + 1: map_range.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + adddent = nw.new_node( + nodegroup_add_dent( + dent_control_points=[ + (0.0, 0.4219), + (0.0977, 0.4469), + (0.2273, 0.4844), + (0.5568, 0.5125), + (1.0, 0.5), + ] + ).name, + input_kwargs={ + "Geometry": surfacebump_1, + "spline parameter": group_input.outputs["spline parameter"], + "spline tangent": group_input.outputs["spline tangent"], + "distance to center": group_input.outputs["distance to center"], + "bottom": True, + "intensity": multiply, + "max radius": 3.0, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": adddent, + "Material": surface.shaderfunc_to_material( + shader_coconut_green_shader, basic_color, bottom_color + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material_3} + ) diff --git a/infinigen/assets/objects/fruits/surfaces/coconuthairy_surface.py b/infinigen/assets/objects/fruits/surfaces/coconuthairy_surface.py new file mode 100644 index 000000000..c65afccc9 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/coconuthairy_surface.py @@ -0,0 +1,197 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_hair, + nodegroup_instance_on_points, + nodegroup_point_on_mesh, + nodegroup_random_rotation_scale, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_hair_shader(nw: NodeWrangler, basic_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 0.5, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.4}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": basic_color, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": hue_saturation_value} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconuthairy_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconuthairy_surface( + nw: NodeWrangler, basic_color=(0.9473, 0.552, 0.2623, 1.0) +): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ], + ) + + material = nw.new_node("GeometryNodeInputMaterial") + material.material = surface.shaderfunc_to_material(shader_hair_shader, basic_color) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Material": material, + }, + ) + + pointonmesh = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "spline parameter": group_input.outputs["spline parameter"], + "Distance Min": 0.03, + "noise amount": 0.0, + "noise scale": 0.0, + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={ + "noise scale": 100.0, + "rot mean": (0.47, 0.0, 4.8), + "rot std": 100.0, + "scale mean": 0.2, + "scale std": 0.0, + }, + ) + + hair = nw.new_node( + nodegroup_hair().name, + input_kwargs={ + "length resolution": 1, + "cross section resolution": 1, + "scale": 0.3, + "Radius": 0.03, + "Material": material, + "Middle": (0.0, 0.3, 1.0), + "End": (0.0, -1.4, 2.0), + }, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": pointonmesh.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": pointonmesh.outputs["Geometry"], + "Instance": hair, + }, + ) + + pointonmesh_1 = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "spline parameter": group_input.outputs["spline parameter"], + "Distance Min": 0.06, + "parameter min": 0.2, + "noise amount": 0.5, + "noise scale": 2.0, + }, + ) + + randomrotationscale_1 = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={ + "rot mean": (1.3, 0.0, 0.0), + "rot std": 3.0, + "scale mean": 0.3, + "scale std": 0.5, + }, + ) + + hair_1 = nw.new_node( + nodegroup_hair().name, + input_kwargs={ + "scale": 1.0, + "Material": material, + "Middle": (0.0, 0.5, 1.0), + "End": (0.0, -1.9, 2.0), + }, + ) + + instanceonpoints_1 = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": pointonmesh_1.outputs["Rotation"], + "rotation delta": randomrotationscale_1.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": randomrotationscale_1.outputs["Value"], + "Points": pointonmesh_1.outputs["Geometry"], + "Instance": hair_1, + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material, instanceonpoints, instanceonpoints_1]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry_2} + ) diff --git a/infinigen/assets/objects/fruits/surfaces/durian_surface.py b/infinigen/assets/objects/fruits/surfaces/durian_surface.py new file mode 100644 index 000000000..290e40bb0 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/durian_surface.py @@ -0,0 +1,207 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_manhattan, + nodegroup_point_on_mesh, + nodegroup_surface_bump, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_durian_shader(nw: NodeWrangler, peak_color, base_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 0.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "durian thorn coordiante"} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Fac"]} + ) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = peak_color + colorramp.color_ramp.elements[1].position = 0.2705 + colorramp.color_ramp.elements[1].color = base_color + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": hue_saturation_value, + "Specular": 0.1205, + "Roughness": 0.5068, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_durian_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_durian_surface( + nw: NodeWrangler, + thorn_control_points=[(0.0, 0.0), (0.7318, 0.4344), (1.0, 1.0)], + peak_color=(0.2401, 0.1455, 0.0313, 1.0), + base_color=(0.3278, 0.3005, 0.0704, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "displacement", 1.0), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketFloatDistance", "distance Min", 0.1), + ("NodeSocketFloat", "noise amount", 0.3), + ("NodeSocketFloat", "noise scale", 5.0), + ], + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Displacement": 0.5, + "Scale": 0.5, + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + pointonmesh = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": surfacebump, + "spline parameter": group_input.outputs["spline parameter"], + "Distance Min": group_input.outputs["distance Min"], + "noise amount": group_input.outputs["noise amount"], + "noise scale": group_input.outputs["noise scale"], + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={ + "Target": pointonmesh.outputs["Geometry"], + "Source Position": position_1, + }, + attrs={"target_element": "POINTS"}, + ) + + manhattan = nw.new_node( + nodegroup_manhattan().name, + input_kwargs={"v1": geometry_proximity.outputs["Position"], "v2": position_1}, + label="manhattan", + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["distance Min"], 1: 2.0}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": manhattan, 2: multiply, 3: 1.0, 4: 0.0} + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve(float_curve.mapping.curves[0], thorn_control_points) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": float_curve}, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": group_input.outputs["displacement"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": surfacebump, "Offset": scale_1.outputs["Vector"]}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position_1, 2: map_range.outputs["Result"]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Material": surface.shaderfunc_to_material( + shader_durian_shader, peak_color, base_color + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_material, + "distance to center": capture_attribute.outputs[2], + }, + ) diff --git a/infinigen/assets/objects/fruits/surfaces/pineapple_surface.py b/infinigen/assets/objects/fruits/surfaces/pineapple_surface.py new file mode 100644 index 000000000..475df34f8 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/pineapple_surface.py @@ -0,0 +1,332 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.cross_section_lib import ( + nodegroup_circle_cross_section, +) +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_instance_on_points, + nodegroup_point_on_mesh, + nodegroup_random_rotation_scale, + nodegroup_surface_bump, +) +from infinigen.assets.objects.fruits.stem_lib import nodegroup_pineapple_leaf +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_pineapple_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pineapple_surface( + nw: NodeWrangler, + color_bottom=(0.0823, 0.0953, 0.0097, 1.0), + color_mid=(0.552, 0.1845, 0.0222, 1.0), + color_top=(0.4508, 0.0999, 0.0003, 1.0), + color_center=(0.8388, 0.5395, 0.314, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketFloatDistance", "point distance", 0.22), + ("NodeSocketFloat", "cell scale", 0.2), + ("NodeSocketFloat", "random seed", 0.0), + ], + ) + + pointonmesh = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "spline parameter": group_input.outputs["spline parameter"], + "Distance Min": group_input.outputs["point distance"], + "parameter max": 0.999, + "noise amount": 0.05, + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={ + "random seed": group_input.outputs["random seed"], + "rot std": 0.3, + "scale mean": group_input.outputs["cell scale"], + }, + ) + + pineapplecellbody = nw.new_node( + nodegroup_pineapple_cell_body().name, + input_kwargs={"resolution": 16, "scale diff": -0.3}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": pineapplecellbody.outputs["Geometry"], + "Material": surface.shaderfunc_to_material( + shader_cell, color_bottom, color_mid, color_top, color_center + ), + }, + ) + + pineappleleaf = nw.new_node( + nodegroup_pineapple_leaf().name, + input_kwargs={"Middle": (0.0, -0.1, 1.0), "End": (0.0, 0.9, 2.5)}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.3 + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": pineappleleaf, + "Translation": (0.0, -0.1, 0.3), + "Rotation": (-1.0315, 0.0, 0.0), + "Scale": value, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_2, + "Material": surface.shaderfunc_to_material( + shader_needle, color_center, color_top + ), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_3]} + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": join_geometry, "Displacement": 0.2, "Scale": 10.0}, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": pointonmesh.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.0, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": pointonmesh.outputs["Geometry"], + "Instance": surfacebump, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Material": surface.shaderfunc_to_material( + shader_cell, color_bottom, color_mid, color_top, color_center + ), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [instanceonpoints, set_material_1]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_1, + "spline parameter": pineapplecellbody.outputs["spline parameter"], + }, + ) + + +def shader_needle(nw: NodeWrangler, color1, color2): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 8.0, + "Detail": 0.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture.outputs["Fac"], + "Color1": color1, # (0.7758, 0.4678, 0.2346, 1.0) + "Color2": color2, + }, + ) # (0.3467, 0.0595, 0.0, 1.0) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_cell(nw: NodeWrangler, color_bottom, color_mid, color_top, color_center): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Object"], "Scale": 4.6}, + ) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "radius"}) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Fac"]} + ) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[ + 0 + ].color = color_bottom # (0.0823, 0.0953, 0.0097, 1.0) + colorramp.color_ramp.elements[1].position = 0.67 + colorramp.color_ramp.elements[1].color = color_mid # (0.552, 0.1845, 0.0222, 1.0) + colorramp.color_ramp.elements[2].position = 0.93 + colorramp.color_ramp.elements[2].color = color_top # (0.4508, 0.0999, 0.0003, 1.0) + colorramp.color_ramp.elements[3].position = 1.0 + colorramp.color_ramp.elements[ + 3 + ].color = color_center # (0.8388, 0.5395, 0.314, 1.0) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Hue": 0.55, "Color": colorramp.outputs["Color"]}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture.outputs["Fac"], + "Color1": hue_saturation_value, + "Color2": colorramp.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix, "Roughness": 0.2} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pineapple_cell_body", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pineapple_cell_body(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "resolution", 0), + ("NodeSocketFloat", "scale diff", 0.0), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["resolution"], + "Start": (0.0, 0.0, 0.0), + "Middle": (0.0, 0.0, 0.2), + "End": (0.0, 0.0, 0.4), + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": quadratic_bezier, + 2: spline_parameter.outputs["Factor"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 1.0), (0.1568, 0.875), (0.8045, 0.5313), (1.0, 0.0)], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Radius": float_curve, + }, + ) + + circlecrosssection = nw.new_node( + nodegroup_circle_cross_section().name, + input_kwargs={ + "noise scale": 8.0, + "noise amount": 0.4, + "Resolution": 64, + "radius": 1.0, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_radius, "Profile Curve": circlecrosssection}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: separate_xyz.outputs["Y"]} + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_1, 1: group_input.outputs["scale diff"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_to_mesh, + "Selection": greater_than, + "Offset": multiply.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "spline parameter": capture_attribute.outputs[2], + }, + ) diff --git a/infinigen/assets/objects/fruits/surfaces/starfruit_surface.py b/infinigen/assets/objects/fruits/surfaces/starfruit_surface.py new file mode 100644 index 000000000..c27b3bbe2 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/starfruit_surface.py @@ -0,0 +1,115 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_add_dent, + nodegroup_surface_bump, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_starfruit_shader(nw: NodeWrangler, base_color, ridge_color): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "star parameters"} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = base_color + colorramp.color_ramp.elements[1].position = 0.9 + colorramp.color_ramp.elements[1].color = base_color + colorramp.color_ramp.elements[2].position = 0.95 + colorramp.color_ramp.elements[2].color = ridge_color + colorramp.color_ramp.elements[3].position = 1.0 + colorramp.color_ramp.elements[3].color = base_color + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": colorramp.outputs["Color"]} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Specular": 0.775, + "Roughness": 0.2, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.7, 1: translucent_bsdf, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +@node_utils.to_nodegroup( + "nodegroup_starfruit_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_starfruit_surface( + nw: NodeWrangler, + dent_control_points=[ + (0.0, 0.4219), + (0.0977, 0.4469), + (0.2273, 0.4844), + (0.5568, 0.5125), + (1.0, 0.5), + ], + base_color=(0.7991, 0.6038, 0.0009, 1.0), + ridge_color=(0.3712, 0.4179, 0.0006, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketVector", "spline tangent", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "distance to center", 0.0), + ("NodeSocketFloat", "dent intensity", 1.0), + ], + ) + + adddent = nw.new_node( + nodegroup_add_dent(dent_control_points=dent_control_points).name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "spline parameter": group_input.outputs["spline parameter"], + "spline tangent": group_input.outputs["spline tangent"], + "distance to center": group_input.outputs["distance to center"], + "intensity": group_input.outputs["dent intensity"], + }, + ) + + surfacebump_002 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": adddent, "Displacement": 0.03, "Scale": 10.0}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": surfacebump_002, + "Material": surface.shaderfunc_to_material( + shader_starfruit_shader, base_color, ridge_color + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) diff --git a/infinigen/assets/objects/fruits/surfaces/strawberry_surface.py b/infinigen/assets/objects/fruits/surfaces/strawberry_surface.py new file mode 100644 index 000000000..392f3f2f1 --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/strawberry_surface.py @@ -0,0 +1,191 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.fruits.fruit_utils import ( + nodegroup_add_crater, + nodegroup_add_noise_scalar, + nodegroup_instance_on_points, + nodegroup_point_on_mesh, + nodegroup_random_rotation_scale, + nodegroup_surface_bump, +) +from infinigen.assets.objects.fruits.seed_lib import nodegroup_strawberry_seed +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +def shader_strawberry_shader(nw: NodeWrangler, top_pos, main_color, top_color): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": texture_coordinate.outputs["Object"], "Scale": 0.5}, + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "strawberry seed height"} + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute.outputs["Color"]} + ) + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.0 + colorramp_1.color_ramp.elements[0].color = main_color + colorramp_1.color_ramp.elements[1].position = top_pos + colorramp_1.color_ramp.elements[1].color = main_color + colorramp_1.color_ramp.elements[2].position = 1.0 + colorramp_1.color_ramp.elements[2].color = top_color + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": 0.55, + "Saturation": 1.5, + "Value": 0.2, + "Fac": 0.3, + "Color": colorramp_1.outputs["Color"], + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": noise_texture.outputs["Fac"], + "Color1": colorramp_1.outputs["Color"], + "Color2": hue_saturation_value, + }, + ) + + translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, input_kwargs={"Color": mix}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": mix, "Specular": 1.0, "Roughness": 0.15}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.8, 1: translucent_bsdf, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +@node_utils.to_nodegroup( + "nodegroup_strawberry_surface", singleton=False, type="GeometryNodeTree" +) +def nodegroup_strawberry_surface( + nw: NodeWrangler, + top_pos=0.9, + main_color=(0.8879, 0.0097, 0.0319, 1.0), + top_color=(0.8148, 0.6105, 0.1746, 1.0), +): + # Code generated using version 2.4.3 of the node_transpiler + strawberryseed = nw.new_node(nodegroup_strawberry_seed().name) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "spline parameter", 0.0), + ("NodeSocketFloatDistance", "Distance Min", 0.12), + ("NodeSocketFloat", "Strength", 0.74), + ("NodeSocketFloat", "noise random seed", 0.0), + ], + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Displacement": 0.4, + "Scale": 0.5, + }, + ) + + addnoisescalar = nw.new_node( + nodegroup_add_noise_scalar().name, + input_kwargs={ + "noise random seed": group_input.outputs["noise random seed"], + "value": group_input.outputs["spline parameter"], + "noise amount": 0.2, + }, + ) + + pointonmesh = nw.new_node( + nodegroup_point_on_mesh().name, + input_kwargs={ + "Mesh": surfacebump, + "spline parameter": addnoisescalar, + "Distance Min": group_input.outputs["Distance Min"], + "parameter max": top_pos, + "noise amount": 0.1, + "noise scale": 2.0, + }, + ) + + addcrater = nw.new_node( + nodegroup_add_crater().name, + input_kwargs={ + "Geometry": surfacebump, + "Points": pointonmesh.outputs["Geometry"], + "Strength": group_input.outputs["Strength"], + }, + ) + + surfacebump_1 = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": addcrater, "Displacement": 0.03, "Scale": 20.0}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": surfacebump_1, + "Material": surface.shaderfunc_to_material( + shader_strawberry_shader, top_pos, main_color, top_color + ), + }, + ) + + randomrotationscale = nw.new_node( + nodegroup_random_rotation_scale().name, + input_kwargs={"rot mean": (-1.571, 0.0, 0.0), "scale mean": 0.08}, + ) + + instanceonpoints = nw.new_node( + nodegroup_instance_on_points().name, + input_kwargs={ + "rotation base": pointonmesh.outputs["Rotation"], + "rotation delta": randomrotationscale.outputs["Vector"], + "translation": (0.0, 0.3, 0.0), + "scale": randomrotationscale.outputs["Value"], + "Points": pointonmesh.outputs["Geometry"], + "Instance": strawberryseed, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, instanceonpoints]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": realize_instances, + "curve parameters": addnoisescalar, + }, + ) diff --git a/infinigen/assets/objects/fruits/surfaces/surface_utils.py b/infinigen/assets/objects/fruits/surfaces/surface_utils.py new file mode 100644 index 000000000..439ea009d --- /dev/null +++ b/infinigen/assets/objects/fruits/surfaces/surface_utils.py @@ -0,0 +1,93 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_stripe_pattern", singleton=False, type="ShaderNodeTree" +) +def nodegroup_stripe_pattern(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0)), + ("NodeSocketFloat", "attribute", 0.0), + ("NodeSocketFloat", "voronoi scale", 50.0), + ("NodeSocketFloatFactor", "voronoi randomness", 1.0), + ("NodeSocketFloat", "seed", 0.0), + ("NodeSocketFloat", "noise scale", 10.0), + ("NodeSocketFloat", "noise amount", 1.4), + ("NodeSocketFloat", "hue min", 0.6), + ("NodeSocketFloat", "hue max", 1.085), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: texture_coordinate.outputs["Object"], + 1: group_input.outputs["seed"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + "Detail": 1.0, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["noise amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["attribute"], + "Scale": group_input.outputs["voronoi scale"], + "Randomness": group_input.outputs["voronoi randomness"], + }, + attrs={"voronoi_dimensions": "1D"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": add_1, + 3: group_input.outputs["hue min"], + 4: group_input.outputs["hue max"], + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Value": map_range.outputs["Result"], + "Color": group_input.outputs["Color"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": hue_saturation_value} + ) diff --git a/infinigen/assets/grassland/__init__.py b/infinigen/assets/objects/grassland/__init__.py similarity index 100% rename from infinigen/assets/grassland/__init__.py rename to infinigen/assets/objects/grassland/__init__.py index 93856183c..c6019dea2 100644 --- a/infinigen/assets/grassland/__init__.py +++ b/infinigen/assets/objects/grassland/__init__.py @@ -5,6 +5,6 @@ from .dandelion import DandelionFactory, DandelionSeedFactory +from .flower import FlowerFactory from .flowerplant import FlowerPlantFactory from .grass_tuft import GrassTuftFactory -from .flower import FlowerFactory diff --git a/infinigen/assets/objects/grassland/dandelion.py b/infinigen/assets/objects/grassland/dandelion.py new file mode 100644 index 000000000..508bdb798 --- /dev/null +++ b/infinigen/assets/objects/grassland/dandelion.py @@ -0,0 +1,1012 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=61Sk8j1Ml9c by BradleyAnimation + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import simple_brownish, simple_greenery, simple_whitish +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup, tag_object + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_head_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_head_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Translation", (0.0, 0.0, 1.0)), + ("NodeSocketFloatDistance", "Radius", 0.04), + ], + ) + + uv_sphere_1 = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={"Segments": 64, "Radius": group_input.outputs["Radius"]}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": uv_sphere_1, + "Translation": group_input.outputs["Translation"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_1, + "Material": surface.shaderfunc_to_material( + simple_brownish.shader_simple_brown + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_end_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_end_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Points", None)] + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"End Size": 0} + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Segments": 64, "Radius": 0.04} + ) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (uniform(0.45, 0.7), uniform(0.45, 0.7), uniform(2, 3)) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": vector} + ) + + cone = nw.new_node( + "GeometryNodeMeshCone", input_kwargs={"Radius Bottom": 0.0040, "Depth": 0.0040} + ) + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal}, attrs={"axis": "Z"} + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform, + "Instance": cone.outputs["Mesh"], + "Rotation": align_euler_to_vector_1, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [instance_on_points_1, transform]} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry, + "Material": surface.shaderfunc_to_material( + simple_brownish.shader_simple_brown + ), + }, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": set_material} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": endpoint_selection, + "Instance": geometry_to_instance, + "Rotation": align_euler_to_vector, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_branch_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_branch_shape(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + pedal_stem_branches_num = nw.new_node( + Nodes.Integer, label="pedal_stem_branches_num" + ) + pedal_stem_branches_num.integer = 40 + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloatDistance", "Radius", 0.0100)] + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": pedal_stem_branches_num, + "Radius": group_input.outputs["Radius"], + }, + ) + + pedal_stem_branch_length = nw.new_node( + Nodes.Value, label="pedal_stem_branch_length" + ) + pedal_stem_branch_length.outputs[0].default_value = 0.5000 + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": pedal_stem_branch_length} + ) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_1}) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_1, "Count": 40} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0000, 0.0000), + (0.2, 0.08 * np.random.normal(1.0, 0.15)), + (0.4, 0.22 * np.random.normal(1.0, 0.2)), + (0.6, 0.45 * np.random.normal(1.0, 0.2)), + (0.8, 0.7 * np.random.normal(1.0, 0.1)), + (1.0000, 1.0000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: uniform(0.15, 0.4)}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Offset": combine_xyz}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": curve_circle_1.outputs["Curve"], + "Instance": set_position, + "Rotation": align_euler_to_vector, + }, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000, "Seed": 2} + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000, "Seed": 1} + ) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2000, 3: 0.2000}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_2.outputs[1], + "Z": random_value.outputs[1], + }, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": instance_on_points, "Rotation": combine_xyz_2}, + ) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.8000}) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances, + "Scale": random_value_3.outputs[1], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Instances": scale_instances}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_branch_contour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_branch_contour(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, + input_kwargs={"Geometry": group_input.outputs["Geometry"]}, + ) + + pedal_stem_branch_rsample = nw.new_node( + Nodes.Value, label="pedal_stem_branch_rsample" + ) + pedal_stem_branch_rsample.outputs[0].default_value = 10.0 + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": realize_instances, "Count": pedal_stem_branch_rsample}, + ) + + index = nw.new_node(Nodes.Index) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": resample_curve, 5: index}, + attrs={"domain": "CURVE", "data_type": "INT"}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + + # generate pedal branch contour + dist = uniform(-0.05, -0.25) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.0), + (0.2, 0.2 + (dist + normal(0, 0.05)) / 2.0), + (0.4, 0.4 + (dist + normal(0, 0.05))), + (0.6, 0.6 + (dist + normal(0, 0.05)) / 1.2), + (0.8, 0.8 + (dist + normal(0, 0.05)) / 2.4), + (1.0, 0.95 + normal(0, 0.05)), + ], + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: 0.05, 3: 0.35, "ID": capture_attribute.outputs[5]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: random_value.outputs[1]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_branch_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_branch_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketVectorTranslation", "Translation", (0.0, 0.0, 1.0)), + ], + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": 1.0}, + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Radius": uniform(0.001, 0.0025), "Resolution": 4}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle_2.outputs["Curve"], + "Fill Caps": True, + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Translation": group_input.outputs["Translation"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "End", (0.0, 0.0, 1.0)), + ("NodeSocketVectorTranslation", "Middle", (0.0, 0.0, 0.5)), + ("NodeSocketFloatDistance", "Radius", 0.05), + ], + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.0, 0.0, 0.0), + "Middle": group_input.outputs["Middle"], + "End": group_input.outputs["End"], + }, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": quadratic_bezier, + "Radius": group_input.outputs["Radius"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": 0.2, "Resolution": 8} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh, + "Material": surface.shaderfunc_to_material( + simple_whitish.shader_simple_white + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material_2, "Curve": quadratic_bezier}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_selection(nw: NodeWrangler, params): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={5: 1}) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: params["random_dropout"], 1: random_value.outputs[1]}, + attrs={"operation": "GREATER_THAN"}, + ) + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "num_segments", 0.5)] + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: group_input.outputs["num_segments"]}, + attrs={"operation": "DIVIDE"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: params["row_less_than"]}, + attrs={"operation": "LESS_THAN"}, + ) + + greater_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: params["row_great_than"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than_1} + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: group_input.outputs["num_segments"]}, + attrs={"operation": "MODULO"}, + ) + + less_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: params["col_less_than"]}, + attrs={"operation": "LESS_THAN"}, + ) + + greater_than_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: params["col_great_than"]}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than_1, 1: greater_than_2} + ) + + nand = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: op_and, 1: op_and_1}, + attrs={"operation": "NAND"}, + ) + + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: nand}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_and_2}) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = uniform(0.2, 0.4) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 0.4, 4: value}, + ) + + set_curve_radius_2 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": map_range.outputs["Result"], + }, + ) + + stem_radius = nw.new_node(Nodes.Value, label="stem_radius") + stem_radius.outputs[0].default_value = uniform(0.01, 0.024) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": stem_radius} + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_2, + "Profile Curve": curve_circle_3.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh_2, + "Material": surface.shaderfunc_to_material( + simple_greenery.shader_simple_greenery + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, set_material, "stem")}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + pedal_stem_top_point = nw.new_node(Nodes.Vector, label="pedal_stem_top_point") + pedal_stem_top_point.vector = (0.0, 0.0, 1.0) + + pedal_stem_mid_point = nw.new_node(Nodes.Vector, label="pedal_stem_mid_point") + pedal_stem_mid_point.vector = (normal(0, 0.05), normal(0, 0.05), 0.5) + + pedal_stem_radius = nw.new_node(Nodes.Value, label="pedal_stem_radius") + pedal_stem_radius.outputs[0].default_value = uniform(0.02, 0.045) + + pedal_stem_geometry = nw.new_node( + nodegroup_pedal_stem_geometry().name, + input_kwargs={ + "End": pedal_stem_top_point, + "Middle": pedal_stem_mid_point, + "Radius": pedal_stem_radius, + }, + ) + + pedal_stem_top_radius = nw.new_node(Nodes.Value, label="pedal_stem_top_radius") + pedal_stem_top_radius.outputs[0].default_value = uniform(0.005, 0.008) + + pedal_stem_branch_shape = nw.new_node( + nodegroup_pedal_stem_branch_shape().name, + input_kwargs={"Radius": pedal_stem_top_radius}, + ) + + pedal_stem_branch_geometry = nw.new_node( + nodegroup_pedal_stem_branch_geometry().name, + input_kwargs={ + "Curve": pedal_stem_branch_shape, + "Translation": pedal_stem_top_point, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": pedal_stem_branch_geometry, + "Material": surface.shaderfunc_to_material( + simple_whitish.shader_simple_white + ), + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": pedal_stem_geometry.outputs["Curve"]}, + ) + + pedal_stem_end_geometry = nw.new_node( + nodegroup_pedal_stem_end_geometry().name, + input_kwargs={"Points": resample_curve}, + ) + + pedal_stem_head_geometry = nw.new_node( + nodegroup_pedal_stem_head_geometry().name, + input_kwargs={ + "Translation": pedal_stem_top_point, + "Radius": pedal_stem_top_radius, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + pedal_stem_geometry.outputs["Geometry"], + set_material_3, + pedal_stem_end_geometry, + pedal_stem_head_geometry, + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry} + ) + + +@node_utils.to_nodegroup( + "nodegroup_flower_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flower_geometry(nw: NodeWrangler, params): + # Code generated using version 2.4.3 of the node_transpiler + + num_core_segments = nw.new_node( + Nodes.Integer, label="num_core_segments", attrs={"integer": 10} + ) + num_core_segments.integer = randint(8, 25) + + num_core_rings = nw.new_node( + Nodes.Integer, label="num_core_rings", attrs={"integer": 10} + ) + num_core_rings.integer = randint(8, 20) + + uv_sphere_2 = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": num_core_segments, + "Rings": num_core_rings, + "Radius": uniform(0.02, 0.05), + }, + ) + + flower_core_shape = nw.new_node(Nodes.Vector, label="flower_core_shape") + flower_core_shape.vector = (uniform(0.8, 1.2), uniform(0.8, 1.2), uniform(0.5, 0.8)) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": uv_sphere_2, "Scale": flower_core_shape}, + ) + + selection_params = { + "random_dropout": params["random_dropout"], + "row_less_than": int(params["row_less_than"] * num_core_rings.integer), + "row_great_than": int(params["row_great_than"] * num_core_rings.integer), + "col_less_than": int(params["col_less_than"] * num_core_segments.integer), + "col_great_than": int(params["col_less_than"] * num_core_segments.integer), + } + pedal_selection = nw.new_node( + nodegroup_pedal_selection(params=selection_params).name, + input_kwargs={"num_segments": num_core_segments}, + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Instance", None)] + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal_1}, attrs={"axis": "Z"} + ) + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.4, 3: 0.7}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_1.outputs[1]}, + attrs={"operation": "MULTIPLY"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform, + "Selection": pedal_selection, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector_1, + "Scale": multiply, + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform, + "Material": surface.shaderfunc_to_material( + simple_whitish.shader_simple_white + ), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [realize_instances_1, set_material]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": tag_nodegroup(nw, join_geometry_1, "flower")}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_flower_on_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flower_on_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector_2 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": endpoint_selection, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector_2, + }, + ) + + realize_instances_2 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": realize_instances_2} + ) + + +def geometry_dandelion_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.0, 0.0, 0.0), + "Middle": (normal(0, 0.1), normal(0, 0.1), 0.5), + "End": (normal(0, 0.1), normal(0, 0.1), 1.0), + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": quadratic_bezier_1} + ) + + pedal_stem = nw.new_node(nodegroup_pedal_stem().name) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": pedal_stem} + ) + + flower_geometry = nw.new_node( + nodegroup_flower_geometry(kwargs).name, + input_kwargs={"Instance": geometry_to_instance}, + ) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": flower_geometry} + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = uniform(-0.15, -0.5) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": geometry_to_instance_1, "Scale": value_2}, + ) + + flower_on_stem = nw.new_node( + nodegroup_flower_on_stem().name, + input_kwargs={"Points": resample_curve, "Instance": transform_3}, + ) + + stem_geometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": quadratic_bezier_1} + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [flower_on_stem, stem_geometry]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) + + +def geometry_dandelion_seed_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + pedal_stem = nw.new_node(nodegroup_pedal_stem().name) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": pedal_stem} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": geometry_to_instance} + ) + + +class DandelionFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(DandelionFactory, self).__init__(factory_seed, coarse=coarse) + self.flower_mode = [ + "full_flower", + "no_flower", + "top_half_flower", + "top_missing_flower", + "sparse_flower", + ] + self.flower_mode_pb = [0.4, 0.04, 0.23, 0.13, 0.2] + + def get_mode_params(self, mode): + if mode == "full_flower": + # generate a flower with full seeds + return { + "random_dropout": uniform(0.5, 1.0), + "row_less_than": 0.0, + "row_great_than": 0.0, + "col_less_than": 0.0, + "col_great_than": 0.0, + } + elif mode == "no_flower": + # generate a flower with no seeds + return { + "random_dropout": 0.0, + "row_less_than": 1.0, + "row_great_than": 0.0, + "col_less_than": 1.0, + "col_great_than": 0.0, + } + elif mode == "top_half_flower": + # generate a flower with no seeds at bottom half + return { + "random_dropout": uniform(0.6, 1.0), + "row_less_than": uniform(0.3, 0.5), + "row_great_than": 0.0, + "col_less_than": 1.0, + "col_great_than": 0.0, + } + elif mode == "top_missing_flower": + # generate a flower with no seeds at bottom half + col = uniform(0.3, 1.0) + return { + "random_dropout": uniform(0.5, 0.9), + "row_less_than": 1.0, + "row_great_than": uniform(0.5, 0.7), + "col_less_than": col, + "col_great_than": col - uniform(0.2, 0.4), + } + elif mode == "sparse_flower": + # generate a flower with no seeds at bottom half + return { + "random_dropout": uniform(0.3, 0.5), + "row_less_than": 0.0, + "row_great_than": 0.0, + "col_less_than": 0.0, + "col_great_than": 0.0, + } + else: + raise NotImplementedError + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + mode = np.random.choice(self.flower_mode, p=self.flower_mode_pb) + params = self.get_mode_params(mode) + + surface.add_geomod( + obj, + geometry_dandelion_nodes, + apply=True, + attributes=[], + input_kwargs=params, + ) + tag_object(obj, "dandelion") + return obj + + +class DandelionSeedFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(DandelionSeedFactory, self).__init__(factory_seed, coarse=coarse) + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod( + obj, + geometry_dandelion_seed_nodes, + apply=True, + attributes=[], + input_kwargs=params, + ) + tag_object(obj, "seed") + return obj + + +if __name__ == "__main__": + f = DandelionSeedFactory(0) + obj = f.create_asset() diff --git a/infinigen/assets/objects/grassland/flower.py b/infinigen/assets/objects/grassland/flower.py new file mode 100644 index 000000000..a185f240f --- /dev/null +++ b/infinigen/assets/objects/grassland/flower.py @@ -0,0 +1,956 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick, Alejandro Newell + + +# Code generated using version v2.0.1 of the node_transpiler +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup, tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util import color +from infinigen.core.util.math import FixedSeed, dict_lerp + + +@node_utils.to_nodegroup("nodegroup_polar_to_cart_old", singleton=True) +def nodegroup_polar_to_cart_old(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Addend", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "SINE"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": cosine, "Z": sine}) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Vector"], + 1: combine_xyz_4, + 2: group_input.outputs["Addend"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": multiply_add.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_follow_curve", singleton=True) +def nodegroup_follow_curve(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Curve Min", 0.5), + ("NodeSocketFloat", "Curve Max", 1.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 2: separate_xyz.outputs["Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + 3: group_input.outputs["Curve Min"], + 4: group_input.outputs["Curve Max"], + }, + ) + + curve_length = nw.new_node( + Nodes.CurveLength, input_kwargs={"Curve": group_input.outputs["Curve"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: curve_length}, + attrs={"operation": "MULTIPLY"}, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": group_input.outputs["Curve"], "Length": multiply}, + attrs={"mode": "LENGTH"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Tangent"], + 1: sample_curve.outputs["Normal"], + }, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: cross_product.outputs["Vector"], + "Scale": separate_xyz.outputs["X"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Normal"], + "Scale": separate_xyz.outputs["Y"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": sample_curve.outputs["Position"], + "Offset": add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("nodegroup_norm_index", singleton=True) +def nodegroup_norm_index(nw): + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Count", 0)] + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["Count"]}, + attrs={"operation": "DIVIDE"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"T": divide}) + + +@node_utils.to_nodegroup("nodegroup_flower_petal", singleton=True) +def nodegroup_flower_petal(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Length", 0.2), + ("NodeSocketFloat", "Point", 1.0), + ("NodeSocketFloat", "Point height", 0.5), + ("NodeSocketFloat", "Bevel", 6.8), + ("NodeSocketFloat", "Base width", 0.2), + ("NodeSocketFloat", "Upper width", 0.3), + ("NodeSocketInt", "Resolution H", 8), + ("NodeSocketInt", "Resolution V", 4), + ("NodeSocketFloat", "Wrinkle", 0.1), + ("NodeSocketFloat", "Curl", 0.0), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution H"], 1: 2.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + grid = nw.new_node( + Nodes.MeshGrid, + input_kwargs={ + "Vertices X": group_input.outputs["Resolution V"], + "Vertices Y": multiply_add, + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": grid, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 0.05}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": separate_xyz.outputs["Y"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": combine_xyz, + "Scale": 7.9, + "Detail": 0.0, + "Distortion": 0.2, + }, + attrs={"noise_dimensions": "2D"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["Wrinkle"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"]}) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute, 1: 2.0}, attrs={"operation": "MULTIPLY"} + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Bevel"]}, + attrs={"operation": "POWER"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: multiply_3, + 1: group_input.outputs["Upper width"], + 2: group_input.outputs["Base width"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_add_2}, + attrs={"operation": "MULTIPLY"}, + ) + + power_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Point"]}, + attrs={"operation": "POWER"}, + ) + + multiply_add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: power_1, 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add_3, 1: group_input.outputs["Point height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Point height"], 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: multiply_add_4}) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: multiply_add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_6}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_1, "Y": multiply_4, "Z": multiply_7}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_8}) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Curl"]} + ) + + group_1 = nw.new_node( + nodegroup_polar_to_cart_old().name, + input_kwargs={"Addend": combine_xyz_3, "Value": reroute, "Vector": multiply_8}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 8, + "Start": (0.0, 0.0, 0.0), + "Middle": combine_xyz_3, + "End": group_1, + }, + ) + + group = nw.new_node( + nodegroup_follow_curve().name, + input_kwargs={ + "Geometry": set_position, + "Curve": quadratic_bezier, + "Curve Min": 0.0, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": tag_nodegroup(nw, group, "petal")} + ) + + +@node_utils.to_nodegroup("nodegroup_phyllo_points", singleton=True) +def nodegroup_phyllo_points(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Count", 50), + ("NodeSocketFloat", "Min Radius", 0.0), + ("NodeSocketFloat", "Max Radius", 2.0), + ("NodeSocketFloat", "Radius exp", 0.5), + ("NodeSocketFloat", "Min angle", -0.5236), + ("NodeSocketFloat", "Max angle", 0.7854), + ("NodeSocketFloat", "Min z", 0.0), + ("NodeSocketFloat", "Max z", 1.0), + ("NodeSocketFloat", "Clamp z", 1.0), + ("NodeSocketFloat", "Yaw offset", -1.5708), + ], + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": group_input.outputs["Count"]} + ) + + mesh_to_points = nw.new_node(Nodes.MeshToPoints, input_kwargs={"Mesh": mesh_line}) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": mesh_to_points, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + index = nw.new_node(Nodes.Index) + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: index}, attrs={"operation": "COSINE"} + ) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: index}, attrs={"operation": "SINE"}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": cosine, "Y": sine}) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["Count"]}, + attrs={"operation": "DIVIDE"}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: group_input.outputs["Radius exp"]}, + attrs={"operation": "POWER"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": power, + 3: group_input.outputs["Min Radius"], + 4: group_input.outputs["Max Radius"], + }, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": divide, + 2: group_input.outputs["Clamp z"], + 3: group_input.outputs["Min z"], + 4: group_input.outputs["Max z"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": map_range_2.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": divide, + 3: group_input.outputs["Min angle"], + 4: group_input.outputs["Max angle"], + }, + ) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.1, 3: 0.1}) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: group_input.outputs["Yaw offset"]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": map_range_3.outputs["Result"], + "Y": random_value.outputs[1], + "Z": add, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Points": set_position, "Rotation": combine_xyz_2}, + ) + + +@node_utils.to_nodegroup("nodegroup_plant_seed", singleton=True) +def nodegroup_plant_seed(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Dimensions", (0.0, 0.0, 0.0)), + ("NodeSocketIntUnsigned", "U", 4), + ("NodeSocketInt", "V", 8), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Dimensions"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": separate_xyz.outputs["X"]} + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["U"], + "Start": (0.0, 0.0, 0.0), + "Middle": multiply_add.outputs["Vector"], + "End": combine_xyz, + }, + ) + + group = nw.new_node( + nodegroup_norm_index().name, input_kwargs={"Count": group_input.outputs["U"]} + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": group}) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.3159, 0.4469), (1.0, 0.0156)] + ) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": float_curve, 4: 3.0}) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": quadratic_bezier_1, + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["V"], + "Radius": separate_xyz.outputs["Y"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, curve_to_mesh, "seed")}, + ) + + +def shader_flower_center(nw): + ambient_occlusion = nw.new_node(Nodes.AmbientOcclusion) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": ambient_occlusion.outputs["Color"]} + ) + colorramp.color_ramp.elements.new(1) + colorramp.color_ramp.elements[0].position = 0.4841 + colorramp.color_ramp.elements[0].color = (0.0127, 0.0075, 0.0026, 1.0) + colorramp.color_ramp.elements[1].position = 0.8591 + colorramp.color_ramp.elements[1].color = (0.0848, 0.0066, 0.0007, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = (1.0, 0.6228, 0.1069, 1.0) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": colorramp.outputs["Color"]} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_petal(nw): + translucent_color_change = uniform(0.1, 0.6) + specular = normal(0.6, 0.1) + roughness = normal(0.4, 0.05) + translucent_amt = normal(0.3, 0.05) + + petal_color = nw.new_node(Nodes.RGB) + petal_color.outputs[0].default_value = color.color_category("petal") + + translucent_color = nw.new_node( + Nodes.MixRGB, + [translucent_color_change, petal_color, color.color_category("petal")], + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": translucent_color} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": petal_color, + "Specular": specular, + "Roughness": roughness, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": translucent_amt, 1: principled_bsdf, 2: translucent_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +def geo_flower(nw, petal_material, center_material): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Center Rad", 0.0), + ("NodeSocketVector", "Petal Dims", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Seed Size", 0.0), + ("NodeSocketFloat", "Min Petal Angle", 0.1), + ("NodeSocketFloat", "Max Petal Angle", 1.36), + ("NodeSocketFloat", "Wrinkle", 0.01), + ("NodeSocketFloat", "Curl", 13.89), + ], + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": 8, + "Rings": 8, + "Radius": group_input.outputs["Center Rad"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 1.0, 0.05)} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Seed Size"], 1: 1.5}, + attrs={"operation": "MULTIPLY"}, + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": transform, + "Distance Min": multiply, + "Density Max": 50000.0, + }, + attrs={"distribute_method": "POISSON"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Seed Size"], 1: 10.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_1, "Y": group_input.outputs["Seed Size"]}, + ) + + group_3 = nw.new_node( + nodegroup_plant_seed().name, + input_kwargs={"Dimensions": combine_xyz, "U": 6, "V": 6}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"W": 13.8, "Scale": 2.41}, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 3: 0.34, 4: 1.21} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": map_range.outputs["Result"], "Y": 1.0, "Z": 1.0}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": distribute_points_on_faces.outputs["Points"], + "Instance": group_3, + "Rotation": (0.0, -1.5708, 0.0541), + "Scale": combine_xyz_1, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [realize_instances, transform]} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": join_geometry_1, "Material": center_material}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Center Rad"], 1: 6.2832}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Petal Dims"]} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 1.2}, attrs={"operation": "MULTIPLY"} + ) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Center Rad"]} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Min Petal Angle"]} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Max Petal Angle"]} + ) + + group_1 = nw.new_node( + nodegroup_phyllo_points().name, + input_kwargs={ + "Count": multiply_3, + "Min Radius": reroute_3, + "Max Radius": reroute_3, + "Radius exp": 0.0, + "Min angle": reroute_1, + "Max angle": reroute, + "Max z": 0.0, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + reroute_2 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Wrinkle"]} + ) + + reroute_4 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Curl"]} + ) + + group = nw.new_node( + nodegroup_flower_petal().name, + input_kwargs={ + "Length": separate_xyz.outputs["X"], + "Point": 0.56, + "Point height": -0.1, + "Bevel": 1.83, + "Base width": separate_xyz.outputs["Y"], + "Upper width": subtract, + "Resolution H": 8, + "Resolution V": 16, + "Wrinkle": reroute_2, + "Curl": reroute_4, + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_1.outputs["Points"], + "Instance": group, + "Rotation": group_1.outputs["Rotation"], + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 3.73, "Detail": 5.41, "Distortion": -1.0}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.025 + + multiply_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_1.outputs["Vector"], 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances_1, + "Offset": multiply_4.outputs["Vector"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": set_position, "Material": petal_material}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_1, set_material]} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": join_geometry, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +class FlowerFactory(AssetFactory): + def __init__(self, factory_seed, rad=0.15, diversity_fac=0.25): + super(FlowerFactory, self).__init__(factory_seed=factory_seed) + + self.rad = rad + self.diversity_fac = diversity_fac + + with FixedSeed(factory_seed): + self.petal_material = surface.shaderfunc_to_material(shader_petal) + self.center_material = surface.shaderfunc_to_material(shader_flower_center) + self.species_params = self.get_flower_params(self.rad) + + @staticmethod + def get_flower_params(overall_rad=0.05): + pct_inner = uniform(0.05, 0.4) + base_width = 2 * np.pi * overall_rad * pct_inner / normal(20, 5) + top_width = overall_rad * np.clip(normal(0.7, 0.3), base_width * 1.2, 100) + + min_angle, max_angle = np.deg2rad(np.sort(uniform(-20, 100, 2))) + + return { + "Center Rad": overall_rad * pct_inner, + "Petal Dims": np.array( + [overall_rad * (1 - pct_inner), base_width, top_width], dtype=np.float32 + ), + "Seed Size": uniform(0.005, 0.01), + "Min Petal Angle": min_angle, + "Max Petal Angle": max_angle, + "Wrinkle": uniform(0.003, 0.02), + "Curl": np.deg2rad(normal(30, 50)), + } + + def create_asset(self, **kwargs) -> bpy.types.Object: + vert = butil.spawn_vert("flower") + mod = surface.add_geomod( + vert, + geo_flower, + input_kwargs={ + "petal_material": self.petal_material, + "center_material": self.center_material, + }, + ) + + inst_params = self.get_flower_params(self.rad * normal(1, 0.05)) + params = dict_lerp(self.species_params, inst_params, 0.25) + butil.set_geomod_inputs(mod, params) + + butil.apply_modifiers(vert, mod=mod) + + vert.rotation_euler.z = uniform(0, 360) + tag_object(vert, "flower") + return vert diff --git a/infinigen/assets/objects/grassland/flowerplant.py b/infinigen/assets/objects/grassland/flowerplant.py new file mode 100644 index 000000000..a9e76be28 --- /dev/null +++ b/infinigen/assets/objects/grassland/flowerplant.py @@ -0,0 +1,868 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import simple_greenery +from infinigen.assets.objects.grassland import flower as Flower +from infinigen.assets.objects.small_plants import leaf_general as Leaf +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup, tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_leaf_s_r", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_leaf_s_r(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.2, 3: 0.7}) + + curve_tangent_1 = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent_1}, + attrs={"axis": "Z"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Value": random_value.outputs[1], + "Rotation": align_euler_to_vector_1, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_leaf_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_leaf_selection(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": spline_parameter_1.outputs["Factor"]} + ) + colorramp_1.color_ramp.interpolation = "CONSTANT" + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.0 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.20 + colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + colorramp_1.color_ramp.elements[2].position = 0.80 + colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + + integer = randint(10, 30, size=(1,))[0] + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={5: integer}, attrs={"data_type": "INT"} + ) + + op_not = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: random_value_3.outputs[2]}, + attrs={"operation": "NOT"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: colorramp_1.outputs["Color"], 1: op_not} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_and}) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_leaves", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_leaves(nw: NodeWrangler, leaves): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + resample_curve_3 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": group_input.outputs["Curve"], "Count": 100}, + ) + + stembranchleafselection = nw.new_node(nodegroup_stem_branch_leaf_selection().name) + + leaf_id = randint(0, len(leaves), size=(1,))[0] + leaf = leaves[leaf_id] + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + + stembranchleafsr = nw.new_node(nodegroup_stem_branch_leaf_s_r().name) + + instance_on_points_4 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": resample_curve_3, + "Selection": stembranchleafselection, + "Instance": object_info_2.outputs["Geometry"], + "Rotation": stembranchleafsr.outputs["Rotation"], + "Scale": stembranchleafsr.outputs["Value"], + }, + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, + input_kwargs={"Max": (0.6, 0.6, 6.28), "Seed": 30}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + rotate_instances_2 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_4, + "Rotation": random_value_3.outputs["Value"], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances_2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": realize_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": spline_parameter.outputs["Factor"]} + ) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[1].position = 1.0 + colorramp.color_ramp.elements[1].color = (0.4, 0.4, 0.4, 1.0) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": colorramp.outputs["Color"], + }, + ) + + r = uniform(0.015, 0.022, size=(1,))[0] + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 10, "Radius": r} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position = nw.new_node(Nodes.InputPosition) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + index = nw.new_node(Nodes.Index) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": index, 2: 20.0}) + + curvature = uniform(-0.5, 0.5, (1,))[0] + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.1, curvature / 5.0 + 0.5), + (0.25, curvature / 2.5 + 0.5), + (0.45, curvature / 1.5 + 0.5), + (0.6, curvature / 1.2 + 0.5), + (1.0, curvature + 0.5), + ], + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1.0}, attrs={"operation": "MULTIPLY"} + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position, + "Center": multiply.outputs["Vector"], + "Angle": multiply_1, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_leaf_s_r", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_leaf_s_r(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.3, 3: 0.6}) + + curve_tangent_1 = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent_1}, + attrs={"axis": "Z"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Value": random_value.outputs[1], + "Rotation": align_euler_to_vector_1, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_leaf_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_leaf_selection(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": spline_parameter_1.outputs["Factor"]} + ) + colorramp_1.color_ramp.interpolation = "CONSTANT" + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.0 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.30 + colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + colorramp_1.color_ramp.elements[2].position = 0.85 + colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + + integer = randint(5, 15, size=(1,))[0] + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={5: integer}, attrs={"data_type": "INT"} + ) + + op_not = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: random_value_3.outputs[2]}, + attrs={"operation": "NOT"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: colorramp_1.outputs["Color"], 1: op_not} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_and}) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch(nw: NodeWrangler, flowers, leaves): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line_2 = nw.new_node(Nodes.CurveLine) + + resample_curve_4 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_2, "Count": 20} + ) + + stembranchrotation = nw.new_node(nodegroup_stem_branch_rotation().name) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve_4, "Position": stembranchrotation}, + ) + + branchflowersetting = nw.new_node( + nodegroup_branch_flower_setting(flowers=flowers).name + ) + + instance_on_points_3 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position_2, + "Selection": branchflowersetting.outputs["Selection"], + "Instance": branchflowersetting.outputs["Geometry"], + "Rotation": branchflowersetting.outputs["Rotation"], + "Scale": branchflowersetting.outputs["Value"], + }, + ) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.4, 3: 0.7}) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": instance_on_points_3, + "Scale": random_value.outputs[1], + }, + ) + + stembranchgeometry = nw.new_node( + nodegroup_stem_branch_geometry().name, input_kwargs={"Curve": set_position_2} + ) + + stembranchleaves = nw.new_node( + nodegroup_stem_branch_leaves(leaves=leaves).name, + input_kwargs={"Curve": set_position_2}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [stembranchgeometry, stembranchleaves]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": surface.shaderfunc_to_material( + simple_greenery.shader_simple_greenery + ), + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": scale_instances} + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [realize_instances, join_geometry_1]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_branch_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_branch_selection(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": spline_parameter_1.outputs["Factor"]} + ) + colorramp_1.color_ramp.interpolation = "CONSTANT" + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.0 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.50 + colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + colorramp_1.color_ramp.elements[2].position = 0.80 + colorramp_1.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + + seed = randint(0, 10000, size=(1,))[0] + threshold = uniform(0.05, 0.1, size=(1,))[0] + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={"Min": 0.0, "Max": 1.0, "Seed": seed} + ) + less_equal = nw.new_node( + Nodes.Compare, + input_kwargs={0: random_value, 1: threshold}, + attrs={"operation": "LESS_EQUAL"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: colorramp_1.outputs["Color"], 1: less_equal} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_and}) + + +@node_utils.to_nodegroup( + "nodegroup_stem_leaves", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_leaves(nw: NodeWrangler, leaves): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + stemleafselection = nw.new_node(nodegroup_stem_leaf_selection().name) + + leaf_id = randint(0, len(leaves), size=(1,))[0] + leaf = leaves[leaf_id] + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + + stemleafsr = nw.new_node(nodegroup_stem_leaf_s_r().name) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Curve"], + "Selection": stemleafselection, + "Instance": object_info_2.outputs["Geometry"], + "Rotation": stemleafsr.outputs["Rotation"], + "Scale": stemleafsr.outputs["Value"], + }, + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, + input_kwargs={"Max": (0.5, 0.5, 6.28), "Seed": 30}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": random_value_2.outputs["Value"], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": realize_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_main_flower_setting", singleton=False, type="GeometryNodeTree" +) +def nodegroup_main_flower_setting(nw: NodeWrangler, flowers): + # Code generated using version 2.4.3 of the node_transpiler + + flower_id = randint(0, len(flowers), size=(1,))[0] + scale = uniform(0.25, 0.45, size=(1,))[0] + flower = flowers[flower_id] + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": flower}) + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": object_info_2.outputs["Geometry"], + "Scale": (scale, scale, scale), + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Value": value, + "Selection": endpoint_selection, + "Rotation": align_euler_to_vector, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_branch_flower_setting", singleton=False, type="GeometryNodeTree" +) +def nodegroup_branch_flower_setting(nw: NodeWrangler, flowers): + # Code generated using version 2.4.3 of the node_transpiler + + flower_id = randint(0, len(flowers), size=(1,))[0] + scale = uniform(0.4, 0.6, size=(1,))[0] + flower = flowers[flower_id] + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": flower}) + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": object_info_2.outputs["Geometry"], + "Scale": (scale, scale, scale), + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Value": value, + "Selection": endpoint_selection, + "Rotation": align_euler_to_vector, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position = nw.new_node(Nodes.InputPosition) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + index = nw.new_node(Nodes.Index) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": index, 2: 20.0}) + + curvature = np.clip(np.abs(normal(0, 0.4, (1,))[0]), 0.0, 0.8) + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.0), + (0.1, curvature / 5.0), + (0.25, curvature / 2.5), + (0.45, curvature / 1.5), + (0.6, curvature / 1.2), + (1.0, curvature), + ], + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.2}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position, + "Center": multiply.outputs["Vector"], + "Angle": multiply_2, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 0.3}) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: (-0.5, -0.5, -0.5), 1: noise_texture.outputs["Color"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Rotation": vector_rotate, "Noise": add_1.outputs["Vector"]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": spline_parameter.outputs["Factor"]} + ) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[1].position = 1.0 + colorramp.color_ramp.elements[1].color = (0.4, 0.4, 0.4, 1.0) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": colorramp.outputs["Color"], + }, + ) + + rad = uniform(0.01, 0.02, size=(1,))[0] + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 10, "Radius": rad} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, curve_to_mesh, "stem")}, + ) + + +def geo_flowerplant(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + leaves = kwargs["leaves"] + flowers = kwargs["flowers"] + curve_line = nw.new_node(Nodes.CurveLine) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": 20} + ) + + stemrotation = nw.new_node( + nodegroup_stem_rotation().name, input_kwargs={"Geometry": curve_line} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": resample_curve, + "Position": stemrotation.outputs["Rotation"], + "Offset": stemrotation.outputs["Noise"], + }, + ) + + stemgeometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": set_position} + ) + + mainflowersetting = nw.new_node(nodegroup_main_flower_setting(flowers=flowers).name) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Selection": mainflowersetting.outputs["Selection"], + "Instance": mainflowersetting.outputs["Geometry"], + "Rotation": mainflowersetting.outputs["Rotation"], + "Scale": mainflowersetting.outputs["Value"], + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": set_position, "Count": 150} + ) + + stemleaves = nw.new_node( + nodegroup_stem_leaves(leaves=leaves).name, + input_kwargs={"Curve": resample_curve_1}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [stemgeometry, stemleaves]} + ) + + join_geometry = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry, + "Material": surface.shaderfunc_to_material( + simple_greenery.shader_simple_greenery + ), + }, + ) + + num_versions = randint(0, 3, size=(1,))[0] + branches = [] + for version in range(num_versions): + resample_num = randint(80, 100, size=(1,))[0] + resample_curve_2 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": set_position, "Count": resample_num}, + ) + stembranchselection = nw.new_node(nodegroup_stem_branch_selection().name) + stembranch = nw.new_node( + nodegroup_stem_branch(flowers=flowers, leaves=leaves).name + ) + random_value_1 = nw.new_node( + Nodes.RandomValue, + input_kwargs={"Min": (0.4, 0.4, 0.4)}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": resample_curve_2, + "Selection": stembranchselection, + "Instance": stembranch, + "Scale": (random_value_1, "Value"), + }, + ) + random_value_4 = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + "Min": (0.15, 0.15, 0.0), + "Max": (0.45, 0.45, 6.28), + "Seed": 30, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_2, + "Rotation": (random_value_4, "Value"), + }, + ) + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances_1} + ) + branches.append(realize_instances_1) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry, realize_instances] + branches}, + ) + + z_rotate = uniform(0, 6.28, size=(1,))[0] + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Rotation": (0.0, 0.0, z_rotate)}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": transform}) + + +class FlowerPlantFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(FlowerPlantFactory, self).__init__(factory_seed, coarse=coarse) + self.leaves_version_num = 4 + self.flowers_version_num = 1 + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # Make the Leaf and Delete It Later + leaves = [] + for _ in range(self.leaves_version_num): + lf_seed = randint(0, 1000, size=(1,))[0] + leaf_model = Leaf.LeafFactory( + genome={"leaf_width": 0.35, "width_rand": 0.1}, factory_seed=lf_seed + ) + leaf = leaf_model.create_asset() + leaves.append(leaf) + + flowers = [] + for _ in range(self.flowers_version_num): + fw_seed = randint(0, 1000, size=(1,))[0] + rad = uniform(0.4, 0.7, size=(1,))[0] + flower_model = Flower.FlowerFactory(rad=rad, factory_seed=fw_seed) + flower = flower_model.create_asset() + flowers.append(flower) + + params["leaves"] = leaves + params["flowers"] = flowers + + mod = surface.add_geomod( + obj, geo_flowerplant, apply=False, attributes=[], input_kwargs=params + ) + butil.delete(leaves + flowers) + with butil.SelectObjects(obj): + bpy.ops.object.material_slot_remove() + bpy.ops.object.shade_flat() + + butil.apply_modifiers(obj) + + tag_object(obj, "flowerplant") + return obj diff --git a/infinigen/assets/grassland/grass_tuft.py b/infinigen/assets/objects/grassland/grass_tuft.py similarity index 73% rename from infinigen/assets/grassland/grass_tuft.py rename to infinigen/assets/objects/grassland/grass_tuft.py index 9b61fb06b..d720d2837 100644 --- a/infinigen/assets/grassland/grass_tuft.py +++ b/infinigen/assets/objects/grassland/grass_tuft.py @@ -5,24 +5,18 @@ import bpy - import numpy as np -from numpy.random import uniform, normal - -from infinigen.assets.creatures.util.geometry.curve import Curve -from infinigen.core.util.blender import deep_clone_obj +from numpy.random import normal, uniform from infinigen.assets.materials import grass_blade_texture - +from infinigen.assets.utils.geometry.curve import Curve from infinigen.core.placement.factory import AssetFactory - +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup -class GrassTuftFactory(AssetFactory): +class GrassTuftFactory(AssetFactory): def __init__(self, seed): - super(GrassTuftFactory, self).__init__(seed) self.n_seg = 4 @@ -37,22 +31,25 @@ def __init__(self, seed): self.blade_width_var = uniform(0, 0.05) self.taper_var = uniform(0, 0.1) - self.taper_y = np.linspace(1, 0, self.n_seg) * normal(1, self.taper_var, self.n_seg) + self.taper_y = np.linspace(1, 0, self.n_seg) * normal( + 1, self.taper_var, self.n_seg + ) self.taper_x = np.linspace(0, 1, self.n_seg) self.taper_points = np.stack([self.taper_x, self.taper_y], axis=-1) - self.base_spread = uniform(0, self.length_mean/4) + self.base_spread = uniform(0, self.length_mean / 4) self.base_angle_var = uniform(0, 15) def create_asset(self, **params) -> bpy.types.Object: - n_blades = np.random.randint(30, 60) - + blade_lengths = normal(self.length_mean, self.length_std, (n_blades, 1)) - seg_lens = (blade_lengths / self.n_seg) - - seg_curls = normal(self.curl_mean, self.curl_std, (n_blades, self.n_seg)) - seg_curls *= np.power(np.linspace(0, 1, self.n_seg).reshape(1, self.n_seg), self.curl_power) + seg_lens = blade_lengths / self.n_seg + + seg_curls = normal(self.curl_mean, self.curl_std, (n_blades, self.n_seg)) + seg_curls *= np.power( + np.linspace(0, 1, self.n_seg).reshape(1, self.n_seg), self.curl_power + ) seg_curls = np.deg2rad(seg_curls) point_rads = np.arange(self.n_seg).reshape(1, self.n_seg) * seg_lens @@ -65,14 +62,18 @@ def create_asset(self, **params) -> bpy.types.Object: taper = Curve(self.taper_points).to_curve_obj() - widths = blade_lengths.reshape(-1) * normal(self.blade_width_pct_mean, self.blade_width_var, n_blades) + widths = blade_lengths.reshape(-1) * normal( + self.blade_width_pct_mean, self.blade_width_var, n_blades + ) objs = [] for i in range(n_blades): - obj = Curve(points[i], taper=taper).to_curve_obj(name=f'_blade_{i}', extrude=widths[i], resu=2) + obj = Curve(points[i], taper=taper).to_curve_obj( + name=f"_blade_{i}", extrude=widths[i], resu=2 + ) objs.append(obj) with butil.SelectObjects(objs): - bpy.ops.object.convert(target='MESH') + bpy.ops.object.convert(target="MESH") butil.delete(taper) # Randomly pose and arrange the blades in a circle-ish cluster @@ -81,7 +82,7 @@ def create_asset(self, **params) -> bpy.types.Object: facing_offsets = np.rad2deg(normal(0, self.base_angle_var, n_blades)) for a, r, off, obj in zip(base_angles, base_rads, facing_offsets, objs): obj.location = (-r * np.cos(a), r * np.sin(a), -0.05 * self.length_mean) - obj.rotation_euler = (np.pi/2, -np.pi/2, -a + off) + obj.rotation_euler = (np.pi / 2, -np.pi / 2, -a + off) with butil.SelectObjects(objs): bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) @@ -91,9 +92,9 @@ def create_asset(self, **params) -> bpy.types.Object: bpy.ops.object.shade_flat() parent = objs[0] - tag_object(parent, 'grass_tuft') - + tag_object(parent, "grass_tuft") + return parent def finalize_assets(self, assets): - grass_blade_texture.apply(assets) \ No newline at end of file + grass_blade_texture.apply(assets) diff --git a/infinigen/assets/objects/lamp/__init__.py b/infinigen/assets/objects/lamp/__init__.py new file mode 100644 index 000000000..4c8445322 --- /dev/null +++ b/infinigen/assets/objects/lamp/__init__.py @@ -0,0 +1,3 @@ +from .ceiling_classic_lamp import CeilingClassicLampFactory +from .ceiling_lights import CeilingLightFactory +from .lamp import DeskLampFactory, FloorLampFactory, LampFactory diff --git a/infinigen/assets/objects/lamp/ceiling_classic_lamp.py b/infinigen/assets/objects/lamp/ceiling_classic_lamp.py new file mode 100644 index 000000000..2e4ee2342 --- /dev/null +++ b/infinigen/assets/objects/lamp/ceiling_classic_lamp.py @@ -0,0 +1,438 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Stamatis Alexandropoulos + +from numpy.random import randint, uniform + +from infinigen.assets.lighting.indoor_lights import PointLampFactory +from infinigen.assets.materials.ceiling_light_shaders import ( + shader_lamp_bulb_nonemissive, +) +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.color import color_category +from infinigen.core.util.math import FixedSeed + + +def shader_lamp_material(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = color_category("textile") + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": rgb, + "Subsurface Radius": (0.1000, 0.1000, 0.1000), + "Roughness": uniform(0.2, 0.9), + "Sheen": 0.2068, + "Clearcoat Roughness": 0.1436, + "Transmission": 0.4045, + "Transmission Roughness": 0.6932, + "Emission": (0.9858, 0.9858, 0.9858, 1.0000), + "Emission Strength": 0.0000, + "Alpha": 0.8614, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Scale": 104.3000, "Randomness": 0.0000}, + attrs={"feature": "SMOOTH_F1"}, + ) + + displacement = nw.new_node( + Nodes.Displacement, + input_kwargs={"Height": voronoi_texture.outputs["Distance"], "Scale": 0.4000}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf, "Displacement": displacement}, + attrs={"is_active_output": True}, + ) + + +def shader_inside_medal(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.0018, 0.0015, 0.0000, 1.0000), + "Metallic": 1.0000, + "Roughness": 0.0682, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_cable(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.0000, 0.0000, 0.0000, 1.0000), + "Metallic": 1.0000, + "Roughness": 0.4273, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("geometry_nodes", singleton=True, type="GeometryNodeTree") +def geometry_nodes(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "cable_length", 0.7000), + ("NodeSocketFloat", "cable_radius", 0.0500), + ("NodeSocketFloat", "height", 0.0000), + ("NodeSocketFloat", "bottom_radius", 0.0000), + ("NodeSocketFloat", "top_radius", 0.0000), + ("NodeSocketFloat", "Thickness", 0.5000), + ("NodeSocketFloatDistance", "Amount", 1.0000), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["cable_length"]} + ) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": 87, "Radius": group_input.outputs["cable_radius"]}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Scale": (1.0000, 1.0000, -1.0000)}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry, + "Material": surface.shaderfunc_to_material(shader_cable), + }, + ) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["top_radius"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_geometry_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_3.outputs["Curve"], + "Translation": combine_xyz_4, + }, + ) + + curve_line_3 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (-1.0000, 0.0000, 0.0000), + "End": (1.0000, 0.0000, 0.0000), + }, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line_3} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Amount"]} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": reroute}, + attrs={"domain": "INSTANCE"}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, + input_kwargs={"Geometry": duplicate_elements.outputs["Geometry"]}, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0000, 1: reroute}, attrs={"operation": "DIVIDE"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": transform_geometry_4, "Factor": multiply_1}, + attrs={"use_all_curves": True}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances_1, + "Selection": endpoint_selection_1, + "Position": sample_curve.outputs["Position"], + }, + ) + + endpoint_selection_2 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 2: 0.0000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + curve_circle_4 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": multiply_add} + ) + + transform_geometry_5 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": curve_circle_4.outputs["Curve"]} + ) + + sample_curve_1 = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": transform_geometry_5, "Factor": multiply_1}, + attrs={"use_all_curves": True}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_2, + "Position": sample_curve_1.outputs["Position"], + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [transform_geometry_4, set_position_1, transform_geometry_5] + }, + ) + + curve_circle_5 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Thickness"]} + ) + + curve_to_mesh_3 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry_3, + "Profile Curve": curve_circle_5.outputs["Curve"], + "Fill Caps": True, + }, + ) + + transform_geometry_6 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": curve_to_mesh_3} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry_6, + "Material": surface.shaderfunc_to_material(shader_inside_medal), + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: -1.5000, 1: -0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: 0.0000}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={1: -1.0000}, attrs={"operation": "MULTIPLY"} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: multiply_3}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_4}) + + curve_line_2 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_1, "End": combine_xyz_2} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["bottom_radius"], + 4: group_input.outputs["top_radius"], + }, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": curve_line_2, "Radius": map_range.outputs["Result"]}, + ) + + curve_circle_2 = nw.new_node(Nodes.CurveCircle) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle_2.outputs["Curve"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": curve_to_mesh_2}) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": curve_to_mesh_2, + "Offset Scale": 0.0050, + "Individual": False, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [flip_faces, extrude_mesh.outputs["Mesh"]]}, + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": join_geometry} + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry_2, + "Material": surface.shaderfunc_to_material(shader_lamp_material), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_1, set_material_2]} + ) + + ico_sphere = nw.new_node( + Nodes.MeshIcoSphere, input_kwargs={"Radius": 0.0500, "Subdivisions": 4} + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": ico_sphere.outputs["Mesh"], + "Material": surface.shaderfunc_to_material(shader_lamp_bulb_nonemissive), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material, join_geometry_1, set_material_3]}, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_2, + "Rotation": (0.0000, 3.1416, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry_3}, + attrs={"is_active_output": True}, + ) + + +class CeilingClassicLampFactory(AssetFactory): + def __init__(self, factory_seed): + super(CeilingClassicLampFactory, self).__init__(factory_seed) + with FixedSeed(factory_seed): + self.params = { + "cable_length": uniform(0.6, 0.710), + "cable_radius": uniform(0.015, 0.02), + "height": uniform(0.4, 0.710), + "top_radius": uniform(0.05, 0.2), + "bottom_radius": uniform(0.22, 0.35), + "Thickness": uniform(0.002, 0.006), + "Amount": randint(1, 8), + } + self.light_factory = PointLampFactory(factory_seed) + + # self.beveler = BevelSharp(mult=uniform(1, 3)) + + def create_placeholder(self, **_): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, "NODES", node_group=geometry_nodes(), ng_inputs=self.params, apply=True + ) + tagging.tag_system.relabel_obj(obj) + return obj + + def create_asset(self, i, placeholder, face_size, **_): + obj = butil.deep_clone_obj(placeholder, keep_materials=True) + light = self.light_factory.spawn_asset(i) + butil.parent_to(light, obj) + return obj diff --git a/infinigen/assets/objects/lamp/ceiling_lights.py b/infinigen/assets/objects/lamp/ceiling_lights.py new file mode 100644 index 000000000..860db0e70 --- /dev/null +++ b/infinigen/assets/objects/lamp/ceiling_lights.py @@ -0,0 +1,324 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: +# - +# - Alexander Raistrick: add point light + + +import numpy as np +from numpy.random import randint as RI +from numpy.random import uniform as U + +from infinigen.assets.lighting.indoor_lights import PointLampFactory +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.autobevel import BevelSharp +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed, clip_gaussian + + +class CeilingLightFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0]): + super(CeilingLightFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + self.ceiling_light_default_params = [ + { + "Radius": 0.2, + "Thickness": 0.001, + "InnerRadius": 0.2, + "Height": 0.1, + "InnerHeight": 0.1, + "Curvature": 0.1, + }, + { + "Radius": 0.18, + "Thickness": 0.05, + "InnerRadius": 0.18, + "Height": 0.1, + "InnerHeight": 0.1, + "Curvature": 0.25, + }, + { + "Radius": 0.2, + "Thickness": 0.005, + "InnerRadius": 0.18, + "Height": 0.1, + "InnerHeight": 0.03, + "Curvature": 0.4, + }, + ] + with FixedSeed(factory_seed): + self.light_factory = PointLampFactory(factory_seed) + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + + self.params.update(self.material_params) + self.beveler = BevelSharp(mult=U(1, 3)) + + def get_material_params(self): + material_assignments = AssetList["CeilingLightFactory"]() + black_material = material_assignments["black_material"].assign_material() + white_material = material_assignments["white_material"].assign_material() + + wrapped_params = { + "BlackMaterial": surface.shaderfunc_to_material(black_material), + "WhiteMaterial": surface.shaderfunc_to_material(white_material), + } + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + def sample_parameters(self, dimensions, use_default=False): + if use_default: + return self.ceiling_light_default_params[ + RI(0, len(self.ceiling_light_default_params)) + ] + else: + Radius = clip_gaussian(0.12, 0.04, 0.1, 0.25) + Thickness = U(0.005, 0.05) + InnerRadius = Radius * U(0.4, 0.9) + Height = 0.7 * clip_gaussian(0.09, 0.03, 0.07, 0.15) + InnerHeight = Height * U(0.5, 1.1) + Curvature = U(0.1, 0.5) + params = { + "Radius": Radius, + "Thickness": Thickness, + "InnerRadius": InnerRadius, + "Height": Height, + "InnerHeight": InnerHeight, + "Curvature": Curvature, + } + return params + + def create_placeholder(self, i, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_ceiling_light_geometry(), + ng_inputs=self.params, + apply=True, + ) + return obj + + def create_asset(self, i, placeholder, **params): + obj = butil.copy(placeholder, keep_materials=True) + self.beveler(obj) + + lamp = self.light_factory.spawn_asset(i, loc=(0, 0, 0), rot=(0, 0, 0)) + + butil.parent_to(lamp, obj, no_transform=True, no_inverse=True) + lamp.location.z -= 0.03 + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup( + "nodegroup_ceiling_light_geometry", singleton=True, type="GeometryNodeTree" +) +def nodegroup_ceiling_light_geometry(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.2000), + ("NodeSocketFloat", "Thickness", 0.0050), + ("NodeSocketFloat", "InnerRadius", 0.1800), + ("NodeSocketFloat", "Height", 0.1000), + ("NodeSocketFloat", "InnerHeight", 0.0300), + ("NodeSocketFloat", "Curvature", 0.4000), + ("NodeSocketMaterial", "BlackMaterial", None), + ("NodeSocketMaterial", "WhiteMaterial", None), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": 512, "Radius": group_input.outputs["Radius"]}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": curve_to_mesh, + "Offset Scale": group_input.outputs["Thickness"], + "Individual": False, + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": curve_to_mesh}) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [extrude_mesh.outputs["Mesh"], flip_faces]}, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": join_geometry, "Shade Smooth": False}, + ) + + mesh_circle = nw.new_node( + Nodes.MeshCircle, + input_kwargs={"Radius": group_input.outputs["Radius"]}, + attrs={"fill_type": "NGON"}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_shade_smooth, mesh_circle]} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": group_input.outputs["BlackMaterial"], + }, + ) + + ico_sphere_1 = nw.new_node( + Nodes.MeshIcoSphere, + input_kwargs={"Radius": group_input.outputs["InnerRadius"], "Subdivisions": 5}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": ico_sphere_1.outputs["Mesh"], + "Name": "UVMap", + 3: ico_sphere_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_2}) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Z"], 1: 0.0010}, + attrs={"operation": "LESS_THAN"}, + ) + + separate_geometry_1 = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={"Geometry": store_named_attribute, "Selection": less_than}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["InnerHeight"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": 1.0000, "Z": group_input.outputs["Curvature"]}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": separate_geometry_1.outputs["Selection"], + "Translation": combine_xyz_2, + "Scale": combine_xyz_3, + }, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={"Start": (0.0000, 0.0000, -0.0010), "End": combine_xyz_1}, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["InnerRadius"]} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, curve_to_mesh_1]} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_2, + "Material": group_input.outputs["WhiteMaterial"], + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": join_geometry_3} + ) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (0.0000, 0.0000, 0.0000) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_3, + "Bounding Box": bounding_box.outputs["Bounding Box"], + "LightPosition": vector, + }, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/lamp/lamp.py b/infinigen/assets/objects/lamp/lamp.py new file mode 100644 index 000000000..5397ec1fc --- /dev/null +++ b/infinigen/assets/objects/lamp/lamp.py @@ -0,0 +1,980 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: +# - Hongyu Wen: primary author +# - Alexander Raistrick: add point light + +import random + +import bpy +import numpy as np +from numpy.random import uniform as U + +from infinigen.assets.lighting.indoor_lights import PointLampFactory +from infinigen.assets.material_assignments import AssetList +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +class LampFactory(AssetFactory): + def __init__( + self, + factory_seed, + coarse=False, + dimensions=[1.0, 1.0, 1.0], + lamp_type="FloorLamp", + ): + super(LampFactory, self).__init__(factory_seed, coarse=coarse) + + self.bulb_fac = PointLampFactory(factory_seed) + self.bulb_fac.params["Temperature"] = max( + self.bulb_fac.params["Temperature"] * 0.6, 2500 + ) + self.bulb_fac.params["Wattage"] *= 0.5 + + self.dimensions = dimensions + self.lamp_type = lamp_type + self.lamp_default_params = { + "DeskLamp": { + "StandRadius": 0.01, + "StandHeight": 0.3, + "BaseRadius": 0.07, + "BaseHeight": 0.02, + "ShadeHeight": 0.18, + "HeadTopRadius": 0.08, + "HeadBotRadius": 0.11, + "ReverseLamp": True, + "RackThickness": 0.002, + "CurvePoint1": (0.0, 0.0, 0.0), + "CurvePoint2": (0.0, 0.0, 0.2), + "CurvePoint3": (0.0, 0.0, 0.3), + }, + "FloorLamp1": { + "StandRadius": 0.01, + "StandHeight": 0.3, + "BaseRadius": 0.1, + "BaseHeight": 0.02, + "ShadeHeight": 0.2, + "HeadTopRadius": 0.1, + "HeadBotRadius": 0.12, + "ReverseLamp": False, + "RackThickness": 0.002, + "CurvePoint1": (0.0, 0.0, 1.0), + "CurvePoint2": (0.05, 0.0, 1.2), + "CurvePoint3": (0.2, 0.0, 1.0), + }, + "FloorLamp2": { + "StandRadius": 0.01, + "StandHeight": 0.3, + "BaseRadius": 0.1, + "BaseHeight": 0.02, + "ShadeHeight": 0.2, + "HeadTopRadius": 0.1, + "HeadBotRadius": 0.11, + "ReverseLamp": True, + "RackThickness": 0.002, + "CurvePoint1": (0.0, 0.0, 1.0), + "CurvePoint2": (0.0, 0.0, 1.1), + "CurvePoint3": (0.0, 0.0, 1.2), + }, + } + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["LampFactory"]() + black_material = material_assignments["black_material"].assign_material() + white_material = material_assignments["metal"].assign_material() + lampshade_material = material_assignments["lampshade"].assign_material() + + wrapped_params = { + "BlackMaterial": surface.shaderfunc_to_material(black_material), + "MetalMaterial": surface.shaderfunc_to_material(white_material), + "LampshadeMaterial": surface.shaderfunc_to_material(lampshade_material), + } + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + def sample_parameters(self, dimensions, use_default=False): + if use_default: + if self.lamp_type == "DeskLamp": + return self.lamp_default_params["DeskLamp"] + else: + return random.choice( + [ + self.lamp_default_params["FloorLamp1"], + self.lamp_default_params["FloorLamp2"], + ] + ) + else: + stand_radius = U(0.005, 0.015) + base_radius = U(0.05, 0.15) + base_height = U(0.01, 0.03) + shade_height = U(0.18, 0.3) + head_top_radius = U(0.07, 0.15) + head_bot_radius = head_top_radius + U(0, 0.05) + rack_thickness = U(0.001, 0.003) + reverse_lamp = True + + if self.lamp_type == "DeskLamp": + height = U(0.25, 0.4) + else: + height = U(1, 1.5) + + z1 = U(base_height, height) + z2 = U(z1, height) + z3 = height + + x1, x2, x3 = 0, 0, 0 + # if self.lamp_type == "FloorLamp" and U() < 0.5: + # x2 = U(0.03, 0.1) + # x3 = U(0.2, 0.4) + # z2, z3 = z3, z2 + # reverse_lamp = False + + params = { + "StandRadius": stand_radius, + "BaseRadius": base_radius, + "BaseHeight": base_height, + "ShadeHeight": shade_height, + "HeadTopRadius": head_top_radius, + "HeadBotRadius": head_bot_radius, + "ReverseLamp": reverse_lamp, + "RackThickness": rack_thickness, + "CurvePoint1": (x1, 0.0, z1), + "CurvePoint2": (x2, 0.0, z2), + "CurvePoint3": (x3, 0.0, z3), + } + return params + + def create_asset(self, i, **params): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_lamp_geometry(), + ng_inputs=self.params, + apply=True, + ) + + if np.random.uniform() < 0.6: + bulb = self.bulb_fac(i) + butil.parent_to(bulb, obj, no_inverse=True, no_transform=True) + bulb.location.z = obj.bound_box[-2][2] - self.params["ShadeHeight"] * 0.5 + + with butil.SelectObjects(obj): + bpy.ops.object.shade_flat() + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +class DeskLampFactory(LampFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse=coarse, lamp_type="DeskLamp") + + +class FloorLampFactory(LampFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__( + factory_seed, + coarse, + lamp_type=np.random.choice(["FloorLamp1", "FloorLamp2"]), + ) + + +@node_utils.to_nodegroup("nodegroup_bulb", singleton=False, type="GeometryNodeTree") +def nodegroup_bulb(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketMaterial", "LampshadeMaterial", None), + ("NodeSocketMaterial", "MetalMaterial", None), + ], + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (0.0000, 0.0000, -0.2000), + "End": (0.0000, 0.0000, 0.0000), + }, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": 0.1500, "Resolution": 100} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + spiral = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Rotations": 5.0000, + "Start Radius": 0.1500, + "End Radius": 0.1500, + "Height": 0.2000, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": spiral, "Translation": (0.0000, 0.0000, -0.2000)}, + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": 0.0150, "Resolution": 100} + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": transform, + "Profile Curve": curve_circle_2.outputs["Curve"], + "Fill Caps": True, + }, + ) + + curve_line_2 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (0.0000, 0.0000, -0.2000), + "End": (0.0000, 0.0000, -0.3000), + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_2, "Count": 100} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0000, 1.0000), (0.4432, 0.5500), (1.0000, 0.2750)], + handles=["AUTO", "VECTOR", "AUTO"], + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": resample_curve_1, "Radius": float_curve_1}, + ) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": 0.1500, "Resolution": 100} + ) + + curve_to_mesh_3 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle_3.outputs["Curve"], + "Fill Caps": True, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh_1, curve_to_mesh_2, curve_to_mesh_3]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": group_input.outputs["MetalMaterial"], + }, + ) + + curve_line = nw.new_node(Nodes.CurveLine) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": 100} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0000, 0.1500), + (0.0500, 0.1700), + (0.1500, 0.2000), + (0.5500, 0.3800), + (0.8000, 0.3500), + (0.9568, 0.2200), + (1.0000, 0.0000), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": resample_curve, "Radius": float_curve}, + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": 100}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh, + "Material": group_input.outputs["LampshadeMaterial"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Translation": (0.0000, 0.0000, 0.3000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bulb_rack", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bulb_rack(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + amount = nw.new_node( + Nodes.GroupInput, + label="amount", + expose_input=[ + ("NodeSocketFloatDistance", "Thickness", 0.0200), + ("NodeSocketInt", "Amount", 3), + ("NodeSocketFloatDistance", "InnerRadius", 1.0000), + ("NodeSocketFloatDistance", "OuterRadius", 1.0000), + ("NodeSocketFloat", "InnerHeight", 0.0000), + ("NodeSocketFloat", "OuterHeight", 0.0000), + ], + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Radius": amount.outputs["OuterRadius"], "Resolution": 100}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": amount.outputs["OuterHeight"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_2.outputs["Curve"], + "Translation": combine_xyz, + }, + ) + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (-1.0000, 0.0000, 0.0000), + "End": (1.0000, 0.0000, 0.0000), + }, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": amount.outputs["Amount"]} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": reroute}, + attrs={"domain": "INSTANCE"}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, + input_kwargs={"Geometry": duplicate_elements.outputs["Geometry"]}, + ) + + endpoint_selection = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0000, 1: reroute}, attrs={"operation": "DIVIDE"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": transform, "Factor": multiply}, + attrs={"use_all_curves": True}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Selection": endpoint_selection, + "Position": sample_curve.outputs["Position"], + }, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: amount.outputs["Thickness"], 2: amount.outputs["InnerRadius"]}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": multiply_add, "Resolution": 100} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": amount.outputs["InnerHeight"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Translation": combine_xyz_1, + }, + ) + + sample_curve_1 = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": transform_1, "Factor": multiply}, + attrs={"use_all_curves": True}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_1, + "Position": sample_curve_1.outputs["Position"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform, set_position_1, transform_1]}, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Radius": amount.outputs["Thickness"], "Resolution": 100}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curve_to_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_reversiable_bulb", singleton=False, type="GeometryNodeTree" +) +def nodegroup_reversiable_bulb(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Scale", 0.3000), + ("NodeSocketBool", "Reverse", False), + ("NodeSocketMaterial", "BlackMaterial", None), + ("NodeSocketMaterial", "LampshadeMaterial", None), + ("NodeSocketMaterial", "MetalMaterial", None), + ], + ) + + bulb = nw.new_node( + nodegroup_bulb().name, + input_kwargs={ + "LampshadeMaterial": group_input.outputs["LampshadeMaterial"], + "MetalMaterial": group_input.outputs["MetalMaterial"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Scale"], + "Y": group_input.outputs["Scale"], + "Z": group_input.outputs["Scale"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": bulb, "Scale": combine_xyz_1} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": transform} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Reverse"], 1: 3.1415}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": geometry_to_instance, "Rotation": combine_xyz_2}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Reverse"], 1: 2.0000, 2: -1.0000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: -0.0150, 1: multiply_add}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": rotate_instances, "RackSupport": multiply_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_lamp_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_lamp_head(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "ShadeHeight", 0.0000), + ("NodeSocketFloat", "TopRadius", 0.3000), + ("NodeSocketFloat", "BotRadius", 0.5000), + ("NodeSocketBool", "ReverseBulb", True), + ("NodeSocketFloatDistance", "RackThickness", 0.0050), + ("NodeSocketFloat", "RackHeight", 0.5000), + ("NodeSocketMaterial", "BlackMaterial", None), + ("NodeSocketMaterial", "LampshadeMaterial", None), + ("NodeSocketMaterial", "MetalMaterial", None), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["TopRadius"], 1: 0.8000}, + attrs={"operation": "MULTIPLY"}, + ) + + reversiable_bulb = nw.new_node( + nodegroup_reversiable_bulb().name, + input_kwargs={ + "Scale": multiply, + "BlackMaterial": group_input.outputs["BlackMaterial"], + "LampshadeMaterial": group_input.outputs["LampshadeMaterial"], + "MetalMaterial": group_input.outputs["MetalMaterial"], + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 0.1500}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["ReverseBulb"], 1: 2.0000, 2: -1.0000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["RackHeight"], 1: multiply_add}, + attrs={"operation": "MULTIPLY"}, + ) + + bulb_rack = nw.new_node( + nodegroup_bulb_rack().name, + input_kwargs={ + "Thickness": group_input.outputs["RackThickness"], + "InnerRadius": multiply_1, + "OuterRadius": group_input.outputs["TopRadius"], + "InnerHeight": reversiable_bulb.outputs["RackSupport"], + "OuterHeight": multiply_2, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": bulb_rack, + "Material": group_input.outputs["BlackMaterial"], + }, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["ShadeHeight"], + 1: group_input.outputs["RackHeight"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: multiply_3}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_4}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_1, "End": combine_xyz} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["TopRadius"], + 4: group_input.outputs["BotRadius"], + }, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": curve_line, "Radius": map_range.outputs["Result"]}, + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": 100}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": curve_to_mesh}) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": curve_to_mesh, + "Offset Scale": 0.0050, + "Individual": False, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [flip_faces, extrude_mesh.outputs["Mesh"]]}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": group_input.outputs["LampshadeMaterial"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + reversiable_bulb.outputs["Geometry"], + set_material, + set_material_1, + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_lamp_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_lamp_geometry(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "StandRadius", 0.0200), + ("NodeSocketFloatDistance", "BaseRadius", 0.1000), + ("NodeSocketFloat", "BaseHeight", 0.0200), + ("NodeSocketFloat", "ShadeHeight", 0.0000), + ("NodeSocketFloat", "HeadTopRadius", 0.3000), + ("NodeSocketFloat", "HeadBotRadius", 0.5000), + ("NodeSocketBool", "ReverseLamp", True), + ("NodeSocketFloatDistance", "RackThickness", 0.0050), + ("NodeSocketVectorTranslation", "CurvePoint1", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVectorTranslation", "CurvePoint2", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVectorTranslation", "CurvePoint3", (0.0000, 0.0000, 0.0000)), + ("NodeSocketMaterial", "BlackMaterial", None), + ("NodeSocketMaterial", "LampshadeMaterial", None), + ("NodeSocketMaterial", "MetalMaterial", None), + ], + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["BaseHeight"]} + ) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_1}) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Radius": group_input.outputs["BaseRadius"], "Resolution": 100}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["BaseHeight"]} + ) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Start": combine_xyz, + "Start Handle": group_input.outputs["CurvePoint1"], + "End Handle": group_input.outputs["CurvePoint2"], + "End": group_input.outputs["CurvePoint3"], + "Resolution": 100, + }, + ) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [bezier_segment, curve_line]} + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Radius": group_input.outputs["StandRadius"], "Resolution": 100}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry_2, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh_1, curve_to_mesh]} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry, + "Material": group_input.outputs["BlackMaterial"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["ShadeHeight"], 1: 0.4000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["ShadeHeight"], 1: 0.2000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: multiply, + 1: group_input.outputs["ReverseLamp"], + 2: multiply_1, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + lamp_head = nw.new_node( + nodegroup_lamp_head().name, + input_kwargs={ + "ShadeHeight": group_input.outputs["ShadeHeight"], + "TopRadius": group_input.outputs["HeadTopRadius"], + "BotRadius": group_input.outputs["HeadBotRadius"], + "ReverseBulb": group_input.outputs["ReverseLamp"], + "RackThickness": group_input.outputs["RackThickness"], + "RackHeight": multiply_add, + "BlackMaterial": group_input.outputs["BlackMaterial"], + "LampshadeMaterial": group_input.outputs["LampshadeMaterial"], + "MetalMaterial": group_input.outputs["MetalMaterial"], + }, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curves": bezier_segment, "Factor": 1.0000}, + attrs={"use_all_curves": True}, + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": sample_curve.outputs["Tangent"]}, + attrs={"axis": "Z"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": lamp_head, + "Translation": sample_curve.outputs["Position"], + "Rotation": align_euler_to_vector, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, transform]} + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": join_geometry_1} + ) + + curve_line_2 = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0.0000, 0.0000, 0.1000)} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_line_2, + "Translation": sample_curve.outputs["Position"], + "Rotation": align_euler_to_vector, + }, + ) + + sample_curve_1 = nw.new_node( + Nodes.SampleCurve, input_kwargs={"Curves": transform_geometry, "Factor": 1.0000} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": join_geometry_1, + "Bounding Box": bounding_box.outputs["Bounding Box"], + "LightPosition": sample_curve_1.outputs["Position"], + }, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/leaves/__init__.py b/infinigen/assets/objects/leaves/__init__.py similarity index 83% rename from infinigen/assets/leaves/__init__.py rename to infinigen/assets/objects/leaves/__init__.py index 64eb41321..875fdb4a8 100644 --- a/infinigen/assets/leaves/__init__.py +++ b/infinigen/assets/objects/leaves/__init__.py @@ -4,4 +4,4 @@ from .leaf_maple import LeafFactoryMaple from .leaf_pine import LeafFactoryPine from .leaf_v2 import LeafFactoryV2 -from .leaf_wrapped import LeafFactoryWrapped \ No newline at end of file +from .leaf_wrapped import LeafFactoryWrapped diff --git a/infinigen/assets/leaves/leaf.py b/infinigen/assets/objects/leaves/leaf.py similarity index 78% rename from infinigen/assets/leaves/leaf.py rename to infinigen/assets/objects/leaves/leaf.py index 0caaedd47..a9bd31249 100644 --- a/infinigen/assets/leaves/leaf.py +++ b/infinigen/assets/objects/leaves/leaf.py @@ -4,27 +4,21 @@ # Authors: Alejandro Newell, Yiming Zuo, Alexander Raistrick -import pdb - -import numpy as np - import bpy -from mathutils import Vector, Matrix - -from infinigen.assets.trees.utils import helper, mesh, materials +import numpy as np +from infinigen.assets.objects.trees.utils import mesh from infinigen.core.placement.factory import AssetFactory from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup C = bpy.context D = bpy.data + class LeafFactory(AssetFactory): - scale = 0.3 - def __init__(self, factory_seed, genome: dict=None, coarse=False): + def __init__(self, factory_seed, genome: dict = None, coarse=False): super(LeafFactory, self).__init__(factory_seed, coarse=coarse) self.genome = dict( leaf_width=0.5, @@ -33,7 +27,7 @@ def __init__(self, factory_seed, genome: dict=None, coarse=False): x_offset=0, flip_leaf=False, z_scaling=0, - width_rand=0.33 + width_rand=0.33, ) if genome: for k, g in genome.items(): @@ -41,15 +35,15 @@ def __init__(self, factory_seed, genome: dict=None, coarse=False): self.genome[k] = g def create_asset(self, **params) -> bpy.types.Object: - # bpy.ops.object.mode_set(mode = 'OBJECT') - bpy.ops.mesh.primitive_circle_add(enter_editmode=False, align='WORLD', - location=(0, 0, 0), scale=(1, 1, 1)) + bpy.ops.mesh.primitive_circle_add( + enter_editmode=False, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) bpy.ops.object.editmode_toggle() bpy.ops.mesh.edge_face_add() obj = bpy.context.active_object - min_radius = .02 + min_radius = 0.02 radii_ref = [1] n = len(obj.data.vertices) // 2 @@ -58,23 +52,33 @@ def create_asset(self, **params) -> bpy.types.Object: bpy.ops.mesh.subdivide() a = np.linspace(0, np.pi, n) - if self.genome['flip_leaf']: + if self.genome["flip_leaf"]: a = a[::-1] - x = np.sin(a) * (self.genome['leaf_width'] + np.random.randn() * self.genome['width_rand']) + self.genome['x_offset'] - y = -np.cos(.9 * (a - self.genome['alpha'])) - z = x ** 2 * self.genome['z_scaling'] - - full_coords = np.concatenate([np.stack([x, y, z], 1), - np.stack([-x[::-1], y[::-1], z], 1), - np.array([[0, y[0], 0]])]).flatten() - bpy.ops.object.mode_set(mode='OBJECT') - obj.data.vertices.foreach_set('co', full_coords) - - if self.genome['use_wave']: - bpy.ops.object.modifier_add(type='WAVE') - bpy.context.object.modifiers["Wave"].height = np.random.randn() * .3 - bpy.context.object.modifiers["Wave"].width = 0.75 + \ - np.random.randn() * .1 + x = ( + np.sin(a) + * ( + self.genome["leaf_width"] + + np.random.randn() * self.genome["width_rand"] + ) + + self.genome["x_offset"] + ) + y = -np.cos(0.9 * (a - self.genome["alpha"])) + z = x**2 * self.genome["z_scaling"] + + full_coords = np.concatenate( + [ + np.stack([x, y, z], 1), + np.stack([-x[::-1], y[::-1], z], 1), + np.array([[0, y[0], 0]]), + ] + ).flatten() + bpy.ops.object.mode_set(mode="OBJECT") + obj.data.vertices.foreach_set("co", full_coords) + + if self.genome["use_wave"]: + bpy.ops.object.modifier_add(type="WAVE") + bpy.context.object.modifiers["Wave"].height = np.random.randn() * 0.3 + bpy.context.object.modifiers["Wave"].width = 0.75 + np.random.randn() * 0.1 bpy.context.object.modifiers["Wave"].speed = np.random.rand() mesh.finalize_obj(obj) @@ -88,7 +92,8 @@ def create_asset(self, **params) -> bpy.types.Object: return obj -''' + +""" class BerryFactory(AssetFactory): def __init__(self, factory_seed, genome, coarse=False): @@ -117,9 +122,9 @@ def create_asset(self, **params) -> bpy.types.Object: tag_object(obj, 'leaf') return obj -''' +""" -''' +""" def init_berries(n_leaves=5, im_mat=None, **leaf_kargs): # Initializing leaves leaves = [create_leaf(**leaf_kargs) for _ in range(n_leaves)] @@ -154,4 +159,4 @@ def init_berries(n_leaves=5, im_mat=None, **leaf_kargs): c_name = helper.create_collection('Leaves', leaves) return leaves, c_name -''' +""" diff --git a/infinigen/assets/objects/leaves/leaf_broadleaf.py b/infinigen/assets/objects/leaves/leaf_broadleaf.py new file mode 100644 index 000000000..7a1fc9153 --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_broadleaf.py @@ -0,0 +1,1315 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo +# Acknowledgment: This file draws inspiration from https://www.youtube.com/watch?v=pfOKB1GKJHM by Dr. Blender + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.leaves.leaf_maple import nodegroup_leaf_shader +from infinigen.assets.objects.leaves.leaf_v2 import ( + nodegroup_apply_wave, + nodegroup_move_to_origin, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "nodegroup_random_mask_vein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_random_mask_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Coord", 0.0), + ("NodeSocketFloat", "Shape", 0.5), + ("NodeSocketFloat", "Density", 0.5), + ("NodeSocketFloat", "Random Scale Seed", 0.5), + ], + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Coord"], + "Scale": group_input.outputs["Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Density"], + 1: group_input.outputs["Random Scale Seed"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + vein_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"W": group_input.outputs["Coord"], "Scale": multiply}, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: vein_1.outputs["Distance"], 1: 0.35}) + + round = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "ROUND"}) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: vein.outputs["Distance"], 1: round} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add_1, 2: 0.02, 3: 0.95, 4: 0.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Shape"], + 1: map_range_1.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_1, 1: 0.001, 2: 0.005, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range_2.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": group_input.outputs["X Modulated"]}, + label="Vein Shape", + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.7227, 0.75), (1.0, 1.0)], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_shape_with_jigsaw", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_shape_with_jigsaw(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Midrib Value", 1.0), + ("NodeSocketFloat", "Vein Coord", 0.0), + ("NodeSocketFloat", "Leaf Shape", 0.5), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.5), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}, + ) + + jigsaw = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord"], + "Scale": group_input.outputs["Jigsaw Scale"], + }, + label="Jigsaw", + attrs={"voronoi_dimensions": "1D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.05}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: jigsaw.outputs["Distance"], + 1: multiply, + 2: group_input.outputs["Leaf Shape"], + }, + attrs={"operation": "MULTIPLY_ADD", "use_clamp": True}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": maximum}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord( + nw: NodeWrangler, vein_curve_control_points, vein_curve_control_handles +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": divide}, label="Vein Shape" + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + vein_curve_control_points, + vein_curve_control_handles, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_shape( + nw: NodeWrangler, + shape_curve_control_points=[ + (0.0, 0.0), + (0.15, 0.2), + (0.3864, 0.2625), + (0.6227, 0.2), + (0.7756, 0.1145), + (0.8955, 0.0312), + (1.0, 0.0), + ], +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Y", 0.0), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X Modulated"], + "Y": group_input.outputs["Y"], + }, + ) + + clamp = nw.new_node( + Nodes.Clamp, + input_kwargs={"Value": group_input.outputs["Y"], "Min": -0.6, "Max": 0.6}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": clamp}) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: combine_xyz_1}, + attrs={"operation": "SUBTRACT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + leaf_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range.outputs["Result"]}, + label="Leaf shape", + ) + node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Leaf Shape": subtract_1, "Value": leaf_shape} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_midrib", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_midrib( + nw: NodeWrangler, + midrib_curve_control_points=[ + (0.0, 0.5), + (0.2455, 0.5078), + (0.5, 0.4938), + (0.75, 0.503), + (0.8773, 0.5125), + (1.0, 0.5), + ], +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.5), + ("NodeSocketFloat", "Y", -0.6), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + stem_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range.outputs["Result"]}, + label="Stem shape", + ) + node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": stem_shape, 3: -1.0} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 20.0}) + + map_range_5 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": noise_texture.outputs["Fac"], 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_5.outputs["Result"], 1: 0.01}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Y"], + 1: -70.0, + 2: group_input.outputs["Midrib Length"], + 3: group_input.outputs["Midrib Width"], + 4: 0.0, + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: map_range_2.outputs["Result"]} + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: absolute}, attrs={"operation": "SUBTRACT"} + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": absolute_1, + 2: group_input.outputs["Stem Length"], + 3: 1.0, + 4: 0.0, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: map_range_3.outputs["Result"], 2: 0.06}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: smooth_min}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "X Modulated": subtract, + "Midrib Value": map_range_4.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_apply_vein_midrib", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_apply_vein_midrib(nw: NodeWrangler, random_scale_seed=1.08): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Midrib Value", 0.5), + ("NodeSocketFloat", "Leaf Shape", 1.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ("NodeSocketFloat", "Vein Coord - main", 0.0), + ("NodeSocketFloat", "Vein Coord - 1", 0.0), + ("NodeSocketFloat", "Vein Coord - 2", 0.0), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Leaf Shape"], + 1: -0.3, + 2: 0.05, + 3: 0.015, + 4: 0.0, + }, + ) + + nodegroup = nw.new_node( + nodegroup_random_mask_vein().name, + input_kwargs={ + "Coord": group_input.outputs["Vein Coord - 2"], + "Shape": map_range.outputs["Result"], + "Density": group_input.outputs["Vein Density"], + "Random Scale Seed": random_scale_seed * 2.7, + }, + ) + + nodegroup_1 = nw.new_node( + nodegroup_random_mask_vein().name, + input_kwargs={ + "Coord": group_input.outputs["Vein Coord - 1"], + "Shape": map_range.outputs["Result"], + "Density": group_input.outputs["Vein Density"], + "Random Scale Seed": random_scale_seed, + }, + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord - main"], + "Scale": group_input.outputs["Vein Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": position, "Scale": 20.0} + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": noise_texture.outputs["Fac"], 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_3.outputs["Result"], 1: 0.02}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: vein.outputs["Distance"], 1: multiply} + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add, 2: 0.03, 3: 1.0, 4: 0.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_4.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_1, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_1, 1: map_range_5.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup, 1: multiply_2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Midrib Value"], 1: multiply_3}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vein Value": multiply_4} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_sub_vein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "X", 0.5), ("NodeSocketFloat", "Y", 0.0)], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": absolute, "Y": group_input.outputs["Y"]} + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": combine_xyz, "Scale": 30.0} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, + attrs={"clamp": False}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 150.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 2: 0.1}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -1.0}, attrs={"operation": "MULTIPLY"} + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": map_range_1.outputs["Result"], 4: -1.0} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply, "Color Value": map_range_3.outputs["Result"]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_leaf_gen", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_leaf_gen(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Displancement scale", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.07), + ("NodeSocketFloat", "Vein Angle", 1.0), + ("NodeSocketFloat", "Sub-vein Displacement", 0.5), + ("NodeSocketFloat", "Sub-vein Scale", 50.0), + ("NodeSocketFloat", "Wave Displacement", 0.1), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + nodegroup_midrib = nw.new_node( + nodegroup_nodegroup_midrib( + midrib_curve_control_points=kwargs["midrib_curve_control_points"] + ).name, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Midrib Length": group_input.outputs["Midrib Length"], + "Midrib Width": group_input.outputs["Midrib Width"], + "Stem Length": group_input.outputs["Stem Length"], + }, + ) + + nodegroup_shape = nw.new_node( + nodegroup_nodegroup_shape( + shape_curve_control_points=kwargs["shape_curve_control_points"] + ).name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + }, + ) + + nodegroup_vein_coord = nw.new_node( + nodegroup_nodegroup_vein_coord( + vein_curve_control_points=[ + (0.0, 0.0), + (0.0182, 0.05), + (0.3364, 0.2386), + (0.6045, 0.4812), + (0.7, 0.725), + (0.8273, 0.8437), + (1.0, 1.0), + ], + vein_curve_control_handles=[ + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + ], + ).name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_vein_coord_002 = nw.new_node( + nodegroup_nodegroup_vein_coord( + vein_curve_control_points=[ + (0.0, 0.0), + (0.0182, 0.05), + (0.3364, 0.2386), + (0.8091, 0.7312), + (1.0, 0.9937), + ], + vein_curve_control_handles=["AUTO", "AUTO", "AUTO", "AUTO", "AUTO"], + ).name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_vein_coord_003 = nw.new_node( + nodegroup_nodegroup_vein_coord( + vein_curve_control_points=[ + (0.0, 0.0), + (0.0182, 0.05), + (0.2909, 0.2199), + (0.4182, 0.3063), + (0.7045, 0.3), + (1.0, 0.8562), + ], + vein_curve_control_handles=[ + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + ], + ).name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_apply_vein_midrib = nw.new_node( + nodegroup_nodegroup_apply_vein_midrib( + random_scale_seed=kwargs["vein_mask_random_seed"] + ).name, + input_kwargs={ + "Midrib Value": nodegroup_midrib.outputs["Midrib Value"], + "Leaf Shape": nodegroup_shape.outputs["Leaf Shape"], + "Vein Density": group_input.outputs["Vein Density"], + "Vein Coord - main": nodegroup_vein_coord_002, + "Vein Coord - 1": nodegroup_vein_coord, + "Vein Coord - 2": nodegroup_vein_coord_003, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Displancement scale"], + 1: nodegroup_apply_vein_midrib, + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": group_input.outputs["Mesh"], "Offset": combine_xyz}, + ) + + nodegroup_shape_with_jigsaw = nw.new_node( + nodegroup_nodegroup_shape_with_jigsaw().name, + input_kwargs={ + "Midrib Value": nodegroup_midrib.outputs["Midrib Value"], + "Vein Coord": nodegroup_vein_coord_002, + "Leaf Shape": nodegroup_shape.outputs["Leaf Shape"], + "Jigsaw Scale": group_input.outputs["Jigsaw Scale"], + "Jigsaw Depth": group_input.outputs["Jigsaw Depth"], + }, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: nodegroup_shape_with_jigsaw, 1: 0.5}, + attrs={"operation": "LESS_THAN"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeom, + input_kwargs={"Geometry": set_position, "Selection": less_than}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": delete_geometry, 2: nodegroup_apply_vein_midrib}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz_1.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], [(0.0, 0.0), (0.5182, 1.0), (1.0, 1.0)] + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": nodegroup_shape.outputs["Leaf Shape"], 2: -1.0}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0045, 0.0063), (0.0409, 0.0375), (0.4182, 0.05), (1.0, 0.0)], + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: float_curve}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: 0.7}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + nodegroup_vein_coord_001 = nw.new_node( + nodegroup_nodegroup_vein_coord_001().name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": set_position_1, + "Attribute": capture_attribute.outputs[2], + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Vein Coord": nodegroup_vein_coord_001, + "Vein Value": nodegroup_apply_vein_midrib, + }, + ) + + +def shader_material(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein value"}) + + # rgb_3 = nw.new_node(Nodes.RGB) + # rgb_3.outputs[0].default_value = (0.9823, 0.8388, 0.117, 1.0) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 6.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["B"], + 1: 0.4, + 2: 0.7, + 3: 0.8, + 4: 1.2, + }, + ) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "subvein offset"}) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": attribute.outputs["Color"], 2: -0.94} + ) + + # rgb_1 = nw.new_node(Nodes.RGB) + # rgb_1.outputs[0].default_value = (0.1878, 0.305, 0.0762, 1.0) + + # rgb = nw.new_node(Nodes.RGB) + # rgb.outputs[0].default_value = (0.0762, 0.1441, 0.0529, 1.0) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 2.0, "Color": kwargs["color_base"]}, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": hue_saturation_value_1, + "Color2": kwargs["color_base"], + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": mix, + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_1.outputs["Color"], + "Color1": kwargs["color_vein"], + "Color2": hue_saturation_value, + }, + ) + + leaf_shader = nw.new_node( + nodegroup_leaf_shader().name, input_kwargs={"Color": mix_1} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": leaf_shader} + ) + + +def geo_leaf_broadleaf(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Geometry"], "Level": 10}, + ) + + # subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, + # input_kwargs={'Mesh': subdivide_mesh}) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": subdivide_mesh, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + nodegroup_leaf_gen = nw.new_node( + nodegroup_nodegroup_leaf_gen(**kwargs).name, + input_kwargs={ + "Mesh": capture_attribute.outputs["Geometry"], + "Displancement scale": 0.005, + "Vein Asymmetry": kwargs["vein_asymmetry"], # 0.3023 + "Vein Density": kwargs["vein_density"], # 7.0 + "Jigsaw Scale": kwargs["jigsaw_scale"], # 50 + "Jigsaw Depth": kwargs["jigsaw_depth"], # 0.3 + "Vein Angle": kwargs["vein_angle"], # 0.3 + "Midrib Length": kwargs["midrib_length"], # 0.3336 + "Midrib Width": kwargs["midrib_length"], # 0.6302, + "Stem Length": kwargs["stem_length"], + }, + ) + + nodegroup_sub_vein = nw.new_node( + nodegroup_nodegroup_sub_vein().name, + input_kwargs={ + "X": nodegroup_leaf_gen.outputs["X Modulated"], + "Y": nodegroup_leaf_gen.outputs["Vein Coord"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0002}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": nodegroup_leaf_gen.outputs["Mesh"], + "Offset": combine_xyz, + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position, + 2: nodegroup_sub_vein.outputs["Color Value"], + }, + ) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: nodegroup_leaf_gen.outputs["Vein Value"], + }, + ) + + apply_wave = nw.new_node( + nodegroup_apply_wave( + y_wave_control_points=kwargs["y_wave_control_points"], + x_wave_control_points=kwargs["x_wave_control_points"], + ).name, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Wave Scale X": 0.2, + "Wave Scale Y": 1.0, + "X Modulated": nodegroup_leaf_gen.outputs["X Modulated"], + }, + ) + + move_to_origin = nw.new_node( + nodegroup_move_to_origin().name, input_kwargs={"Geometry": apply_wave} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": move_to_origin, + "Offset": nodegroup_leaf_gen.outputs["Attribute"], + "Coordinate": capture_attribute.outputs["Attribute"], + "subvein offset": capture_attribute_1.outputs[2], + "vein value": capture_attribute_2.outputs[2], + }, + ) + + +class LeafFactoryBroadleaf(AssetFactory): + scale = 0.5 + + def __init__(self, factory_seed, season="autumn", coarse=False): + super(LeafFactoryBroadleaf, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.genome = self.sample_geo_genome() + + t = uniform(0.0, 1.0) + + if season == "autumn": + hsvcol_blade = [uniform(0.0, 0.20), 0.85, 0.9] + hsvcol_vein = np.copy(hsvcol_blade) + hsvcol_vein[2] = 0.7 + + elif season == "summer" or season == "spring": + hsvcol_blade = [uniform(0.28, 0.32), uniform(0.6, 0.7), 0.9] + hsvcol_vein = np.copy(hsvcol_blade) + hsvcol_blade[2] = uniform(0.1, 0.5) + hsvcol_vein[2] = uniform(0.1, 0.5) + + elif season == "winter": + hsvcol_blade = [ + uniform(0.0, 0.10), + uniform(0.2, 0.6), + uniform(0.0, 0.1), + ] + hsvcol_vein = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] + + else: + raise NotImplementedError + + self.blade_color = hsvcol_blade + self.vein_color = hsvcol_vein + + self.color_randomness = uniform(0.05, 0.10) + + # if t < 0.5: + # self.blade_color = np.array((0.2346, 0.4735, 0.0273, 1.0)) + # else: + # self.blade_color = np.array((1.000, 0.855, 0.007, 1.0)) + + @staticmethod + def sample_geo_genome(): + leaf_width_1 = uniform(0.2, 0.4) + leaf_width_2 = uniform(0.1, leaf_width_1) + + leaf_offset_1 = uniform(0.49, 0.51) + + return { + "midrib_length": uniform(0.0, 0.8), + "midrib_width": uniform(0.5, 1.0), + "stem_length": uniform(0.7, 0.9), + "vein_asymmetry": uniform(0.0, 1.0), + "vein_angle": uniform(0.4, 1.0), + "vein_density": uniform(3.0, 8.0), + "subvein_scale": uniform(10.0, 20.0), + "jigsaw_scale": uniform(30.0, 70.0), + "jigsaw_depth": uniform(0.0, 0.6), + "vein_mask_random_seed": uniform(0.0, 100.0), + "midrib_curve_control_points": [ + (0.0, 0.5), + (0.25, leaf_offset_1), + (0.75, 1.0 - leaf_offset_1), + (1.0, 0.5), + ], + "shape_curve_control_points": [ + (0.0, 0.0), + (uniform(0.2, 0.4), leaf_width_1), + (uniform(0.6, 0.8), leaf_width_2), + (1.0, 0.0), + ], + "vein_curve_control_points": [ + (0.0, 0.0), + (0.25, uniform(0.1, 0.4)), + (0.75, uniform(0.6, 0.9)), + (1.0, 1.0), + ], + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # add noise to the genotype output + # hue_noise = np.random.randn() * 0 + # hsv_blade = self.hsv_blade + hue_noise + # hsv_vein = self.hsv_vein + hue_noise + + phenome = self.genome.copy() + + phenome["y_wave_control_points"] = [ + (0.0, 0.5), + (uniform(0.25, 0.75), uniform(0.50, 0.60)), + (1.0, 0.5), + ] + x_wave_val = np.random.uniform(0.50, 0.58) + phenome["x_wave_control_points"] = [ + (0.0, 0.5), + (0.4, x_wave_val), + (0.5, 0.5), + (0.6, x_wave_val), + (1.0, 0.5), + ] + + material_kwargs = phenome.copy() + material_kwargs["color_base"] = np.copy( + self.blade_color + ) # (0.2346, 0.4735, 0.0273, 1.0), + material_kwargs["color_base"][0] += np.random.normal(0.0, 0.02) + material_kwargs["color_base"][1] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"][2] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"] = hsv2rgba(material_kwargs["color_base"]) + + material_kwargs["color_vein"] = np.copy( + self.vein_color + ) # (0.2346, 0.4735, 0.0273, 1.0), + material_kwargs["color_vein"][0] += np.random.normal(0.0, 0.02) + material_kwargs["color_vein"][1] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_vein"][2] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_vein"] = hsv2rgba(material_kwargs["color_vein"]) + + surface.add_geomod( + obj, + geo_leaf_broadleaf, + apply=False, + attributes=["offset", "coordinate", "subvein offset", "vein value"], + input_kwargs=phenome, + ) + surface.add_material( + obj, shader_material, reuse=False, input_kwargs=material_kwargs + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.1) * self.scale + butil.apply_transform(obj) + tag_object(obj, "leaf_broadleaf") + + return obj diff --git a/infinigen/assets/objects/leaves/leaf_ginko.py b/infinigen/assets/objects/leaves/leaf_ginko.py new file mode 100644 index 000000000..c86065589 --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_ginko.py @@ -0,0 +1,844 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.leaves.leaf_maple import nodegroup_leaf_shader +from infinigen.assets.objects.leaves.leaf_v2 import ( + nodegroup_apply_wave, + nodegroup_move_to_origin, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed + + +def deg2rad(deg): + return deg / 180.0 * np.pi + + +@node_utils.to_nodegroup( + "nodegroup_ginko_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_ginko_stem( + nw: NodeWrangler, + stem_curve_control_points=[ + (0.0, 0.4938), + (0.3659, 0.4969), + (0.7477, 0.4688), + (1.0, 0.4969), + ], +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Length", 0.64), + ("NodeSocketFloat", "Value", 0.005), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.0, 0.03, 0.0)}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Y"], 1: -1.0, 2: 0.0}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve(float_curve_1.mapping.curves[0], stem_curve_control_points) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_1, 3: -1.0} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_3.outputs["Result"], 1: separate_xyz.outputs["X"]}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "ABSOLUTE"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: -1.72, + 2: -0.35, + 3: 0.03, + 4: 0.008, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: map_range.outputs["Result"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["Length"]}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute_1, 1: group_input.outputs["Length"]}, + attrs={"operation": "SUBTRACT"}, + ) + + smooth_max = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: subtract_1, 2: 0.02}, + attrs={"operation": "SMOOTH_MAX"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: smooth_max, 1: group_input.outputs["Value"]}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Stem": subtract_2, "Stem Raw": absolute} + ) + + +@node_utils.to_nodegroup( + "nodegroup_ginko_vein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_ginko_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Scale Vein", 80.0), + ("NodeSocketFloat", "Scale Wave", 5.0), + ], + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Vector"], 1: (-0.18, 0.0, 0.0)}, + attrs={"operation": "SUBTRACT"}, + ) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": subtract.outputs["Vector"]} + ) + + gradient_texture_1 = nw.new_node( + Nodes.GradientTexture, + input_kwargs={"Vector": subtract.outputs["Vector"]}, + attrs={"gradient_type": "RADIAL"}, + ) + + pingpong = nw.new_node( + Nodes.Math, + input_kwargs={0: gradient_texture_1.outputs["Fac"]}, + attrs={"operation": "PINGPONG"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: pingpong}, attrs={"operation": "SUBTRACT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -0.44}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: pingpong, 1: multiply_1}) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.005, 2: add}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_add}) + + wave_texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": combine_xyz_2, + "Scale": group_input.outputs["Scale Vein"], + "Distortion": 0.6, + "Detail": 3.0, + "Detail Scale": 5.0, + "Detail Roughness": 1.0, + "Phase Offset": -4.62, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: wave_texture_1.outputs["Color"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": multiply_2, 1: 0.15, 2: -0.32, 4: -0.02} + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture_1.outputs["Fac"], 1: 0.03, 2: add}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_add_1}) + + wave_texture_2 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": combine_xyz_3, + "Scale": group_input.outputs["Scale Wave"], + "Distortion": -0.42, + "Detail": 10.0, + "Detail Roughness": 1.0, + "Phase Offset": -4.62, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: wave_texture_2.outputs["Fac"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vein": map_range_1.outputs["Result"], "Wave": multiply_3}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_ginko_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_ginko_shape( + nw: NodeWrangler, + shape_curve_control_points=[ + (0.0, 0.0), + (0.523, 0.1156), + (0.5805, 0.7469), + (0.7742, 0.7719), + (0.9461, 0.7531), + (1.0, 0.0), + ], +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Multiplier", 1.980), + ("NodeSocketFloat", "Scale Margin", 6.6), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.9, 1.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + gradient_texture = nw.new_node( + "ShaderNodeTexGradient", + input_kwargs={"Vector": group_input.outputs["Coordinate"]}, + ) + + gradient_texture = nw.new_node( + Nodes.GradientTexture, + input_kwargs={"Vector": group_input.outputs["Coordinate"]}, + attrs={"gradient_type": "RADIAL"}, + ) + + pingpong = nw.new_node( + Nodes.Math, + input_kwargs={0: gradient_texture.outputs["Fac"]}, + attrs={"operation": "PINGPONG"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: pingpong, 1: group_input.outputs["Multiplier"]}, + attrs={"operation": "MULTIPLY"}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": gradient_texture.outputs["Fac"]}, + attrs={"noise_dimensions": "1D"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"], 1: 0.3}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add}) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": combine_xyz_1, + "Scale": group_input.outputs["Scale Margin"], + "Distortion": 5.82, + "Detail": 1.52, + "Detail Roughness": 1.0, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: wave_texture.outputs["Fac"], 1: 0.02}, + attrs={"operation": "MULTIPLY"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": multiply_1}) + node_utils.assign_curve(float_curve.mapping.curves[0], shape_curve_control_points) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: float_curve}) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: add_1}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": subtract}) + + +@node_utils.to_nodegroup( + "nodegroup_valid_area", singleton=False, type="GeometryNodeTree" +) +def nodegroup_valid_area(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "SIGN"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": sign, 1: -1.0, 3: 1.0, 4: 0.0} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range_4.outputs["Result"]} + ) + + +@node_utils.to_nodegroup("nodegroup_ginko", singleton=False, type="GeometryNodeTree") +def nodegroup_ginko( + nw: NodeWrangler, stem_curve_control_points, shape_curve_control_points +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Vein Length", 0.64), + ("NodeSocketFloat", "Vein Width", 0.005), + ("NodeSocketFloatAngle", "Angle", -1.7617), + ("NodeSocketFloat", "Displacenment", 0.5), + ("NodeSocketFloat", "Multiplier", 1.980), + ("NodeSocketFloat", "Scale Vein", 80.0), + ("NodeSocketFloat", "Scale Wave", 5.0), + ("NodeSocketFloat", "Scale Margin", 6.6), + ("NodeSocketInt", "Level", 9), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Level": group_input.outputs["Level"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position, "Angle": group_input.outputs["Angle"]}, + attrs={"rotation_type": "Z_AXIS"}, + ) + + ginkoshape = nw.new_node( + nodegroup_ginko_shape( + shape_curve_control_points=shape_curve_control_points + ).name, + input_kwargs={ + "Coordinate": vector_rotate, + "Multiplier": group_input.outputs["Multiplier"], + "Scale Margin": group_input.outputs["Scale Margin"], + }, + ) + + validarea = nw.new_node( + nodegroup_valid_area().name, input_kwargs={"Value": ginkoshape} + ) + + ginkovein = nw.new_node( + nodegroup_ginko_vein().name, + input_kwargs={ + "Vector": vector_rotate, + "Scale Vein": group_input.outputs["Scale Vein"], + "Scale Wave": group_input.outputs["Scale Wave"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: validarea, 1: ginkovein.outputs["Vein"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": ginkoshape, 1: -1.0, 2: 0.0, 3: -5.0, 4: 0.0}, + attrs={"clamp": False}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: map_range_4.outputs["Result"]}, + attrs={"operation": "MULTIPLY", "use_clamp": True}, + ) + + clamp = nw.new_node(Nodes.Clamp, input_kwargs={"Value": multiply_1, "Max": 0.01}) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, input_kwargs={"Geometry": subdivide_mesh, 2: clamp} + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: ginkoshape, + }, + ) + + ginkostem = nw.new_node( + nodegroup_ginko_stem(stem_curve_control_points=stem_curve_control_points).name, + input_kwargs={ + "Coordinate": position, + "Length": group_input.outputs["Vein Length"], + "Value": group_input.outputs["Vein Width"], + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: ginkoshape, 1: ginkostem.outputs["Stem"], 2: 0.1}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: smooth_min, 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + stem_length = nw.new_node( + Nodes.Compare, + input_kwargs={0: multiply_2, 1: 0.0}, + label="stem length", + attrs={"operation": "LESS_THAN"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeom, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Selection": stem_length, + }, + ) + + validarea_1 = nw.new_node( + nodegroup_valid_area().name, input_kwargs={"Value": ginkostem.outputs["Stem"]} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: validarea_1, 1: ginkostem.outputs["Stem Raw"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: clamp}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["Displacenment"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_4}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": delete_geometry, "Offset": combine_xyz}, + ) + + validarea_2 = nw.new_node( + nodegroup_valid_area().name, input_kwargs={"Value": ginkoshape} + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: validarea_2, 1: ginkovein.outputs["Wave"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + "Vein": capture_attribute_1.outputs[2], + "Shape": capture_attribute.outputs[2], + "Wave": multiply_5, + }, + ) + + +def shader_material(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein"}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": attribute.outputs["Color"], 2: 0.12, 4: 6.26}, + ) + + attribute_1 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "shape"}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": attribute_1.outputs["Color"], + 1: -0.74, + 2: 0.01, + 3: 2.0, + 4: 0.0, + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.3795, 0.6344), (1.0, 1.0)] + ) + + separate_hsv = nw.new_node( + "ShaderNodeSeparateHSV", input_kwargs={"Color": kwargs["color_base"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_hsv.outputs["V"], 1: 0.2}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_hsv = nw.new_node( + Nodes.CombineHSV, + input_kwargs={ + "H": separate_hsv.outputs["H"], + "S": separate_hsv.outputs["S"], + "V": subtract, + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": float_curve, + "Color1": kwargs["color_base"], + "Color2": combine_hsv, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": mix_1, + "Color2": kwargs["color_vein"], + }, + ) + + group = nw.new_node(nodegroup_leaf_shader().name, input_kwargs={"Color": mix}) + + material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": group}) + + +def geo_leaf_ginko(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + nodegroup = nw.new_node( + nodegroup_ginko( + stem_curve_control_points=kwargs["stem_curve_control_points"], + shape_curve_control_points=kwargs["shape_curve_control_points"], + ).name, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Vein Length": kwargs["vein_length"], + "Angle": deg2rad(kwargs["angle"]), + "Multiplier": kwargs["multiplier"], + "Scale Vein": kwargs["scale_vein"], + "Scale Wave": kwargs["scale_wave"], + "Scale Margin": kwargs["scale_margin"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": nodegroup.outputs["Wave"], 4: 0.04} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": map_range.outputs["Result"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": nodegroup.outputs["Geometry"], "Offset": combine_xyz}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + apply_wave = nw.new_node( + nodegroup_apply_wave( + y_wave_control_points=kwargs["y_wave_control_points"], + x_wave_control_points=kwargs["x_wave_control_points"], + ).name, + input_kwargs={ + "Geometry": set_position, + "Wave Scale X": 0.0, + "Wave Scale Y": 1.0, + "X Modulated": separate_xyz.outputs["X"], + }, + ) + + move_to_origin = nw.new_node( + nodegroup_move_to_origin().name, input_kwargs={"Geometry": apply_wave} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": move_to_origin, + "Vein": nodegroup.outputs["Vein"], + "Shape": nodegroup.outputs["Shape"], + }, + ) + + +class LeafFactoryGinko(AssetFactory): + scale = 0.3 + + def __init__(self, factory_seed, season="autumn", coarse=False): + super(LeafFactoryGinko, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.genome = self.sample_geo_genome() + + t = uniform(0.0, 1.0) + + # self.blade_color = hsv2rgba([0.125 + 0.16 * factory_seed / 10, 0.95, 0.6]) + + if season == "autumn": + self.blade_color = [uniform(0.125, 0.2), 0.95, 0.6] + elif season == "summer" or season == "spring": + self.blade_color = [uniform(0.25, 0.3), 0.95, 0.6] + elif season == "winter": + self.blade_color = [uniform(0.125, 0.2), 0.95, 0.6] + else: + raise NotImplementedError + + self.color_randomness = 0.05 + + @staticmethod + def sample_geo_genome(): + return { + "midrib_length": uniform(0.0, 0.8), + "midrib_width": uniform(0.5, 1.0), + "stem_length": uniform(0.7, 0.9), + "vein_asymmetry": uniform(0.0, 1.0), + "vein_angle": uniform(0.2, 2.0), + "vein_density": uniform(5.0, 20.0), + "subvein_scale": uniform(10.0, 20.0), + "jigsaw_scale": uniform(5.0, 20.0), + "jigsaw_depth": uniform(0.0, 2.0), + "midrib_shape_control_points": [ + (0.0, 0.5), + (0.25, uniform(0.48, 0.52)), + (0.75, uniform(0.48, 0.52)), + (1.0, 0.5), + ], + "leaf_shape_control_points": [ + (0.0, 0.0), + (uniform(0.2, 0.4), uniform(0.1, 0.4)), + (uniform(0.6, 0.8), uniform(0.1, 0.4)), + (1.0, 0.0), + ], + "vein_shape_control_points": [ + (0.0, 0.0), + (0.25, uniform(0.1, 0.4)), + (0.75, uniform(0.6, 0.9)), + (1.0, 1.0), + ], + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # add noise to the genotype output + # hue_noise = np.random.randn() * 0 + # hsv_blade = self.hsv_blade + hue_noise + # hsv_vein = self.hsv_vein + hue_noise + + phenome = self.genome.copy() + + phenome["y_wave_control_points"] = [ + (0.0, 0.5), + (uniform(0.25, 0.75), uniform(0.50, 0.60)), + (1.0, 0.5), + ] + x_wave_val = np.random.uniform(0.50, 0.58) + phenome["x_wave_control_points"] = [ + (0.0, 0.5), + (0.4, x_wave_val), + (0.5, 0.5), + (0.6, x_wave_val), + (1.0, 0.5), + ] + + phenome["stem_curve_control_points"] = [ + (0.0, 0.5), + (uniform(0.2, 0.3), uniform(0.45, 0.55)), + (uniform(0.7, 0.8), uniform(0.45, 0.55)), + (1.0, 0.5), + ] + phenome["shape_curve_control_points"] = [ + (0.0, 0.0), + (0.523, 0.1156), + (0.5805, 0.7469), + (0.7742, 0.7719), + (0.9461, 0.7531), + (1.0, 0.0), + ] + phenome["vein_length"] = uniform(0.4, 0.5) + phenome["angle"] = uniform(-110.0, -70.0) + phenome["multiplier"] = uniform(1.90, 1.98) + + phenome["scale_vein"] = uniform(70.0, 90.0) + phenome["scale_wave"] = uniform(4.0, 6.0) + phenome["scale_margin"] = uniform(5.5, 7.5) + + material_kwargs = phenome.copy() + material_kwargs["color_base"] = np.copy( + self.blade_color + ) # (0.2346, 0.4735, 0.0273, 1.0), + material_kwargs["color_base"][0] += np.random.normal(0.0, 0.02) + material_kwargs["color_base"][1] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"][2] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"] = hsv2rgba(material_kwargs["color_base"]) + + material_kwargs["color_vein"] = hsv2rgba(np.copy(self.blade_color)) + + surface.add_geomod( + obj, + geo_leaf_ginko, + apply=False, + attributes=["vein", "shape"], + input_kwargs=phenome, + ) + surface.add_material( + obj, shader_material, reuse=False, input_kwargs=material_kwargs + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.2) * self.scale + butil.apply_transform(obj) + tag_object(obj, "leaf_ginko") + + return obj diff --git a/infinigen/assets/objects/leaves/leaf_maple.py b/infinigen/assets/objects/leaves/leaf_maple.py new file mode 100644 index 000000000..620292bbc --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_maple.py @@ -0,0 +1,1356 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo +# Acknowledgement: This file draws inspiration https://www.youtube.com/watch?v=X9YmJ0zGWHw by Creative Shrimp + + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.leaves.leaf_v2 import nodegroup_apply_wave +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed + + +def deg2rad(deg): + return deg / 180.0 * np.pi + + +@node_utils.to_nodegroup("nodegroup_vein", singleton=False, type="GeometryNodeTree") +def nodegroup_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloatAngle", "Angle", 0.0), + ("NodeSocketFloat", "Length", 0.0), + ("NodeSocketFloat", "Start", 0.0), + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Anneal", 0.4), + ("NodeSocketFloat", "Phase Offset", 0.0), + ], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": absolute, + "Y": separate_xyz.outputs["Y"], + "Z": separate_xyz.outputs["Z"], + }, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": combine_xyz_1, "Angle": group_input.outputs["Angle"]}, + attrs={"rotation_type": "Z_AXIS"}, + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": vector_rotate} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": combine_xyz_1} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": separate_xyz_1.outputs["X"], 2: 0.3} + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.5932, 0.1969), (1.0, 1.0)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 0.2}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_3.outputs["X"], 1: multiply} + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: sign, 1: 0.1}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply_1}) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: group_input.outputs["Phase Offset"]} + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"W": add_2, "Scale": 8.0, "Randomness": 0.7125}, + attrs={"voronoi_dimensions": "1D"}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_rotate}, attrs={"operation": "LENGTH"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.05, 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY", "use_clamp": True}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.08, 1: multiply_2}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: subtract, + 3: 1.0, + 4: 0.0, + }, + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: 0.0}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["Anneal"]}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute_1, 1: multiply_3}, + attrs={"operation": "LESS_THAN"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: less_than}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["Start"]}, + attrs={"operation": "LESS_THAN"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: less_than_1}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Result": multiply_5}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_shader", singleton=False, type="ShaderNodeTree" +) +def nodegroup_leaf_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0))], + ) + + diffuse_bsdf = nw.new_node( + "ShaderNodeBsdfDiffuse", input_kwargs={"Color": group_input.outputs["Color"]} + ) + + glossy_bsdf = nw.new_node( + "ShaderNodeBsdfGlossy", + input_kwargs={"Color": group_input.outputs["Color"], "Roughness": 0.3}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.2, 1: diffuse_bsdf, 2: glossy_bsdf} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": group_input.outputs["Color"]} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.3, 1: mix_shader, 2: translucent_bsdf} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Shader": mix_shader_1}) + + +@node_utils.to_nodegroup( + "nodegroup_node_group_002", singleton=False, type="GeometryNodeTree" +) +def nodegroup_node_group_002(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position = nw.new_node(Nodes.InputPosition) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position}, attrs={"operation": "LENGTH"} + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Shape", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: group_input.outputs["Shape"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply, 1: -1.0, 2: 0.0, 3: -0.1, 4: 0.1}, + attrs={"clamp": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range_1.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_sub_vein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "X", 0.5), ("NodeSocketFloat", "Y", 0.0)], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": absolute, "Y": group_input.outputs["Y"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": combine_xyz} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.9, + "Color1": noise_texture.outputs["Color"], + "Color2": combine_xyz, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": mix, "Scale": 30.0} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, + attrs={"clamp": False}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mix, "Scale": 150.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 2: 0.1}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -1.0}, attrs={"operation": "MULTIPLY"} + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": map_range_1.outputs["Result"], 4: -1.0} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply, "Color Value": map_range_3.outputs["Result"]}, + ) + + +@node_utils.to_nodegroup("nodegroup_midrib", singleton=False, type="GeometryNodeTree") +def nodegroup_midrib(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ("NodeSocketFloatAngle", "Angle", 0.8238), + ("NodeSocketFloatAngle", "vein Angle", 0.7854), + ("NodeSocketFloat", "vein Length", 0.2), + ("NodeSocketFloat", "vein Start", -0.2), + ("NodeSocketFloat", "Anneal", 0.4), + ("NodeSocketFloat", "Phase Offset", 0.0), + ], + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": group_input.outputs["Vector"], + "Angle": group_input.outputs["Angle"], + }, + attrs={"rotation_type": "Z_AXIS"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": vector_rotate_1} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": separate_xyz.outputs["Y"]} + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.1432, 0.5406), + (0.2591, 0.5062), + (0.3705, 0.5406), + (0.4591, 0.425), + (0.5932, 0.4562), + (0.7432, 0.3562), + (0.8727, 0.5062), + (1.0, 0.5), + ], + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.1 + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: multiply} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: value}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + vein = nw.new_node( + nodegroup_vein().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": group_input.outputs["vein Angle"], + "Length": group_input.outputs["vein Length"], + "Start": group_input.outputs["vein Start"], + "X Modulated": subtract, + "Anneal": group_input.outputs["Anneal"], + "Phase Offset": group_input.outputs["Phase Offset"], + }, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": vector_rotate_1, "Scale": 10.0} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: 0.01}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: absolute, 1: multiply_2}) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add_1, 2: 0.01, 3: 1.0, 4: 0.0} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: 0.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: greater_than}, + attrs={"operation": "MULTIPLY"}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: vein, 1: multiply_3}, + attrs={"operation": "MAXIMUM"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": maximum, "Vector": vector_rotate_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_valid_area", singleton=False, type="GeometryNodeTree" +) +def nodegroup_valid_area(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "SIGN"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": sign, 1: -1.0, 3: 1.0, 4: 0.0} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range_4.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_maple_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_maple_shape(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Multiplier", 1.96), + ("NodeSocketFloat", "Noise Level", 0.02), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.9, 1.0, 0.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + gradient_texture = nw.new_node( + Nodes.GradientTexture, + input_kwargs={"Vector": group_input.outputs["Coordinate"]}, + attrs={"gradient_type": "RADIAL"}, + ) + + pingpong = nw.new_node( + Nodes.Math, + input_kwargs={0: gradient_texture.outputs["Fac"]}, + attrs={"operation": "PINGPONG"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: pingpong, 1: group_input.outputs["Multiplier"]}, + attrs={"operation": "MULTIPLY"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": multiply_1}) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.0), + (0.1156, 0.075), + (0.2109, 0.2719), + (0.2602, 0.2344), + (0.3633, 0.2625), + (0.4171, 0.5545), + (0.4336, 0.5344), + (0.4568, 0.7094), + (0.4749, 0.6012), + (0.4882, 0.6636), + (0.5352, 0.4594), + (0.5484, 0.4375), + (0.5648, 0.4469), + (0.6366, 0.7331), + (0.6719, 0.6562), + (0.7149, 0.8225), + (0.768, 0.6344), + (0.7928, 0.6853), + (0.8156, 0.5125), + (0.8297, 0.4906), + (0.85, 0.5125), + (0.8988, 0.747), + (0.9297, 0.6937), + (0.9648, 0.8937), + (0.9797, 0.8656), + (0.9883, 0.8938), + (1.0, 1.0), + ], + handles=[ + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + "VECTOR", + "AUTO", + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: float_curve}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 0.06}, attrs={"operation": "SUBTRACT"} + ) + + float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": multiply_1}) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, 0.0), + (0.1156, 0.075), + (0.2109, 0.2719), + (0.2602, 0.2344), + (0.3633, 0.2625), + (0.4336, 0.5344), + (0.4568, 0.7094), + (0.4749, 0.6012), + (0.5352, 0.4594), + (0.5484, 0.4375), + (0.5648, 0.4469), + (0.6719, 0.6562), + (0.7149, 0.8225), + (0.768, 0.6344), + (0.8156, 0.5125), + (0.8297, 0.4906), + (0.85, 0.5125), + (0.9297, 0.6937), + (0.9883, 0.8938), + (1.0, 1.0), + ], + handles=[ + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + ], + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: float_curve_1}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: 0.06}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Shape": subtract_1, "Displacement": subtract_3}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_maple_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_maple_stem(nw: NodeWrangler, stem_curve_control_points): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Length", 0.64), + ("NodeSocketFloat", "Value", 0.005), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Coordinate"], 1: (0.0, 0.08, 0.0)}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add.outputs["Vector"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Y"], 1: -1.0, 2: 0.0}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve(float_curve_1.mapping.curves[0], stem_curve_control_points) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_1, 3: -1.0} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_3.outputs["Result"], 1: separate_xyz.outputs["X"]}, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "ABSOLUTE"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: -1.72, + 2: -0.35, + 3: 0.03, + 4: 0.008, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: map_range.outputs["Result"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: group_input.outputs["Length"]}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute_1, 1: group_input.outputs["Length"]}, + attrs={"operation": "SUBTRACT"}, + ) + + smooth_max = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: subtract_1, 2: 0.02}, + attrs={"operation": "SMOOTH_MAX"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: smooth_max, 1: group_input.outputs["Value"]}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Stem": subtract_2, "Stem Raw": absolute} + ) + + +@node_utils.to_nodegroup( + "nodegroup_move_to_origin", singleton=False, type="GeometryNodeTree" +) +def nodegroup_move_to_origin(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["Y"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["Z"], + }, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.0, 1: attribute_statistic_1.outputs["Max"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": subtract, "Z": subtract_1} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +def shader_material(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.55, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.4}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein"}) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": kwargs["color_vein"], + "Color2": kwargs["color_base"], + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_4.outputs["Result"], + "Value": map_range_6.outputs["Result"], + "Color": mix, + }, + ) + + group = nw.new_node( + nodegroup_leaf_shader().name, input_kwargs={"Color": hue_saturation_value} + ) + + material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": group}) + + +def geo_leaf_maple(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + # subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, + # input_kwargs={'Mesh': group_input.outputs["Geometry"]}) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Geometry"], "Level": 11}, + ) + + position = nw.new_node(Nodes.InputPosition) + + maplestem = nw.new_node( + nodegroup_maple_stem( + stem_curve_control_points=kwargs["stem_curve_control_points"] + ).name, + input_kwargs={"Coordinate": position, "Length": 0.32, "Value": 0.005}, + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position, "Angle": deg2rad(kwargs["angle"])}, + attrs={"rotation_type": "Z_AXIS"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": vector_rotate_1, "Angle": -1.5708}, + attrs={"rotation_type": "Z_AXIS"}, + ) + + mapleshape = nw.new_node( + nodegroup_maple_shape().name, + input_kwargs={ + "Coordinate": vector_rotate, + "Multiplier": kwargs["multiplier"], + "Noise Level": 0.04, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: maplestem.outputs["Stem"], + 1: mapleshape.outputs["Shape"], + 2: 0.0, + }, + attrs={"operation": "SMOOTH_MIN"}, + ) + + stem_length = nw.new_node( + Nodes.Compare, input_kwargs={0: smooth_min}, label="stem length" + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeom, + input_kwargs={"Geometry": subdivide_mesh, "Selection": stem_length}, + ) + + validarea = nw.new_node( + nodegroup_valid_area().name, input_kwargs={"Value": mapleshape.outputs["Shape"]} + ) + + midrib = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": 1.693, + "vein Length": 0.12, + "vein Start": -0.12, + "Phase Offset": uniform(0, 100), + }, + ) + + midrib_1 = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": -1.7279, + "vein Length": 0.12, + "vein Start": -0.12, + "Phase Offset": uniform(0, 100), + }, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: midrib.outputs["Result"], 1: midrib_1.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + midrib_2 = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": 0.8901, + "vein Length": 0.2, + "vein Start": 0.0, + "Phase Offset": uniform(0, 100), + }, + ) + + midrib_3 = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": -0.9041, + "vein Start": 0.0, + "Phase Offset": uniform(0, 100), + }, + ) + + maximum_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: midrib_2.outputs["Result"], 1: midrib_3.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + maximum_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum, 1: maximum_1}, + attrs={"operation": "MAXIMUM"}, + ) + + midrib_4 = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": 0.0, + "vein Length": 1.64, + "vein Start": -0.12, + "Phase Offset": uniform(0, 100), + }, + ) + + midrib_5 = nw.new_node( + nodegroup_midrib().name, + input_kwargs={ + "Vector": vector_rotate_1, + "Angle": 3.1416, + "vein Angle": 0.761, + "vein Length": -10.56, + "vein Start": 0.02, + "Anneal": 10.0, + "Phase Offset": uniform(0, 100), + }, + ) + + maximum_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: midrib_4.outputs["Result"], 1: midrib_5.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + maximum_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum_2, 1: maximum_3}, + attrs={"operation": "MAXIMUM"}, + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + nodegroup_sub_vein = nw.new_node( + nodegroup_nodegroup_sub_vein().name, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": nodegroup_sub_vein.outputs["Color Value"], + 2: -0.94, + 3: 1.0, + 4: 0.0, + }, + ) + + maximum_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum_4, 1: map_range.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: maximum_5}, attrs={"operation": "SUBTRACT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: validarea, 1: subtract}, + attrs={"operation": "MULTIPLY"}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, input_kwargs={"Geometry": delete_geometry, 2: multiply} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: -0.03}, + attrs={"operation": "MULTIPLY"}, + ) + + maximum_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum_4, 1: multiply_1}, + attrs={"operation": "MAXIMUM"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum_6, 1: 0.015}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_3, 1: validarea}, + attrs={"operation": "MULTIPLY"}, + ) + + validarea_1 = nw.new_node( + nodegroup_valid_area().name, input_kwargs={"Value": maplestem.outputs["Stem"]} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: maplestem.outputs["Stem Raw"], 1: 0.01}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: validarea_1, 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_5}) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + nodegroup_002 = nw.new_node( + nodegroup_node_group_002().name, + input_kwargs={"Shape": mapleshape.outputs["Displacement"]}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: nodegroup_002}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add_1}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": vector_rotate_1} + ) + + move_to_origin = nw.new_node( + nodegroup_move_to_origin().name, input_kwargs={"Geometry": set_position} + ) + + apply_wave = nw.new_node( + nodegroup_apply_wave( + y_wave_control_points=kwargs["y_wave_control_points"], + x_wave_control_points=kwargs["x_wave_control_points"], + ).name, + input_kwargs={ + "Geometry": move_to_origin, + "Wave Scale X": 0.5, + "Wave Scale Y": 1.0, + "X Modulated": separate_xyz_1.outputs["X"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": apply_wave, "Vein": capture_attribute.outputs[2]}, + ) + + +class LeafFactoryMaple(AssetFactory): + scale = 0.5 + + def __init__(self, factory_seed, season="autumn", coarse=False): + super().__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.genome = self.sample_geo_genome() + + t = uniform(0.0, 1.0) + + if season == "autumn": + hsvcol_blade = [uniform(0.0, 0.20), 0.85, 0.9] + hsvcol_vein = np.copy(hsvcol_blade) + hsvcol_vein[2] = 0.7 + + elif season == "summer" or season == "spring": + hsvcol_blade = [uniform(0.28, 0.32), uniform(0.6, 0.7), 0.9] + hsvcol_vein = np.copy(hsvcol_blade) + hsvcol_blade[2] = uniform(0.1, 0.5) + hsvcol_vein[2] = uniform(0.1, 0.5) + + elif season == "winter": + hsvcol_blade = [ + uniform(0.0, 0.10), + uniform(0.2, 0.6), + uniform(0.0, 0.1), + ] + hsvcol_vein = [uniform(0.0, 0.10), uniform(0.2, 0.6), uniform(0.0, 0.1)] + + else: + raise NotImplementedError + + self.blade_color = hsvcol_blade + self.vein_color = hsvcol_vein + + self.color_randomness = uniform(0.05, 0.10) + + # if t < 0.5: + # self.blade_color = np.array((0.2346, 0.4735, 0.0273, 1.0)) + # else: + # self.blade_color = np.array((1.000, 0.855, 0.007, 1.0)) + + @staticmethod + def sample_geo_genome(): + return { + "midrib_length": uniform(0.0, 0.8), + "midrib_width": uniform(0.5, 1.0), + "stem_length": uniform(0.7, 0.9), + "vein_asymmetry": uniform(0.0, 1.0), + "vein_angle": uniform(0.2, 2.0), + "vein_density": uniform(5.0, 20.0), + "subvein_scale": uniform(10.0, 20.0), + "jigsaw_scale": uniform(5.0, 20.0), + "jigsaw_depth": uniform(0.0, 2.0), + "midrib_shape_control_points": [ + (0.0, 0.5), + (0.25, uniform(0.48, 0.52)), + (0.75, uniform(0.48, 0.52)), + (1.0, 0.5), + ], + "leaf_shape_control_points": [ + (0.0, 0.0), + (uniform(0.2, 0.4), uniform(0.1, 0.4)), + (uniform(0.6, 0.8), uniform(0.1, 0.4)), + (1.0, 0.0), + ], + "vein_shape_control_points": [ + (0.0, 0.0), + (0.25, uniform(0.1, 0.4)), + (0.75, uniform(0.6, 0.9)), + (1.0, 1.0), + ], + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=4, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # add noise to the genotype output + # hue_noise = np.random.randn() * 0 + # hsv_blade = self.hsv_blade + hue_noise + # hsv_vein = self.hsv_vein + hue_noise + + phenome = self.genome.copy() + + phenome["y_wave_control_points"] = [ + (0.0, 0.5), + (uniform(0.25, 0.75), uniform(0.50, 0.60)), + (1.0, 0.5), + ] + x_wave_val = np.random.uniform(0.50, 0.58) + phenome["x_wave_control_points"] = [ + (0.0, 0.5), + (0.4, x_wave_val), + (0.5, 0.5), + (0.6, x_wave_val), + (1.0, 0.5), + ] + + phenome["stem_curve_control_points"] = [ + (0.0, 0.5), + (uniform(0.2, 0.3), uniform(0.45, 0.55)), + (uniform(0.7, 0.8), uniform(0.45, 0.55)), + (1.0, 0.5), + ] + phenome["shape_curve_control_points"] = [ + (0.0, 0.0), + (0.523, 0.1156), + (0.5805, 0.7469), + (0.7742, 0.7719), + (0.9461, 0.7531), + (1.0, 0.0), + ] + phenome["vein_length"] = uniform(0.4, 0.5) + phenome["angle"] = uniform(-15.0, 15.0) + phenome["multiplier"] = uniform(1.92, 2.00) + + phenome["scale_vein"] = uniform(70.0, 90.0) + phenome["scale_wave"] = uniform(4.0, 6.0) + phenome["scale_margin"] = uniform(5.5, 7.5) + + material_kwargs = phenome.copy() + material_kwargs["color_base"] = np.copy( + self.blade_color + ) # (0.2346, 0.4735, 0.0273, 1.0), + material_kwargs["color_base"][0] += np.random.normal(0.0, 0.02) + material_kwargs["color_base"][1] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"][2] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_base"] = hsv2rgba(material_kwargs["color_base"]) + + material_kwargs["color_vein"] = np.copy( + self.vein_color + ) # (0.2346, 0.4735, 0.0273, 1.0), + material_kwargs["color_vein"][0] += np.random.normal(0.0, 0.02) + material_kwargs["color_vein"][1] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_vein"][2] += np.random.normal(0.0, self.color_randomness) + material_kwargs["color_vein"] = hsv2rgba(material_kwargs["color_vein"]) + + surface.add_geomod( + obj, geo_leaf_maple, apply=False, attributes=["vein"], input_kwargs=phenome + ) + surface.add_material( + obj, shader_material, reuse=False, input_kwargs=material_kwargs + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.1) * self.scale + butil.apply_transform(obj) + tag_object(obj, "leaf_maple") + + return obj diff --git a/infinigen/assets/objects/leaves/leaf_pine.py b/infinigen/assets/objects/leaves/leaf_pine.py new file mode 100644 index 000000000..e07ee8fe6 --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_pine.py @@ -0,0 +1,602 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from random import randint + +import bpy +from numpy.random import normal, uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + +######## code for creating pine needles ######## + + +def shader_needle(nw): + # Code generated using version 2.3.1 of the node_transpiler + + velvet_bsdf = nw.new_node( + "ShaderNodeBsdfVelvet", input_kwargs={"Color": (0.016, 0.2241, 0.0252, 1.0)} + ) + + glossy_bsdf = nw.new_node( + "ShaderNodeBsdfGlossy", + input_kwargs={"Color": (0.5771, 0.8, 0.5713, 1.0), "Roughness": 0.4}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.3, 1: velvet_bsdf, 2: glossy_bsdf} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": (0.0116, 0.4409, 0.0262, 1.0)} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.1, 1: mix_shader, 2: translucent_bsdf} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader_1} + ) + + +def geometry_needle(nw): + # Code generated using version 2.3.1 of the node_transpiler + + cone = nw.new_node( + "GeometryNodeMeshCone", + input_kwargs={ + "Vertices": 4, + "Radius Top": 0.01, + "Radius Bottom": 0.02, + "Depth": 1.0, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": cone.outputs["Mesh"], + "Material": surface.shaderfunc_to_material(shader_needle), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +def apply_needle(obj, selection=None, **kwargs): + surface.add_geomod(obj, geometry_needle, selection=selection, attributes=[]) + + +def make_needle(name="Needle"): + if bpy.context.scene.objects.get(name): + return bpy.context.scene.objects.get(name) + + else: + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + needle = bpy.context.active_object + needle.name = name + apply_needle(needle) + + bpy.ops.object.convert(target="MESH") + + return needle + + +######## code for creating pine needles ######## + +######## code for creating pine twigs ######## + + +@node_utils.to_nodegroup( + "nodegroup_instance_needle", singleton=True, type="GeometryNodeTree" +) +def nodegroup_instance_needle(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloatFactor", "Needle Density", 0.9), + ("NodeSocketInt", "Seed", 0), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketFloat", "X Angle Mean", 0.5), + ("NodeSocketFloat", "X Angle Range", 0.0), + ], + ) + + spline_parameter_1 = nw.new_node("GeometryNodeSplineParameter") + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: spline_parameter_1.outputs["Factor"], 1: 0.1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + "Probability": group_input.outputs["Needle Density"], + "Seed": group_input.outputs["Seed"], + }, + attrs={"data_type": "BOOLEAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: random_value_3.outputs[3]} + ) + + curve_tangent = nw.new_node("GeometryNodeInputTangent") + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Y"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={2: 0.6, "Seed": group_input.outputs["Seed"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 0.8, "Y": 0.8, "Z": random_value.outputs[1]}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.3 + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: value_1}, + attrs={"operation": "MULTIPLY"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Curve"], + "Selection": op_and, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + "Scale": multiply.outputs["Vector"], + }, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["X Angle Mean"], + 1: group_input.outputs["X Angle Range"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["X Angle Mean"], + 1: group_input.outputs["X Angle Range"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: add, 3: subtract, "Seed": group_input.outputs["Seed"]}, + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_2.outputs[1]}, + attrs={"operation": "RADIANS"}, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={3: 360.0, "Seed": group_input.outputs["Seed"]} + ) + + radians_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_1.outputs[1]}, + attrs={"operation": "RADIANS"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": radians, "Y": radians_1} + ) + + rotate_instances = nw.new_node( + "GeometryNodeRotateInstances", + input_kwargs={"Instances": instance_on_points, "Rotation": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": rotate_instances} + ) + + +@node_utils.to_nodegroup("nodegroup_needle5", singleton=True, type="GeometryNodeTree") +def nodegroup_needle5(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketFloat", "X Angle Mean", 0.5), + ("NodeSocketFloat", "X Angle Range", 0.0), + ("NodeSocketFloatFactor", "Needle Density", 0.9), + ("NodeSocketInt", "Seed", 0), + ], + ) + + instanceneedle = nw.new_node( + nodegroup_instance_needle().name, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": group_input.outputs["Seed"], + "Instance": group_input.outputs["Instance"], + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + }, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Seed"], 1: 1.0}) + + instanceneedle_1 = nw.new_node( + nodegroup_instance_needle().name, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": add, + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Seed"], 1: 2.0} + ) + + instanceneedle_2 = nw.new_node( + nodegroup_instance_needle().name, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": add_1, + "Instance": group_input.outputs["Instance"], + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + }, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Seed"], 1: 3.0} + ) + + instanceneedle_3 = nw.new_node( + nodegroup_instance_needle().name, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": add_2, + "Instance": group_input.outputs["Instance"], + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + }, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Seed"], 1: 4.0} + ) + + instanceneedle_4 = nw.new_node( + nodegroup_instance_needle().name, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": add_3, + "Instance": group_input.outputs["Instance"], + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + instanceneedle, + instanceneedle_1, + instanceneedle_2, + instanceneedle_3, + instanceneedle_4, + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": join_geometry} + ) + + +def shader_twig(nw): + # Code generated using version 2.3.2 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.08, 0.0329, 0.0414, 1.0), + "Specular": 0.0527, + "Roughness": 0.4491, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pine_twig", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pine_twig(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 20), + ("NodeSocketFloat", "Middle Y", 0.0), + ("NodeSocketFloat", "Middle Z", 0.0), + ("NodeSocketFloatFactor", "Needle Density", 0.9), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketFloat", "X Angle Mean", 0.5), + ("NodeSocketFloat", "X Angle Range", 0.0), + ("NodeSocketInt", "Seed", 0), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: 30.0}, + attrs={"operation": "DIVIDE"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 2.0}, attrs={"operation": "DIVIDE"} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Middle Y"], + "Y": divide_1, + "Z": group_input.outputs["Middle Z"], + }, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": divide}) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Start": (0.0, 0.0, 0.0), + "Middle": combine_xyz, + "End": combine_xyz_1, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"W": -1.7}, attrs={"noise_dimensions": "4D"} + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5 + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: value}, + attrs={"operation": "SUBTRACT"}, + ) + + spline_parameter = nw.new_node("GeometryNodeSplineParameter") + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 0.1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"], 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": quadratic_bezier, + "Offset": multiply_1.outputs["Vector"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: 2.0, 1: map_range.outputs["Result"]}, + attrs={"operation": "POWER"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": set_position, "Radius": power} + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 16, "Radius": 0.01} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh, + "Material": surface.shaderfunc_to_material(shader_twig), + }, + ) + + needle5 = nw.new_node( + nodegroup_needle5().name, + input_kwargs={ + "Curve": set_position, + "Instance": group_input.outputs["Instance"], + "X Angle Mean": group_input.outputs["X Angle Mean"], + "X Angle Range": group_input.outputs["X Angle Range"], + "Needle Density": group_input.outputs["Needle Density"], + "Seed": group_input.outputs["Seed"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, needle5]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": realize_instances, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +def geometry_node_pine_twig( + nw, + needle_name="Needle", + length=30, + middle_y=0.0, + middle_z=0.0, + seed=0, + x_angle_mean=-50.0, + x_angle_range=10.0, +): + # Code generated using version 2.3.2 of the node_transpiler + + object_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": bpy.data.objects[needle_name]} + ) + + pine_needle = nw.new_node( + nodegroup_pine_twig().name, + input_kwargs={ + "Resolution": length, + "Middle Y": middle_y, + "Middle Z": middle_z, + "Instance": object_info.outputs["Geometry"], + "X Angle Mean": x_angle_mean, + "X Angle Range": x_angle_range, + "Seed": seed, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": pine_needle} + ) + + +def apply_twig(obj, selection=None, **kwargs): + surface.add_geomod( + obj, + geometry_node_pine_twig, + selection=selection, + attributes=[], + input_kwargs=kwargs, + ) + surface.add_material(obj, shader_twig, selection=selection) + + +def make_pine_twig(**kwargs): + bpy.ops.mesh.primitive_plane_add( + size=2, enter_editmode=False, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + twig = bpy.context.active_object + twig.name = "Twig" + apply_twig(twig, **kwargs) + + # bpy.ops.object.convert(target='MESH') + + return twig + + +class LeafFactoryPine(AssetFactory): + scale = 0.7 + + def __init__(self, factory_seed, season="autumn", coarse=False): + super(LeafFactoryPine, self).__init__(factory_seed, coarse=coarse) + self.needle = make_needle("Needle") + self.needle.hide_viewport = True + self.needle.hide_render = True + + def create_asset(self, **params): + # with FixedSeed(self.factory_seed): + seed = randint(0, 1e6) + middle_y = normal(0.0, 0.1) + middle_z = normal(0.0, 0.1) + length = randint(25, 35) + x_angle_mean = uniform(-40, -60) + + obj = make_pine_twig( + needle_name="Needle", + length=length, + middle_y=middle_y, + middle_z=middle_z, + seed=seed, + x_angle_mean=x_angle_mean, + x_angle_range=10.0, + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.05) * self.scale + butil.apply_transform(obj) + butil.purge_empty_materials(obj) # TODO remove when geonodes emptymats solved + tag_object(obj, "leaf_pine") + + return obj diff --git a/infinigen/assets/objects/leaves/leaf_v2.py b/infinigen/assets/objects/leaves/leaf_v2.py new file mode 100644 index 000000000..8f503a94b --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_v2.py @@ -0,0 +1,1693 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import logging + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import color_category +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "shader_nodegroup_sub_vein", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_sub_vein(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.0), + ], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": absolute, "Y": group_input.outputs["Y"]} + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 30.0, "Randomness": 0.754}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.1, 4: 3.0}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 10.0, "Randomness": 0.754}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 2: 0.1, 4: 3.0}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "shader_nodegroup_midrib", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_midrib( + nw, + midrib_curve_control_points=[ + (0.0, 0.5), + (0.2809, 0.4868), + (0.7448, 0.5164), + (1.0, 0.5), + ], +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.5), + ("NodeSocketFloat", "Y", -0.6), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + stem_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range_6.outputs["Result"]}, + label="Stem shape", + ) + node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) + + map_range_7 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": stem_shape, 3: -1.0} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_7.outputs["Result"], 1: group_input.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + map_range_8 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Y"], + 1: -70.0, + 2: group_input.outputs["Midrib Length"], + 3: group_input.outputs["Midrib Width"], + 4: 0.0, + }, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_8.outputs["Result"], 1: absolute}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + map_range_9 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": absolute_1, + 2: group_input.outputs["Stem Length"], + 3: 1.0, + 4: 0.0, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: map_range_9.outputs["Result"], 2: 0.06}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_8.outputs["Result"], 1: smooth_min}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + map_range_11 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "X Modulated": subtract, + "Midrib Value": map_range_11.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "shader_nodegroup_vein_coord", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_vein_coord( + nw, + vein_curve_control_points=[ + (0.0, 0.0), + (0.3608, 0.2434), + (0.7454, 0.4951), + (1.0, 1.0), + ], +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: sign, 1: group_input.outputs["Vein Asymmetry"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_13 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + vein__shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": absolute}, label="Vein Shape" + ) + node_utils.assign_curve(vein__shape.mapping.curves[0], vein_curve_control_points) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein__shape, 2: 0.9, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_4.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_13.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "shader_nodegroup_shape", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_shape( + nw, shape_curve_control_points=[(0.0, 0.0), (0.3454, 0.2336), (1.0, 0.0)] +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Y", 0.0), + ], + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X Modulated"], + "Y": group_input.outputs["Y"], + }, + ) + + clamp = nw.new_node( + "ShaderNodeClamp", + input_kwargs={"Value": group_input.outputs["Y"], "Min": -0.6, "Max": 0.6}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": clamp}) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_2, 1: combine_xyz_1}, + attrs={"operation": "SUBTRACT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + leaf_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range_1.outputs["Result"]}, + label="Leaf shape", + ) + node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Leaf Shape": subtract_1} + ) + + +@node_utils.to_nodegroup( + "shader_nodegroup_apply_vein_midrib", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_apply_vein_midrib(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Vein Coord", 0.0), + ("NodeSocketFloat", "Midrib Value", 0.5), + ("NodeSocketFloat", "Leaf Shape", 1.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ], + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Leaf Shape"], + 1: -0.3, + 2: 0.0, + 3: 0.015, + 4: 0.0, + }, + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord"], + "Scale": group_input.outputs["Vein Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": vein.outputs["Distance"], + 1: 0.001, + 2: 0.05, + 3: 1.0, + 4: 0.0, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_5.outputs["Result"], + 1: map_range_3.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_10 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Midrib Value"], + 1: map_range_10.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vein Value": multiply_1} + ) + + +@node_utils.to_nodegroup( + "shader_nodegroup_leaf_gen", singleton=False, type="ShaderNodeTree" +) +def shader_nodegroup_leaf_gen( + nw, + midrib_curve_control_points, + vein_curve_control_points, + shape_curve_control_points, +): + # Code generated using version 2.3.2 of the node_transpiler + input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Displancement scale", 0.01), + ("NodeSocketFloat", "Vein Asymmetry", 0.8), + ("NodeSocketFloat", "Vein Density", 10.0), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 1.0), + ("NodeSocketFloat", "Vein Angle", 1.0), + ("NodeSocketFloat", "Sub-vein Displacement", 0.5), + ("NodeSocketFloat", "Sub-vein Scale", 20.0), + ("NodeSocketFloat", "Wave Displacement", 0.05), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + coordinate = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "coordinate"}) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": coordinate.outputs["Vector"]} + ) + + midrib = nw.new_node( + shader_nodegroup_midrib( + midrib_curve_control_points=midrib_curve_control_points + ).name, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Midrib Length": input.outputs["Midrib Length"], + "Midrib Width": input.outputs["Midrib Width"], + "Stem Length": input.outputs["Stem Length"], + }, + ) + + veincoord = nw.new_node( + shader_nodegroup_vein_coord( + vein_curve_control_points=vein_curve_control_points + ).name, + input_kwargs={ + "X Modulated": midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": input.outputs["Vein Asymmetry"], + "Vein Angle": input.outputs["Vein Angle"], + }, + ) + + shape = nw.new_node( + shader_nodegroup_shape( + shape_curve_control_points=shape_curve_control_points + ).name, + input_kwargs={ + "X Modulated": midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + }, + ) + + applyveinmidrib = nw.new_node( + shader_nodegroup_apply_vein_midrib().name, + input_kwargs={ + "Vein Coord": veincoord, + "Midrib Value": midrib.outputs["Midrib Value"], + "Leaf Shape": shape, + "Vein Density": input.outputs["Vein Density"], + }, + ) + + subvein = nw.new_node( + shader_nodegroup_sub_vein().name, + input_kwargs={"X Modulated": midrib.outputs["X Modulated"], "Y": veincoord}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vein Value": applyveinmidrib, "Sub Vein Value": subvein}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shape_with_jigsaw", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shape_with_jigsaw(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Midrib Value", 1.0), + ("NodeSocketFloat", "Vein Coord", 0.0), + ("NodeSocketFloat", "Leaf Shape", 0.5), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.5), + ], + ) + + map_range_12 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}, + ) + + jigsaw = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord"], + "Scale": group_input.outputs["Jigsaw Scale"], + }, + label="Jigsaw", + attrs={"voronoi_dimensions": "1D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.05}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: jigsaw.outputs["Distance"], + 1: multiply, + 2: group_input.outputs["Leaf Shape"], + }, + attrs={"operation": "MULTIPLY_ADD", "use_clamp": True}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_12.outputs["Result"], + 1: map_range.outputs["Result"], + }, + attrs={"operation": "MAXIMUM"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": maximum}) + + +@node_utils.to_nodegroup("nodegroup_shape", singleton=False, type="GeometryNodeTree") +def nodegroup_shape( + nw, shape_curve_control_points=[(0.0, 0.0), (0.3454, 0.2336), (1.0, 0.0)] +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Y", 0.0), + ], + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X Modulated"], + "Y": group_input.outputs["Y"], + }, + ) + + clamp = nw.new_node( + "ShaderNodeClamp", + input_kwargs={"Value": group_input.outputs["Y"], "Min": -0.6, "Max": 0.6}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": clamp}) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_2, 1: combine_xyz_1}, + attrs={"operation": "SUBTRACT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + leaf_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range_1.outputs["Result"]}, + label="Leaf shape", + ) + node_utils.assign_curve(leaf_shape.mapping.curves[0], shape_curve_control_points) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: leaf_shape}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Leaf Shape": subtract_1} + ) + + +@node_utils.to_nodegroup("nodegroup_midrib", singleton=False, type="GeometryNodeTree") +def nodegroup_midrib( + nw, + midrib_curve_control_points=[ + (0.0, 0.5), + (0.2809, 0.4868), + (0.7448, 0.5164), + (1.0, 0.5), + ], +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.5), + ("NodeSocketFloat", "Y", -0.6), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + stem_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range_6.outputs["Result"]}, + label="Stem shape", + ) + node_utils.assign_curve(stem_shape.mapping.curves[0], midrib_curve_control_points) + + map_range_7 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": stem_shape, 3: -1.0} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_7.outputs["Result"], 1: group_input.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + map_range_8 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Y"], + 1: -70.0, + 2: group_input.outputs["Midrib Length"], + 3: group_input.outputs["Midrib Width"], + 4: 0.0, + }, + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_8.outputs["Result"], 1: absolute}, + attrs={"operation": "SUBTRACT"}, + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + map_range_9 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": absolute_1, + 2: group_input.outputs["Stem Length"], + 3: 1.0, + 4: 0.0, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: map_range_9.outputs["Result"], 2: 0.06}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_8.outputs["Result"], 1: smooth_min}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + map_range_11 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "X Modulated": subtract, + "Midrib Value": map_range_11.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_vein_coord", singleton=False, type="GeometryNodeTree" +) +def nodegroup_vein_coord( + nw, + vein_curve_control_points=[ + (0.0, 0.0), + (0.3608, 0.2434), + (0.7454, 0.4951), + (1.0, 1.0), + ], +): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: sign, 1: group_input.outputs["Vein Asymmetry"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_13 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + vein__shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": absolute}, label="Vein Shape" + ) + node_utils.assign_curve(vein__shape.mapping.curves[0], vein_curve_control_points) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein__shape, 2: 0.9, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_4.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_13.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_apply_vein_midrib", singleton=False, type="GeometryNodeTree" +) +def nodegroup_apply_vein_midrib(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Vein Coord", 0.0), + ("NodeSocketFloat", "Midrib Value", 0.5), + ("NodeSocketFloat", "Leaf Shape", 1.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ], + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Leaf Shape"], + 1: -0.3, + 2: 0.0, + 3: 0.015, + 4: 0.0, + }, + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord"], + "Scale": group_input.outputs["Vein Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": vein.outputs["Distance"], + 1: 0.001, + 2: 0.05, + 3: 1.0, + 4: 0.0, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_5.outputs["Result"], + 1: map_range_3.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_10 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Midrib Value"], + 1: map_range_10.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vein Value": multiply_1} + ) + + +@node_utils.to_nodegroup("nodegroup_leaf_gen", singleton=False, type="GeometryNodeTree") +def nodegroup_leaf_gen( + nw, + midrib_curve_control_points, + vein_curve_control_points, + shape_curve_control_points, +): + # Code generated using version 2.3.2 of the node_transpiler + + geometry = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Displancement scale", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.07), + ("NodeSocketFloat", "Vein Angle", 1.0), + ("NodeSocketFloat", "Sub-vein Displacement", 0.5), + ("NodeSocketFloat", "Sub-vein Scale", 50.0), + ("NodeSocketFloat", "Wave Displacement", 0.1), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + midrib = nw.new_node( + nodegroup_midrib(midrib_curve_control_points=midrib_curve_control_points).name, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Midrib Length": geometry.outputs["Midrib Length"], + "Midrib Width": geometry.outputs["Midrib Width"], + "Stem Length": geometry.outputs["Stem Length"], + }, + ) + + veincoord = nw.new_node( + nodegroup_vein_coord(vein_curve_control_points=vein_curve_control_points).name, + input_kwargs={ + "X Modulated": midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": geometry.outputs["Vein Asymmetry"], + "Vein Angle": geometry.outputs["Vein Angle"], + }, + ) + + shape = nw.new_node( + nodegroup_shape(shape_curve_control_points=shape_curve_control_points).name, + input_kwargs={ + "X Modulated": midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + }, + ) + + applyveinmidrib = nw.new_node( + nodegroup_apply_vein_midrib().name, + input_kwargs={ + "Vein Coord": veincoord, + "Midrib Value": midrib.outputs["Midrib Value"], + "Leaf Shape": shape, + "Vein Density": geometry.outputs["Vein Density"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: geometry.outputs["Displancement scale"], 1: applyveinmidrib}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": geometry.outputs["Mesh"], "Offset": combine_xyz}, + ) + + shapewithjigsaw = nw.new_node( + nodegroup_shape_with_jigsaw().name, + input_kwargs={ + "Midrib Value": midrib.outputs["Midrib Value"], + "Vein Coord": veincoord, + "Leaf Shape": shape, + "Jigsaw Scale": geometry.outputs["Jigsaw Scale"], + "Jigsaw Depth": geometry.outputs["Jigsaw Depth"], + }, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: shapewithjigsaw, 1: 0.5}, + attrs={"operation": "LESS_THAN"}, + ) + + delete_geometry = nw.new_node( + "GeometryNodeDeleteGeometry", + input_kwargs={"Geometry": set_position, "Selection": less_than}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": delete_geometry, 2: applyveinmidrib}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": capture_attribute, + "Attribute": capture_attribute.outputs[2], + "X Modulated": midrib.outputs["X Modulated"], + "Vein Coord": veincoord, + }, + ) + + +@node_utils.to_nodegroup("nodegroup_sub_vein", singleton=False, type="GeometryNodeTree") +def nodegroup_sub_vein(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "X", 0.5), ("NodeSocketFloat", "Y", 0.0)], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": absolute, "Y": group_input.outputs["Y"]} + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 30.0, "Randomness": 0.754}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.1}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 10.0, "Randomness": 0.754}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 2: 0.1}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_add_noise", singleton=False, type="GeometryNodeTree" +) +def nodegroup_add_noise(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement", 0.05), + ("NodeSocketFloat", "Scale", 10.0), + ], + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": position_1, "Scale": group_input.outputs["Scale"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["Displacement"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_apply_wave", singleton=False, type="GeometryNodeTree" +) +def nodegroup_apply_wave(nw, y_wave_control_points, x_wave_control_points): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Wave Scale Y", 1.0), + ("NodeSocketFloat", "Wave Scale X", 1.0), + ("NodeSocketFloat", "X Modulated", None), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz_1.outputs["Y"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + float_curves = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve(float_curves.mapping.curves[0], y_wave_control_points) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curves, 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_2.outputs["Result"], + 1: group_input.outputs["Wave Scale Y"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: group_input.outputs["X Modulated"], + }, + ) + + map_range_7 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["X Modulated"], + 1: attribute_statistic_1.outputs["Min"], + 2: attribute_statistic_1.outputs["Max"], + }, + ) + + float_curves_2 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_7.outputs["Result"]} + ) + node_utils.assign_curve(float_curves_2.mapping.curves[0], x_wave_control_points) + float_curves_2.mapping.curves[0].points[2].handle_type = "VECTOR" + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curves_2, 3: -1.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_4.outputs["Result"], + 1: group_input.outputs["Wave Scale X"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position, "Offset": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_move_to_origin", singleton=False, type="GeometryNodeTree" +) +def nodegroup_move_to_origin(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["Y"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": subtract}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("nodegroup_blight", singleton=False, type="ShaderNodeTree") +def nodegroup_blight(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coordinate", (0.0, 0.0, 0.0)), + ("NodeSocketColor", "Leaf Color", (0.5, 0.5, 0.5, 1.0)), + ("NodeSocketColor", "Blight Color", (0.5, 0.3992, 0.035, 1.0)), + ("NodeSocketFloat", "Random Seed", 18.3), + ("NodeSocketFloat", "Offset", 0.5), + ], + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": group_input.outputs["Coordinate"], + "W": group_input.outputs["Random Seed"], + "Scale": 4.0, + "Detail": 10.0, + "Dimension": 10.0, + "Lacunarity": 5.0, + "Offset": group_input.outputs["Offset"], + }, + attrs={"musgrave_dimensions": "4D", "musgrave_type": "HETERO_TERRAIN"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 4: 0.8} + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_1.outputs["Result"], + "Color1": group_input.outputs["Leaf Color"], + "Color2": group_input.outputs["Blight Color"], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": mix_4}) + + +@node_utils.to_nodegroup( + "nodegroup_dotted_blight", singleton=False, type="ShaderNodeTree" +) +def nodegroup_dotted_blight(nw): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Coord", (0.0, 0.0, 0.0)), + ("NodeSocketColor", "Leaf Color", (0.5, 0.5, 0.5, 1.0)), + ("NodeSocketColor", "Blight Color", (0.4969, 0.2831, 0.0273, 1.0)), + ], + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": group_input.outputs["Coord"], "Scale": 20.0}, + attrs={"voronoi_dimensions": "2D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": voronoi_texture.outputs["Distance"], + 2: 0.15, + 3: 1.0, + 4: 0.0, + }, + ) + + mix_5 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": group_input.outputs["Blight Color"], + "Color2": (0.0, 0.0, 0.0, 1.0), + }, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range.outputs["Result"], + "Color1": group_input.outputs["Leaf Color"], + "Color2": mix_5, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Color": mix_3}) + + +def shader_leaf_new(nw, **kwargs): + # Code generated using version 2.3.2 of the node_transpiler + leafgen = nw.new_node( + shader_nodegroup_leaf_gen( + midrib_curve_control_points=kwargs["midrib_shape_control_points"], + vein_curve_control_points=kwargs["vein_shape_control_points"], + shape_curve_control_points=kwargs["leaf_shape_control_points"], + ).name, + input_kwargs={ + "Displancement scale": 0.01, + "Vein Asymmetry": kwargs["vein_asymmetry"], + "Vein Angle": kwargs["vein_angle"], + "Vein Density": kwargs["vein_density"], + "Jigsaw Scale": kwargs["jigsaw_scale"], + "Jigsaw Depth": kwargs["jigsaw_depth"], + "Midrib Length": kwargs["midrib_length"], + "Midrib Width": kwargs["midrib_width"], + "Stem Length": kwargs["stem_length"], + }, + ) + + rgb = nw.new_node(Nodes.RGB) + rgb.outputs[0].default_value = kwargs["blade_color"] + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": kwargs["vein_color_mix_factor"], + "Color1": rgb, + "Color2": (0.35, 0.35, 0.35, 1.0), + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": leafgen.outputs["Sub Vein Value"], + "Color1": mix, + "Color2": rgb, + }, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": leafgen.outputs["Vein Value"], + "Color1": mix, + "Color2": mix_1, + }, + ) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + rgb_1 = nw.new_node(Nodes.RGB) + rgb_1.outputs[0].default_value = kwargs["blight_color"] + + group_1 = nw.new_node( + nodegroup_dotted_blight().name, + input_kwargs={ + "Coord": texture_coordinate.outputs["Generated"], + "Leaf Color": mix_2, + "Blight Color": rgb_1, + }, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": kwargs["dotted_blight_weight"], + "Color1": mix_2, + "Color2": group_1, + }, + ) + + group_2 = nw.new_node( + nodegroup_blight().name, + input_kwargs={ + "Coordinate": texture_coordinate.outputs["Generated"], + "Leaf Color": mix_3, + "Blight Color": rgb_1, + "Random Seed": kwargs["blight_random_seed"], + "Offset": kwargs["blight_area_factor"], + }, + ) + + mix_4 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": kwargs["blight_weight"], + "Color1": mix_3, + "Color2": group_2, + }, + ) + + translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, input_kwargs={"Color": mix_4}) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": mix_4} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": 0.7, 1: translucent_bsdf, 2: principled_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +def geo_leaf_v2(nw, **kwargs): + # Code generated using version 2.3.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Geometry"], "Level": 10}, + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": subdivide_mesh, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + leafgen = nw.new_node( + nodegroup_leaf_gen( + midrib_curve_control_points=kwargs["midrib_shape_control_points"], + vein_curve_control_points=kwargs["vein_shape_control_points"], + shape_curve_control_points=kwargs["leaf_shape_control_points"], + ).name, + input_kwargs={ + "Mesh": capture_attribute.outputs["Geometry"], + "Displancement scale": 0.005, + "Vein Asymmetry": kwargs["vein_asymmetry"], + "Vein Angle": kwargs["vein_angle"], + "Vein Density": kwargs["vein_density"], + "Jigsaw Scale": kwargs["jigsaw_scale"], + "Jigsaw Depth": kwargs["jigsaw_depth"], + "Midrib Length": kwargs["midrib_length"], + "Midrib Width": kwargs["midrib_width"], + "Stem Length": kwargs["stem_length"], + }, + ) + + # addnoise = nw.new_node(nodegroup_add_noise().name, + # input_kwargs={'Geometry': leafgen.outputs["Mesh"], 'Displacement': 0.03, 'Scale': 10.0}) + + subvein = nw.new_node( + nodegroup_sub_vein().name, + input_kwargs={ + "X": leafgen.outputs["X Modulated"], + "Y": leafgen.outputs["Vein Coord"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: subvein, 1: 0.001}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": leafgen.outputs["Mesh"], "Offset": combine_xyz}, + ) + + logging.warning("Disabling set_position to avoid LeafV2 segfault") + set_position = leafgen.outputs["Mesh"] + + applywave = nw.new_node( + nodegroup_apply_wave( + y_wave_control_points=kwargs["y_wave_control_points"], + x_wave_control_points=kwargs["x_wave_control_points"], + ).name, + input_kwargs={ + "Geometry": set_position, + "Wave Scale X": 0.15, + "Wave Scale Y": 1.5, + "X Modulated": leafgen.outputs["X Modulated"], + }, + ) + + movetoorigin = nw.new_node( + nodegroup_move_to_origin().name, input_kwargs={"Geometry": applywave} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": movetoorigin, + "Attribute": leafgen.outputs["Attribute"], + "Coordinate": capture_attribute.outputs["Attribute"], + }, + ) + + +class LeafFactoryV2(AssetFactory): + scale = 0.5 + + def __init__(self, factory_seed, coarse=False): + super(LeafFactoryV2, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.genome = self.sample_geo_genome() + + t = uniform(0.0, 1.0) + + if t < 0.8: + self.blade_color = color_category("greenery") + elif t < 0.9: + self.blade_color = color_category("yellowish") + else: + self.blade_color = color_category("red") + + self.blight_color = color_category("yellowish") + self.vein_color_mix_factor = uniform(0.2, 0.6) + + @staticmethod + def sample_geo_genome(): + return { + "midrib_length": uniform(0.0, 0.8), + "midrib_width": uniform(0.5, 1.0), + "stem_length": uniform(0.7, 0.9), + "vein_asymmetry": uniform(0.0, 1.0), + "vein_angle": uniform(0.2, 2.0), + "vein_density": uniform(5.0, 20.0), + "subvein_scale": uniform(10.0, 20.0), + "jigsaw_scale": uniform(5.0, 20.0), + "jigsaw_depth": uniform(0.0, 2.0), + "midrib_shape_control_points": [ + (0.0, 0.5), + (0.25, uniform(0.48, 0.52)), + (0.75, uniform(0.48, 0.52)), + (1.0, 0.5), + ], + "leaf_shape_control_points": [ + (0.0, 0.0), + (uniform(0.2, 0.4), uniform(0.1, 0.4)), + (uniform(0.6, 0.8), uniform(0.1, 0.4)), + (1.0, 0.0), + ], + "vein_shape_control_points": [ + (0.0, 0.0), + (0.25, uniform(0.1, 0.4)), + (0.75, uniform(0.6, 0.9)), + (1.0, 1.0), + ], + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # add noise to the genotype output + # hue_noise = np.random.randn() * 0 + # hsv_blade = self.hsv_blade + hue_noise + # hsv_vein = self.hsv_vein + hue_noise + + phenome = self.genome.copy() + + phenome["y_wave_control_points"] = [ + (0.0, 0.5), + (np.random.uniform(0.25, 0.75), np.random.uniform(0.50, 0.60)), + (1.0, 0.5), + ] + x_wave_val = np.random.uniform(0.50, 0.58) + phenome["x_wave_control_points"] = [ + (0.0, 0.5), + (0.4, x_wave_val), + (0.5, 0.5), + (0.6, x_wave_val), + (1.0, 0.5), + ] + + material_kwargs = phenome.copy() + material_kwargs["blade_color"] = self.blade_color + material_kwargs["blade_color"][0] += np.random.normal(0.0, 0.03) + material_kwargs["blade_color"][1] += np.random.normal(0.0, 0.03) + material_kwargs["blade_color"][2] += np.random.normal(0.0, 0.03) + + material_kwargs["blight_color"] = self.blight_color + + material_kwargs["vein_color_mix_factor"] = self.vein_color_mix_factor + material_kwargs["blight_weight"] = np.random.binomial(1, 0.1) + material_kwargs["dotted_blight_weight"] = np.random.binomial(1, 0.1) + material_kwargs["blight_random_seed"] = np.random.uniform(0.0, 100.0) + material_kwargs["blight_area_factor"] = np.random.uniform(0.2, 0.8) + + # TODO: add more phenome attributes + + surface.add_geomod( + obj, + geo_leaf_v2, + apply=False, + attributes=["offset", "coordinate"], + input_kwargs=phenome, + ) + surface.add_material( + obj, shader_leaf_new, reuse=False, input_kwargs=material_kwargs + ) + + bpy.ops.object.convert(target="MESH") + + obj = bpy.context.object + obj.scale *= normal(1, 0.05) * self.scale + butil.apply_transform(obj) + tag_object(obj, "leaf") + + return obj diff --git a/infinigen/assets/objects/leaves/leaf_wrapped.py b/infinigen/assets/objects/leaves/leaf_wrapped.py new file mode 100644 index 000000000..0f5919954 --- /dev/null +++ b/infinigen/assets/objects/leaves/leaf_wrapped.py @@ -0,0 +1,240 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +import numpy as np +from numpy.random import randint, uniform + +from infinigen.assets.objects.leaves.leaf_broadleaf import LeafFactoryBroadleaf +from infinigen.assets.objects.leaves.leaf_ginko import LeafFactoryGinko +from infinigen.assets.objects.leaves.leaf_maple import LeafFactoryMaple +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +def nodegroup_nodegroup_apply_wrap(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + angle = nw.new_node(Nodes.Value, label="angle") + angle.outputs[0].default_value = kwargs["angle"] + + radians = nw.new_node( + Nodes.Math, input_kwargs={0: angle}, attrs={"operation": "RADIANS"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radians}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Rotation": combine_xyz_2, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + rotation = nw.new_node(Nodes.Value, label="rotation") + rotation.outputs[0].default_value = kwargs["rotation"] + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 1.0 + + end_radius = nw.new_node(Nodes.Value, label="end_radius") + end_radius.outputs[0].default_value = kwargs["end_radius"] + + spiral = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Resolution": 1000, + "Rotations": rotation, + "Start Radius": value, + "End Radius": end_radius, + "Height": 0.0, + }, + ) + + curve_length = nw.new_node(Nodes.CurveLength, input_kwargs={"Curve": spiral}) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={"Geometry": transform_2, 2: separate_xyz_1.outputs["Y"]}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: attribute_statistic.outputs["Max"], + 1: attribute_statistic.outputs["Min"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: curve_length, 1: subtract}, + attrs={"operation": "DIVIDE"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, input_kwargs={0: value, 1: divide}, attrs={"operation": "DIVIDE"} + ) + + divide_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: end_radius, 1: divide}, + attrs={"operation": "DIVIDE"}, + ) + + spiral_1 = nw.new_node( + "GeometryNodeCurveSpiral", + input_kwargs={ + "Resolution": 1000, + "Rotations": rotation, + "Start Radius": divide_1, + "End Radius": divide_2, + "Height": 0.0, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": spiral_1, "Rotation": (0.0, 1.5708, 3.1416)}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 2.0}) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + noise_level = nw.new_node(Nodes.Value, label="noise_level") + noise_level.outputs[0].default_value = kwargs["noise_level"] + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_1.outputs["Vector"], 1: noise_level}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": transform, "Offset": multiply.outputs["Vector"]}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz_1.outputs["Y"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": set_position_2, "Factor": map_range.outputs["Result"]}, + attrs={"mode": "FACTOR"}, + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": sample_curve.outputs["Position"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz_2.outputs["Y"], + "Z": separate_xyz_2.outputs["Z"], + }, + ) + + normalize = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_curve.outputs["Position"]}, + attrs={"operation": "NORMALIZE"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: separate_xyz.outputs["Z"], 1: normalize.outputs["Vector"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: combine_xyz, 1: multiply_1.outputs["Vector"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": transform_2, "Position": add.outputs["Vector"]}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, input_kwargs={0: 0.0, 1: radians}, attrs={"operation": "SUBTRACT"} + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract_2}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Rotation": combine_xyz_3}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": divide_1}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_3, "Translation": combine_xyz_4}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_4} + ) + + +class LeafFactoryWrapped(AssetFactory): + def __init__(self, factory_seed, season="autumn", coarse=False): + super().__init__(factory_seed, coarse=coarse) + self.factory_list = [ + LeafFactoryMaple(factory_seed, season=season, coarse=coarse), + LeafFactoryBroadleaf(factory_seed, season=season, coarse=coarse), + LeafFactoryGinko(factory_seed, season=season, coarse=coarse), + ] + + def create_asset(self, **params): + fac_id = randint(len(self.factory_list)) + fac = self.factory_list[fac_id] + + wrap_params = { + "angle": uniform(-70, 70), + "rotation": uniform(0.2, 2.0), + "end_radius": np.exp(uniform(-2.0, 2.0)), + "noise_level": uniform(0.0, 0.5), + } + + obj = fac.create_asset() + surface.add_geomod( + obj, nodegroup_nodegroup_apply_wrap, apply=False, input_kwargs=wrap_params + ) + + bpy.ops.object.convert(target="MESH") + + return obj diff --git a/infinigen/assets/objects/mollusk/__init__.py b/infinigen/assets/objects/mollusk/__init__.py new file mode 100644 index 000000000..4ee6f9a6b --- /dev/null +++ b/infinigen/assets/objects/mollusk/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +from .generate import ( + AugerFactory, + ClamFactory, + ConchFactory, + MolluskFactory, + MusselFactory, + NautilusFactory, + ScallopFactory, + VoluteFactory, +) +from .shell import ( + ClamBaseFactory, + MusselBaseFactory, + ScallopBaseFactory, + ShellBaseFactory, +) +from .snail import ( + AugerBaseFactory, + ConchBaseFactory, + NautilusBaseFactory, + SnailBaseFactory, + VoluteBaseFactory, +) diff --git a/infinigen/assets/mollusk/base.py b/infinigen/assets/objects/mollusk/base.py similarity index 86% rename from infinigen/assets/mollusk/base.py rename to infinigen/assets/objects/mollusk/base.py index d3528b188..50ba656b0 100644 --- a/infinigen/assets/mollusk/base.py +++ b/infinigen/assets/objects/mollusk/base.py @@ -10,8 +10,8 @@ class BaseMolluskFactory(AssetFactory): - max_expected_radius = .5 - noise_strength = .02 + max_expected_radius = 0.5 + noise_strength = 0.02 ratio = 1 x_scale = 2 z_scale = 1 @@ -21,4 +21,4 @@ def __init__(self, factory_seed, coarse=False): super(BaseMolluskFactory, self).__init__(factory_seed, coarse) def create_asset(self, **params) -> bpy.types.Object: - raise NotImplemented + raise NotImplementedError diff --git a/infinigen/assets/mollusk/generate.py b/infinigen/assets/objects/mollusk/generate.py similarity index 52% rename from infinigen/assets/mollusk/generate.py rename to infinigen/assets/objects/mollusk/generate.py index 103068df0..5320f15b4 100644 --- a/infinigen/assets/mollusk/generate.py +++ b/infinigen/assets/objects/mollusk/generate.py @@ -11,18 +11,30 @@ from numpy.random import uniform import infinigen.core.util.blender as butil -from .base import BaseMolluskFactory -from .shell import ShellBaseFactory, ScallopBaseFactory, ClamBaseFactory, MusselBaseFactory -from .snail import SnailBaseFactory, ConchBaseFactory, AugerBaseFactory, VoluteBaseFactory, NautilusBaseFactory -from infinigen.core.nodes.node_utils import build_color_ramp -from infinigen.core.util.random import log_uniform from infinigen.assets.utils.decorate import subsurface2face_size from infinigen.assets.utils.misc import assign_material -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes from infinigen.core import surface +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.random import log_uniform + +from .base import BaseMolluskFactory +from .shell import ( + ClamBaseFactory, + MusselBaseFactory, + ScallopBaseFactory, + ShellBaseFactory, +) +from .snail import ( + AugerBaseFactory, + ConchBaseFactory, + NautilusBaseFactory, + SnailBaseFactory, + VoluteBaseFactory, +) class MolluskFactory(AssetFactory): @@ -37,9 +49,14 @@ def __init__(self, factory_seed, coarse=False, factory_method=None): self.factory: BaseMolluskFactory = factory_method(factory_seed, coarse) base_hue = self.build_base_hue() - self.material = surface.shaderfunc_to_material(self.shader_mollusk, base_hue, self.factory.ratio, - self.factory.x_scale, self.factory.z_scale, - self.factory.distortion) + self.material = surface.shaderfunc_to_material( + self.shader_mollusk, + base_hue, + self.factory.ratio, + self.factory.x_scale, + self.factory.z_scale, + self.factory.distortion, + ) def create_asset(self, face_size=0.01, **params): obj = self.factory.create_asset(**params) @@ -48,55 +65,82 @@ def create_asset(self, face_size=0.01, **params): def decorate_mollusk(self, obj, face_size): subsurface2face_size(obj, face_size) - butil.modify_mesh(obj, 'SOLIDIFY', True, thickness=.005) - t = np.random.choice(['STUCCI', 'MARBLE']) - texture = bpy.data.textures.new(name='mollusk', type=t) - texture.noise_scale = log_uniform(.1, .2) - butil.modify_mesh(obj, 'DISPLACE', strength=self.factory.noise_strength, mid_level=0, texture=texture) + butil.modify_mesh(obj, "SOLIDIFY", True, thickness=0.005) + t = np.random.choice(["STUCCI", "MARBLE"]) + texture = bpy.data.textures.new(name="mollusk", type=t) + texture.noise_scale = log_uniform(0.1, 0.2) + butil.modify_mesh( + obj, + "DISPLACE", + strength=self.factory.noise_strength, + mid_level=0, + texture=texture, + ) assign_material(obj, self.material) - tag_object(obj, 'mollusk') + tag_object(obj, "mollusk") return obj @staticmethod def build_base_hue(): - if uniform(0, 1) < .4: - return uniform(0, .2) + if uniform(0, 1) < 0.4: + return uniform(0, 0.2) else: - return uniform(.05, .12) + return uniform(0.05, 0.12) @staticmethod - def shader_mollusk(nw: NodeWrangler, base_hue, ratio=0, x_scale=2, z_scale=1, distortion=5): - roughness = uniform(.2, .8) - specular = .3 + def shader_mollusk( + nw: NodeWrangler, base_hue, ratio=0, x_scale=2, z_scale=1, distortion=5 + ): + roughness = uniform(0.2, 0.8) + specular = 0.3 value_scale = log_uniform(1, 20) - saturation_scale = log_uniform(.4, 1) + saturation_scale = log_uniform(0.4, 1) def dark_color(): - return *colorsys.hsv_to_rgb(base_hue + uniform(-.06, .06), uniform(.6, 1.) * saturation_scale, - .005 * value_scale ** 1.5), 1 + return *colorsys.hsv_to_rgb( + base_hue + uniform(-0.06, 0.06), + uniform(0.6, 1.0) * saturation_scale, + 0.005 * value_scale**1.5, + ), 1 def light_color(): - return *colorsys.hsv_to_rgb(base_hue + uniform(-.06, .06), uniform(.6, 1.) * saturation_scale, - .05 * value_scale), 1 + return *colorsys.hsv_to_rgb( + base_hue + uniform(-0.06, 0.06), + uniform(0.6, 1.0) * saturation_scale, + 0.05 * value_scale, + ), 1 def color_fn(dark_prob): return dark_color() if uniform(0, 1) < dark_prob else light_color() - vector = nw.new_node(Nodes.Attribute, attrs={'attribute_name': 'vector'}).outputs['Vector'] + vector = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "vector"} + ).outputs["Vector"] n = np.random.randint(3, 5) - texture_0 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': vector, 'Distortion': distortion, 'Scale': x_scale}, - attrs={'wave_profile': 'SAW', 'bands_direction': 'X'}) - cr_0 = build_color_ramp(nw, texture_0, np.sort(uniform(0, 1, n)), [color_fn(.4) for _ in range(n)]) - texture_1 = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': vector, 'Distortion': distortion, 'Scale': z_scale}, - attrs={'wave_profile': 'SAW', 'bands_direction': 'Z'}) - cr_1 = build_color_ramp(nw, texture_1, np.sort(uniform(0, 1, n)), [color_fn(.4) for _ in range(n)]) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': nw.new_node(Nodes.MixRGB, [ratio, cr_0, cr_1]), - 'Specular': specular, - 'Roughness': roughness - }) + texture_0 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={"Vector": vector, "Distortion": distortion, "Scale": x_scale}, + attrs={"wave_profile": "SAW", "bands_direction": "X"}, + ) + cr_0 = build_color_ramp( + nw, texture_0, np.sort(uniform(0, 1, n)), [color_fn(0.4) for _ in range(n)] + ) + texture_1 = nw.new_node( + Nodes.WaveTexture, + input_kwargs={"Vector": vector, "Distortion": distortion, "Scale": z_scale}, + attrs={"wave_profile": "SAW", "bands_direction": "Z"}, + ) + cr_1 = build_color_ramp( + nw, texture_1, np.sort(uniform(0, 1, n)), [color_fn(0.4) for _ in range(n)] + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": nw.new_node(Nodes.MixRGB, [ratio, cr_0, cr_1]), + "Specular": specular, + "Roughness": roughness, + }, + ) return principled_bsdf diff --git a/infinigen/assets/mollusk/shell.py b/infinigen/assets/objects/mollusk/shell.py similarity index 58% rename from infinigen/assets/mollusk/shell.py rename to infinigen/assets/objects/mollusk/shell.py index 3babd5753..f25b37fd2 100644 --- a/infinigen/assets/mollusk/shell.py +++ b/infinigen/assets/objects/mollusk/shell.py @@ -6,24 +6,22 @@ import bpy import numpy as np -from numpy.random import normal, uniform +from numpy.random import uniform import infinigen.core.util.blender as butil -from infinigen.assets.creatures.util.animation.driver_repeated import repeated_driver -from infinigen.assets.mollusk.base import BaseMolluskFactory -from infinigen.assets.utils.object import join_objects, mesh2obj, data2mesh, new_circle -from infinigen.assets.utils.draw import shape_by_angles -from infinigen.core.util.random import log_uniform +from infinigen.assets.objects.mollusk.base import BaseMolluskFactory from infinigen.assets.utils.decorate import displace_vertices +from infinigen.assets.utils.draw import shape_by_angles +from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_circle +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface +from infinigen.core.tagging import tag_object from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.random import log_uniform class ShellBaseFactory(BaseMolluskFactory): - def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): @@ -31,67 +29,82 @@ def __init__(self, factory_seed, coarse=False): self.maker = np.random.choice(self.makers) self.z_scale = log_uniform(2, 10) - def build_ellipse(self, viewpoint=(0., 0, 1.), softness=.3): + def build_ellipse(self, viewpoint=(0.0, 0, 1.0), softness=0.3): viewpoint = np.array(viewpoint) obj = new_circle(vertices=1024) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.fill_grid() - surface.add_geomod(obj, self.geo_shader_vector, apply=True, attributes=['vector']) + surface.add_geomod( + obj, self.geo_shader_vector, apply=True, attributes=["vector"] + ) butil.apply_transform(obj, loc=True) def displace(x, y, z): - r = np.sqrt((x - 1) ** 2 + y ** 2 + z ** 2) - t = 1 - softness + softness * r ** 4 - return ((1 - t)[:, np.newaxis] * (viewpoint[np.newaxis, :] - np.stack([x, y, z], -1))).T + r = np.sqrt((x - 1) ** 2 + y**2 + z**2) + t = 1 - softness + softness * r**4 + return ( + (1 - t)[:, np.newaxis] + * (viewpoint[np.newaxis, :] - np.stack([x, y, z], -1)) + ).T displace_vertices(obj, displace) return obj @staticmethod def geo_shader_vector(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) pos = nw.new_node(Nodes.InputPosition) x, y, z = nw.separate(pos) - vector = nw.combine(x, y, nw.vector_math('DISTANCE', pos, [1, 0, 0])) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry, 'Vector': vector}) + vector = nw.combine(x, y, nw.vector_math("DISTANCE", pos, [1, 0, 0])) + nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": geometry, "Vector": vector} + ) return geometry def scallop_make(self): obj = self.build_ellipse() obj.scale = 1, 1.2, 1 butil.apply_transform(obj) - boundary = .42 - outer = uniform(.28, .32) - inner = uniform(.18, .22) - s = uniform(.6, .7) + boundary = 0.42 + outer = uniform(0.28, 0.32) + inner = uniform(0.18, 0.22) + s = uniform(0.6, 0.7) angles = [-boundary, -outer, -inner, inner, outer, boundary] scales = [0, s, 1, 1, s, 0] shape_by_angles(obj, np.array(angles) * np.pi, scales) self.add_radial_groove(obj) obj = self.add_hinge(obj) - tag_object(obj, 'scallop') + tag_object(obj, "scallop") return obj def clam_make(self): - obj = self.build_ellipse(softness=.5) + obj = self.build_ellipse(softness=0.5) obj.scale = 1, 1.2, 1 butil.apply_transform(obj) - s = uniform(.6, .7) - angles = [-uniform(.4, .5), -uniform(.3, .35), uniform(-.25, .25), uniform(.3, .35), uniform(.4, .5)] + s = uniform(0.6, 0.7) + angles = [ + -uniform(0.4, 0.5), + -uniform(0.3, 0.35), + uniform(-0.25, 0.25), + uniform(0.3, 0.35), + uniform(0.4, 0.5), + ] scales = [0, s, 1, s, 0] shape_by_angles(obj, np.array(angles) * np.pi, scales) - tag_object(obj, 'clam') + tag_object(obj, "clam") return obj def mussel_make(self): - obj = self.build_ellipse(softness=.5) + obj = self.build_ellipse(softness=0.5) obj.scale = 1, 3, 1 butil.apply_transform(obj) - s = uniform(.6, .8) - angles = [-.5, -uniform(.1, .15), uniform(0., .25), .5] - scales = [0, s, 1, uniform(.6, .8)] + s = uniform(0.6, 0.8) + angles = [-0.5, -uniform(0.1, 0.15), uniform(0.0, 0.25), 0.5] + scales = [0, s, 1, uniform(0.6, 0.8)] shape_by_angles(obj, np.array(angles) * np.pi, scales) - tag_object(obj, 'mussel') + tag_object(obj, "mussel") return obj @staticmethod @@ -102,21 +115,32 @@ def add_radial_groove(obj): def displace(x, y, z): a = np.arctan(y / (x + 1e-6 * (x >= 0))) r = np.sqrt(x * x + y * y + z * z) - return scale * np.cos(a * frequency) * np.clip(r - .25, 0, None) + return scale * np.cos(a * frequency) * np.clip(r - 0.25, 0, None) displace_vertices(obj, displace) return obj @staticmethod def add_hinge(obj): - length = .4 - width = .1 - x = uniform(.8, 1.) - vertices = [[0, -length, 0], [width, -length * x, 0], [width, length * x, 0], [0, length, 0]] - o = mesh2obj(data2mesh(vertices, [], [[0, 1, 2, 3]], 'trap')) - butil.modify_mesh(o, 'SUBSURF', render_levels=2, levels=2, subdivision_type='SIMPLE') - butil.modify_mesh(o, 'DISPLACE', strength=.2, - texture=bpy.data.textures.new(name='hinge', type='STUCCI')) + length = 0.4 + width = 0.1 + x = uniform(0.8, 1.0) + vertices = [ + [0, -length, 0], + [width, -length * x, 0], + [width, length * x, 0], + [0, length, 0], + ] + o = mesh2obj(data2mesh(vertices, [], [[0, 1, 2, 3]], "trap")) + butil.modify_mesh( + o, "SUBSURF", render_levels=2, levels=2, subdivision_type="SIMPLE" + ) + butil.modify_mesh( + o, + "DISPLACE", + strength=0.2, + texture=bpy.data.textures.new(name="hinge", type="STUCCI"), + ) obj = join_objects([obj, o]) return obj @@ -124,15 +148,15 @@ def create_asset(self, **params): upper = self.maker() dim = np.sqrt(upper.dimensions[0] * upper.dimensions[1] + 0.01) upper.scale = [1 / dim] * 3 - upper.location[-1] += .005 + upper.location[-1] += 0.005 butil.apply_transform(upper, loc=True) lower = butil.deep_clone_obj(upper) lower.scale[-1] = -1 butil.apply_transform(lower) base = uniform(0, np.pi / 4) - lower.rotation_euler[1] = - base - upper.rotation_euler[1] = - base - uniform(np.pi / 6, np.pi / 3) + lower.rotation_euler[1] = -base + upper.rotation_euler[1] = -base - uniform(np.pi / 6, np.pi / 3) obj = join_objects([lower, upper]) return obj diff --git a/infinigen/assets/objects/mollusk/snail.py b/infinigen/assets/objects/mollusk/snail.py new file mode 100644 index 000000000..f8554e46a --- /dev/null +++ b/infinigen/assets/objects/mollusk/snail.py @@ -0,0 +1,237 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.mollusk.base import BaseMolluskFactory +from infinigen.assets.utils.object import center, data2mesh, mesh2obj, new_empty +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class SnailBaseFactory(BaseMolluskFactory): + freq = 256 + + def __init__(self, factory_seed, coarse=False): + super(SnailBaseFactory, self).__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.makers = [ + self.volute_make, + self.nautilus_make, + self.snail_make, + self.conch_make, + ] + self.maker = np.random.choice(self.makers) + self.ratio = uniform(0, 0.3) if uniform(0, 1) < 0.5 else uniform(0.7, 1.0) + self.z_scale = log_uniform(0.2, 1) + self.distortion = log_uniform(2, 20) + + @staticmethod + def build_cross_section(n=64, affine=1, spike=0.0, concave=2.2): + perturb = 1 / (5 * n) + angles = (np.arange(n) / n + uniform(-perturb, perturb, n)) * 2 * np.pi + radius = np.abs(np.cos(angles)) ** concave + np.abs(np.sin(angles)) ** concave + radius *= 1 + uniform(0, spike, n) * (uniform(0, 1, n) < 0.2) + vertices = np.stack( + [ + np.cos(angles) * radius, + np.sin(angles) * radius * affine, + np.zeros_like(angles), + ] + ).T + edges = np.stack([np.arange(n), np.roll(np.arange(n), -1)]).T + obj = mesh2obj(data2mesh(vertices, edges, [], "circle")) + obj.rotation_euler = 0, 0, uniform(0, np.pi / 12) + butil.apply_transform(obj) + return obj + + def snail_make( + self, + lateral=0.15, + longitudinal=0.04, + freq=28, + scale=0.99, + loop=8, + affine=1, + spike=0.0, + ): + n = 40 + resolution = loop * freq + concave = uniform(1.9, 2.1) + obj = self.build_cross_section(n, affine, spike, concave) + empty = new_empty( + location=(longitudinal * np.random.choice([-1, 1]), 0, 0), + rotation=(2 * np.pi / freq, 0, 0), + scale=[scale] * 3, + ) + butil.modify_mesh( + obj, + "ARRAY", + apply=True, + use_relative_offset=False, + use_constant_offset=True, + use_object_offset=True, + constant_offset_displace=(0, 0, lateral), + count=resolution, + offset_object=empty, + ) + butil.delete(empty) + surface.add_geomod( + obj, + self.geo_shader_vector, + apply=True, + input_args=[n, lateral], + attributes=["vector"], + ) + + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops() + + return obj + + @staticmethod + def geo_shader_vector(nw: NodeWrangler, n, interval): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + id = nw.new_node(Nodes.InputID) + angle = nw.scalar_multiply(nw.math("MODULO", id, n), 2 * np.pi / n) + height = nw.scalar_multiply(nw.math("FLOOR", nw.scalar_divide(id, n)), interval) + vector = nw.combine(nw.math("COSINE", angle), nw.math("SINE", angle), height) + nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": geometry, "Vector": vector} + ) + + @staticmethod + def solve_longitude(ratio, freq, scale): + return ratio * (1 + scale**freq) / freq + + @staticmethod + def solve_lateral(ratio, freq, scale): + return ( + ratio + / ( + np.sin(2 * np.pi / freq * np.arange(freq)) * scale ** np.arange(freq) + ).sum() + ) + + @staticmethod + def solve_scale(shrink, freq): + return shrink ** (1 / freq) + + def conch_make(self): + scale = self.solve_scale(uniform(0.7, 0.8), self.freq) + lateral = self.solve_lateral(uniform(0.3, 0.4), self.freq, scale) + longitude = self.solve_longitude(uniform(0.7, 0.8), self.freq, scale) + loop = np.random.randint(8, 10) + obj = self.snail_make( + lateral, + longitude, + self.freq, + scale, + loop, + affine=uniform(0.8, 0.9), + spike=0.1, + ) + tag_object(obj, "conch") + return obj + + def auger_make(self): + scale = self.solve_scale(uniform(0.7, 0.8), self.freq) + lateral = self.solve_lateral(uniform(0.1, 0.15), self.freq, scale) + longitude = self.solve_longitude(uniform(0.9, 1.0), self.freq, scale) + loop = np.random.randint(8, 12) + obj = self.snail_make( + lateral, longitude, self.freq, scale, loop, affine=uniform(0.5, 0.6) + ) + tag_object(obj, "auger") + return obj + + def volute_make(self): + scale = self.solve_scale(uniform(0.5, 0.6), self.freq) + lateral = self.solve_lateral(uniform(0.4, 0.5), self.freq, scale) + longitude = self.solve_longitude(uniform(0.6, 0.7), self.freq, scale) + loop = np.random.randint(4, 5) + obj = self.snail_make(lateral, longitude, self.freq, scale, loop) + tag_object(obj, "volute") + return obj + + def nautilus_make(self): + scale = self.solve_scale(uniform(0.4, 0.5), self.freq) + lateral = self.solve_lateral(uniform(1.2, 1.4), self.freq, scale) + longitude = self.solve_longitude(uniform(0.2, 0.3), self.freq, scale) + loop = np.random.randint(4, 5) + obj = self.snail_make(lateral, longitude, self.freq, scale, loop) + tag_object(obj, "nautilus") + return obj + + @staticmethod + def geo_affine(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + affine = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": geometry, + "Offset": nw.combine( + *[ + nw.vector_math( + "DOT_PRODUCT", + uniform(-0.1, 0.1, 3), + nw.new_node(Nodes.InputPosition), + ) + for _ in range(3) + ] + ), + }, + ) + return affine + + def create_asset(self, **params): + obj = self.maker() + obj.scale = [1 / max(obj.dimensions)] * 3 + obj.rotation_euler = uniform(0, np.pi * 2, 3) + butil.apply_transform(obj) + obj.location = -center(obj) + obj.location[-1] += obj.dimensions[-1] * 0.4 + butil.apply_transform(obj, loc=True) + surface.add_geomod(obj, self.geo_affine, apply=True) + tag_object(obj, "snail") + return obj + + +class VoluteBaseFactory(SnailBaseFactory): + def __init__(self, factory_seed, coarse=False): + super(VoluteBaseFactory, self).__init__(factory_seed, coarse) + self.maker = self.volute_make + + +class NautilusBaseFactory(SnailBaseFactory): + def __init__(self, factory_seed, coarse=False): + super(NautilusBaseFactory, self).__init__(factory_seed, coarse) + self.maker = self.nautilus_make + + +class ConchBaseFactory(SnailBaseFactory): + def __init__(self, factory_seed, coarse=False): + super(ConchBaseFactory, self).__init__(factory_seed, coarse) + self.maker = self.conch_make + + +class AugerBaseFactory(SnailBaseFactory): + def __init__(self, factory_seed, coarse=False): + super(AugerBaseFactory, self).__init__(factory_seed, coarse) + self.maker = self.auger_make diff --git a/infinigen/assets/monocot/__init__.py b/infinigen/assets/objects/monocot/__init__.py similarity index 69% rename from infinigen/assets/monocot/__init__.py rename to infinigen/assets/objects/monocot/__init__.py index 833443f62..afee96c08 100644 --- a/infinigen/assets/monocot/__init__.py +++ b/infinigen/assets/objects/monocot/__init__.py @@ -1,8 +1,13 @@ from .agave import AgaveMonocotFactory -from .grasses import GrassesMonocotFactory, WheatMonocotFactory, WheatEarMonocotFactory, MaizeMonocotFactory -from .tussock import TussockMonocotFactory -from .pinecone import PineconeFactory -from .generate import MonocotFactory from .banana import BananaMonocotFactory, TaroMonocotFactory -from .veratrum import VeratrumMonocotFactory +from .generate import MonocotFactory +from .grasses import ( + GrassesMonocotFactory, + MaizeMonocotFactory, + WheatEarMonocotFactory, + WheatMonocotFactory, +) from .kelp import KelpMonocotFactory +from .pinecone import PineconeFactory +from .tussock import TussockMonocotFactory +from .veratrum import VeratrumMonocotFactory diff --git a/infinigen/assets/monocot/agave.py b/infinigen/assets/objects/monocot/agave.py similarity index 59% rename from infinigen/assets/monocot/agave.py rename to infinigen/assets/objects/monocot/agave.py index bf8fd4deb..a53980bba 100644 --- a/infinigen/assets/monocot/agave.py +++ b/infinigen/assets/objects/monocot/agave.py @@ -4,22 +4,20 @@ # Authors: Lingjie Mei -import colorsys - import bpy import numpy as np from numpy.random import uniform import infinigen.core.util.blender as butil -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.assets.utils.decorate import distance2boundary, displace_vertices -from infinigen.assets.utils.object import join_objects +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory +from infinigen.assets.utils.decorate import displace_vertices, distance2boundary from infinigen.assets.utils.draw import cut_plane, leaf -from infinigen.core.util.random import log_uniform -from infinigen.core.surface import shaderfunc_to_material +from infinigen.assets.utils.object import join_objects +from infinigen.core.tagging import tag_object from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.random import log_uniform + class AgaveMonocotFactory(MonocotGrowthFactory): use_distance = True @@ -27,45 +25,51 @@ class AgaveMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(AgaveMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.0, .5) + self.stem_offset = uniform(0.0, 0.5) self.angle = uniform(np.pi / 9, np.pi / 6) - self.z_drag = uniform(.05, .1) - self.min_y_angle = uniform(np.pi * .1, np.pi * .15) - self.max_y_angle = uniform(np.pi * .4, np.pi * .52) + self.z_drag = uniform(0.05, 0.1) + self.min_y_angle = uniform(np.pi * 0.1, np.pi * 0.15) + self.max_y_angle = uniform(np.pi * 0.4, np.pi * 0.52) self.count = int(log_uniform(32, 64)) - self.scale_curve = [(0, uniform(.8, 1.)), (.5, 1), (1, uniform(.6, 1.))] + self.scale_curve = [ + (0, uniform(0.8, 1.0)), + (0.5, 1), + (1, uniform(0.6, 1.0)), + ] self.bud_angle = uniform(np.pi / 8, np.pi / 4) - self.cut_prob = 0 if uniform(0, 1) < .5 else uniform(.2, .4) + self.cut_prob = 0 if uniform(0, 1) < 0.5 else uniform(0.2, 0.4) @staticmethod def build_base_hue(): - return uniform(.12, .32) + return uniform(0.12, 0.32) def build_leaf(self, face_size): - x_anchors = 0, .2 * np.cos(self.bud_angle), uniform(1., 1.4), 1.5 - y_anchors = 0, .2 * np.sin(self.bud_angle), uniform(.1, .15), 0 + x_anchors = 0, 0.2 * np.cos(self.bud_angle), uniform(1.0, 1.4), 1.5 + y_anchors = 0, 0.2 * np.sin(self.bud_angle), uniform(0.1, 0.15), 0 obj = leaf(x_anchors, y_anchors, face_size=face_size) distance = distance2boundary(obj) lower = deep_clone_obj(obj) - z_offset = -log_uniform(.08, .16) + z_offset = -log_uniform(0.08, 0.16) z_ratio = uniform(1.5, 2.5) - displace_vertices(lower, lambda x, y, z: (0, 0, (1 - (1 - distance) ** z_ratio) * z_offset)) + displace_vertices( + lower, lambda x, y, z: (0, 0, (1 - (1 - distance) ** z_ratio) * z_offset) + ) obj = join_objects([lower, obj]) butil.modify_mesh(obj, "WELD", merge_threshold=2e-4) if uniform(0, 1) < self.cut_prob: angle = uniform(-np.pi / 3, np.pi / 3) - cut_center = np.array([uniform(1., 1.4), 0, 0]) + cut_center = np.array([uniform(1.0, 1.4), 0, 0]) cut_normal = np.array([np.cos(angle), np.sin(angle), 0]) obj, cut = cut_plane(obj, cut_center, cut_normal) obj = join_objects([obj, cut]) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() bpy.ops.mesh.remove_doubles(threshold=1e-2) self.decorate_leaf(obj) - tag_object(obj, 'agave') + tag_object(obj, "agave") return obj diff --git a/infinigen/assets/monocot/banana.py b/infinigen/assets/objects/monocot/banana.py similarity index 50% rename from infinigen/assets/monocot/banana.py rename to infinigen/assets/objects/monocot/banana.py index 74fa33461..3071150cb 100644 --- a/infinigen/assets/monocot/banana.py +++ b/infinigen/assets/objects/monocot/banana.py @@ -3,61 +3,66 @@ # Authors: Lingjie Mei -import bpy import bmesh import numpy as np from numpy.random import uniform +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory from infinigen.assets.utils.decorate import displace_vertices, read_co from infinigen.assets.utils.draw import bezier_curve, leaf from infinigen.assets.utils.nodegroup import geo_radius from infinigen.assets.utils.object import join_objects, origin2lowest from infinigen.core import surface -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.core.util.random import log_uniform +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.random import log_uniform -class BananaMonocotFactory(MonocotGrowthFactory): +class BananaMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(BananaMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.6, 1.) + self.stem_offset = uniform(0.6, 1.0) self.angle = uniform(np.pi / 4, np.pi / 3) self.z_scale = uniform(1, 1.5) - self.z_drag = uniform(.1, .2) - self.min_y_angle = uniform(np.pi * .05, np.pi * .1) - self.max_y_angle = uniform(np.pi * .25, np.pi * .45) - self.leaf_range = uniform(.5, .7), 1 + self.z_drag = uniform(0.1, 0.2) + self.min_y_angle = uniform(np.pi * 0.05, np.pi * 0.1) + self.max_y_angle = uniform(np.pi * 0.25, np.pi * 0.45) + self.leaf_range = uniform(0.5, 0.7), 1 self.count = int(log_uniform(16, 24)) - self.scale_curve = [(0, uniform(.4, 1.)), (1, uniform(.6, 1.))] - self.radius = uniform(.04, .06) + self.scale_curve = [(0, uniform(0.4, 1.0)), (1, uniform(0.6, 1.0))] + self.radius = uniform(0.04, 0.06) self.bud_angle = uniform(np.pi / 8, np.pi / 6) self.cut_angle = self.bud_angle + uniform(np.pi / 20, np.pi / 12) self.freq = log_uniform(100, 300) - self.n_cuts = np.random.randint(6, 10) if uniform(0, 1) < .8 else 0 + self.n_cuts = np.random.randint(6, 10) if uniform(0, 1) < 0.8 else 0 @staticmethod def build_base_hue(): - return uniform(.15, .35) + return uniform(0.15, 0.35) def cut_leaf(self, obj): coords = read_co(obj) x, y, z = coords.T - coords = coords[(np.abs(y) < .08) & (np.abs(y) > .01)] + coords = coords[(np.abs(y) < 0.08) & (np.abs(y) > 0.01)] positive_coords = coords[coords.T[1] > 0] positive_coords = positive_coords[np.argsort(positive_coords[:, 0])] negative_coords = coords[coords.T[1] < 0] negative_coords = negative_coords[np.argsort(negative_coords[:, 0])] - positive_coords = positive_coords[np.random.choice(len(positive_coords), self.n_cuts, replace=False)] - negative_coords = negative_coords[np.random.choice(len(negative_coords), self.n_cuts, replace=False)] - - for (x1, y1, _), (x2, y2, _) in zip(np.concatenate([positive_coords[:-1], negative_coords[:-1]], 0), - np.concatenate([positive_coords[1:], negative_coords[1:]], 0)): + positive_coords = positive_coords[ + np.random.choice(len(positive_coords), self.n_cuts, replace=False) + ] + negative_coords = negative_coords[ + np.random.choice(len(negative_coords), self.n_cuts, replace=False) + ] + + for (x1, y1, _), (x2, y2, _) in zip( + np.concatenate([positive_coords[:-1], negative_coords[:-1]], 0), + np.concatenate([positive_coords[1:], negative_coords[1:]], 0), + ): coeff = 1 if y1 > 0 else -1 - ratio = uniform(-2., .4) + ratio = uniform(-2.0, 0.4) exponent = uniform(1.2, 1.6) def cut(x, y, z): @@ -65,69 +70,99 @@ def cut(x, y, z): m2 = x2 * np.sin(self.cut_angle) - y2 * np.cos(self.cut_angle) * coeff m = x * np.sin(self.cut_angle) - y * np.cos(self.cut_angle) * coeff dist = ((x - x1) * (y1 - y2) + (y - y1) * (x1 - x2)) / np.sqrt( - (x1 - x2) ** 2 + (y1 - y2) ** 2 + .1) - return 0, 0, np.where((m1 < m) & (m < m2) & (dist * coeff < 0), - ratio * np.abs(dist) ** exponent, 0) + (x1 - x2) ** 2 + (y1 - y2) ** 2 + 0.1 + ) + return ( + 0, + 0, + np.where( + (m1 < m) & (m < m2) & (dist * coeff < 0), + ratio * np.abs(dist) ** exponent, + 0, + ), + ) displace_vertices(obj, cut) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) - geom = [e for e in bm.edges if e.calc_length() > .02] - bmesh.ops.delete(bm, geom=geom, context='EDGES') + geom = [e for e in bm.edges if e.calc_length() > 0.02] + bmesh.ops.delete(bm, geom=geom, context="EDGES") bmesh.update_edit_mesh(obj.data) def build_leaf(self, face_size): - x_anchors = 0, .2 * np.cos(self.bud_angle), uniform(.8, 1.2), 2. - y_anchors = 0, .2 * np.sin(self.bud_angle), uniform(.2, .25), 0 + x_anchors = 0, 0.2 * np.cos(self.bud_angle), uniform(0.8, 1.2), 2.0 + y_anchors = 0, 0.2 * np.sin(self.bud_angle), uniform(0.2, 0.25), 0 obj = leaf(x_anchors, y_anchors, face_size=face_size) self.cut_leaf(obj) self.displace_veins(obj) self.decorate_leaf(obj) - tag_object(obj, 'banana') + tag_object(obj, "banana") return obj def displace_veins(self, obj): - vg = obj.vertex_groups.new(name='distance') + vg = obj.vertex_groups.new(name="distance") x, y, z = read_co(obj).T branch = np.cos( - (np.abs(y) * np.cos(self.cut_angle) - x * np.sin(self.cut_angle)) * self.freq) > uniform(.85, .9, - len(x)) - leaf = np.abs(y) < uniform(.002, .008, len(x)) + (np.abs(y) * np.cos(self.cut_angle) - x * np.sin(self.cut_angle)) + * self.freq + ) > uniform(0.85, 0.9, len(x)) + leaf = np.abs(y) < uniform(0.002, 0.008, len(x)) weights = branch | leaf for i, l in enumerate(weights): - vg.add([i], l, 'REPLACE') - butil.modify_mesh(obj, 'DISPLACE', strength=-uniform(5e-3, 8e-3), mid_level=0, vertex_group='distance') + vg.add([i], l, "REPLACE") + butil.modify_mesh( + obj, + "DISPLACE", + strength=-uniform(5e-3, 8e-3), + mid_level=0, + vertex_group="distance", + ) class TaroMonocotFactory(BananaMonocotFactory): def __init__(self, factory_seed, coarse=False): super(TaroMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.05, .1) - self.radius = uniform(.02, .04) - self.z_drag = uniform(.2, .3) - self.bud_angle = uniform(np.pi * .6, np.pi * .7) + self.stem_offset = uniform(0.05, 0.1) + self.radius = uniform(0.02, 0.04) + self.z_drag = uniform(0.2, 0.3) + self.bud_angle = uniform(np.pi * 0.6, np.pi * 0.7) self.freq = log_uniform(10, 20) self.count = int(log_uniform(12, 16)) - self.n_cuts = np.random.randint(1, 2) if uniform(0, 1) < .5 else 0 - self.min_y_angle = uniform(-np.pi * .25, -np.pi * .05) - self.max_y_angle = uniform(-np.pi * .05, 0) + self.n_cuts = np.random.randint(1, 2) if uniform(0, 1) < 0.5 else 0 + self.min_y_angle = uniform(-np.pi * 0.25, -np.pi * 0.05) + self.max_y_angle = uniform(-np.pi * 0.05, 0) def displace_veins(self, obj): - vg = obj.vertex_groups.new(name='distance') + vg = obj.vertex_groups.new(name="distance") x, y, z = read_co(obj).T - branch = np.cos(uniform(0, np.pi * 2) + np.arctan2(y - np.where(y > 0, -1, 1) * uniform(.1, .2), - x - uniform(.1, .4)) * self.freq) > uniform(.98, .99, - len(x)) - leaf = np.abs(y) < uniform(.002, .008, len(x)) + branch = np.cos( + uniform(0, np.pi * 2) + + np.arctan2( + y - np.where(y > 0, -1, 1) * uniform(0.1, 0.2), x - uniform(0.1, 0.4) + ) + * self.freq + ) > uniform(0.98, 0.99, len(x)) + leaf = np.abs(y) < uniform(0.002, 0.008, len(x)) weights = branch | leaf for i, l in enumerate(weights): - vg.add([i], l, 'REPLACE') - butil.modify_mesh(obj, 'DISPLACE', strength=-uniform(5e-3, 8e-3), mid_level=0, vertex_group='distance') + vg.add([i], l, "REPLACE") + butil.modify_mesh( + obj, + "DISPLACE", + strength=-uniform(5e-3, 8e-3), + mid_level=0, + vertex_group="distance", + ) def build_leaf(self, face_size): - x_anchors = 0, .2 * np.cos(self.bud_angle), uniform(.4, 1.), uniform(.8, 1.) - y_anchors = 0, .2 * np.sin(self.bud_angle), uniform(.25, .3), 0 + x_anchors = ( + 0, + 0.2 * np.cos(self.bud_angle), + uniform(0.4, 1.0), + uniform(0.8, 1.0), + ) + y_anchors = 0, 0.2 * np.sin(self.bud_angle), uniform(0.25, 0.3), 0 obj = leaf(x_anchors, y_anchors, face_size=face_size) self.cut_leaf(obj) self.displace_veins(obj) @@ -135,16 +170,18 @@ def build_leaf(self, face_size): bezier = self.build_branch() obj = join_objects([obj, bezier]) origin2lowest(obj) - tag_object(obj, 'taro') + tag_object(obj, "taro") return obj def build_branch(self): - offset = uniform(.2, .3) + offset = uniform(0.2, 0.3) length = uniform(1, 2) - x_anchors = 0, -.05, - offset - uniform(.01, .02), -offset - z_anchors = 0, 0, - length + .1, -length + x_anchors = 0, -0.05, -offset - uniform(0.01, 0.02), -offset + z_anchors = 0, 0, -length + 0.1, -length bezier = bezier_curve([x_anchors, 0, z_anchors]) - surface.add_geomod(bezier, geo_radius, apply=True, input_args=[uniform(.02, .03), 32]) + surface.add_geomod( + bezier, geo_radius, apply=True, input_args=[uniform(0.02, 0.03), 32] + ) return bezier def build_instance(self, i, face_size): diff --git a/infinigen/assets/monocot/generate.py b/infinigen/assets/objects/monocot/generate.py similarity index 62% rename from infinigen/assets/monocot/generate.py rename to infinigen/assets/objects/monocot/generate.py index 98b16eb63..b172aae0c 100644 --- a/infinigen/assets/monocot/generate.py +++ b/infinigen/assets/objects/monocot/generate.py @@ -8,51 +8,71 @@ import numpy as np from numpy.random import uniform -from .veratrum import VeratrumMonocotFactory -from .banana import BananaMonocotFactory, TaroMonocotFactory +from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.object import join_objects +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed + from .agave import AgaveMonocotFactory +from .banana import BananaMonocotFactory, TaroMonocotFactory from .grasses import GrassesMonocotFactory, MaizeMonocotFactory, WheatMonocotFactory from .growth import MonocotGrowthFactory from .tussock import TussockMonocotFactory -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed -from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.tagging import tag_object, tag_nodegroup +from .veratrum import VeratrumMonocotFactory + class MonocotFactory(AssetFactory): max_cluster = 10 def create_asset(self, i, **params) -> bpy.types.Object: - params['decorate'] = True + params["decorate"] = True if self.factory.is_grass: n = np.random.randint(1, 6) angles = polygon_angles(n, np.pi / 4, np.pi * 2) - radius = uniform(.08, .16, n) - monocots = [self.factory.create_asset(**params, i=j + i * self.max_cluster) for j in range(n)] + radius = uniform(0.08, 0.16, n) + monocots = [ + self.factory.create_asset(**params, i=j + i * self.max_cluster) + for j in range(n) + ] for m, a, r in zip(monocots, angles, radius): m.location = r * np.cos(a), r * np.sin(a), 0 obj = join_objects(monocots) - tag_object(obj, 'monocot') + tag_object(obj, "monocot") return obj else: m = self.factory.create_asset(**params) - tag_object(m, 'monocot') + tag_object(m, "monocot") return m def __init__(self, factory_seed, coarse=False, factory_method=None, grass=None): super(MonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - grass_factory = [TussockMonocotFactory, GrassesMonocotFactory, WheatMonocotFactory, - MaizeMonocotFactory] - nongrass_factory = [AgaveMonocotFactory, BananaMonocotFactory, TaroMonocotFactory, - VeratrumMonocotFactory] + grass_factory = [ + TussockMonocotFactory, + GrassesMonocotFactory, + WheatMonocotFactory, + MaizeMonocotFactory, + ] + nongrass_factory = [ + AgaveMonocotFactory, + BananaMonocotFactory, + TaroMonocotFactory, + VeratrumMonocotFactory, + ] # noinspection PyTypeChecker - self.factory_methods = grass_factory + nongrass_factory if grass is None else grass_factory if \ - grass else nongrass_factory + self.factory_methods = ( + grass_factory + nongrass_factory + if grass is None + else grass_factory + if grass + else nongrass_factory + ) weights = np.array([1] * len(self.factory_methods)) self.weights = weights / weights.sum() if factory_method is None: with FixedSeed(self.factory_seed): - factory_method = np.random.choice(self.factory_methods, p=self.weights) + factory_method = np.random.choice( + self.factory_methods, p=self.weights + ) self.factory: MonocotGrowthFactory = factory_method(factory_seed, coarse) diff --git a/infinigen/assets/monocot/grasses.py b/infinigen/assets/objects/monocot/grasses.py similarity index 57% rename from infinigen/assets/monocot/grasses.py rename to infinigen/assets/objects/monocot/grasses.py index 0fea24138..7ee174fa0 100644 --- a/infinigen/assets/monocot/grasses.py +++ b/infinigen/assets/objects/monocot/grasses.py @@ -4,66 +4,68 @@ # Authors: Lingjie Mei -import colorsys - import bpy import numpy as np from numpy.random import uniform -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.assets.utils.decorate import remove_vertices, write_attribute, \ - write_material_index -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.object import join_objects +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory +from infinigen.assets.utils.decorate import ( + remove_vertices, + write_attribute, + write_material_index, +) from infinigen.assets.utils.draw import bezier_curve, leaf, spin from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform - +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import join_objects +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core import surface -from infinigen.core.surface import read_attr_data, shaderfunc_to_material +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.surface import shaderfunc_to_material +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.random import log_uniform -class GrassesMonocotFactory(MonocotGrowthFactory): +class GrassesMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(GrassesMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(1.5, 2.) + self.stem_offset = uniform(1.5, 2.0) self.angle = uniform(np.pi / 6, np.pi / 3) - self.z_drag = uniform(.0, .2) - self.min_y_angle = uniform(np.pi * .35, np.pi * .45) - self.max_y_angle = uniform(np.pi * .45, np.pi * .5) + self.z_drag = uniform(0.0, 0.2) + self.min_y_angle = uniform(np.pi * 0.35, np.pi * 0.45) + self.max_y_angle = uniform(np.pi * 0.45, np.pi * 0.5) self.count = int(log_uniform(16, 64)) - self.scale_curve = [(0, 1.), (1, .2)] + self.scale_curve = [(0, 1.0), (1, 0.2)] self.bend_angle = np.pi / 2 @staticmethod def build_base_hue(): - if uniform(0, 1) < .6: - return uniform(.08, .12) + if uniform(0, 1) < 0.6: + return uniform(0.08, 0.12) else: - return uniform(.2, .25) + return uniform(0.2, 0.25) def build_leaf(self, face_size): - x_anchors = np.array([0, uniform(.1, .2), uniform(.5, .7), 1.]) - y_anchors = np.array([0, uniform(.02, .03), uniform(.02, .03), 0]) + x_anchors = np.array([0, uniform(0.1, 0.2), uniform(0.5, 0.7), 1.0]) + y_anchors = np.array([0, uniform(0.02, 0.03), uniform(0.02, 0.03), 0]) obj = leaf(x_anchors, y_anchors, face_size=face_size) - cut_prob = .4 + cut_prob = 0.4 if uniform(0, 1) < cut_prob: - x_cutoff = uniform(.5, 1.) + x_cutoff = uniform(0.5, 1.0) angle = uniform(-np.pi / 3, np.pi / 3) - remove_vertices(obj, lambda x, y, z: (x - x_cutoff) * np.cos(angle) + y * np.sin(angle) > 0) + remove_vertices( + obj, + lambda x, y, z: (x - x_cutoff) * np.cos(angle) + y * np.sin(angle) > 0, + ) self.decorate_leaf(obj) - tag_object(obj, 'grasses') + tag_object(obj, "grasses") return obj @property @@ -72,91 +74,92 @@ def is_grass(self): class WheatEarMonocotFactory(MonocotGrowthFactory): - def __init__(self, factory_seed, coarse=False): super(WheatEarMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.4, .5) + self.stem_offset = uniform(0.4, 0.5) self.angle = uniform(np.pi / 6, np.pi / 4) self.min_y_angle = uniform(np.pi / 4, np.pi / 3) self.max_y_angle = np.pi / 2 - self.leaf_prob = uniform(.9, 1) + self.leaf_prob = uniform(0.9, 1) self.count = int(log_uniform(96, 128)) self.bend_angle = np.pi @staticmethod def build_base_hue(): - return uniform(.12, .28) + return uniform(0.12, 0.28) def build_leaf(self, face_size): - x_anchors = np.array([0, .05, .1]) - y_anchors = np.array([0, uniform(.01, .015), 0]) + x_anchors = np.array([0, 0.05, 0.1]) + y_anchors = np.array([0, uniform(0.01, 0.015), 0]) curves = [] for angle in polygon_angles(np.random.randint(4, 6)): anchors = [x_anchors, np.cos(angle) * y_anchors, np.sin(angle) * y_anchors] curves.append(bezier_curve(anchors)) obj = butil.join_objects(curves) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.convex_hull() remesh_with_attrs(obj, face_size / 2) - tag_object(obj, 'wheat_ear') + tag_object(obj, "wheat_ear") return obj class WheatMonocotFactory(GrassesMonocotFactory): - def __init__(self, factory_seed, coarse=False): super(WheatMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): self.ear_factory = WheatEarMonocotFactory(factory_seed, coarse) - self.scale_curve = [(0, 1.), (1, .6)] - self.leaf_range = .1, .7 + self.scale_curve = [(0, 1.0), (1, 0.6)] + self.leaf_range = 0.1, 0.7 @staticmethod def build_base_hue(): - return uniform(.08, .12) + return uniform(0.08, 0.12) def create_asset(self, **params): obj = super().create_raw(**params) ear = self.ear_factory.create_asset(**params) - butil.modify_mesh(ear, 'SIMPLE_DEFORM', deform_method='BEND', - angle=uniform(0, self.ear_factory.bend_angle)) - ear.location[-1] = self.stem_offset - .02 + butil.modify_mesh( + ear, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(0, self.ear_factory.bend_angle), + ) + ear.location[-1] = self.stem_offset - 0.02 obj = join_objects([obj, ear]) self.decorate_monocot(obj) - tag_object(obj, 'wheat') + tag_object(obj, "wheat") return obj class MaizeMonocotFactory(GrassesMonocotFactory): - def __init__(self, factory_seed, coarse=False): super(MaizeMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(2., 2.5) - self.scale_curve = [(0, 1.), (1, .6)] - self.leaf_range = .1, .7 + self.stem_offset = uniform(2.0, 2.5) + self.scale_curve = [(0, 1.0), (1, 0.6)] + self.leaf_range = 0.1, 0.7 def build_leaf(self, face_size): - x_anchors = np.array([0, uniform(.1, .2), uniform(.5, .7), 1.]) - y_anchors = np.array([0, uniform(.03, .06), uniform(.03, .06), 0]) + x_anchors = np.array([0, uniform(0.1, 0.2), uniform(0.5, 0.7), 1.0]) + y_anchors = np.array([0, uniform(0.03, 0.06), uniform(0.03, 0.06), 0]) obj = leaf(x_anchors, y_anchors, face_size=face_size) self.decorate_leaf(obj) - tag_object(obj, 'maize_leaf') + tag_object(obj, "maize_leaf") return obj def build_husk(self): - x_anchors = 0, uniform(.04, .05), uniform(.03, .03), 0 - z_anchors = 0, .01, uniform(.24, .3), uniform(.35, .4) + x_anchors = 0, uniform(0.04, 0.05), uniform(0.03, 0.03), 0 + z_anchors = 0, 0.01, uniform(0.24, 0.3), uniform(0.35, 0.4) anchors = x_anchors, 0, z_anchors husk = spin(anchors) - texture = bpy.data.textures.new(name='husk', type='STUCCI') - texture.noise_scale = .01 - butil.modify_mesh(husk, 'DISPLACE', strength=.02, texture=texture) - husk.location[-1] = self.stem_offset - .02 - husk.rotation_euler[0] = uniform(0, np.pi * .2) - tag_object(husk, 'maize_husk') + texture = bpy.data.textures.new(name="husk", type="STUCCI") + texture.noise_scale = 0.01 + butil.modify_mesh(husk, "DISPLACE", strength=0.02, texture=texture) + husk.location[-1] = self.stem_offset - 0.02 + husk.rotation_euler[0] = uniform(0, np.pi * 0.2) + tag_object(husk, "maize_husk") return husk def create_asset(self, **params): @@ -164,31 +167,30 @@ def create_asset(self, **params): husk = self.build_husk() obj = join_objects([obj, husk]) self.decorate_monocot(obj) - tag_object(obj, 'maize') + tag_object(obj, "maize") return obj class ReedEarMonocotFactory(MonocotGrowthFactory): - def __init__(self, factory_seed, coarse=False): super(ReedEarMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.3, .4) + self.stem_offset = uniform(0.3, 0.4) self.min_y_angle = uniform(np.pi / 4, np.pi / 3) self.max_y_angle = self.min_y_angle + np.pi / 12 self.count = int(log_uniform(48, 96)) - self.radius = .002 + self.radius = 0.002 def build_leaf(self, face_size): - x_anchors = np.array([0, uniform(.02, .03), .05]) - y_anchors = np.array([0, uniform(.005, .01), 0]) + x_anchors = np.array([0, uniform(0.02, 0.03), 0.05]) + y_anchors = np.array([0, uniform(0.005, 0.01), 0]) obj = leaf(x_anchors, y_anchors, face_size=face_size) return obj def create_raw(self, **params): obj = super(ReedEarMonocotFactory, self).create_raw(**params) - write_attribute(obj, 1, 'ear', 'FACE') - tag_object(obj, 'reed_ear') + write_attribute(obj, 1, "ear", "FACE") + tag_object(obj, "reed_ear") return obj @@ -198,59 +200,65 @@ class ReedBranchMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(ReedBranchMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.6, .8) + self.stem_offset = uniform(0.6, 0.8) self.ear_factory = ReedEarMonocotFactory(self.factory_seed) - self.scale_curve = (0, 1), (.5, .6), (1, .1) + self.scale_curve = (0, 1), (0.5, 0.6), (1, 0.1) self.min_y_angle = uniform(-np.pi / 10, -np.pi / 8) self.max_y_angle = uniform(-np.pi / 6, -np.pi / 8) self.angle = 0 - self.radius = .005 + self.radius = 0.005 def make_collection(self, face_size): - return make_asset_collection(self.ear_factory.create_raw, 2, 'leaves', verbose=False, - face_size=face_size) + return make_asset_collection( + self.ear_factory.create_raw, 2, "leaves", verbose=False, face_size=face_size + ) class ReedMonocotFactory(GrassesMonocotFactory): def __init__(self, factory_seed, coarse=False): super(ReedMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(3., 4.) - self.scale_curve = [(0, 1.2), (1, .8)] + self.stem_offset = uniform(3.0, 4.0) + self.scale_curve = [(0, 1.2), (1, 0.8)] self.branch_factory = ReedBranchMonocotFactory(factory_seed, coarse) self.branch_material = shaderfunc_to_material(self.shader_ear) @staticmethod def build_base_hue(): - return uniform(.08, .12) + return uniform(0.08, 0.12) def create_asset(self, **params): obj = super().create_raw(**params) branch = self.branch_factory.create_asset(**params) self.branch_factory.decorate_monocot(branch) - branch.location[-1] = self.stem_offset - .02 + branch.location[-1] = self.stem_offset - 0.02 obj = join_objects([obj, branch]) - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-3) + butil.modify_mesh(obj, "WELD", merge_threshold=1e-3) self.decorate_monocot(obj) assign_material(obj, [self.material, self.branch_material]) - write_material_index(obj, surface.read_attr_data(obj, 'ear', 'FACE').astype(int)[:, 0]) - tag_object(obj, 'reed') + write_material_index( + obj, surface.read_attr_data(obj, "ear", "FACE").astype(int)[:, 0] + ) + tag_object(obj, "reed") return obj @staticmethod def shader_ear(nw: NodeWrangler): - color = hsv2rgba(uniform(.06, .1), uniform(.2, .5), log_uniform(.2, .5)) - specular = uniform(.0, .2) - clearcoat = 0 if uniform(0, 1) < .8 else uniform(.2, .5) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, .8)]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface': .01, - 'Subsurface Radius': (.01, .01, .01), - }) + color = hsv2rgba(uniform(0.06, 0.1), uniform(0.2, 0.5), log_uniform(0.2, 0.5)) + specular = uniform(0.0, 0.2) + clearcoat = 0 if uniform(0, 1) < 0.8 else uniform(0.2, 0.5) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 0.8)]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface": 0.01, + "Subsurface Radius": (0.01, 0.01, 0.01), + }, + ) return bsdf diff --git a/infinigen/assets/objects/monocot/growth.py b/infinigen/assets/objects/monocot/growth.py new file mode 100644 index 000000000..e1782fc9c --- /dev/null +++ b/infinigen/assets/objects/monocot/growth.py @@ -0,0 +1,307 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + +# Authors: Lingjie Mei + + +from functools import reduce + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.utils.decorate import displace_vertices, geo_extension +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.assets.utils.object import data2mesh, mesh2obj, origin2leftmost +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import adapt_mesh_resolution +from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.surface import shaderfunc_to_material +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class MonocotGrowthFactory(AssetFactory): + use_distance = False + + def __init__(self, factory_seed, coarse=False): + super(MonocotGrowthFactory, self).__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.count = 128 + self.perturb = 0.05 + self.angle = np.pi / 6 + self.min_y_angle = 0.0 + self.max_y_angle = np.pi / 2 + self.leaf_prob = uniform(0.8, 0.9) + self.leaf_range = 0, 1 + self.stem_offset = 0.2 + self.scale_curve = [(0, 1), (1, 1)] + self.radius = 0.01 + self.bend_angle = np.pi / 4 + self.twist_angle = np.pi / 6 + self.z_drag = 0.0 + self.z_scale = uniform(1.0, 1.2) + self.align_factor = 0 + self.align_direction = 1, 0, 0 + self.base_hue = self.build_base_hue() + self.bright_color = hsv2rgba( + self.base_hue, uniform(0.6, 0.8), log_uniform(0.05, 0.1) + ) + self.dark_color = hsv2rgba( + (self.base_hue + uniform(-0.03, 0.03)) % 1, + uniform(0.8, 1.0), + log_uniform(0.05, 0.2), + ) + self.material = shaderfunc_to_material( + self.shader_monocot, + self.dark_color, + self.bright_color, + self.use_distance, + ) + + @staticmethod + def build_base_hue(): + return uniform(0.15, 0.35) + + @property + def is_grass(self): + return False + + def build_leaf(self, face_size): + raise NotImplementedError + + @staticmethod + def decorate_leaf( + obj, + y_ratio=4, + y_bend_angle=np.pi / 6, + z_bend_angle=np.pi / 6, + noise_scale=0.1, + strength=0.02, + leftmost=True, + ): + obj.rotation_euler[1] = -np.pi / 2 + butil.apply_transform(obj) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(0.5, 1) * y_bend_angle, + deform_axis="Y", + ) + obj.rotation_euler[1] = np.pi / 2 + butil.apply_transform(obj) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(-1, 1) * z_bend_angle, + deform_axis="Z", + ) + + displace_vertices(obj, lambda x, y, z: (0, 0, y_ratio * uniform(0, 1) * y * y)) + surface.add_geomod(obj, geo_extension, apply=True) + + texture = bpy.data.textures.new(name="grasses", type="STUCCI") + texture.noise_scale = noise_scale + butil.modify_mesh(obj, "DISPLACE", strength=strength, texture=texture) + + for direction, width in zip("XY", obj.dimensions[:2]): + texture = bpy.data.textures.new(name="grasses", type="STUCCI") + texture.noise_scale = noise_scale + butil.modify_mesh( + obj, + "DISPLACE", + strength=uniform(0.01, 0.02) * width, + texture=texture, + direction=direction, + ) + if leftmost: + origin2leftmost(obj) + return obj + + def make_geo_flower(self): + def geo_flower(nw: NodeWrangler, leaves): + stem = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketGeometry", "Geometry", None)], + ) + line = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0, 0, self.stem_offset)} + ) + points = nw.new_node(Nodes.ResampleCurve, [line, None, self.count]) + parameter = nw.new_node(Nodes.SplineParameter) + y_rotation = nw.build_float_curve( + parameter, [(0, -self.min_y_angle), (1, -self.max_y_angle)] + ) + z_rotation = nw.new_node( + Nodes.AccumulateField, + [None, nw.uniform(self.angle * 0.95, self.angle * 1.05)], + ) + rotation = nw.combine(0, y_rotation, z_rotation) + scale = nw.build_float_curve(parameter, self.scale_curve, "AUTO") + if self.perturb: + rotation = nw.add( + rotation, nw.uniform([-self.perturb] * 3, [self.perturb] * 3) + ) + scale = nw.add( + scale, nw.uniform([-self.perturb] * 3, [self.perturb] * 3) + ) + if self.align_factor: + rotation = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={ + "Rotation": rotation, + "Factor": surface.eval_argument(nw, self.align_factor), + "Vector": self.align_direction, + }, + attrs={"pivot_axis": "Z"}, + ) + points, _, z_rotation = nw.new_node( + Nodes.CaptureAttribute, [points, None, z_rotation] + ).outputs[:3] + leaves = nw.new_node(Nodes.CollectionInfo, [leaves, True, True]) + is_leaf = reduce( + lambda *xs: nw.boolean_math("AND", *xs), + [ + nw.bernoulli(self.leaf_prob), + nw.compare("GREATER_EQUAL", parameter, self.leaf_range[0]), + nw.compare("LESS_EQUAL", parameter, self.leaf_range[1]), + ], + ) + instances = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": points, + "Selection": is_leaf, + "Instance": leaves, + "Pick Instance": True, + "Rotation": rotation, + "Scale": scale, + }, + ) + geometry = nw.new_node(Nodes.RealizeInstances, [instances]) + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": geometry, + "Name": "z_rotation", + "Value": z_rotation, + }, + ) + geometry = nw.new_node(Nodes.JoinGeometry, [[stem, geometry]]) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + return geo_flower + + def build_instance(self, i, face_size): + obj = self.build_leaf(face_size) + origin2leftmost(obj) + obj.location[0] -= 0.01 + butil.apply_transform(obj, loc=True) + return obj + + def make_collection(self, face_size): + return make_asset_collection( + self.build_instance, 10, "leaves", verbose=False, face_size=face_size + ) + + def build_stem(self, face_size): + obj = mesh2obj(data2mesh([[0, 0, 0], [0, 0, self.stem_offset]], [[0, 1]])) + butil.modify_mesh(obj, "SUBSURF", True, levels=9, render_levels=9) + surface.add_geomod(obj, geo_radius, apply=True, input_args=[self.radius, 16]) + adapt_mesh_resolution(obj, face_size, "subdivide") + + texture = bpy.data.textures.new(name="grasses", type="STUCCI") + texture.noise_scale = 0.1 + butil.modify_mesh(obj, "DISPLACE", strength=0.01, texture=texture) + tag_object(obj, "stem") + return obj + + def create_asset(self, **params): + obj = self.create_raw(**params) + self.decorate_monocot(obj) + tag_object(obj, "monocot_growth") + return obj + + def create_raw(self, face_size=0.01, apply=True, **params): + if self.angle != 0: + frequency = 2 * np.pi / self.angle + if 0.01 < frequency - int(frequency) < 0.05: + frequency += 0.05 + elif -0.05 < frequency - int(frequency) < -0.01: + frequency -= 0.05 + self.angle = 2 * np.pi / frequency + leaves = self.make_collection(face_size) + obj = self.build_stem(face_size) + surface.add_geomod( + obj, self.make_geo_flower(), apply=apply, input_args=[leaves] + ) + if apply: + butil.delete_collection(leaves) + tag_object(obj, "flower") + return obj + + def decorate_monocot(self, obj): + displace_vertices(obj, lambda x, y, z: (0, 0, -self.z_drag * (x * x + y * y))) + surface.add_geomod(obj, geo_extension, apply=True, input_args=[0.4]) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="TWIST", + angle=uniform(-self.twist_angle, self.twist_angle), + deform_axis="Z", + ) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(0, self.bend_angle), + ) + obj.scale = uniform(0.8, 1.2), uniform(0.8, 1.2), self.z_scale + obj.rotation_euler[-1] = uniform(0, np.pi * 2) + butil.apply_transform(obj) + assign_material(obj, self.material) + + @staticmethod + def shader_monocot(nw: NodeWrangler, dark_color, bright_color, use_distance): + specular = uniform(0.0, 0.2) + clearcoat = 0 if uniform(0, 1) < 0.8 else uniform(0.2, 0.5) + if use_distance: + distance = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "distance"} + ).outputs["Fac"] + exponent = uniform(1.8, 3.5) + ratio = nw.scalar_sub( + 1, nw.math("POWER", nw.scalar_sub(1, distance), exponent) + ) + color = nw.new_node(Nodes.MixRGB, [ratio, bright_color, dark_color]) + else: + color = build_color_ramp( + nw, + nw.musgrave(10), + [0.0, 0.3, 0.7, 1.0], + [bright_color, bright_color, dark_color, dark_color], + ) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 0.8)]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface": 0.01, + "Subsurface Radius": (0.01, 0.01, 0.01), + }, + ) + return bsdf diff --git a/infinigen/assets/monocot/kelp.py b/infinigen/assets/objects/monocot/kelp.py similarity index 62% rename from infinigen/assets/monocot/kelp.py rename to infinigen/assets/objects/monocot/kelp.py index 5851f9cc2..f32cdcbf5 100644 --- a/infinigen/assets/monocot/kelp.py +++ b/infinigen/assets/objects/monocot/kelp.py @@ -9,15 +9,17 @@ from numpy.random import uniform import infinigen.core.util.blender as butil -from infinigen.assets.creatures.util.animation.driver_repeated import repeated_driver -from infinigen.assets.monocot.growth import MonocotGrowthFactory +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + repeated_driver, +) +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory from infinigen.assets.utils.draw import bezier_curve, leaf from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.random import log_uniform from infinigen.assets.utils.object import join_objects, origin2leftmost from infinigen.core.nodes.node_wrangler import NodeWrangler from infinigen.core.placement.detail import remesh_with_attrs from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class KelpMonocotFactory(MonocotGrowthFactory): @@ -27,66 +29,79 @@ class KelpMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(KelpMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = 10. + self.stem_offset = 10.0 self.angle = uniform(np.pi / 6, np.pi / 4) - self.z_drag = uniform(.0, .2) - self.min_y_angle = uniform(0, np.pi * .1) + self.z_drag = uniform(0.0, 0.2) + self.min_y_angle = uniform(0, np.pi * 0.1) self.max_y_angle = self.min_y_angle self.bend_angle = uniform(0, np.pi / 6) self.twist_angle = uniform(0, np.pi / 6) self.count = 512 - self.leaf_prob = uniform(.6, .7) + self.leaf_prob = uniform(0.6, 0.7) self.align_angle = uniform(np.pi / 30, np.pi / 15) - self.radius = .02 + self.radius = 0.02 self.align_factor = self.make_align_factor() self.align_direction = self.make_align_direction() flow_angle = uniform(0, np.pi * 2) - self.align_direction = np.cos(flow_angle), np.sin(flow_angle), uniform(-.2, .2) + self.align_direction = ( + np.cos(flow_angle), + np.sin(flow_angle), + uniform(-0.2, 0.2), + ) self.anim_freq = 1 / log_uniform(100, 200) self.anim_offset = uniform(0, 1) self.anim_seed = np.random.randint(1e5) def make_align_factor(self): def align_factor(nw: NodeWrangler): - rand = nw.uniform(.7, .95) - driver = rand.inputs[2].driver_add('default_value').driver - driver.expression = repeated_driver(.7, .85, self.anim_freq, self.anim_offset, self.anim_seed) - return nw.scalar_multiply(nw.bernoulli(.9), rand) + rand = nw.uniform(0.7, 0.95) + driver = rand.inputs[2].driver_add("default_value").driver + driver.expression = repeated_driver( + 0.7, 0.85, self.anim_freq, self.anim_offset, self.anim_seed + ) + return nw.scalar_multiply(nw.bernoulli(0.9), rand) return align_factor def make_align_direction(self): def align_direction(nw: NodeWrangler): direction = nw.combine(1, 0, 0) - driver = direction.inputs[2].driver_add('default_value').driver - driver.expression = repeated_driver(-.5, -.1, self.anim_freq, self.anim_offset, self.anim_seed) + driver = direction.inputs[2].driver_add("default_value").driver + driver.expression = repeated_driver( + -0.5, -0.1, self.anim_freq, self.anim_offset, self.anim_seed + ) return direction return align_direction @staticmethod def build_base_hue(): - return uniform(.05, .25) + return uniform(0.05, 0.25) def build_instance(self, i, face_size): - x_anchors = np.array([0, -.02, -.04]) - y_anchors = np.array([0, uniform(.01, .02), 0]) + x_anchors = np.array([0, -0.02, -0.04]) + y_anchors = np.array([0, uniform(0.01, 0.02), 0]) curves = [] for angle in np.linspace(0, np.pi * 2, 6): anchors = [x_anchors, np.cos(angle) * y_anchors, np.sin(angle) * y_anchors] curves.append(bezier_curve(anchors)) bud = butil.join_objects(curves) - bud.location[0] += .02 - with butil.ViewportMode(bud, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + bud.location[0] += 0.02 + with butil.ViewportMode(bud, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.convex_hull() remesh_with_attrs(bud, face_size) - x_anchors = 0, uniform(.35, .65), uniform(.8, 1.2) - y_anchors = 0, uniform(.06, .08), 0 + x_anchors = 0, uniform(0.35, 0.65), uniform(0.8, 1.2) + y_anchors = 0, uniform(0.06, 0.08), 0 obj = leaf(x_anchors, y_anchors, face_size=face_size) obj = join_objects([obj, bud]) - self.decorate_leaf(obj, uniform(-2, 2), uniform(-np.pi / 4, np.pi / 4), uniform(-np.pi / 4, np.pi / 4)) + self.decorate_leaf( + obj, + uniform(-2, 2), + uniform(-np.pi / 4, np.pi / 4), + uniform(-np.pi / 4, np.pi / 4), + ) origin2leftmost(obj) return obj diff --git a/infinigen/assets/objects/monocot/pinecone.py b/infinigen/assets/objects/monocot/pinecone.py new file mode 100644 index 000000000..e863689cb --- /dev/null +++ b/infinigen/assets/objects/monocot/pinecone.py @@ -0,0 +1,100 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory +from infinigen.assets.utils.draw import shape_by_angles, shape_by_xs +from infinigen.assets.utils.object import new_circle +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import remesh_with_attrs +from infinigen.core.surface import shaderfunc_to_material +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class PineconeFactory(MonocotGrowthFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.angle = 2 * np.pi / (np.random.randint(4, 8) + 0.5) + self.max_y_angle = uniform(0.7, 0.8) * np.pi / 2 + self.leaf_prob = uniform(0.9, 0.95) + self.count = int(log_uniform(64, 96)) + self.stem_offset = uniform(0.2, 0.4) + self.perturb = 0 + self.scale_curve = [ + (0, 0.5), + (0.5, uniform(0.6, 1.0)), + (1, uniform(0.1, 0.2)), + ] + self.bright_color = hsv2rgba(uniform(0.02, 0.06), uniform(0.8, 1.0), 0.01) + self.dark_color = hsv2rgba(uniform(0.02, 0.06), uniform(0.8, 1.0), 0.005) + self.material = shaderfunc_to_material( + self.shader_monocot, + self.dark_color, + self.bright_color, + self.use_distance, + ) + + def build_leaf(self, face_size): + obj = new_circle(vertices=128) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.fill_grid() + angles = np.array([-1, -0.8, -0.5, 0, 0.5, 0.8, 1]) * self.angle / 2 + scale = uniform(0.9, 0.95) + scales = [0, 0.7, scale, 1, scale, 0.7, 0] + displacement = [0, 0, 0, -uniform(0.2, 0.3), 0, 0, 0] + shape_by_angles(obj, angles, scales, displacement) + + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.convex_hull() + + xs = [0, 1, 2] + displacement = [0, 0, 0.5] + shape_by_xs(obj, xs, displacement) + + obj.scale = [0.1] * 3 + obj.rotation_euler[1] -= uniform(np.pi / 18, np.pi / 12) + butil.apply_transform(obj) + remesh_with_attrs(obj, face_size) + + texture = bpy.data.textures.new(name="pinecone", type="STUCCI") + texture.noise_scale = log_uniform(0.002, 0.005) + butil.modify_mesh( + obj, "DISPLACE", True, strength=0.001, mid_level=0, texture=texture + ) + + tag_object(obj, "pinecone") + return obj + + @staticmethod + def shader_monocot(nw: NodeWrangler, dark_color, bright_color, use_distance): + specular = uniform(0.2, 0.4) + color = build_color_ramp( + nw, + nw.musgrave(10), + [0.0, 0.3, 0.7, 1.0], + [bright_color, bright_color, dark_color, dark_color], + ) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 0.8)]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + }, + ) + return bsdf diff --git a/infinigen/assets/monocot/tussock.py b/infinigen/assets/objects/monocot/tussock.py similarity index 64% rename from infinigen/assets/monocot/tussock.py rename to infinigen/assets/objects/monocot/tussock.py index 225427d8a..13bf520fe 100644 --- a/infinigen/assets/monocot/tussock.py +++ b/infinigen/assets/objects/monocot/tussock.py @@ -6,36 +6,36 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory from infinigen.assets.utils.draw import leaf -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed from infinigen.core.tagging import tag_object +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class TussockMonocotFactory(MonocotGrowthFactory): def __init__(self, factory_seed, coarse=False): super(TussockMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(.0, .2) + self.stem_offset = uniform(0.0, 0.2) self.angle = uniform(np.pi / 20, np.pi / 18) - self.z_drag = uniform(.1, .2) - self.min_y_angle = uniform(np.pi * .2, np.pi * .25) + self.z_drag = uniform(0.1, 0.2) + self.min_y_angle = uniform(np.pi * 0.2, np.pi * 0.25) self.max_y_angle = np.pi / 2 self.count = int(log_uniform(512, 1024)) - self.scale_curve = [(0, uniform(.6, 1.)), (1, uniform(.6, 1.))] + self.scale_curve = [(0, uniform(0.6, 1.0)), (1, uniform(0.6, 1.0))] @staticmethod def build_base_hue(): - if uniform(0, 1) < .5: - return uniform(.1, .15) + if uniform(0, 1) < 0.5: + return uniform(0.1, 0.15) else: - return uniform(.25, .35) + return uniform(0.25, 0.35) def build_leaf(self, face_size): - x_anchors = np.array([0, uniform(.3, .7), 1.]) - y_anchors = np.array([0, .01, 0]) + x_anchors = np.array([0, uniform(0.3, 0.7), 1.0]) + y_anchors = np.array([0, 0.01, 0]) obj = leaf(x_anchors, y_anchors, face_size=face_size) self.decorate_leaf(obj) - tag_object(obj, 'tussock') + tag_object(obj, "tussock") return obj diff --git a/infinigen/assets/monocot/veratrum.py b/infinigen/assets/objects/monocot/veratrum.py similarity index 52% rename from infinigen/assets/monocot/veratrum.py rename to infinigen/assets/objects/monocot/veratrum.py index a7906be1e..01cef3027 100644 --- a/infinigen/assets/monocot/veratrum.py +++ b/infinigen/assets/objects/monocot/veratrum.py @@ -5,43 +5,49 @@ # Authors: Lingjie Mei -import colorsys - import bpy import numpy as np from numpy.random import uniform -from infinigen.assets.monocot.growth import MonocotGrowthFactory -from infinigen.assets.utils.decorate import distance2boundary, write_attribute, write_material_index +from infinigen.assets.objects.monocot.growth import MonocotGrowthFactory +from infinigen.assets.utils.decorate import ( + distance2boundary, + write_attribute, + write_material_index, +) +from infinigen.assets.utils.draw import leaf, spin from infinigen.assets.utils.misc import assign_material from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.draw import leaf, spin -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface from infinigen.core.surface import shaderfunc_to_material -from infinigen.core.util.math import FixedSeed +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class VeratrumMonocotFactory(MonocotGrowthFactory): - def __init__(self, factory_seed, coarse=False): super(VeratrumMonocotFactory, self).__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.stem_offset = uniform(1., 1.5) + self.stem_offset = uniform(1.0, 1.5) self.angle = uniform(np.pi / 4, np.pi / 3) - self.z_drag = uniform(.4, .5) + self.z_drag = uniform(0.4, 0.5) self.bend_angle = np.pi / 2 - self.min_y_angle = uniform(np.pi * .25, np.pi * .35) - self.max_y_angle = uniform(np.pi * .6, np.pi * .7) + self.min_y_angle = uniform(np.pi * 0.25, np.pi * 0.35) + self.max_y_angle = uniform(np.pi * 0.6, np.pi * 0.7) self.count = int(log_uniform(32, 64)) - self.scale_curve = (0, uniform(.8, 1.)), (.4, .6), (.8, uniform(0, .1)), (1, 0) - self.leaf_range = 0, uniform(.7, .8) + self.scale_curve = ( + (0, uniform(0.8, 1.0)), + (0.4, 0.6), + (0.8, uniform(0, 0.1)), + (1, 0), + ) + self.leaf_range = 0, uniform(0.7, 0.8) self.bud_angle = uniform(np.pi / 15, np.pi / 12) self.freq = uniform(25, 50) self.branches_factory = VeratrumBranchMonocotFactory(factory_seed, coarse) @@ -49,49 +55,60 @@ def __init__(self, factory_seed, coarse=False): @staticmethod def build_base_hue(): - return uniform(.12, .32) + return uniform(0.12, 0.32) @staticmethod def shader_ear(nw: NodeWrangler): - color = hsv2rgba(uniform(.1, .35), uniform(.1, .5), log_uniform(.2, .5)) - specular = uniform(.0, .2) - clearcoat = 0 if uniform(0, 1) < .8 else uniform(.2, .5) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 50}) - roughness = nw.build_float_curve(noise_texture, [(0, .5), (1, .8)]) - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Specular': specular, - 'Clearcoat': clearcoat, - 'Subsurface': .01, - 'Subsurface Radius': (.01, .01, .01), - }) + color = hsv2rgba(uniform(0.1, 0.35), uniform(0.1, 0.5), log_uniform(0.2, 0.5)) + specular = uniform(0.0, 0.2) + clearcoat = 0 if uniform(0, 1) < 0.8 else uniform(0.2, 0.5) + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 50}) + roughness = nw.build_float_curve(noise_texture, [(0, 0.5), (1, 0.8)]) + bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface": 0.01, + "Subsurface Radius": (0.01, 0.01, 0.01), + }, + ) return bsdf def build_leaf(self, face_size): - x_anchors = 0, .2 * np.cos(self.bud_angle), uniform(.6, .7), .8 - y_anchors = 0, .2 * np.sin(self.bud_angle), uniform(.06, .1), 0 + x_anchors = 0, 0.2 * np.cos(self.bud_angle), uniform(0.6, 0.7), 0.8 + y_anchors = 0, 0.2 * np.sin(self.bud_angle), uniform(0.06, 0.1), 0 obj = leaf(x_anchors, y_anchors, face_size=face_size) distance = distance2boundary(obj) - vg = obj.vertex_groups.new(name='distance') + vg = obj.vertex_groups.new(name="distance") weights = np.cos(self.freq * distance) ** 4 for i, w in enumerate(weights): - vg.add([i], w, 'REPLACE') - butil.modify_mesh(obj, 'DISPLACE', strength=-uniform(5e-3, 8e-3), mid_level=0, vertex_group='distance') + vg.add([i], w, "REPLACE") + butil.modify_mesh( + obj, + "DISPLACE", + strength=-uniform(5e-3, 8e-3), + mid_level=0, + vertex_group="distance", + ) self.decorate_leaf(obj, 8, np.pi / 2) return obj def create_asset(self, **params): obj = super().create_raw(**params) branches = self.branches_factory.create_asset(**params) - branches.location[-1] = self.stem_offset - .02 + branches.location[-1] = self.stem_offset - 0.02 obj = join_objects([obj, branches]) self.decorate_monocot(obj) assign_material(obj, [self.material, self.branch_material]) - write_material_index(obj, surface.read_attr_data(obj, 'ear', 'FACE').astype(int)) - tag_object(obj, 'veratrum') + write_material_index( + obj, surface.read_attr_data(obj, "ear", "FACE").astype(int) + ) + tag_object(obj, "veratrum") return obj @@ -100,12 +117,14 @@ class VeratrumBranchMonocotFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(VeratrumBranchMonocotFactory, self).__init__(factory_seed, coarse) - self.branch_factories = [VeratrumEarMonocotFactory(self.factory_seed * self.max_branches + i, coarse) - for i in range(np.random.randint(3, self.max_branches) + 1)] - self.primary_stem_offset = uniform(.4, .8) + self.branch_factories = [ + VeratrumEarMonocotFactory(self.factory_seed * self.max_branches + i, coarse) + for i in range(np.random.randint(3, self.max_branches) + 1) + ] + self.primary_stem_offset = uniform(0.4, 0.8) for i, f in enumerate(self.branch_factories): - scale = log_uniform(.3, .6) if i > 0 else 1 + scale = log_uniform(0.3, 0.6) if i > 0 else 1 f.stem_offset = scale * self.primary_stem_offset f.count = int(log_uniform(64, 238) * scale) @@ -113,32 +132,41 @@ def create_asset(self, **params) -> bpy.types.Object: branches = [f.create_asset(**params) for f in self.branch_factories] for i, branch in enumerate(branches): if i > 0: - branch.location[-1] = self.primary_stem_offset * uniform(0, .6) - branch.rotation_euler = uniform(np.pi * .25, np.pi * .4), 0, uniform(0, np.pi * 2) + branch.location[-1] = self.primary_stem_offset * uniform(0, 0.6) + branch.rotation_euler = ( + uniform(np.pi * 0.25, np.pi * 0.4), + 0, + uniform(0, np.pi * 2), + ) obj = join_objects(branches) - tag_object(obj, 'veratrum_branch') + tag_object(obj, "veratrum_branch") return obj class VeratrumEarMonocotFactory(MonocotGrowthFactory): - def __init__(self, factory_seed, coarse=False): super(VeratrumEarMonocotFactory, self).__init__(factory_seed, coarse) self.angle = uniform(np.pi / 4, np.pi / 3) - self.min_y_angle = uniform(np.pi * .25, np.pi * .3) - self.max_y_angle = uniform(np.pi * .3, np.pi * .35) + self.min_y_angle = uniform(np.pi * 0.25, np.pi * 0.3) + self.max_y_angle = uniform(np.pi * 0.3, np.pi * 0.35) self.count = np.random.randint(64, 128) - self.leaf_prob = uniform(.6, .8) - self.leaf_range = 0, .98 + self.leaf_prob = uniform(0.6, 0.8) + self.leaf_range = 0, 0.98 def build_leaf(self, face_size): - x_anchors = 0, .04, .06, .04, 0 - y_anchors = 0, .01, 0, -.01, 0 - z_anchors = 0, - .01, -.01, -.006, 0 + x_anchors = 0, 0.04, 0.06, 0.04, 0 + y_anchors = 0, 0.01, 0, -0.01, 0 + z_anchors = 0, -0.01, -0.01, -0.006, 0 anchors = [x_anchors, y_anchors, z_anchors] - obj = spin(anchors, [0, 2, 4], dupli=True, loop=True, resolution=np.random.randint(3, 5), - axis=(1, 0, 0)) - butil.modify_mesh(obj, 'WELD', merge_threshold=face_size / 2) - write_attribute(obj, 1, 'ear', 'FACE') - tag_object(obj, 'veratrum_ear') + obj = spin( + anchors, + [0, 2, 4], + dupli=True, + loop=True, + resolution=np.random.randint(3, 5), + axis=(1, 0, 0), + ) + butil.modify_mesh(obj, "WELD", merge_threshold=face_size / 2) + write_attribute(obj, 1, "ear", "FACE") + tag_object(obj, "veratrum_ear") return obj diff --git a/infinigen/assets/mushroom/__init__.py b/infinigen/assets/objects/mushroom/__init__.py similarity index 52% rename from infinigen/assets/mushroom/__init__.py rename to infinigen/assets/objects/mushroom/__init__.py index c344738fe..df7ffbf96 100644 --- a/infinigen/assets/mushroom/__init__.py +++ b/infinigen/assets/objects/mushroom/__init__.py @@ -1,2 +1,2 @@ +from .generate import MushroomFactory from .growth import MushroomGrowthFactory -from .generate import MushroomFactory \ No newline at end of file diff --git a/infinigen/assets/objects/mushroom/cap.py b/infinigen/assets/objects/mushroom/cap.py new file mode 100644 index 000000000..d2e3da10a --- /dev/null +++ b/infinigen/assets/objects/mushroom/cap.py @@ -0,0 +1,598 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import colorsys + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.utils.decorate import ( + displace_vertices, + geo_extension, + subsurface2face_size, +) +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import remesh_with_attrs +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class MushroomCapFactory(AssetFactory): + def __init__(self, factory_seed, base_hue, material_func, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.x_scale, self.z_scale = uniform(0.7, 1.4, 2) + self.cap_configs = [ + self.campanulate, + self.conical, + self.convex, + self.depressed, + self.flat, + self.infundiuliform, + self.ovate, + self.umbillicate, + self.umbonate, + ] + config_weights = np.array([2, 2, 2, 1, 2, 1, 2, 1, 1]) + cap_config = np.random.choice( + self.cap_configs, p=config_weights / config_weights.sum() + ) + self.cap_config = { + **cap_config, + "x_anchors": [_ * self.x_scale for _ in cap_config["x_anchors"]], + "z_anchors": [_ * self.z_scale for _ in cap_config["z_anchors"]], + } + + self.radius = max(self.cap_config["x_anchors"]) + self.inner_radius = log_uniform(0.2, 0.35) * self.radius + + self.gill_configs = [self.adnexed_gill, self.decurrent_gill, None] + gill_configs = np.array([1, 1, 1]) + self.gill_config = np.random.choice( + self.gill_configs, p=gill_configs / gill_configs.sum() + ) + if not self.cap_config["has_gill"]: + self.gill_config = None + + self.shader_funcs = [ + self.shader_cap, + self.shader_noise, + self.shader_voronoi, + self.shader_speckle, + ] + shader_weights = np.array([2, 1, 1, 1]) + self.shader_func = np.random.choice( + self.shader_funcs, p=shader_weights / shader_weights.sum() + ) + + self.is_morel = uniform(0, 1) < 0.5 and self.shader_func == self.shader_cap + + self.base_hue = base_hue + self.material_cap = surface.shaderfunc_to_material( + self.shader_func, self.base_hue + ) + self.material = material_func() + + @property + def campanulate(self): + x = uniform(0.12, 0.15) + return { + "x_anchors": [0, x, x, 0.08, 0.04, 0], + "z_anchors": [ + 0, + 0, + uniform(0.03, 0.05), + uniform(0.1, 0.12), + uniform(0.16, 0.2), + 0.2, + ], + "vector_locations": [], + "has_gill": True, + } + + @property + def conical(self): + z = uniform(0.2, 0.3) + return { + "x_anchors": [0, uniform(0.12, 0.15), 0.01, 0], + "z_anchors": [0, 0, z, z], + "vector_locations": [1], + "has_gill": True, + } + + @property + def convex(self): + z = uniform(0.14, 0.16) + return { + "x_anchors": [0, 0.15, 0.12, 0.01, 0], + "z_anchors": [0, 0, uniform(0.04, 0.06), z, z], + "vector_locations": [1], + "has_gill": True, + } + + @property + def depressed(self): + z = uniform(0.03, 0.05) + return { + "x_anchors": [0, 0.15, 0.12, 0], + "z_anchors": [0, 0, uniform(0.06, 0.08), z], + "vector_locations": [1], + "has_gill": True, + } + + @property + def flat(self): + z = uniform(0.05, 0.07) + return { + "x_anchors": [0, 0.15, 0.12, 0], + "z_anchors": [0, 0, z, z], + "vector_locations": [1], + "has_gill": True, + } + + @property + def infundiuliform(self): + z = uniform(0.08, 0.12) + x = uniform(0.12, 0.15) + return { + "x_anchors": [0, 0.03, x, x - 0.01, 0], + "z_anchors": [0, 0, z, z + uniform(0.005, 0.01), 0.02], + "vector_locations": [], + "has_gill": False, + } + + @property + def ovate(self): + z = uniform(0.2, 0.3) + return { + "x_anchors": [0, uniform(0.12, 0.15), 0.08, 0.01, 0], + "z_anchors": [0, 0, 0.8 * z, z, z], + "vector_locations": [1], + "has_gill": True, + } + + @property + def umbillicate(self): + z = uniform(0.03, 0.05) + return { + "x_anchors": [0, 0.15, 0.12, 0.02, 0], + "z_anchors": [0, 0.04, uniform(0.06, 0.08), z + 0.02, z], + "vector_locations": [], + "has_gill": False, + } + + @property + def umbonate(self): + z = uniform(0.05, 0.07) + z_ = z + uniform(0.02, 0.04) + return { + "x_anchors": [0, 0.15, 0.12, 0.06, 0.02, 0], + "z_anchors": [0, 0, z - 0.01, z, z_, z_], + "vector_locations": [1], + "has_gill": True, + } + + @property + def adnexed_gill(self): + return { + "x_anchors": [ + self.radius, + (self.radius + self.inner_radius) / 2, + self.inner_radius, + self.inner_radius, + self.radius, + ], + "z_anchors": [0, -uniform(0.05, 0.08), -uniform(0, 0.02), 0, 0], + "vector_locations": [3], + } + + @property + def decurrent_gill(self): + return { + "x_anchors": [ + self.radius, + (self.radius + self.inner_radius) / 2, + self.inner_radius, + 0, + self.radius, + ], + "z_anchors": [0, -uniform(0.05, 0.08), -uniform(0.08, 0.1), 0, 0], + "vector_locations": [2], + } + + @staticmethod + def geo_xyz(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + for name, component in zip( + "xyz", nw.separate(nw.new_node(Nodes.InputPosition)) + ): + component = nw.math("ABSOLUTE", component) + m = nw.new_node( + Nodes.AttributeStatistic, [geometry, None, component] + ).outputs["Max"] + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": geometry, + "Name": name, + "Value": nw.scalar_divide(component, m), + }, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def geo_morel(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.compare( + "LESS_THAN", + nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Scale": uniform(15, 20), "Randomness": uniform(0.5, 1)}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ), + 0.05, + ) + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": geometry, "Name": "morel", "Value": selection}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def apply_cut(self, obj): + if max(self.cap_config["x_anchors"]) > 0.1: + return + n_cuts = np.random.randint(0, 5) + angles = polygon_angles(n_cuts, np.pi / 4, np.pi * 2) + for a in angles: + width = uniform(0.15, 0.2) * 0.4 + vertices = [ + [0, 0, 0.4], + [0.4, -width, 0.4], + [0.4, width, 0.4], + [0, 0, -1], + [0.4, -width, -0.01], + [0.4, width, -0.01], + ] + faces = [[0, 1, 2], [1, 0, 3, 4], [2, 1, 4, 5], [0, 2, 5, 3], [5, 4, 3]] + cutter = mesh2obj(data2mesh(vertices, [], faces)) + displace_vertices(cutter, lambda x, y, z: (0, 2 * y * y, 0)) + butil.modify_mesh( + cutter, "SUBSURF", render_levels=5, levels=5, subdivision_type="SIMPLE" + ) + depth = self.radius * uniform(0.4, 0.7) + cutter.location = np.cos(a) * depth, np.sin(a) * depth, 0 + cutter.rotation_euler = 0, 0, a + uniform(-np.pi / 4, np.pi / 4) + butil.modify_mesh(obj, "WELD", merge_threshold=0.002) + butil.modify_mesh( + obj, "BOOLEAN", object=cutter, operation="DIFFERENCE", apply=True + ) + butil.delete(cutter) + + def create_asset(self, face_size, **params) -> bpy.types.Object: + cap_config = self.cap_config + anchors = cap_config["x_anchors"], 0, cap_config["z_anchors"] + obj = spin(anchors, cap_config["vector_locations"]) + self.apply_cut(obj) + remesh_with_attrs(obj, face_size) + surface.add_geomod(obj, self.geo_xyz, apply=True) + surface.add_geomod(obj, self.geo_morel, apply=True) + assign_material(obj, self.material_cap) + + if self.is_morel: + with butil.SelectObjects(obj): + surface.set_active(obj, "morel") + bpy.ops.geometry.attribute_convert(mode="VERTEX_GROUP") + butil.modify_mesh( + obj, "DISPLACE", vertex_group="morel", strength=0.04, mid_level=0.7 + ) + + if self.gill_config is not None: + gill_config = self.gill_config + anchors = gill_config["x_anchors"], 0, gill_config["z_anchors"] + gill = spin( + anchors, + gill_config["vector_locations"], + dupli=True, + loop=True, + resolution=np.random.randint(8, 20), + ) + subsurface2face_size(gill, face_size) + assign_material(gill, self.material) + obj = join_objects([obj, gill]) + + texture = bpy.data.textures.new( + name="cap", type=np.random.choice(["STUCCI", "MARBLE"]) + ) + texture.noise_scale = log_uniform(0.01, 0.05) + butil.modify_mesh(obj, "DISPLACE", strength=0.008, texture=texture, mid_level=0) + + surface.add_geomod(obj, geo_extension, apply=True, input_args=[0.1]) + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="TWIST", + angle=uniform(-np.pi / 4, np.pi / 4), + deform_axis="X", + ) + r1, r2, r3, r4 = uniform(-0.25, 0.25, 4) + displace_vertices( + obj, + lambda x, y, z: ( + np.where(x > 0, r1, r2) * x, + np.where(y > 0, r3, r4) * y, + 0, + ), + ) + tag_object(obj, "cap") + return obj + + @staticmethod + def shader_voronoi(nw: NodeWrangler, base_hue): + bright_color = hsv2rgba(base_hue, uniform(0.4, 0.8), log_uniform(0.05, 0.2)) + dark_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.01, 0.05), + ), + 1, + ) + subsurface_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.05, 0.2), + ), + 1, + ) + light_color = hsv2rgba(base_hue, uniform(0, 0.1), uniform(0.2, 0.8)) + anchors = [0.0, 0.3, 0.6, 1.0] if uniform(0, 1) < 0.5 else [0.0, 0.4, 0.7, 1.0] + color = build_color_ramp( + nw, + nw.musgrave(500), + anchors, + [dark_color, dark_color, bright_color, bright_color], + ) + + x = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "x"}).outputs["Fac"] + y = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "y"}).outputs["Fac"] + r = nw.power(nw.add(nw.power(x, 2), nw.power(y, 2)), 0.5) + coord = nw.scale( + nw.combine(x, y, 0), + nw.build_float_curve(r, [(0, 1), (uniform(0.5, 0.7), 2), (1, 8)]), + ) + + perturbed_position = nw.add( + coord, + nw.scale( + nw.new_node(Nodes.NoiseTexture, attrs={"noise_dimensions": "2D"}), 0.2 + ), + ) + voronoi = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Scale": uniform(2, 2.5), "Vector": perturbed_position}, + attrs={"voronoi_dimensions": "2D", "feature": "DISTANCE_TO_EDGE"}, + ) + + ratio = nw.divide( + voronoi, nw.scalar_add(1, nw.scalar_multiply(5, nw.power(r, 2))) + ) + ratio = nw.build_float_curve(ratio, [(0, 0.4), (0.04, 0)]) + ratio = nw.scalar_multiply( + ratio, + nw.new_node( + Nodes.MapRange, + [ + nw.new_node(Nodes.MusgraveTexture, input_kwargs={"Scale": 20}), + -0.2, + 0.1, + 0, + 1, + ], + ), + ) + color = nw.new_node(Nodes.MixRGB, [ratio, color, light_color]) + + roughness = uniform(0.2, 0.5) if uniform(0, 1) < 0.5 else uniform(0.8, 1.0) + specular = uniform(0.2, 0.8) + clearcoat = uniform(0.2, 0.5) if uniform(0, 1) < 0.25 else 0 + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface Color": subsurface_color, + "Subsurface": 0.01, + "Subsurface Radius": (0.05, 0.05, 0.05), + }, + ) + return principled_bsdf + + @staticmethod + def shader_speckle(nw: NodeWrangler, base_hue): + bright_color = hsv2rgba(base_hue, uniform(0.4, 0.8), log_uniform(0.05, 0.2)) + dark_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.01, 0.05), + ), + 1, + ) + subsurface_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.05, 0.2), + ), + 1, + ) + light_color = hsv2rgba(base_hue, uniform(0, 0.1), uniform(0.2, 0.8)) + anchors = [0.0, 0.3, 0.6, 1.0] if uniform(0, 1) < 0.5 else [0.0, 0.4, 0.7, 1.0] + color = build_color_ramp( + nw, + nw.musgrave(500), + anchors, + [dark_color, dark_color, bright_color, bright_color], + ) + + musgrave = nw.build_float_curve(nw.musgrave(50), [(0.7, 0), (0.72, 1.0)]) + color = nw.new_node(Nodes.MixRGB, [musgrave, color, light_color]) + + roughness = uniform(0.2, 0.5) if uniform(0, 1) < 0.5 else uniform(0.8, 1.0) + specular = uniform(0.2, 0.8) + clearcoat = uniform(0.2, 0.5) if uniform(0, 1) < 0.25 else 0 + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface Color": subsurface_color, + "Subsurface": 0.01, + "Subsurface Radius": (0.05, 0.05, 0.05), + }, + ) + return principled_bsdf + + @staticmethod + def shader_noise(nw: NodeWrangler, base_hue): + bright_color = hsv2rgba(base_hue, uniform(0.4, 0.8), log_uniform(0.05, 0.2)) + dark_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.01, 0.05), + ), + 1, + ) + subsurface_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.05, 0.2), + ), + 1, + ) + light_color = hsv2rgba(base_hue, uniform(0, 0.1), uniform(0.2, 0.8)) + anchors = [0.0, 0.3, 0.6, 1.0] if uniform(0, 1) < 0.5 else [0.0, 0.4, 0.7, 1.0] + color = build_color_ramp( + nw, + nw.musgrave(500), + anchors, + [dark_color, dark_color, bright_color, bright_color], + ) + + ratio = nw.build_float_curve( + nw.musgrave(10), [(0.52, 0), (0.56, 0.2), (0.6, 0.0)] + ) + ratio = nw.scalar_multiply( + ratio, + nw.new_node( + Nodes.MapRange, + [ + nw.new_node(Nodes.MusgraveTexture, input_kwargs={"Scale": 20}), + -0.2, + 0.1, + 0, + 1, + ], + ), + ) + color = nw.new_node(Nodes.MixRGB, [ratio, color, light_color]) + + roughness = uniform(0.2, 0.5) if uniform(0, 1) < 0.5 else uniform(0.8, 1.0) + specular = uniform(0.2, 0.8) + clearcoat = uniform(0.2, 0.5) if uniform(0, 1) < 0.25 else 0 + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface Color": subsurface_color, + "Subsurface": 0.01, + "Subsurface Radius": (0.05, 0.05, 0.05), + }, + ) + return principled_bsdf + + @staticmethod + def shader_cap(nw: NodeWrangler, base_hue): + bright_color = hsv2rgba(base_hue, uniform(0.6, 0.8), log_uniform(0.05, 0.2)) + dark_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.4, 0.8), + log_uniform(0.01, 0.05), + ), + 1, + ) + light_color = hsv2rgba(base_hue, uniform(0, 0.1), uniform(0.6, 0.8)) + subsurface_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.05, 0.05)) % 1, + uniform(0.6, 0.8), + log_uniform(0.05, 0.2), + ), + 1, + ) + + anchors = [0.0, 0.3, 0.6, 1.0] if uniform(0, 1) < 0.5 else [0.0, 0.4, 0.7, 1.0] + color = build_color_ramp( + nw, + nw.musgrave(500), + anchors, + [dark_color, dark_color, bright_color, bright_color], + ) + + z = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "z"}) + musgrave = nw.build_float_curve( + z, + [ + (uniform(0, 0.2), uniform(0.95, 0.98)), + (uniform(0.2, 0.4), uniform(0.98, 1)), + (0.8, 1), + ], + ) + color = nw.new_node(Nodes.MixRGB, [musgrave, light_color, color]) + + roughness = uniform(0.2, 0.5) if uniform(0, 1) < 0.5 else uniform(0.8, 1.0) + specular = uniform(0.2, 0.8) + clearcoat = uniform(0.2, 0.5) if uniform(0, 1) < 0.25 else 0 + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Specular": specular, + "Clearcoat": clearcoat, + "Subsurface Color": subsurface_color, + "Subsurface": 0.01, + "Subsurface Radius": (0.05, 0.05, 0.05), + }, + ) + return principled_bsdf diff --git a/infinigen/assets/mushroom/generate.py b/infinigen/assets/objects/mushroom/generate.py similarity index 77% rename from infinigen/assets/mushroom/generate.py rename to infinigen/assets/objects/mushroom/generate.py index 1f4c49459..0b5891b8a 100644 --- a/infinigen/assets/mushroom/generate.py +++ b/infinigen/assets/objects/mushroom/generate.py @@ -4,21 +4,20 @@ # Authors: Lingjie Mei -from copy import deepcopy - import numpy as np from mathutils import Euler, kdtree from numpy.random import uniform -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from .growth import MushroomGrowthFactory -from infinigen.assets.utils.object import join_objects from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.object import join_objects from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.util.random import log_uniform -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed + +from .growth import MushroomGrowthFactory + class MushroomFactory(AssetFactory): max_cluster = 10 @@ -28,9 +27,9 @@ def __init__(self, factory_seed, coarse=False): with FixedSeed(factory_seed): self.makers = [self.directional_make, self.cluster_make] self.maker = np.random.choice(self.makers) - self.lowered = uniform(0, 1) < .5 + self.lowered = uniform(0, 1) < 0.5 self.factory = MushroomGrowthFactory(factory_seed, coarse) - self.tolerant_length = uniform(0, .2) + self.tolerant_length = uniform(0, 0.2) def create_asset(self, i, face_size, **params): mushrooms, keypoints = self.build_mushrooms(i, face_size) @@ -41,20 +40,29 @@ def create_asset(self, i, face_size, **params): m.scale = s butil.apply_transform(m, loc=True) obj = join_objects(mushrooms) - butil.modify_mesh(obj, 'SIMPLE_DEFORM', deform_method='BEND', angle=uniform(- np.pi / 8, np.pi / 8), - deform_axis=np.random.choice(['X', 'Y'])) - tag_object(obj, 'mushroom') + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=uniform(-np.pi / 8, np.pi / 8), + deform_axis=np.random.choice(["X", "Y"]), + ) + tag_object(obj, "mushroom") return obj - def build_mushrooms(self, i, face_size=.01): + def build_mushrooms(self, i, face_size=0.01): n = np.random.randint(1, 6) mushrooms, keypoints = [], [] for j in range(n): - obj = self.factory.create_asset(i=j + i * self.max_cluster, face_size=face_size / 2) + obj = self.factory.create_asset( + i=j + i * self.max_cluster, face_size=face_size / 2 + ) clone = deep_clone_obj(obj) - butil.modify_mesh(clone, 'REMESH', voxel_size=.04) + butil.modify_mesh(clone, "REMESH", voxel_size=0.04) mushrooms.append(obj) - k = np.array([v.co for v in clone.data.vertices if v.co[-1] > self.tolerant_length]) + k = np.array( + [v.co for v in clone.data.vertices if v.co[-1] > self.tolerant_length] + ) if len(k) == 0: k = np.array([v.co for v in clone.data.vertices]) if len(k) == 0: @@ -70,9 +78,13 @@ def radius(self): def find_closest(self, keypoints, rotations, start_locs, directions): vertices = [k.copy() for k in keypoints] locations, scales = [np.zeros(3)], [] - scales = np.tile(uniform(.3, 1.2, len(keypoints))[:, np.newaxis], 3) + scales = np.tile(uniform(0.3, 1.2, len(keypoints))[:, np.newaxis], 3) for i in range(len(vertices)): - vertices[i] = (np.array(Euler(rotations[i]).to_matrix()) @ np.diag(scales[i]) @ vertices[i].T).T + vertices[i] = ( + np.array(Euler(rotations[i]).to_matrix()) + @ np.diag(scales[i]) + @ vertices[i].T + ).T for i in range(1, len(vertices)): basis = np.concatenate(vertices[:i]) kd = kdtree.KDTree(len(basis)) @@ -81,7 +93,7 @@ def find_closest(self, keypoints, rotations, start_locs, directions): kd.balance() for d in np.linspace(0, 4, 20) * self.radius: offset = start_locs[i] + directions[i] * d - if min(kd.find(v + offset)[-1] for v in vertices[i]) > .008: + if min(kd.find(v + offset)[-1] for v in vertices[i]) > 0.008: break else: offset = start_locs[i] + directions[i] * 4 * self.radius @@ -104,6 +116,8 @@ def directional_make(self, keypoints): rot_y = uniform(0, np.pi / 6, n) if self.lowered else np.zeros(n) rot_z = -np.pi / 2 + uniform(-np.pi / 8, np.pi / 8, n) rotations = np.stack([np.zeros(n), rot_y, rot_z], -1) - start_locs = np.stack([np.linspace(0, self.radius * n * .4, n), np.zeros(n), np.zeros(n)], -1) + start_locs = np.stack( + [np.linspace(0, self.radius * n * 0.4, n), np.zeros(n), np.zeros(n)], -1 + ) directions = np.tile([0, 1, 0], (n, 1)) return self.find_closest(keypoints, rotations, start_locs, directions) diff --git a/infinigen/assets/objects/mushroom/growth.py b/infinigen/assets/objects/mushroom/growth.py new file mode 100644 index 000000000..3ba06f728 --- /dev/null +++ b/infinigen/assets/objects/mushroom/growth.py @@ -0,0 +1,99 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import colorsys + +from numpy.random import uniform + +from infinigen.assets.utils.object import join_objects, origin2lowest +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from .cap import MushroomCapFactory +from .stem import MushroomStemFactory + + +class MushroomGrowthFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.base_hue = self.build_base_hue() + self.material_func = lambda: surface.shaderfunc_to_material( + self.shader_mushroom, self.base_hue + ) + self.cap_factory = MushroomCapFactory( + factory_seed, self.base_hue, self.material_func, coarse + ) + self.stem_factory = MushroomStemFactory( + factory_seed, self.cap_factory.inner_radius, self.material_func, coarse + ) + + @staticmethod + def build_base_hue(): + if uniform(0, 1) < 0.4: + return uniform(0, 1) + else: + return uniform(0.02, 0.15) + + def create_asset(self, **params): + cap = self.cap_factory(**params) + stem = self.stem_factory(**params) + obj = join_objects([cap, stem]) + origin2lowest(obj) + return cap + + @staticmethod + def shader_mushroom(nw: NodeWrangler, base_hue): + roughness = 0.8 + front_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.1, 0.1)) % 1, + uniform(0.1, 0.3), + log_uniform(0.02, 0.5), + ), + 1, + ) + back_color = ( + *colorsys.hsv_to_rgb( + (base_hue + uniform(-0.1, 0.1)) % 1, + uniform(0.1, 0.3), + log_uniform(0.02, 0.5), + ), + 1, + ) + + x, y, z = nw.separate(nw.new_node(Nodes.TextureCoord).outputs["Generated"]) + musgrave = nw.new_node( + Nodes.MapRange, + [ + nw.new_node( + Nodes.MusgraveTexture, + [nw.combine(x, y, nw.scalar_multiply(uniform(5, 10), z))], + input_kwargs={"Scale": 200}, + ), + -1, + 1, + 0, + 1, + ], + ) + + color = build_color_ramp( + nw, + musgrave, + [0, 0.3, 0.7, 1], + [front_color, front_color, back_color, back_color], + ) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": color, "Roughness": roughness}, + ) + return principled_bsdf diff --git a/infinigen/assets/objects/mushroom/stem.py b/infinigen/assets/objects/mushroom/stem.py new file mode 100644 index 000000000..7e56fa42a --- /dev/null +++ b/infinigen/assets/objects/mushroom/stem.py @@ -0,0 +1,182 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.utils.decorate import geo_extension, subsurface2face_size +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import join_objects +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import remesh_with_attrs +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class MushroomStemFactory(AssetFactory): + def __init__(self, factory_seed, inner_radius, material_func, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.web_builders = [self.build_hollow_web, self.build_solid_web, None] + web_weights = np.array([1, 1, 2]) + self.web_builder = np.random.choice( + self.web_builders, p=web_weights / web_weights.sum() + ) + self.has_band = uniform(0, 1) < 0.75 + + self.material = material_func() + self.material_web = material_func() + self.inner_radius = inner_radius + + def build_solid_web(self, inner_radius): + outer_radius = inner_radius * uniform(1.5, 3.5) + z = uniform(0.0, 0.05) + length = uniform(0.15, 0.2) + x_anchors = inner_radius, (outer_radius + inner_radius) / 2, outer_radius + z_anchors = -z, -z - uniform(0.3, 0.4) * length, -z - length + anchors = x_anchors, 0, z_anchors + obj = spin(anchors) + surface.add_geomod( + obj, self.geo_inverse_band, apply=True, input_args=[-uniform(0.008, 0.01)] + ) + tag_object(obj, "web") + return obj + + @staticmethod + def geo_voronoi(nw: NodeWrangler): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.compare( + "LESS_THAN", + nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Scale": uniform(15, 20)}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ), + 0.06, + ) + geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def build_hollow_web(self, inner_radius): + outer_radius = inner_radius * uniform(2, 3.5) + z = uniform(0.0, 0.05) + length = log_uniform(0.2, 0.4) + x_anchors = inner_radius, (outer_radius + inner_radius) / 2, outer_radius + z_anchors = -z, -z - uniform(0.3, 0.4) * length, -z - length + anchors = x_anchors, 0, z_anchors + obj = spin(anchors) + levels = 3 + butil.modify_mesh(obj, "SUBSURF", True, render_levels=levels, levels=levels) + surface.add_geomod(obj, self.geo_voronoi, apply=True) + butil.modify_mesh(obj, "SMOOTH", iterations=2) + tag_object(obj, "web") + return obj + + def create_asset(self, face_size, **params) -> bpy.types.Object: + length = log_uniform(0.4, 0.8) + x_anchors = ( + 0, + self.inner_radius, + log_uniform(1, 2) * self.inner_radius, + self.inner_radius * uniform(1, 1.2), + 0, + ) + z_anchors = 0, 0, -length * uniform(0.3, 0.7), -length, -length + anchors = x_anchors, 0, z_anchors + obj = spin(anchors, [1, 4]) + remesh_with_attrs(obj, face_size) + if self.has_band: + surface.add_geomod( + obj, + self.geo_band, + apply=True, + input_args=[length, uniform(0.008, 0.01)], + ) + assign_material(obj, self.material) + + if self.web_builder is not None: + web = self.web_builder(self.inner_radius) + surface.add_geomod(web, geo_extension, apply=True) + subsurface2face_size(web, face_size / 2) + assign_material(obj, self.material_web) + obj = join_objects([web, obj]) + + texture = bpy.data.textures.new(name="cap", type="STUCCI") + texture.noise_scale = uniform(0.005, 0.01) + butil.modify_mesh(obj, "DISPLACE", strength=0.008, texture=texture, mid_level=0) + + butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + deform_method="BEND", + angle=-uniform(0, np.pi / 2), + deform_axis="Y", + ) + tag_object(obj, "stem") + return obj + + @staticmethod + def geo_band(nw: NodeWrangler, length, scale): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + wave = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Scale": log_uniform(5, 10), + "Distortion": uniform(5, 10), + "Detail Scale": 2, + }, + attrs={"bands_direction": "Z", "wave_profile": "SAW"}, + ).outputs["Fac"] + selection = nw.compare( + "LESS_THAN", + nw.separate(nw.new_node(Nodes.InputPosition))[-1], + -uniform(0.3, 0.7) * length, + ) + normal = nw.vector_math( + "NORMALIZE", nw.add(nw.new_node(Nodes.InputNormal), (0, 0, 2)) + ) + geometry = nw.new_node( + Nodes.SetPosition, + [geometry, selection, None, nw.scale(nw.scale(wave, scale), normal)], + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def geo_inverse_band(nw: NodeWrangler, scale): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + vector = nw.combine(x, y, nw.scalar_multiply(-1, z)) + wave = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": vector, + "Scale": log_uniform(5, 10), + "Distortion": uniform(5, 10), + "Detail Scale": 2, + }, + attrs={"bands_direction": "Z", "wave_profile": "SAW"}, + ).outputs["Fac"] + normal = nw.vector_math( + "NORMALIZE", nw.add(nw.new_node(Nodes.InputNormal), (0, 0, 2)) + ) + geometry = nw.new_node( + Nodes.SetPosition, + [geometry, None, None, nw.scale(nw.scale(wave, scale), normal)], + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/organizer/__init__.py b/infinigen/assets/objects/organizer/__init__.py similarity index 82% rename from infinigen/assets/organizer/__init__.py rename to infinigen/assets/objects/organizer/__init__.py index 4eeef6039..9faff406e 100644 --- a/infinigen/assets/organizer/__init__.py +++ b/infinigen/assets/objects/organizer/__init__.py @@ -6,4 +6,4 @@ from .basket import BasketBaseFactory from .hook import HookBaseFactory, SpatulaOnHookBaseFactory -from .plate_rack import PlateRackBaseFactory, PlateOnRackBaseFactory +from .plate_rack import PlateOnRackBaseFactory, PlateRackBaseFactory diff --git a/infinigen/assets/objects/organizer/basket.py b/infinigen/assets/objects/organizer/basket.py new file mode 100644 index 000000000..66636a1c1 --- /dev/null +++ b/infinigen/assets/objects/organizer/basket.py @@ -0,0 +1,511 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup("nodegroup_holes", singleton=False, type="GeometryNodeTree") +def nodegroup_holes(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 0.5000), + ("NodeSocketFloat", "Value2", 0.5000), + ("NodeSocketFloat", "Value3", 0.5000), + ("NodeSocketFloat", "Value4", 0.5000), + ("NodeSocketFloat", "Value5", 0.5000), + ("NodeSocketFloat", "Value6", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value1"], 1: add}, + attrs={"operation": "SUBTRACT"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value6"], 1: 0.0000} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value4"], 1: 0.0000} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: group_input.outputs["Value2"]} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: add_3}, attrs={"operation": "DIVIDE"} + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: add_3}, + attrs={"operation": "DIVIDE"}, + ) + + grid = nw.new_node( + Nodes.MeshGrid, + input_kwargs={ + "Size X": subtract, + "Size Y": subtract_1, + "Vertices X": divide, + "Vertices Y": divide_1, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": grid.outputs["Mesh"], + "Name": "uv_map", + 3: grid.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value5"], 1: 0.0000} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: 0.1}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_5, "Y": add_2, "Z": add_2} + ) + + cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_3}) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": transform_1, "Instance": store_named_attribute_1}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + divide_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: add_3}, + attrs={"operation": "DIVIDE"}, + ) + + grid_1 = nw.new_node( + Nodes.MeshGrid, + input_kwargs={ + "Size X": subtract_2, + "Size Y": subtract, + "Vertices X": divide_2, + "Vertices Y": divide, + }, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": grid_1.outputs["Mesh"], + "Name": "uv_map", + 3: grid_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.1}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": add_6, "Z": add_2} + ) + + cube_3 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_4}) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_3.outputs["Mesh"], + "Name": "uv_map", + 3: cube_3.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": transform_2, "Instance": store_named_attribute_3}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Instances1": instance_on_points, + "Instances2": instance_on_points_1, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_handle_hole", singleton=False, type="GeometryNodeTree" +) +def nodegroup_handle_hole(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.0000), + ("NodeSocketFloat", "Z", 0.0000), + ("NodeSocketFloat", "Value", 0.5000), + ("NodeSocketFloat", "Value2", 0.5000), + ("NodeSocketInt", "Level", 0), + ], + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X"], + "Y": 1.0000, + "Z": group_input.outputs["Z"], + }, + ) + + cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_3}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subdivide_mesh_2 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": store_named_attribute} + ) + + subdivision_surface_2 = nw.new_node( + Nodes.SubdivisionSurface, + input_kwargs={"Mesh": subdivide_mesh_2, "Level": group_input.outputs["Level"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["Value2"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract}) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": subdivision_surface_2, "Translation": combine_xyz_4}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + depth = nw.new_node(Nodes.Value, label="depth") + depth.outputs[0].default_value = kwargs["depth"] + + width = nw.new_node(Nodes.Value, label="width") + width.outputs[0].default_value = kwargs["width"] + + height = nw.new_node(Nodes.Value, label="height") + height.outputs[0].default_value = kwargs["height"] + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": depth, "Y": width, "Z": height} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": store_named_attribute, "Level": 2} + ) + + sub_level = nw.new_node(Nodes.Integer, label="sub_level") + sub_level.integer = kwargs["frame_sub_level"] + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, + input_kwargs={"Mesh": subdivide_mesh, "Level": sub_level}, + ) + + differences = [] + + if kwargs["has_handle"]: + hole_depth = nw.new_node(Nodes.Value, label="hole_depth") + hole_depth.outputs[0].default_value = kwargs["handle_depth"] + + hole_height = nw.new_node(Nodes.Value, label="hole_height") + hole_height.outputs[0].default_value = kwargs["handle_height"] + + hole_dist = nw.new_node(Nodes.Value, label="hole_dist") + hole_dist.outputs[0].default_value = kwargs["handle_dist_to_top"] + + handle_level = nw.new_node(Nodes.Integer, label="handle_level") + handle_level.integer = kwargs["handle_sub_level"] + handle_hole = nw.new_node( + nodegroup_handle_hole().name, + input_kwargs={ + "X": hole_depth, + "Z": hole_height, + "Value": height, + "Value2": hole_dist, + "Level": handle_level, + }, + ) + differences.append(handle_hole) + + thickness = nw.new_node(Nodes.Value, label="thickness") + thickness.outputs[0].default_value = kwargs["thickness"] + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: depth, 1: thickness}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: width, 1: thickness}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": subtract_1, "Z": height} + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": store_named_attribute_1, "Level": 2} + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, + input_kwargs={"Mesh": subdivide_mesh_1, "Level": sub_level}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: thickness, 2: 0.2500}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": subdivision_surface_1, "Translation": combine_xyz_2}, + ) + + if kwargs["has_holes"]: + gap_size = nw.new_node(Nodes.Value, label="gap_size") + gap_size.outputs[0].default_value = kwargs["hole_gap_size"] + + hole_edge_gap = nw.new_node(Nodes.Value, label="hole_edge_gap") + hole_edge_gap.outputs[0].default_value = kwargs["hole_edge_gap"] + + hole_size = nw.new_node(Nodes.Value, label="hole_size") + hole_size.outputs[0].default_value = kwargs["hole_size"] + holes = nw.new_node( + nodegroup_holes().name, + input_kwargs={ + "Value1": height, + "Value2": gap_size, + "Value3": hole_edge_gap, + "Value4": hole_size, + "Value5": depth, + "Value6": width, + }, + ) + differences.extend([holes.outputs["Instances1"], holes.outputs["Instances2"]]) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={ + "Mesh 1": subdivision_surface, + "Mesh 2": [transform] + differences, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": difference.outputs["Mesh"]} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: height}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances, "Translation": combine_xyz_3}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry, + "Material": surface.shaderfunc_to_material(shader_rough_plastic), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) + + +class BasketBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(BasketBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("depth", None) is None: + params["depth"] = uniform(0.15, 0.4) + if params.get("width", None) is None: + params["width"] = uniform(0.2, 0.6) + if params.get("height", None) is None: + params["height"] = uniform(0.06, 0.24) + if params.get("frame_sub_level", None) is None: + params["frame_sub_level"] = np.random.choice([0, 3], p=[0.5, 0.5]) + if params.get("thickness", None) is None: + params["thickness"] = uniform(0.001, 0.005) + + if params.get("has_handle", None) is None: + params["has_handle"] = np.random.choice([True, False], p=[0.8, 0.2]) + if params.get("handle_sub_level", None) is None: + params["handle_sub_level"] = np.random.choice([0, 1, 2], p=[0.2, 0.4, 0.4]) + if params.get("handle_depth", None) is None: + params["handle_depth"] = params["depth"] * uniform(0.2, 0.4) + if params.get("handle_height", None) is None: + params["handle_height"] = params["height"] * uniform(0.1, 0.25) + if params.get("handle_dist_to_top", None) is None: + params["handle_dist_to_top"] = params["handle_height"] * 0.5 + params[ + "height" + ] * uniform(0.08, 0.15) + + if params.get("has_holes", None) is None: + if params["height"] < 0.12: + params["has_holes"] = False + else: + params["has_holes"] = np.random.choice([True, False], p=[0.5, 0.5]) + if params.get("hole_size", None) is None: + params["hole_size"] = uniform(0.005, 0.01) + if params.get("hole_gap_size", None) is None: + params["hole_gap_size"] = params["hole_size"] * uniform(0.8, 1.1) + if params.get("hole_edge_gap", None) is None: + params["hole_edge_gap"] = uniform(0.04, 0.06) + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + return obj diff --git a/infinigen/assets/objects/organizer/hook.py b/infinigen/assets/objects/organizer/hook.py new file mode 100644 index 000000000..5be1a829d --- /dev/null +++ b/infinigen/assets/objects/organizer/hook.py @@ -0,0 +1,577 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import randint, uniform + +from infinigen.assets.materials import shader_brushed_metal, shader_rough_plastic +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil + + +def hook_geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.5 of the node_transpiler + + hook_num = nw.new_node(Nodes.Integer, label="hook_num") + hook_num.integer = kwargs["num_hook"] + + add = nw.new_node(Nodes.Math, input_kwargs={0: hook_num, 1: -1.0000}) + + hook_gap = nw.new_node(Nodes.Value, label="hook_gap") + hook_gap.outputs[0].default_value = kwargs["hook_gap"] + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: hook_gap, 1: add}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_2}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_1}) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={ + "Count": add, + "Start Location": combine_xyz_2, + "Offset": combine_xyz_1, + }, + attrs={"mode": "END_POINTS"}, + ) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Start": (0.0000, 0.0000, 0.0000), + "Start Handle": (0.0000, 0.0000, kwargs["init_handle"]), + "End Handle": kwargs["curve_handle"], + "End": kwargs["curve_end_point"], + }, + ) + + curve_line = nw.new_node(Nodes.CurveLine) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [bezier_segment, curve_line]} + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Factor": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0000, 0.8), (0.5, 0.8), (1.0000, 0.8)] + ) + + raduis = nw.new_node(Nodes.Value, label="raduis") + raduis.outputs[0].default_value = kwargs["hook_radius"] + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: raduis}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": join_geometry_3, "Radius": multiply_3}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": kwargs["hook_resolution"], + "Point 1": (1.0000, 0.0000, 0.0000), + "Point 3": (-1.0000, 0.0000, 0.0000), + }, + attrs={"mode": "POINTS"}, + ) + + hook_reshape = nw.new_node(Nodes.Vector, label="hook_reshape") + hook_reshape.vector = (1.0000, 1.0000, 1.0000) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_circle.outputs["Curve"], "Scale": hook_reshape}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": transform_geometry_2, + "Fill Caps": True, + }, + ) + + hook_size = nw.new_node(Nodes.Value, label="hook_size") + hook_size.outputs[0].default_value = kwargs["hook_size"] + + transform_geometry = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": curve_to_mesh, "Scale": hook_size} + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": transform_geometry} + ) + + merge_by_distance_1 = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": realize_instances_1} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": merge_by_distance_1}, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, input_kwargs={"Instances": instance_on_points} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": scale_instances, + "Material": surface.shaderfunc_to_material(shader_brushed_metal), + }, + ) + + board_side_gap = nw.new_node(Nodes.Value, label="board_side_gap") + board_side_gap.outputs[0].default_value = kwargs["board_side_gap"] + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: board_side_gap}) + + board_thickness = nw.new_node(Nodes.Value, label="board_thickness") + board_thickness.outputs[0].default_value = kwargs["board_thickness"] + + board_height = nw.new_node(Nodes.Value, label="board_height") + board_height.outputs[0].default_value = kwargs["board_height"] + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add_1, "Y": board_thickness, "Z": board_height}, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: board_thickness, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: board_height}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: hook_size, 1: multiply_5}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_4, "Z": subtract} + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cube.outputs["Mesh"], "Translation": combine_xyz_3}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry_1, + "Material": surface.shaderfunc_to_material(shader_rough_plastic), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_2} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances} + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry_3}, + attrs={"is_active_output": True}, + ) + + +def spatula_geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.5 of the node_transpiler + + handle_length = nw.new_node(Nodes.Value, label="handle_length") + handle_length.outputs[0].default_value = kwargs["handle_length"] + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": handle_length}) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={"Count": 64, "Offset": combine_xyz}, + attrs={"mode": "END_POINTS"}, + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": mesh_line}) + + handle_radius = nw.new_node(Nodes.Value, label="handle_radius") + handle_radius.outputs[0].default_value = kwargs["handle_radius"] + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], kwargs["handle_control_points"] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: handle_radius, 1: float_curve}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": mesh_to_curve, "Radius": multiply} + ) + + curve_circle = nw.new_node(Nodes.CurveCircle) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_to_mesh, + "Scale": (kwargs["handle_ratio"], 1.0, 1.0), + }, + ) + + hole_radius = nw.new_node(Nodes.Value, label="hole_radius") + hole_radius.outputs[0].default_value = kwargs["hole_radius"] + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Radius": hole_radius, "Depth": 0.1000}, + ) + + hole_place_ratio = nw.new_node(Nodes.Value, label="hole_placement") + hole_place_ratio.outputs[0].default_value = kwargs["hole_placement"] + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: handle_length, 1: hole_place_ratio}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_1, + "Rotation": (0.0000, 1.5708, 0.0000), + "Scale": (kwargs["hole_ratio"], 1.0000, 1.0000), + }, + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={"Mesh 1": transform_geometry, "Mesh 2": transform_geometry_1}, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": ( + kwargs["plate_thickness"], + kwargs["plate_width"], + kwargs["plate_length"], + ), + "Vertices X": 4, + "Vertices Y": 4, + "Vertices Z": 4, + }, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Translation": (0.0000, 0.0000, -kwargs["plate_length"] / 2.0), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [difference.outputs["Mesh"], transform_geometry_3]}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Translation": combine_xyz_2}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_geometry_2, + "Material": surface.shaderfunc_to_material(shader_rough_plastic), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) + + +class HookBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(HookBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_hang_points(self, params): + # compute the lowest point in the bezier curve + x = params["init_handle"] + y = params["curve_handle"][2] - params["init_handle"] + z = params["curve_end_point"][2] - params["curve_handle"][2] + + t1 = (x - y + np.sqrt(y**2 - x * z)) / (x + z - 2 * y) + t2 = (x - y - np.sqrt(y**2 - x * z)) / (x + z - 2 * y) + + t = 0 + if t1 >= 0 and t1 <= 1: + t = max(t1, t) + if t2 >= 0 and t2 <= 1: + t = max(t2, t) + if t == 0: + t = 0.5 + + # get x, z coordinate + alpha1 = 3 * ((1 - t) ** 2) * t + alpha2 = 3 * (1 - t) * (t**2) + alpha3 = t**3 + + z = ( + alpha1 * params["init_handle"] + + alpha2 * params["curve_handle"][-1] + + alpha3 * params["curve_end_point"][-1] + ) + x = alpha2 * params["curve_handle"][-2] + alpha3 * params["curve_end_point"][-2] + + ys = [] + total_length = ( + params["board_side_gap"] + (params["num_hook"] - 1) * params["hook_gap"] + ) + for i in range(params["num_hook"]): + y = ( + -total_length / 2.0 + + params["board_side_gap"] / 2.0 + + i * params["hook_gap"] + ) + ys.append(y) + + hang_points = [] + for y in ys: + hang_points.append((x * params["hook_size"], y, z * params["hook_size"])) + + return hang_points + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("num_hook", None) is None: + params["num_hook"] = randint(3, 6) + if params.get("hook_size", None) is None: + params["hook_size"] = uniform(0.05, 0.1) + if params.get("hook_radius", None) is None: + params["hook_radius"] = uniform(0.002, 0.004) / params["hook_size"] + else: + params["hook_radius"] = params["hook_radius"] / params["hook_size"] + + if params.get("hook_resolution", None) is None: + params["hook_resolution"] = np.random.choice([4, 32], p=[0.5, 0.5]) + + if params.get("hook_gap", None) is None: + params["hook_gap"] = uniform(0.04, 0.08) + if params.get("board_height", None) is None: + params["board_height"] = params["hook_size"] + uniform(-0.02, 0.01) + if params.get("board_thickness", None) is None: + params["board_thickness"] = uniform(0.005, 0.015) + if params.get("board_side_gap", None) is None: + params["board_side_gap"] = uniform(0.03, 0.05) + + params["init_handle"] = uniform(-0.15, -0.25) + params["curve_handle"] = (0, uniform(0.15, 0.35), uniform(-0.15, -0.35)) + params["curve_end_point"] = (0, uniform(0.35, 0.55), uniform(-0.05, 0.15)) + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, hook_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + hang_points = self.get_hang_points(obj_params) + + return obj, hang_points + + +class SpatulaBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(SpatulaBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + + if params.get("hole_radius", None) is None: + params["hole_radius"] = uniform(0.003, 0.008) + if params.get("hole_placement", None) is None: + params["hole_placement"] = uniform(0.75, 0.9) + if params.get("hole_ratio", None) is None: + params["hole_ratio"] = uniform(0.8, 2.0) + + if params.get("handle_length", None) is None: + params["handle_length"] = uniform(0.15, 0.25) + + if params.get("handle_ratio", None) is None: + params["handle_ratio"] = uniform(0.1, 0.4) + if params.get("handle_control_points", None) is None: + params["handle_control_points"] = [ + (0, 0.5), + (0.5, uniform(0.45, 0.65)), + (1.0, uniform(0.4, 0.6)), + ] + if params.get("handle_radius", None) is None: + params["handle_radius"] = ( + params["hole_radius"] / params["handle_control_points"][0][1] + ) / uniform(0.6, 0.8) + + if params.get("plate_thickness", None) is None: + params["plate_thickness"] = uniform(0.005, 0.01) + if params.get("plate_width", None) is None: + params["plate_width"] = uniform(0.04, 0.06) + if params.get("plate_length", None) is None: + params["plate_length"] = uniform(0.05, 0.08) + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, + spatula_geometry_nodes, + attributes=[], + apply=True, + input_kwargs=obj_params, + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class SpatulaOnHookBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(SpatulaOnHookBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + self.hook_fac = HookBaseFactory(factory_seed, params=params) + self.spatula_fac = SpatulaBaseFactory(factory_seed, params=params) + + def get_asset_params(self, i): + if self.params.get("hook_radius", None) is None: + r = uniform(0.002, 0.0035) + self.hook_fac.params["hook_radius"] = r + self.spatula_fac.params["hole_radius"] = r / uniform(0.3, 0.6) + + def create_asset(self, i, **params): + self.get_asset_params(i) + hook, hang_points = self.hook_fac.create_asset(i) + spatula = self.spatula_fac.create_asset(i) + + spatula.location = hang_points[0] + butil.apply_transform(spatula, loc=True) + + return hook diff --git a/infinigen/assets/objects/organizer/plate_rack.py b/infinigen/assets/objects/organizer/plate_rack.py new file mode 100644 index 000000000..23885c86f --- /dev/null +++ b/infinigen/assets/objects/organizer/plate_rack.py @@ -0,0 +1,501 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +from numpy.random import randint, uniform + +from infinigen.assets.materials import shader_wood +from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_plate_rack_connect", singleton=False, type="GeometryNodeTree" +) +def nodegroup_plate_rack_connect(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "Value1", 0.5000), + ("NodeSocketFloat", "Value", 0.5000), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value1"], 1: 2.0000, 2: -0.0020}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Radius": group_input.outputs["Radius"], "Depth": multiply_add}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 2: -uniform(0.02, 0.045)}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_add_1}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": combine_xyz, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_rack_cyn", singleton=False, type="GeometryNodeTree") +def nodegroup_rack_cyn(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "Value", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0000} + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Radius": group_input.outputs["Radius"], "Depth": add}, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 2: 0.0010}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_add}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_4}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_rack_base", singleton=False, type="GeometryNodeTree" +) +def nodegroup_rack_base(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketFloat", "Value1", 0.5000), + ("NodeSocketFloat", "Value2", 0.5000), + ("NodeSocketFloat", "Value3", 0.5000), + ("NodeSocketInt", "Count", 10), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value2"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_1} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0000} + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_2}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_1}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 2: -0.0150}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_add, "Y": add_2} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": add_2} + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={ + "Count": group_input.outputs["Count"], + "Start Location": combine_xyz_2, + "Offset": combine_xyz_3, + }, + attrs={"mode": "END_POINTS"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": group_input.outputs["Instance"]}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Base": transform, "Racks": realize_instances}, + attrs={"is_active_output": True}, + ) + + +def rack_geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.5 of the node_transpiler + + rack_radius = nw.new_node(Nodes.Value, label="rack_radius") + rack_radius.outputs[0].default_value = kwargs["rack_radius"] + + rack_height = nw.new_node(Nodes.Value, label="rack_height") + rack_height.outputs[0].default_value = kwargs["rack_height"] + + rack_cyn = nw.new_node( + nodegroup_rack_cyn().name, + input_kwargs={"Radius": rack_radius, "Value": rack_height}, + ) + + base_length = nw.new_node(Nodes.Value, label="base_length") + base_length.outputs[0].default_value = kwargs["base_length"] + + base_width = nw.new_node(Nodes.Value, label="base_width") + base_width.outputs[0].default_value = kwargs["base_width"] + + base_gap = nw.new_node(Nodes.Value, label="base_gap") + base_gap.outputs[0].default_value = kwargs["base_gap"] + + integer = nw.new_node(Nodes.Integer) + integer.integer = kwargs["num_rack"] + + rack_base = nw.new_node( + nodegroup_rack_base().name, + input_kwargs={ + "Instance": rack_cyn, + "Value1": base_length, + "Value2": base_width, + "Value3": base_gap, + "Count": integer, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [rack_base.outputs["Base"], rack_base.outputs["Racks"]] + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Scale": (1.0000, -1.0000, 1.0000)}, + ) + + plate_rack_connect = nw.new_node( + nodegroup_plate_rack_connect().name, + input_kwargs={"Radius": rack_radius, "Value1": base_gap, "Value": base_length}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, join_geometry, plate_rack_connect]}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: base_width}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": transform} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": triangulate, + "Material": surface.shaderfunc_to_material(shader_wood), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) + + +def plate_geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.5 of the node_transpiler + + radius = nw.new_node(Nodes.Value, label="radius") + radius.outputs[0].default_value = kwargs["radius"] + + thickness = nw.new_node(Nodes.Value, label="thickness") + thickness.outputs[0].default_value = kwargs["thickness"] + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": radius, "Depth": thickness}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radius}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": transform_geometry} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": triangulate, + "Material": surface.shaderfunc_to_material(shader_rough_plastic), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) + + +class PlateRackBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(PlateRackBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_place_points(self, params): + # compute the lowest point in the bezier curve + xs = [] + for i in range(params["num_rack"] - 1): + l = params["base_length"] + d = (l - 0.03) / (params["num_rack"] - 1) + x = -l / 2.0 + 0.015 + (i + 0.5) * d + xs.append(x) + + y = 0 + z = params["base_width"] + + place_points = [] + for x in xs: + place_points.append((x, y, z)) + + return place_points + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("num_rack", None) is None: + params["num_rack"] = randint(3, 7) + if params.get("rack_radius", None) is None: + params["rack_radius"] = uniform(0.0025, 0.006) + if params.get("rack_height", None) is None: + params["rack_height"] = uniform(0.08, 0.15) + if params.get("base_length", None) is None: + params["base_length"] = (params["num_rack"] - 1) * uniform( + 0.03, 0.06 + ) + 0.03 + if params.get("base_gap", None) is None: + params["base_gap"] = uniform(0.05, 0.08) + if params.get("base_width", None) is None: + params["base_width"] = uniform(0.015, 0.03) + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, rack_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + place_points = self.get_place_points(obj_params) + + return obj, place_points + + +class PlateBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(PlateBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("radius", None) is None: + params["radius"] = uniform(0.15, 0.25) + if params.get("thickness", None) is None: + params["thickness"] = uniform(0.01, 0.025) + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, + plate_geometry_nodes, + attributes=[], + apply=True, + input_kwargs=obj_params, + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class PlateOnRackBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(PlateOnRackBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + self.rack_fac = PlateRackBaseFactory(factory_seed, params=params) + self.plate_fac = PlateBaseFactory(factory_seed, params=params) + + def get_asset_params(self, i): + if self.params.get("base_gap", None) is None: + d = uniform(0.05, 0.08) + self.rack_fac.params["base_gap"] = d + self.plate_fac.params["radius"] = d + uniform(0.025, 0.06) + + def create_asset(self, i, **params): + self.get_asset_params(i) + rack, place_points = self.rack_fac.create_asset(i) + plate = self.plate_fac.create_asset(i) + + plate.location = place_points[0] + butil.apply_transform(plate, loc=True) + + return plate diff --git a/infinigen/assets/objects/particles/__init__.py b/infinigen/assets/objects/particles/__init__.py new file mode 100644 index 000000000..f0045e029 --- /dev/null +++ b/infinigen/assets/objects/particles/__init__.py @@ -0,0 +1,4 @@ +from .lichen import LichenFactory +from .moss import MossFactory +from .particles import DustMoteFactory, RaindropFactory, SnowflakeFactory +from .pine_needle import PineNeedleFactory diff --git a/infinigen/assets/objects/particles/lichen.py b/infinigen/assets/objects/particles/lichen.py new file mode 100644 index 000000000..e0337d522 --- /dev/null +++ b/infinigen/assets/objects/particles/lichen.py @@ -0,0 +1,123 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import normal as N +from numpy.random import uniform + +from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import data2mesh +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba +from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth + + +class LichenFactory(AssetFactory): + def __init__(self, factory_seed): + super(LichenFactory, self).__init__(factory_seed) + self.max_polygon = 1e4 + self.base_hue = uniform(0.15, 0.3) + + @staticmethod + def build_lichen_circle_mesh(n): + angles = polygon_angles(n) + z_jitter = N(0.0, 0.02, n) + r_jitter = np.exp(uniform(-0.2, 0.0, n)) + vertices = np.concatenate( + [ + np.stack( + [np.cos(angles) * r_jitter, np.sin(angles) * r_jitter, z_jitter] + ).T, + np.zeros((1, 3)), + ], + 0, + ) + faces = np.stack([np.arange(n), np.roll(np.arange(n), 1), np.full(n, n)]).T + mesh = data2mesh(vertices, [], faces, "circle") + return mesh + + @staticmethod + def shader_lichen(nw: NodeWrangler, base_hue=0.2, **params): + h_perturb = uniform(-0.02, 0.02) + s_perturb = uniform(-0.05, -0.0) + v_perturb = uniform(1.0, 1.5) + + def map_perturb(h, s, v): + return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) + + subsurface_ratio = 0.02 + roughness = 1.0 + + cr = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": nw.musgrave(5000)}) + elements = cr.color_ramp.elements + elements.new(1) + elements[0].position = 0.0 + elements[1].position = 0.5 + elements[2].position = 1.0 + elements[0].color = map_perturb(base_hue, 1, 0.05) + elements[1].color = map_perturb((base_hue + 0.05) % 1, 1, 0.05) + elements[2].color = 0.0, 0.0, 0.0, 1.0 + + background = map_perturb(base_hue, 0.5, 0.3) + mix_rgb = nw.new_node( + Nodes.MixRGB, + [ + nw.new_node(Nodes.ObjectInfo_Shader).outputs["Random"], + cr.outputs["Color"], + background, + ], + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_rgb, + "Subsurface": subsurface_ratio, + "Subsurface Radius": (0.01, 0.01, 0.01), + "Subsurface Color": background, + "Roughness": roughness, + }, + ) + + return principled_bsdf + + def create_asset(self, **kwargs): + n = np.random.randint(4, 6) + mesh = self.build_lichen_circle_mesh(n) + obj = bpy.data.objects.new("lichen", mesh) + bpy.context.scene.collection.objects.link(obj) + bpy.context.view_layer.objects.active = obj + + boundary = obj.vertex_groups.new(name="Boundary") + boundary.add(list(range(n)), 1.0, "REPLACE") + + growth_scale = 1, 1, 0.5 + build_diff_growth( + obj, + boundary.index, + max_polygons=self.max_polygon * uniform(0.2, 1), + growth_scale=growth_scale, + inhibit_shell=4, + repulsion_radius=2, + dt=0.25, + ) + obj.scale = [0.004] * 3 + butil.apply_transform(obj) + assign_material( + obj, + surface.shaderfunc_to_material( + LichenFactory.shader_lichen, (self.base_hue + uniform(-0.04, 0.04)) % 1 + ), + ) + + tag_object(obj, "lichen") + return obj diff --git a/infinigen/assets/objects/particles/moss.py b/infinigen/assets/objects/particles/moss.py new file mode 100644 index 000000000..811236582 --- /dev/null +++ b/infinigen/assets/objects/particles/moss.py @@ -0,0 +1,116 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory +# of this source tree. + + +# Authors: Lingjie Mei +import math + +from numpy.random import uniform as U + +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import new_cube +from infinigen.core import surface +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba + + +class MossFactory(AssetFactory): + def __init__(self, factory_seed): + super(MossFactory, self).__init__(factory_seed) + self.max_polygon = 1e4 + self.base_hue = U(0.2, 0.24) + + @staticmethod + def shader_moss(nw: NodeWrangler, base_hue=0.3): + h_perturb = U(-0.02, 0.02) + s_perturb = U(-0.1, -0.0) + v_perturb = U(1.0, 1.5) + + def map_perturb(h, s, v): + return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) + + subsurface_ratio = 0.05 + roughness = 1.0 + mix_ratio = 0.2 + + cr = build_color_ramp( + nw, + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": 5.0}).outputs["Fac"], + [0, 0.5, 1], + [ + map_perturb(base_hue, 0.8, 0.1), + map_perturb(base_hue - 0.05, 0.8, 0.1), + (0.0, 0.0, 0.0, 1.0), + ], + ) + + background = map_perturb(base_hue, 0.8, 0.02) + mix_rgb = nw.new_node( + Nodes.MixRGB, + [ + nw.new_node(Nodes.ObjectInfo_Shader).outputs["Random"], + cr.outputs["Color"], + background, + ], + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix_rgb, + "Subsurface": subsurface_ratio, + "Subsurface Radius": (0.01, 0.01, 0.01), + "Subsurface Color": background, + "Roughness": roughness, + }, + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": mix_rgb} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, [mix_ratio, principled_bsdf, translucent_bsdf] + ) + return mix_shader + + def create_asset(self, face_size=0.01, **params): + obj = new_cube() + surface.add_geomod( + obj, self.geo_moss_instance, apply=True, input_args=[face_size] + ) + assign_material( + obj, + surface.shaderfunc_to_material( + MossFactory.shader_moss, (self.base_hue + U(-0.02, 0.02) % 1) + ), + ) + tag_object(obj, "moss") + return obj + + @staticmethod + def geo_moss_instance(nw: NodeWrangler, face_size): + radius = 0.008 + start = (0.0, 0.0, 0.0) + start_handle = (-0.03, 0.0, 0.02) + end = (-0.04, 0.0, U(0.04, 0.05)) + end_handle = (end[0] + U(-0.03, -0.02), 0.0, end[2] + U(-0.01, 0.0)) + bezier = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": 10 * math.ceil(0.01 / face_size), + "Start": start, + "Start Handle": start_handle, + "End Handle": end_handle, + "End": end, + }, + ) + circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 4, "Radius": radius} + ).outputs["Curve"] + mesh = nw.curve2mesh(bezier, circle) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": mesh}) diff --git a/infinigen/assets/objects/particles/particles.py b/infinigen/assets/objects/particles/particles.py new file mode 100644 index 000000000..a514bcd55 --- /dev/null +++ b/infinigen/assets/objects/particles/particles.py @@ -0,0 +1,131 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Hei Law, Alexander Raistrick + + +import bpy +from numpy.random import normal as N + +from infinigen.assets.materials import dirt +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.infinigen_gpl.surfaces import snow + + +def shader_raindrop(nw): + glass_bsdf = nw.new_node( + "ShaderNodeBsdfGlass", + input_kwargs={ + "IOR": 1.33, + }, + ) + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={ + "Surface": glass_bsdf, + }, + ) + + +def geo_raindrop(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ( + "NodeSocketGeometry", + "Geometry", + None, + ) + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + vector_curves = nw.new_node( + Nodes.VectorCurve, + input_kwargs={ + "Vector": position, + }, + ) + node_utils.assign_curve( + vector_curves.mapping.curves[0], + [(-1.0, -1.0), (1.0, 1.0)], + ) + node_utils.assign_curve( + vector_curves.mapping.curves[1], + [(-1.0, -1.0), (1.0, 1.0)], + ) + node_utils.assign_curve( + vector_curves.mapping.curves[2], + [(-1.0, -0.15 * N(1, 0.15)), (-0.6091, -0.0938), (1.0, 1.0)], + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": vector_curves, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position, + }, + ) + + +class RaindropFactory(AssetFactory): + def create_asset(self, **kwargs): + bpy.ops.mesh.primitive_ico_sphere_add( + radius=1, + enter_editmode=False, + subdivisions=5, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + + sphere = bpy.context.object + + surface.add_geomod(sphere, geo_raindrop, apply=True) + tag_object(sphere, "raindrop") + return sphere + + def finalize_assets(self, assets): + surface.add_material(assets, shader_raindrop) + + +class DustMoteFactory(AssetFactory): + def create_asset(self, **kwargs): + bpy.ops.mesh.primitive_ico_sphere_add( + radius=1, + subdivisions=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + tag_object(bpy.context.object, "dustmote") + return bpy.context.object + + def finalize_assets(self, assets): + dirt.apply(assets) + + +class SnowflakeFactory(AssetFactory): + def create_asset(self, **params) -> bpy.types.Object: + bpy.ops.mesh.primitive_circle_add( + vertices=6, + fill_type="TRIFAN", + ) + tag_object(bpy.context.object, "snowflake") + return bpy.context.object + + def finalize_assets(self, assets): + snow.apply(assets, subsurface=0) diff --git a/infinigen/assets/objects/particles/pine_needle.py b/infinigen/assets/objects/particles/pine_needle.py new file mode 100644 index 000000000..778764d9d --- /dev/null +++ b/infinigen/assets/objects/particles/pine_needle.py @@ -0,0 +1,133 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick, Lingjie Mei + +from numpy.random import normal as N + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.color import color_category + + +def shader_material(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + object_info = nw.new_node(Nodes.ObjectInfo_Shader) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": object_info.outputs["Random"]} + ) + colorramp.color_ramp.elements[0].position = 0.0000 + colorramp.color_ramp.elements[0].color = color_category("pine_needle") + colorramp.color_ramp.elements[1].position = 1.0000 + colorramp.color_ramp.elements[1].color = color_category("pine_needle") + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": colorramp} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pine_needle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pine_needle(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Scale", 0.0400), + ("NodeSocketFloat", "Bend", 0.0300), + ("NodeSocketFloatDistance", "Radius", 0.0010), + ], + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (-1.0000, 0.0000, 0.0000), + "Scale": group_input.outputs["Scale"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (0.0000, 1.0000, 0.0000), + "Scale": group_input.outputs["Bend"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: (1.0000, 0.0000, 0.0000), + "Scale": group_input.outputs["Scale"], + }, + attrs={"operation": "SCALE"}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 5, + "Start": scale.outputs["Vector"], + "Middle": scale_1.outputs["Vector"], + "End": scale_2.outputs["Vector"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": 6, "Radius": group_input.outputs["Radius"]}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": quadratic_bezier, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": curve_to_mesh}, + attrs={"is_active_output": True}, + ) + + +class PineNeedleFactory(AssetFactory): + def sample_params(self): + s = N(1, 0.2) + return { + "Scale": 0.04 * s, + "Bend": 0.03 * s * N(1, 0.2), + "Radius": 0.001 * s * N(1, 0.2), + } + + def create_asset(self, **_): + obj = butil.spawn_vert("pine_needle") + butil.modify_mesh( + obj, + "NODES", + apply=True, + node_group=nodegroup_pine_needle(), + ng_inputs=self.sample_params(), + ) + tag_object(obj, "pine_needle") + return obj + + def finalize_assets(self, objs): + surface.add_material(objs, shader_material) diff --git a/infinigen/assets/rocks/__init__.py b/infinigen/assets/objects/rocks/__init__.py similarity index 63% rename from infinigen/assets/rocks/__init__.py rename to infinigen/assets/objects/rocks/__init__.py index b132ae749..c4d5123e0 100644 --- a/infinigen/assets/rocks/__init__.py +++ b/infinigen/assets/objects/rocks/__init__.py @@ -1,3 +1,3 @@ from .blender_rock import BlenderRockFactory from .boulder import BoulderFactory -from .glowing_rocks import GlowingRocksFactory \ No newline at end of file +from .glowing_rocks import GlowingRocksFactory diff --git a/infinigen/assets/rocks/blender_rock.py b/infinigen/assets/objects/rocks/blender_rock.py similarity index 64% rename from infinigen/assets/rocks/blender_rock.py rename to infinigen/assets/objects/rocks/blender_rock.py index 76fb9d1ce..217c36015 100644 --- a/infinigen/assets/rocks/blender_rock.py +++ b/infinigen/assets/objects/rocks/blender_rock.py @@ -5,20 +5,17 @@ import bpy -from mathutils import Vector import numpy as np -from numpy.random import uniform as U, normal as N +from numpy.random import uniform as U -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil from infinigen.core.placement.factory import AssetFactory -from infinigen.core.tagging import tag_object, tag_nodegroup +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil -bpy.ops.preferences.addon_enable(module='add_mesh_extra_objects') +bpy.ops.preferences.addon_enable(module="add_mesh_extra_objects") -class BlenderRockFactory(AssetFactory): +class BlenderRockFactory(AssetFactory): def __init__(self, factory_seed, detail=1): super(BlenderRockFactory, self).__init__(factory_seed) self.detail = detail @@ -36,11 +33,14 @@ def create_asset(self, **params): kwargs = dict( use_random_seed=False, user_seed=seed, - display_detail=self.detail, detail=self.detail, - scale_Z=(zrand*zscale, zscale), scale_fac=(1, 1, 1), - scale_X=(1.00, 1.01), scale_Y=(1.00, 1.01), # Bug occurs otherwise, I think - deform=U(2, 10), - rough=U(0.5, 1.0) # Higher than 1.0 can cause self-intersection + display_detail=self.detail, + detail=self.detail, + scale_Z=(zrand * zscale, zscale), + scale_fac=(1, 1, 1), + scale_X=(1.00, 1.01), + scale_Y=(1.00, 1.01), # Bug occurs otherwise, I think + deform=U(2, 10), + rough=U(0.5, 1.0), # Higher than 1.0 can cause self-intersection ) # The rock generator is poorly built. # It uses a weibull distribution to sample from a list, which will fail w/ 1.111% probability. @@ -56,6 +56,6 @@ def create_asset(self, **params): butil.apply_modifiers(obj) - tag_object(obj, 'blender_rock') + tag_object(obj, "blender_rock") - return obj \ No newline at end of file + return obj diff --git a/infinigen/assets/objects/rocks/boulder.py b/infinigen/assets/objects/rocks/boulder.py new file mode 100644 index 000000000..0feb92571 --- /dev/null +++ b/infinigen/assets/objects/rocks/boulder.py @@ -0,0 +1,191 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import logging +from functools import reduce + +import bpy +import gin +import numpy as np +import trimesh.convex +from numpy.random import uniform + +from infinigen.assets.utils.decorate import geo_extension +from infinigen.assets.utils.object import trimesh2obj +from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement import detail +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.placement.split_in_view import split_inview +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +logger = logging.getLogger(__name__) + + +class BoulderFactory(AssetFactory): + config_mappings = {"boulder": [True, False], "slab": [False, True]} + + def __init__( + self, + factory_seed, + meshing_camera=None, + adapt_mesh_method="remesh", + cam_meshing_max_dist=1e7, + coarse=False, + do_voronoi=True, + ): + super(BoulderFactory, self).__init__(factory_seed, coarse) + + self.camera = meshing_camera + self.cam_meshing_max_dist = cam_meshing_max_dist + self.adapt_mesh_method = adapt_mesh_method + + self.octree_depth = 3 + self.do_voronoi = do_voronoi + self.weights = [0.8, 0.2] + self.configs = ["boulder", "slab"] + with FixedSeed(factory_seed): + self.rock_surface = surface.registry("rock_collection") + method = np.random.choice(self.configs, p=self.weights) + self.has_horizontal_cut, self.is_slab = self.config_mappings[method] + + @gin.configurable + def create_placeholder(self, boulder_scale=1, **kwargs) -> bpy.types.Object: + butil.select_none() + + vertices = np.random.uniform(-1, 1, (32, 3)) + obj = trimesh2obj(trimesh.convex.convex_hull(vertices)) + surface.add_geomod(obj, self.geo_extrusion, apply=True) + butil.modify_mesh( + obj, "SUBSURF", render_levels=2, levels=2, subdivision_type="SIMPLE" + ) + + obj.location[-1] += obj.dimensions[-1] * 0.2 + butil.apply_transform(obj, loc=True) + if self.is_slab: + obj.scale = *log_uniform(0.5, 2.0, 2), log_uniform(0.1, 0.15) + else: + obj.scale = *log_uniform(0.4, 1.2, 2), log_uniform(0.4, 0.8) + + obj.scale *= boulder_scale + butil.apply_transform(obj) + obj.rotation_euler[0] = uniform(-np.pi / 24, np.pi / 24) + butil.apply_transform(obj) + obj.rotation_euler[-1] = uniform(0, np.pi * 2) + butil.apply_transform(obj) + + with butil.SelectObjects(obj): + bpy.ops.geometry.attribute_convert(mode="VERTEX_GROUP") + + butil.modify_mesh( + obj, + "BEVEL", + limit_method="VGROUP", + vertex_group="top", + invert_vertex_group=True, + offset_type="PERCENT", + width_pct=10, + ) + butil.modify_mesh( + obj, "REMESH", apply=True, mode="SHARP", octree_depth=self.octree_depth + ) + surface.add_geomod(obj, geo_extension, apply=True) + + if self.do_voronoi: + voronoi_texture = bpy.data.textures.new(name="boulder", type="VORONOI") + voronoi_texture.noise_scale = log_uniform(0.2, 0.5) + voronoi_texture.distance_metric = "DISTANCE" + butil.modify_mesh( + obj, "DISPLACE", texture=voronoi_texture, strength=0.01, mid_level=0 + ) + + voronoi_texture = bpy.data.textures.new(name="boulder", type="VORONOI") + voronoi_texture.noise_scale = log_uniform(0.05, 0.1) + voronoi_texture.distance_metric = "DISTANCE" + butil.modify_mesh( + obj, "DISPLACE", texture=voronoi_texture, strength=0.01, mid_level=0 + ) + + return obj + + def finalize_placeholders(self, placeholders): + with FixedSeed(self.factory_seed): + self.rock_surface.apply(placeholders, is_rock=True) + + @staticmethod + def geo_extrusion(nw: NodeWrangler, extrude_scale=1): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + face_area = nw.new_node(Nodes.InputMeshFaceArea) + + tops = [] + extrude_configs = [(uniform(0.2, 0.3), 0.8, 0.4), (0.6, 0.2, 0.6)] + top_facing = nw.compare_direction( + "LESS_THAN", nw.new_node(Nodes.InputNormal), (0, 0, 1), np.pi * 2 / 3 + ) + for prob, extrude, scale in extrude_configs: + extrude = extrude * extrude_scale + face_area_stats = nw.new_node( + Nodes.AttributeStatistic, + [geometry, None, face_area], + attrs={"domain": "FACE"}, + ).outputs + selection = reduce( + lambda *xs: nw.boolean_math("AND", *xs), + [ + top_facing, + nw.bernoulli(prob), + nw.compare("GREATER_THAN", face_area, face_area_stats["Mean"]), + ], + ) + geometry, top, side = nw.new_node( + Nodes.ExtrudeMesh, + [geometry, selection, None, nw.uniform(extrude * 0.5, extrude)], + ).outputs + geometry = nw.new_node( + Nodes.ScaleElements, [geometry, top, nw.uniform(scale * 0.5, scale)] + ) + tops.append(top) + + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": geometry, + "Name": "top", + "Value": reduce(lambda *xs: nw.boolean_math("OR", *xs), tops), + }, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + def create_asset(self, i, placeholder, face_size=0.01, distance=0, **params): + if self.camera is not None and distance < self.cam_meshing_max_dist: + assert self.adapt_mesh_method != "remesh" + skin_obj, outofview, vert_dists, _ = split_inview( + placeholder, cam=self.camera, vis_margin=0.15 + ) + butil.parent_to(outofview, skin_obj, no_inverse=True, no_transform=True) + face_size = detail.target_face_size(vert_dists.min()) + else: + skin_obj = deep_clone_obj( + placeholder, keep_modifiers=True, keep_materials=True + ) + + butil.parent_to(skin_obj, placeholder, no_inverse=True, no_transform=True) + + with butil.DisableModifiers(skin_obj): + detail.adapt_mesh_resolution( + skin_obj, face_size, method=self.adapt_mesh_method, apply=True + ) + + butil.apply_modifiers(skin_obj) + tag_object(skin_obj, "boulder") + + return skin_obj diff --git a/infinigen/assets/rocks/glowing_rocks.py b/infinigen/assets/objects/rocks/glowing_rocks.py similarity index 63% rename from infinigen/assets/rocks/glowing_rocks.py rename to infinigen/assets/objects/rocks/glowing_rocks.py index a52dbfe21..fafff3d3b 100644 --- a/infinigen/assets/rocks/glowing_rocks.py +++ b/infinigen/assets/objects/rocks/glowing_rocks.py @@ -7,50 +7,68 @@ import bpy import gin import numpy as np -from infinigen.core.util import blender as butil +from infinigen.assets.objects.rocks.blender_rock import BlenderRockFactory +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.assets.rocks.blender_rock import BlenderRockFactory -from infinigen.core import surface +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil from infinigen.core.util.color import color_category -from infinigen.core.tagging import tag_object, tag_nodegroup + def shader_glowrock(nw: NodeWrangler, transparent_for_bounce=True): object_info = nw.new_node(Nodes.ObjectInfo_Shader) - white_noise = nw.new_node(Nodes.WhiteNoiseTexture, attrs={"noise_dimensions": "4D"}, - input_kwargs={"Vector": (object_info, "Random")}) - mix_rgb = nw.new_node(Nodes.MixRGB, [0.6, (white_noise, "Color"), tuple(color_category("gem"))]) + white_noise = nw.new_node( + Nodes.WhiteNoiseTexture, + attrs={"noise_dimensions": "4D"}, + input_kwargs={"Vector": (object_info, "Random")}, + ) + mix_rgb = nw.new_node( + Nodes.MixRGB, [0.6, (white_noise, "Color"), tuple(color_category("gem"))] + ) translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, [mix_rgb]) transparent_bsdf = nw.new_node(Nodes.TransparentBSDF, [mix_rgb]) is_camera_ray = nw.new_node(Nodes.LightPath) if transparent_for_bounce else 1 - mix_shader = nw.new_node(Nodes.MixShader, [is_camera_ray, transparent_bsdf, translucent_bsdf]) + mix_shader = nw.new_node( + Nodes.MixShader, [is_camera_ray, transparent_bsdf, translucent_bsdf] + ) nw.new_node(Nodes.MaterialOutput, [mix_shader]) + @gin.configurable class GlowingRocksFactory(AssetFactory): - def quickly_resample(obj): assert obj.type == "EMPTY", obj.type obj.rotation_euler[:] = np.random.uniform(-np.pi, np.pi, size=(3,)) - def __init__(self, factory_seed, coarse=False, transparent_for_bounce=True, watt_power_range=(400, 800), **kwargs): + def __init__( + self, + factory_seed, + coarse=False, + transparent_for_bounce=True, + watt_power_range=(400, 800), + **kwargs, + ): super().__init__(factory_seed, coarse=coarse) if coarse: return self.watt_power_range = watt_power_range - self.rock_collection = make_asset_collection(BlenderRockFactory(np.random.randint(1e5), detail=1), - name="glow_rock_base", n=5) - + self.rock_collection = make_asset_collection( + BlenderRockFactory(np.random.randint(1e5), detail=1), + name="glow_rock_base", + n=5, + ) + for o in self.rock_collection.objects: - butil.modify_mesh(o, 'SUBSURF', levels=2) + butil.modify_mesh(o, "SUBSURF", levels=2) self.material = surface.shaderfunc_to_material(shader_glowrock) def create_placeholder(self, i, loc, rot): - placeholder = butil.spawn_empty('placeholder', disp_type='SPHERE', s=0.1) + placeholder = butil.spawn_empty("placeholder", disp_type="SPHERE", s=0.1) return placeholder - + def create_asset(self, *args, **kwargs) -> bpy.types.Object: src_obj = np.random.choice(list(self.rock_collection.objects)) new_obj = butil.deep_clone_obj(src_obj) @@ -62,13 +80,19 @@ def create_asset(self, *args, **kwargs) -> bpy.types.Object: min_side_length = (bbox.max(axis=0) - bbox.min(axis=0)).min() # Diameter is set to half the shortest edge of the bbox - bpy.ops.object.light_add(type='POINT', radius=min_side_length * 1.0, align='WORLD', location=(0, 0, 0), - rotation=(0, 0, 0), scale=(1, 1, 1)) + bpy.ops.object.light_add( + type="POINT", + radius=min_side_length * 1.0, + align="WORLD", + location=(0, 0, 0), + rotation=(0, 0, 0), + scale=(1, 1, 1), + ) point_light = bpy.context.selected_objects[0] point_light.data.energy = round(np.random.uniform(*self.watt_power_range)) point_light.parent = new_obj butil.apply_transform(new_obj) - tag_object(new_obj, 'glowing_rocks') - + tag_object(new_obj, "glowing_rocks") + return new_obj diff --git a/infinigen/assets/rocks/pile.py b/infinigen/assets/objects/rocks/pile.py similarity index 83% rename from infinigen/assets/rocks/pile.py rename to infinigen/assets/objects/rocks/pile.py index e6d37fd0b..320754ca3 100644 --- a/infinigen/assets/rocks/pile.py +++ b/infinigen/assets/objects/rocks/pile.py @@ -5,26 +5,25 @@ import bpy -import tqdm import numpy as np +import tqdm from numpy.random import uniform -from infinigen.assets.rocks.boulder import BoulderFactory -from infinigen.assets.utils.physics import free_fall -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.placement.factory import AssetFactory import infinigen.core.util.blender as butil +from infinigen.assets.objects.rocks.boulder import BoulderFactory from infinigen.assets.utils.decorate import multi_res +from infinigen.assets.utils.draw import surface_from_func from infinigen.assets.utils.misc import toggle_hide from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.draw import surface_from_func -from infinigen.core.util.blender import deep_clone_obj +from infinigen.assets.utils.physics import free_fall +from infinigen.core.placement.detail import remesh_with_attrs +from infinigen.core.placement.factory import AssetFactory from infinigen.core.tagging import tag_object +from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.random import log_uniform class BoulderPileFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) self.factory = BoulderFactory(factory_seed, coarse) @@ -39,7 +38,7 @@ def floor_fn(x, y): return np.maximum(x, alpha * x) mesh = surface_from_func(floor_fn, 32, 32, 12, 12) - obj = bpy.data.objects.new('floor', mesh) + obj = bpy.data.objects.new("floor", mesh) bpy.context.scene.collection.objects.link(obj) return obj @@ -51,12 +50,18 @@ def place_boulder(obj, height): def create_placeholder(self, **kwargs): n = np.random.randint(3, 5) - empty = butil.spawn_empty('placeholder', disp_type='CUBE', s=8) + empty = butil.spawn_empty("placeholder", disp_type="CUBE", s=8) objects = [] for i in range(n): - empty_ = butil.spawn_empty('placeholder', disp_type='CUBE', s=8) - scale = [1, log_uniform(.4, .6), log_uniform(.2, .4), log_uniform(.2, .4), log_uniform(.2, .4), - log_uniform(.1, .2)] + empty_ = butil.spawn_empty("placeholder", disp_type="CUBE", s=8) + scale = [ + 1, + log_uniform(0.4, 0.6), + log_uniform(0.2, 0.4), + log_uniform(0.2, 0.4), + log_uniform(0.2, 0.4), + log_uniform(0.1, 0.2), + ] p = self.factory.create_placeholder() p.parent = empty_ objects.append(p.children[0]) @@ -76,7 +81,7 @@ def create_placeholder(self, **kwargs): def create_asset(self, placeholder, face_size=0.01, **params) -> bpy.types.Object: objects = [] - for c in tqdm.tqdm(placeholder.children, desc='Creating boulder assets'): + for c in tqdm.tqdm(placeholder.children, desc="Creating boulder assets"): p = c.children[0] a = self.factory.create_asset(placeholder=p) a.location = p.children[0].location @@ -96,5 +101,5 @@ def create_asset(self, placeholder, face_size=0.01, **params) -> bpy.types.Objec butil.delete(c) multi_res(obj) remesh_with_attrs(obj, face_size) - tag_object(obj, 'pile') + tag_object(obj, "pile") return obj diff --git a/infinigen/assets/seating/__init__.py b/infinigen/assets/objects/seating/__init__.py similarity index 71% rename from infinigen/assets/seating/__init__.py rename to infinigen/assets/objects/seating/__init__.py index 5abb955c1..8cc29004c 100644 --- a/infinigen/assets/seating/__init__.py +++ b/infinigen/assets/objects/seating/__init__.py @@ -3,9 +3,13 @@ # Authors: Lingjie Mei -from .sofa import SofaFactory, ArmChairFactory +from .bed import BedFactory from .bedframe import BedFrameFactory -from .pillow import PillowFactory +from .chairs import ( + BarChairFactory, + ChairFactory, + OfficeChairFactory, +) from .mattress import MattressFactory -from .bed import BedFactory -from .chairs import * +from .pillow import PillowFactory +from .sofa import ArmChairFactory, SofaFactory diff --git a/infinigen/assets/seating/bed.py b/infinigen/assets/objects/seating/bed.py similarity index 65% rename from infinigen/assets/seating/bed.py rename to infinigen/assets/objects/seating/bed.py index 7a437dcd1..c6ba18c7f 100644 --- a/infinigen/assets/seating/bed.py +++ b/infinigen/assets/objects/seating/bed.py @@ -9,80 +9,85 @@ import trimesh from numpy.random import uniform -from ..scatters import clothes -from . import BedFrameFactory, MattressFactory, PillowFactory -from ..scatters.clothes import ClothesCover -from ..utils.decorate import decimate, read_co, subsurf -from ..utils.object import obj2trimesh -from ...core import surface +from infinigen.assets.objects.seating import bedframe, mattress, pillow +from infinigen.assets.scatters import clothes +from infinigen.assets.utils.decorate import decimate, read_co, subsurf +from infinigen.assets.utils.object import obj2trimesh from infinigen.core.util import blender as butil -from infinigen.core.util.random import random_general as rg, log_uniform -from ...core.util.blender import deep_clone_obj +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg -class BedFactory(BedFrameFactory): - mattress_types = 'weighted_choice', (1, 'coiled'), (3, 'wrapped') - sheet_types = 'weighted_choice', (4, 'quilt'), (4, 'comforter'), (4, 'box_comforter'), (1, 'none') +class BedFactory(bedframe.BedFrameFactory): + mattress_types = "weighted_choice", (1, "coiled"), (3, "wrapped") + sheet_types = ( + "weighted_choice", + (4, "quilt"), + (4, "comforter"), + (4, "box_comforter"), + (1, "none"), + ) def __init__(self, factory_seed, coarse=False): super(BedFactory, self).__init__(factory_seed, coarse) self.sheet_type = rg(self.sheet_types) - self.sheet_folded = uniform() < .5 - self.has_cover = uniform() < .5 - self.clothes_scatter = ClothesCover((.3, .7, .3, .7)) if uniform() < 0.2 else surface.NoApply + self.sheet_folded = uniform() < 0.5 + self.has_cover = uniform() < 0.5 @cached_property def mattress_factory(self): - factory = MattressFactory(self.factory_seed, self.coarse) + factory = mattress.MattressFactory(self.factory_seed, self.coarse) factory.type = rg(self.mattress_types) - factory.width = self.width * uniform(.88, .96) - factory.size = self.size * uniform(.88, .96) + factory.width = self.width * uniform(0.88, 0.96) + factory.size = self.size * uniform(0.88, 0.96) return factory @cached_property def quilt_factory(self): from ..clothes.blanket import BlanketFactory + factory = BlanketFactory(self.factory_seed, self.coarse) factory.width = self.mattress_factory.width * uniform(1.4, 1.6) - factory.size = self.mattress_factory.size * uniform(.9, 1.1) + factory.size = self.mattress_factory.size * uniform(0.9, 1.1) return factory @cached_property def comforter_factory(self): from ..clothes.blanket import ComforterFactory + factory = ComforterFactory(self.factory_seed, self.coarse) factory.width = self.mattress_factory.width * uniform(1.4, 1.8) - factory.size = self.mattress_factory.size * uniform(.9, 1.2) + factory.size = self.mattress_factory.size * uniform(0.9, 1.2) return factory @cached_property def box_comforter_factory(self): from ..clothes.blanket import BoxComforterFactory + factory = BoxComforterFactory(self.factory_seed, self.coarse) factory.width = self.mattress_factory.width * uniform(1.4, 1.8) - factory.size = self.mattress_factory.size * uniform(.9, 1.2) + factory.size = self.mattress_factory.size * uniform(0.9, 1.2) return factory @cached_property def cover_factory(self): from ..clothes.blanket import BlanketFactory + factory = BlanketFactory(self.factory_seed, self.coarse) factory.width = self.mattress_factory.width * uniform(1.6, 1.8) - factory.size = self.mattress_factory.size * uniform(.3, .4) + factory.size = self.mattress_factory.size * uniform(0.3, 0.4) return factory @cached_property def towel_factory(self): from ..clothes import TowelFactory - return TowelFactory(self.factory_seed) - @cached_property - def cloth_scatter(self): - return ClothesCover((.3, .7, .3, .7)) if uniform() < 0.0 else surface.NoApply + return TowelFactory(self.factory_seed) @cached_property def pillow_factory(self): - return PillowFactory(self.factory_seed, self.coarse) + return pillow.PillowFactory(self.factory_seed, self.coarse) def create_asset(self, i, **params) -> bpy.types.Object: frame = super().create_asset(i=i, **params) @@ -90,7 +95,6 @@ def create_asset(self, i, **params) -> bpy.types.Object: mattress = self.make_mattress(i) sheet = self.make_sheet(i, mattress, frame) cover = self.make_cover(i, sheet, mattress) - self.cloth_scatter.apply(sheet) n_pillows = np.random.randint(2, 4) if n_pillows > 0: @@ -100,9 +104,12 @@ def create_asset(self, i, **params) -> bpy.types.Object: pillows = [] self.pillow_factory.finalize_assets(pillows) points = np.stack( - [uniform(.1, .4, 10) * self.size, - uniform(-.3, .3, 10) * self.width, - np.full(10, 1)], -1 + [ + uniform(0.1, 0.4, 10) * self.size, + uniform(-0.3, 0.3, 10) * self.width, + np.full(10, 1), + ], + -1, ) self.scatter(pillows, points, [sheet, mattress]) @@ -114,9 +121,12 @@ def create_asset(self, i, **params) -> bpy.types.Object: towels = [] self.towel_factory.finalize_assets(towels) points = np.stack( - [uniform(.5, .8, 10) * self.size, - uniform(-.3, .3, 10) * self.width, - np.full(10, 1)], -1 + [ + uniform(0.5, 0.8, 10) * self.size, + uniform(-0.3, 0.3, 10) * self.width, + np.full(10, 1), + ], + -1, ) self.scatter(towels, points, [sheet, mattress]) @@ -135,12 +145,12 @@ def make_mattress(self, i): def make_sheet(self, i, mattress, obj): match self.sheet_type: - case 'quilt': + case "quilt": factory = self.quilt_factory pressure = 0 - case 'comforter': + case "comforter": factory = self.comforter_factory - pressure = uniform(1., 1.5) + pressure = uniform(1.0, 1.5) case _: factory = self.box_comforter_factory pressure = log_uniform(8, 15) @@ -149,12 +159,18 @@ def make_sheet(self, i, mattress, obj): factory.fold(sheet) factory.finalize_assets(sheet) z_sheet = mattress.location[-1] + np.max(read_co(mattress)[:, -1]) - sheet.location = factory.size / 2 + uniform(0, .15), 0, z_sheet + sheet.location = factory.size / 2 + uniform(0, 0.15), 0, z_sheet sheet.rotation_euler[-1] = np.pi / 2 butil.apply_transform(sheet, True) clothes.cloth_sim( - sheet, [mattress, obj], mass=.05, tension_stiffness=2, distance_min=5e-3, use_pressure=True, - uniform_pressure_force=pressure, use_self_collision=self.sheet_folded + sheet, + [mattress, obj], + mass=0.05, + tension_stiffness=2, + distance_min=5e-3, + use_pressure=True, + uniform_pressure_force=pressure, + use_self_collision=self.sheet_folded, ) subsurf(sheet, 2) return sheet @@ -163,11 +179,16 @@ def make_cover(self, i, sheet, mattress): cover = self.cover_factory(i) self.cover_factory.finalize_assets(cover) z_sheet = sheet.location[-1] + np.max(read_co(sheet)[:, -1]) - cover.location = self.size / 2 + uniform(0, .3), 0, z_sheet + cover.location = self.size / 2 + uniform(0, 0.3), 0, z_sheet cover.rotation_euler[-1] = np.pi / 2 butil.apply_transform(cover, True) clothes.cloth_sim( - cover, [sheet, mattress], 80, mass=.05, tension_stiffness=2, distance_min=5e-3 + cover, + [sheet, mattress], + 80, + mass=0.05, + tension_stiffness=2, + distance_min=5e-3, ) subsurf(cover, 2) return cover @@ -177,10 +198,13 @@ def scatter(self, pillows, points, bases): lengths = np.full(len(points), np.inf) for b in bases: lengths = np.minimum( - lengths, trimesh.proximity.longest_ray(obj2trimesh(b), points, np.repeat(dir, len(points), 0)) + lengths, + trimesh.proximity.longest_ray( + obj2trimesh(b), points, np.repeat(dir, len(points), 0) + ), ) points += dir * lengths[:, np.newaxis] for a, loc in zip(pillows, decimate(points, len(pillows))): a.location = loc - a.location[-1] += .02 - np.min(read_co(a)[:, -1]) + a.location[-1] += 0.02 - np.min(read_co(a)[:, -1]) a.rotation_euler[-1] = uniform(0, np.pi) diff --git a/infinigen/assets/objects/seating/bedframe.py b/infinigen/assets/objects/seating/bedframe.py new file mode 100644 index 000000000..d6555856b --- /dev/null +++ b/infinigen/assets/objects/seating/bedframe.py @@ -0,0 +1,231 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.seating.chairs.chair import ChairFactory +from infinigen.assets.objects.seating.mattress import make_coiled +from infinigen.assets.utils.decorate import ( + read_co, + read_normal, + remove_faces, + select_faces, + subdivide_edge_ring, + write_attribute, + write_co, +) +from infinigen.assets.utils.object import join_objects, new_grid +from infinigen.core import surface +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg + + +class BedFrameFactory(ChairFactory): + scale = 1.0 + leg_decor_types = ( + "weighted_choice", + (2, "coiled"), + (2, "pad"), + (1, "plain"), + (2, "legs"), + ) + back_types = ( + "weighted_choice", + (3, "coiled"), + (3, "pad"), + (2, "whole"), + (1, "horizontal-bar"), + (1, "vertical-bar"), + ) + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.width = log_uniform(1.4, 2.4) + self.size = uniform(2, 2.4) + self.thickness = uniform(0.05, 0.12) + self.has_all_legs = uniform() < 0.2 + self.leg_thickness = uniform(0.08, 0.12) + self.leg_height = uniform(0.2, 0.6) + self.leg_decor_type = rg(self.leg_decor_types) + self.leg_decor_wrapped = uniform() < 0.5 + self.back_height = uniform(0.5, 1.3) + self.seat_back = 1 + self.seat_subdivisions_x = np.random.randint(1, 4) + self.seat_subdivisions_y = int(log_uniform(4, 10)) + self.has_arm = False + self.leg_type = "vertical" + self.leg_x_offset = 0 + self.leg_y_offset = 0, 0 + self.back_x_offset = 0 + self.back_y_offset = 0 + + materials = AssetList["BedFrameFactory"]() + self.surface = materials["surface"].assign_material() + self.limb_surface = materials["limb_surface"].assign_material() + + scratch_prob, edge_wear_prob = materials["wear_tear_prob"] + self.scratch, self.edge_wear = materials["wear_tear"] + self.scratch = None if uniform() > scratch_prob else self.scratch + self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear + + self.clothes_scatter = surface.NoApply + self.dot_distance = log_uniform(0.16, 0.2) + self.dot_size = uniform(0.005, 0.02) + self.dot_depth = uniform(0.04, 0.08) + self.panel_distance = uniform(0.3, 0.5) + self.panel_margin = uniform(0.01, 0.02) + self.post_init() + + def make_seat(self): + obj = new_grid( + x_subdivisions=self.seat_subdivisions_x, + y_subdivisions=self.seat_subdivisions_y, + ) + obj.scale = ( + (self.width - self.leg_thickness) / 2, + (self.size - self.leg_thickness) / 2, + 1, + ) + butil.apply_transform(obj, True) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.delete(type="ONLY_FACE") + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, 0, self.thickness)} + ) + butil.modify_mesh( + obj, + "SOLIDIFY", + thickness=self.leg_thickness - 1e-3, + offset=0, + solidify_mode="NON_MANIFOLD", + ) + obj.location = 0, -self.size / 2, -self.thickness / 2 + butil.apply_transform(obj, True) + butil.modify_mesh(obj, "BEVEL", width=self.bevel_width, segments=8) + return obj + + def make_legs(self): + legs = super().make_legs() + if self.has_all_legs: + leg_starts = np.array( + [[-1, -0.5, 0], [0, -1, 0], [0, 0, 0], [1, -0.5, 0]] + ) * np.array([[self.width / 2, self.size, 0]]) + leg_ends = leg_starts.copy() + leg_ends[0, 0] -= self.leg_x_offset + leg_ends[3, 0] += self.leg_x_offset + leg_ends[2, 1] += self.leg_y_offset[0] + leg_ends[1, 1] -= self.leg_y_offset[1] + leg_ends[:, -1] = -self.leg_height + legs += self.make_limb(leg_ends, leg_starts) + return legs + + def make_leg_decors(self, legs): + if self.leg_decor_type == "none": + return super().make_leg_decors(legs) + obj = join_objects([deep_clone_obj(_) for _ in legs]) + x, y, z = read_co(obj).T + z = np.maximum(z, -self.leg_height * uniform(0.7, 0.9)) + write_co(obj, np.stack([x, y, z], -1)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.convex_hull() + bpy.ops.mesh.normals_make_consistent(inside=False) + remove_faces(obj, np.abs(read_normal(obj)[:, -1]) > 0.5) + if self.leg_decor_wrapped: + x, y, z = read_co(obj).T + x[x < 0] -= self.leg_thickness / 2 + 1e-3 + x[x > 0] += self.leg_thickness / 2 + 1e-3 + y[y < -self.size / 2] -= self.leg_thickness / 2 + 1e-3 + y[y > -self.size / 2] += self.leg_thickness / 2 + 1e-3 + write_co(obj, np.stack([x, y, z], -1)) + match self.leg_decor_type: + case "coiled": + self.divide(obj, self.dot_distance) + make_coiled(obj, self.dot_distance, self.dot_depth, self.dot_size) + case "pad": + self.divide(obj, self.panel_distance) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.inset( + thickness=self.panel_margin, + depth=self.panel_margin, + use_individual=True, + ) + butil.modify_mesh(obj, "BEVEL", segments=4) + write_attribute(obj, 1, "panel", "FACE") + return [obj] + + def divide(self, obj, distance): + for i, size in enumerate(obj.dimensions): + axis = np.zeros(3) + axis[i] = 1 + distance = distance if i != 2 else distance * uniform(0.5, 1.0) + subdivide_edge_ring(obj, int(np.ceil(size / distance)), axis) + + def make_back_decors(self, backs, finalize=True): + decors = super().make_back_decors(backs) + match self.back_type: + case "coiled": + obj = self.make_back(backs) + self.divide(obj, self.dot_distance) + make_coiled(obj, self.dot_distance, self.dot_depth, self.dot_size) + obj.scale = (1 - 1e-3,) * 3 + write_attribute(obj, 1, "panel", "FACE") + with butil.ViewportMode(decors[0], "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bisect( + plane_co=(0, 0, self.back_height), + plane_no=(0, 0, 1), + clear_inner=True, + ) + return [obj] + decors + case "pad": + obj = self.make_back(backs) + self.divide(obj, self.panel_distance) + with butil.ViewportMode(obj, "EDIT"): + select_faces(obj, np.abs(read_normal(obj)[:, 1]) > 0.5) + bpy.ops.mesh.inset( + thickness=self.panel_margin, + depth=self.panel_margin, + use_individual=True, + ) + butil.modify_mesh(obj, "BEVEL", segments=4) + write_attribute(obj, 1, "panel", "FACE") + obj.scale = (1 - 1e-3,) * 3 + with butil.ViewportMode(decors[0], "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bisect( + plane_co=(0, 0, self.back_height), + plane_no=(0, 0, 1), + clear_inner=True, + ) + return [obj] + decors + case _: + return decors + + def make_back(self, backs): + obj = join_objects([deep_clone_obj(b) for b in backs]) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.convex_hull() + butil.modify_mesh( + obj, + "SOLIDIFY", + thickness=np.minimum(self.thickness, self.leg_thickness), + offset=0, + ) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.normals_make_consistent(inside=False) + return obj diff --git a/infinigen/assets/seating/chairs/__init__.py b/infinigen/assets/objects/seating/chairs/__init__.py similarity index 100% rename from infinigen/assets/seating/chairs/__init__.py rename to infinigen/assets/objects/seating/chairs/__init__.py index 5d10c9c3b..a7f276345 100644 --- a/infinigen/assets/seating/chairs/__init__.py +++ b/infinigen/assets/objects/seating/chairs/__init__.py @@ -3,5 +3,5 @@ # Authors: Lingjie Mei from .bar_chair import BarChairFactory -from .office_chair import OfficeChairFactory from .chair import ChairFactory +from .office_chair import OfficeChairFactory diff --git a/infinigen/assets/objects/seating/chairs/bar_chair.py b/infinigen/assets/objects/seating/chairs/bar_chair.py new file mode 100644 index 000000000..0bb805b4f --- /dev/null +++ b/infinigen/assets/objects/seating/chairs/bar_chair.py @@ -0,0 +1,207 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +from numpy.random import choice, uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.seating.chairs.seats.round_seats import ( + generate_round_seats, +) +from infinigen.assets.objects.tables.cocktail_table import geometry_create_legs +from infinigen.core import surface, tagging +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed + + +def geometry_assemble_chair(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + generateseat = nw.new_node( + generate_round_seats( + thickness=kwargs["Top Thickness"], + radius=kwargs["Top Profile Width"], + seat_material=kwargs["SeatMaterial"], + ).name + ) + + seat_instance = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": generateseat, + "Translation": (0.0000, 0.0000, kwargs["Top Height"]), + }, + ) + + legs = nw.new_node(geometry_create_legs(**kwargs).name) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [seat_instance, legs]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class BarChairFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(BarChairFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params, leg_style = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params(leg_style) + ) + + self.params.update(self.material_params) + + def get_material_params(self, leg_style): + material_assignments = AssetList["BarChairFactory"](leg_style=leg_style) + + params = { + "SeatMaterial": material_assignments["seat"].assign_material(), + "LegMaterial": material_assignments["leg"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # all in meters + if dimensions is None: + x = uniform(0.35, 0.45) + z = uniform(0.7, 1) + dimensions = (x, x, z) + + x, y, z = dimensions + + top_thickness = uniform(0.06, 0.10) + + leg_style = choice(["straight", "single_stand", "wheeled"]) + + parameters = { + "Top Profile Width": x, + "Top Thickness": top_thickness, + "Height": z, + "Top Height": z - top_thickness, + "Leg Style": leg_style, + "Leg NGon": choice([4, 32]), + "Leg Placement Top Relative Scale": 0.7, + "Leg Placement Bottom Relative Scale": uniform(1.1, 1.3), + "Leg Height": 1.0, + } + + if leg_style == "single_stand": + leg_number = 1 + leg_diameter = uniform(0.7 * x, 0.9 * x) + + leg_curve_ctrl_pts = [ + (0.0, uniform(0.1, 0.2)), + (0.5, uniform(0.1, 0.2)), + (0.9, uniform(0.2, 0.3)), + (1.0, 1.0), + ] + + parameters.update( + { + "Leg Number": leg_number, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood']) + } + ) + + elif leg_style == "straight": + leg_diameter = uniform(0.04, 0.06) + leg_number = choice([3, 4]) + + leg_curve_ctrl_pts = [ + (0.0, 1.0), + (0.4, uniform(0.85, 0.95)), + (1.0, uniform(0.4, 0.6)), + ] + + parameters.update( + { + "Leg Number": leg_number, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood']), + "Strecher Relative Pos": uniform(0.6, 0.9), + "Strecher Increament": choice([0, 1, 2]), + } + ) + + elif leg_style == "wheeled": + leg_diameter = uniform(0.03, 0.05) + leg_number = 1 + pole_number = choice([4, 5]) + joint_height = uniform(0.5, 0.8) * (z - top_thickness) + wheel_arc_sweep_angle = uniform(120, 240) + wheel_width = uniform(0.11, 0.15) + wheel_rot = uniform(0, 360) + pole_length = uniform(1.6, 2.0) + + parameters.update( + { + "Leg Number": leg_number, + "Leg Pole Number": pole_number, + "Leg Diameter": leg_diameter, + "Leg Joint Height": joint_height, + "Leg Wheel Arc Sweep Angle": wheel_arc_sweep_angle, + "Leg Wheel Width": wheel_width, + "Leg Wheel Rot": wheel_rot, + "Leg Pole Length": pole_length, + # 'Leg Material': choice(['metal']) + } + ) + + else: + raise NotImplementedError + + return parameters, leg_style + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod( + obj, geometry_assemble_chair, apply=True, input_kwargs=self.params + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) diff --git a/infinigen/assets/objects/seating/chairs/chair.py b/infinigen/assets/objects/seating/chairs/chair.py new file mode 100644 index 000000000..d83e3f395 --- /dev/null +++ b/infinigen/assets/objects/seating/chairs/chair.py @@ -0,0 +1,456 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.decorate import ( + read_co, + read_edge_center, + read_edge_direction, + remove_edges, + remove_vertices, + select_edges, + solidify, + subsurf, + write_attribute, + write_co, +) +from infinigen.assets.utils.draw import align_bezier, bezier_curve +from infinigen.assets.utils.nodegroup import geo_radius +from infinigen.assets.utils.object import join_objects, new_bbox +from infinigen.core import surface +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import NoApply +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg + + +class ChairFactory(AssetFactory): + back_types = ( + "weighted_choice", + (1, "whole"), + (1, "partial"), + (1, "horizontal-bar"), + (1, "vertical-bar"), + ) + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.width = uniform(0.4, 0.5) + self.size = uniform(0.38, 0.45) + self.thickness = uniform(0.04, 0.08) + self.bevel_width = self.thickness * (0.1 if uniform() < 0.4 else 0.5) + self.seat_back = uniform(0.7, 1.0) if uniform() < 0.75 else 1.0 + self.seat_mid = uniform(0.7, 0.8) + self.seat_mid_x = uniform( + self.seat_back + self.seat_mid * (1 - self.seat_back), 1 + ) + self.seat_mid_z = uniform(0, 0.5) + self.seat_front = uniform(1.0, 1.2) + self.is_seat_round = uniform() < 0.6 + self.is_seat_subsurf = uniform() < 0.5 + + self.leg_thickness = uniform(0.04, 0.06) + self.limb_profile = uniform(1.5, 2.5) + self.leg_height = uniform(0.45, 0.5) + self.back_height = uniform(0.4, 0.5) + self.is_leg_round = uniform() < 0.5 + self.leg_type = np.random.choice( + ["vertical", "straight", "up-curved", "down-curved"] + ) + + self.leg_x_offset = 0 + self.leg_y_offset = 0, 0 + self.back_x_offset = 0 + self.back_y_offset = 0 + + self.has_leg_x_bar = uniform() < 0.6 + self.has_leg_y_bar = uniform() < 0.6 + self.leg_offset_bar = uniform(0.2, 0.4), uniform(0.6, 0.8) + + self.has_arm = uniform() < 0.7 + self.arm_thickness = uniform(0.04, 0.06) + self.arm_height = self.arm_thickness * uniform(0.6, 1) + self.arm_y = uniform(0.8, 1) * self.size + self.arm_z = uniform(0.3, 0.6) * self.back_height + self.arm_mid = np.array( + [uniform(-0.03, 0.03), uniform(-0.03, 0.09), uniform(-0.09, 0.03)] + ) + self.arm_profile = log_uniform(0.1, 3, 2) + + self.back_thickness = uniform(0.04, 0.05) + self.back_type = rg(self.back_types) + self.back_profile = [(0, 1)] + self.back_vertical_cuts = np.random.randint(1, 4) + self.back_partial_scale = uniform(1, 1.4) + + materials = AssetList["ChairFactory"]() + self.limb_surface = materials["limb"].assign_material() + self.surface = materials["surface"].assign_material() + if uniform() < 0.3: + self.panel_surface = self.surface + else: + self.panel_surface = materials["panel"].assign_material() + + scratch_prob, edge_wear_prob = materials["wear_tear_prob"] + self.scratch, self.edge_wear = materials["wear_tear"] + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + self.scratch = None + if not is_edge_wear: + self.edge_wear = None + + # from infinigen.assets.clothes import blanket + # from infinigen.assets.scatters.clothes import ClothesCover + # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), + # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() + self.clothes_scatter = NoApply() + self.post_init() + + def post_init(self): + with FixedSeed(self.factory_seed): + if self.leg_type == "vertical": + self.leg_x_offset = 0 + self.leg_y_offset = 0, 0 + self.back_x_offset = 0 + self.back_y_offset = 0 + else: + self.leg_x_offset = self.width * uniform(0.05, 0.2) + self.leg_y_offset = self.size * uniform(0.05, 0.2, 2) + self.back_x_offset = self.width * uniform(-0.1, 0.15) + self.back_y_offset = self.size * uniform(0.1, 0.25) + + match self.back_type: + case "partial": + self.back_profile = ((uniform(0.4, 0.8), 1),) + case "horizontal-bar": + n_cuts = np.random.randint(2, 4) + locs = uniform(1, 2, n_cuts).cumsum() + locs = locs / locs[-1] + ratio = uniform(0.5, 0.75) + locs = np.array( + [ + (p + ratio * (l - p), l) + for p, l in zip([0, *locs[:-1]], locs) + ] + ) + lowest = uniform(0, 0.4) + self.back_profile = locs * (1 - lowest) + lowest + case "vertical-bar": + self.back_profile = ((uniform(0.8, 0.9), 1),) + case _: + self.back_profile = [(0, 1)] + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + obj = new_bbox( + -self.width / 2 - max(self.leg_x_offset, self.back_x_offset), + self.width / 2 + max(self.leg_x_offset, self.back_x_offset), + -self.size - self.leg_y_offset[1] - self.leg_thickness * 0.5, + max(self.leg_y_offset[0], self.back_y_offset), + -self.leg_height, + self.back_height * 1.2, + ) + obj.rotation_euler.z += np.pi / 2 + butil.apply_transform(obj) + return obj + + def create_asset(self, **params) -> bpy.types.Object: + obj = self.make_seat() + legs = self.make_legs() + backs = self.make_backs() + + parts = [obj] + legs + backs + parts.extend(self.make_leg_decors(legs)) + if self.has_arm: + parts.extend(self.make_arms(obj, backs)) + parts.extend(self.make_back_decors(backs)) + + for obj in legs: + self.solidify(obj, 2) + for obj in backs: + self.solidify(obj, 2, self.back_thickness) + + obj = join_objects(parts) + obj.rotation_euler.z += np.pi / 2 + butil.apply_transform(obj) + + with FixedSeed(self.factory_seed): + # TODO: wasteful to create unique materials for each individual asset + self.surface.apply(obj) + self.panel_surface.apply(obj, selection="panel") + self.limb_surface.apply(obj, selection="limb") + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + def make_seat(self): + x_anchors = ( + np.array( + [ + 0, + -self.seat_back, + -self.seat_mid_x, + -1, + 0, + 1, + self.seat_mid_x, + self.seat_back, + 0, + ] + ) + * self.width + / 2 + ) + y_anchors = ( + np.array( + [0, 0, -self.seat_mid, -1, -self.seat_front, -1, -self.seat_mid, 0, 0] + ) + * self.size + ) + z_anchors = ( + np.array([0, 0, self.seat_mid_z, 0, 0, 0, self.seat_mid_z, 0, 0]) + * self.thickness + ) + vector_locations = [1, 7] if self.is_seat_round else [1, 3, 5, 7] + obj = bezier_curve((x_anchors, y_anchors, z_anchors), vector_locations, 8) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.fill_grid(use_interp_simple=True) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0) + subsurf(obj, 1, not self.is_seat_subsurf) + butil.modify_mesh(obj, "BEVEL", width=self.bevel_width, segments=8) + return obj + + def make_legs(self): + leg_starts = np.array( + [[-self.seat_back, 0, 0], [-1, -1, 0], [1, -1, 0], [self.seat_back, 0, 0]] + ) * np.array([[self.width / 2, self.size, 0]]) + leg_ends = leg_starts.copy() + leg_ends[[0, 1], 0] -= self.leg_x_offset + leg_ends[[2, 3], 0] += self.leg_x_offset + leg_ends[[0, 3], 1] += self.leg_y_offset[0] + leg_ends[[1, 2], 1] -= self.leg_y_offset[1] + leg_ends[:, -1] = -self.leg_height + return self.make_limb(leg_ends, leg_starts) + + def make_limb(self, leg_ends, leg_starts): + limbs = [] + for leg_start, leg_end in zip(leg_starts, leg_ends): + match self.leg_type: + case "up-curved": + axes = [(0, 0, 1), None] + scale = [self.limb_profile, 1] + case "down-curved": + axes = [None, (0, 0, 1)] + scale = [1, self.limb_profile] + case _: + axes = None + scale = None + limb = align_bezier( + np.stack([leg_start, leg_end], -1), axes, scale, resolution=64 + ) + limb.location = ( + np.array( + [ + 1 if leg_start[0] < 0 else -1, + 1 if leg_start[1] < -self.size / 2 else -1, + 0, + ] + ) + * self.leg_thickness + / 2 + ) + butil.apply_transform(limb, True) + limbs.append(limb) + return limbs + + def make_backs(self): + back_starts = ( + np.array([[-self.seat_back, 0, 0], [self.seat_back, 0, 0]]) * self.width / 2 + ) + back_ends = back_starts.copy() + back_ends[:, 0] += np.array([self.back_x_offset, -self.back_x_offset]) + back_ends[:, 1] = self.back_y_offset + back_ends[:, 2] = self.back_height + return self.make_limb(back_starts, back_ends) + + def make_leg_decors(self, legs): + decors = [] + if self.has_leg_x_bar: + z_height = -self.leg_height * uniform(*self.leg_offset_bar) + locs = [] + for leg in legs: + co = read_co(leg) + locs.append(co[np.argmin(np.abs(co[:, -1] - z_height))]) + decors.append( + self.solidify(bezier_curve(np.stack([locs[0], locs[3]], -1)), 0) + ) + decors.append( + self.solidify(bezier_curve(np.stack([locs[1], locs[2]], -1)), 0) + ) + if self.has_leg_y_bar: + z_height = -self.leg_height * uniform(*self.leg_offset_bar) + locs = [] + for leg in legs: + co = read_co(leg) + locs.append(co[np.argmin(np.abs(co[:, -1] - z_height))]) + decors.append( + self.solidify(bezier_curve(np.stack([locs[0], locs[1]], -1)), 1) + ) + decors.append( + self.solidify(bezier_curve(np.stack([locs[2], locs[3]], -1)), 1) + ) + for d in decors: + write_attribute(d, 1, "limb", "FACE") + return decors + + def make_back_decors(self, backs, finalize=True): + obj = join_objects([deep_clone_obj(b) for b in backs]) + x, y, z = read_co(obj).T + x += np.where(x > 0, self.back_thickness / 2, -self.back_thickness / 2) + write_co(obj, np.stack([x, y, z], -1)) + smoothness = uniform(0, 1) + profile_shape_factor = uniform(0, 0.4) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + center = read_edge_center(obj) + for z_min, z_max in self.back_profile: + select_edges( + obj, + (z_min * self.back_height <= center[:, -1]) + & (center[:, -1] <= z_max * self.back_height), + ) + bpy.ops.mesh.bridge_edge_loops( + number_cuts=32, + interpolation="LINEAR", + smoothness=smoothness, + profile_shape_factor=profile_shape_factor, + ) + bpy.ops.mesh.select_loose() + bpy.ops.mesh.delete() + butil.modify_mesh( + obj, + "SOLIDIFY", + thickness=np.minimum(self.thickness, self.back_thickness), + offset=0, + ) + if finalize: + butil.modify_mesh(obj, "BEVEL", width=self.bevel_width, segments=8) + parts = [obj] + if self.back_type == "vertical-bar": + other = join_objects([deep_clone_obj(b) for b in backs]) + with butil.ViewportMode(other, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=self.back_vertical_cuts, + interpolation="LINEAR", + smoothness=smoothness, + profile_shape_factor=profile_shape_factor, + ) + bpy.ops.mesh.select_all(action="INVERT") + bpy.ops.mesh.delete() + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.delete(type="ONLY_FACE") + remove_edges(other, np.abs(read_edge_direction(other)[:, -1]) < 0.5) + remove_vertices(other, lambda x, y, z: z < -self.thickness / 2) + remove_vertices( + other, + lambda x, y, z: z + > (self.back_profile[0][0] + self.back_profile[0][1]) + * self.back_height + / 2, + ) + parts.append(self.solidify(other, 2, self.back_thickness)) + elif self.back_type == "partial": + co = read_co(obj) + co[:, 1] *= self.back_partial_scale + write_co(obj, co) + for p in parts: + write_attribute(p, 1, "panel", "FACE") + return parts + + def make_arms(self, base, backs): + co = read_co(base) + end = co[np.argmin(co[:, 0] - (np.abs(co[:, 1] + self.arm_y) < 0.02))] + end[0] += self.arm_thickness / 4 + end_ = end.copy() + end_[0] = -end[0] + arms = [] + co = read_co(backs[0]) + start = co[np.argmin(co[:, 0] - (np.abs(co[:, -1] - self.arm_z) < 0.02))] + start[0] -= self.arm_thickness / 4 + start_ = start.copy() + start_[0] = -start[0] + for start, end in zip([start, start_], [end, end_]): + mid = np.array( + [ + end[0] + self.arm_mid[0] * (-1 if end[0] > 0 else 1), + end[1] + self.arm_mid[1], + start[2] + self.arm_mid[2], + ] + ) + arm = align_bezier( + np.stack([start, mid, end], -1), + np.array( + [ + [end[0] - start[0], end[1] - start[1], 0], + [0, 1 / np.sqrt(2), 1 / np.sqrt(2)], + [0, 0, 1], + ] + ), + [1, *self.arm_profile, 1], + ) + if self.is_leg_round: + surface.add_geomod( + arm, + geo_radius, + apply=True, + input_args=[self.arm_thickness / 2, 32], + input_kwargs={"to_align_tilt": False}, + ) + else: + with butil.ViewportMode(arm, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={ + "value": ( + self.arm_thickness + if end[0] < 0 + else -self.arm_thickness, + 0, + 0, + ) + } + ) + butil.modify_mesh(arm, "SOLIDIFY", thickness=self.arm_height, offset=0) + write_attribute(arm, 1, "limb", "FACE") + arms.append(arm) + return arms + + def solidify(self, obj, axis, thickness=None): + if thickness is None: + thickness = self.leg_thickness + if self.is_leg_round: + solidify(obj, axis, thickness) + butil.modify_mesh(obj, "BEVEL", width=self.bevel_width, segments=8) + else: + surface.add_geomod( + obj, geo_radius, apply=True, input_args=[thickness / 2, 32] + ) + write_attribute(obj, 1, "limb", "FACE") + return obj diff --git a/infinigen/assets/objects/seating/chairs/office_chair.py b/infinigen/assets/objects/seating/chairs/office_chair.py new file mode 100644 index 000000000..329c5ba04 --- /dev/null +++ b/infinigen/assets/objects/seating/chairs/office_chair.py @@ -0,0 +1,236 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + +import bpy +import numpy as np +from numpy.random import choice, uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.seating.chairs.seats.curvy_seats import ( + generate_curvy_seats, +) +from infinigen.assets.objects.tables.cocktail_table import geometry_create_legs +from infinigen.core import surface, tagging +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +def geometry_assemble_chair(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + generateseat = nw.new_node( + generate_curvy_seats().name, + input_kwargs={ + "Width": kwargs["Top Profile Width"], + "Front Relative Width": kwargs["Top Front Relative Width"], + "Front Bent": kwargs["Top Front Bent"], + "Seat Bent": kwargs["Top Seat Bent"], + "Mid Bent": kwargs["Top Mid Bent"], + "Mid Relative Width": kwargs["Top Mid Relative Width"], + "Back Bent": kwargs["Top Back Bent"], + "Back Relative Width": kwargs["Top Back Relative Width"], + "Mid Pos": kwargs["Top Mid Pos"], + "Seat Height": kwargs["Top Thickness"], + }, + ) + + seat_instance = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": generateseat, + "Translation": (0.0000, 0.0000, kwargs["Top Height"]), + }, + ) + + seat_instance = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": seat_instance, "Material": kwargs["TopMaterial"]}, + ) + + legs = nw.new_node(geometry_create_legs(**kwargs).name) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [seat_instance, legs]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class OfficeChairFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(OfficeChairFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params, leg_style = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params(leg_style) + ) + self.params.update(self.material_params) + + def get_material_params(self, leg_style): + material_assignments = AssetList["OfficeChairFactory"](leg_style) + params = { + "TopMaterial": material_assignments["top"].assign_material(), + "LegMaterial": material_assignments["leg"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # all in meters + + if dimensions is None: + x = uniform(0.5, 0.6) + z = uniform(1.0, 1.4) + dimensions = (x, x, z) + + x, y, z = dimensions + + top_thickness = uniform(0.5, 0.7) + + # straight has the bug that seat and legs are disjoint, so disable for now. + + # leg_style = choice(['straight', 'single_stand', 'wheeled']) + leg_style = choice(["single_stand", "wheeled"]) + + parameters = { + "Top Profile Width": x, + "Top Thickness": top_thickness, + "Top Front Relative Width": uniform(0.5, 0.8), + "Top Front Bent": uniform(-1.5, -0.4), + "Top Seat Bent": uniform(-1.5, -0.4), + "Top Mid Bent": uniform(-2.4, -0.5), + "Top Mid Relative Width": uniform(0.5, 0.9), + "Top Back Bent": uniform(-1, -0.1), + "Top Back Relative Width": uniform(0.6, 0.9), + "Top Mid Pos": uniform(0.4, 0.6), + # 'Top Material': choice(['leather', 'wood', 'plastic', 'glass']), + "Height": z, + "Top Height": z - top_thickness, + "Leg Style": leg_style, + "Leg NGon": choice([4, 32]), + "Leg Placement Top Relative Scale": 0.7, + "Leg Placement Bottom Relative Scale": uniform(1.1, 1.3), + "Leg Height": 1.0, + } + + if leg_style == "single_stand": + leg_number = 1 + leg_diameter = uniform(0.7 * x, 0.9 * x) + + leg_curve_ctrl_pts = [ + (0.0, uniform(0.1, 0.2)), + (0.5, uniform(0.1, 0.2)), + (0.9, uniform(0.2, 0.3)), + (1.0, 1.0), + ] + + parameters.update( + { + "Leg Number": leg_number, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood']) + } + ) + + elif leg_style == "straight": + leg_diameter = uniform(0.04, 0.06) + leg_number = 4 + + leg_curve_ctrl_pts = [ + (0.0, 1.0), + (0.4, uniform(0.85, 0.95)), + (1.0, uniform(0.4, 0.6)), + ] + + parameters.update( + { + "Leg Number": leg_number, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood']), + "Strecher Relative Pos": uniform(0.2, 0.6), + "Strecher Increament": choice([0, 1, 2]), + } + ) + + elif leg_style == "wheeled": + leg_diameter = uniform(0.03, 0.05) + leg_number = 1 + pole_number = choice([4, 5]) + joint_height = uniform(0.5, 0.8) * (z - top_thickness) + wheel_arc_sweep_angle = uniform(120, 240) + wheel_width = uniform(0.11, 0.15) + wheel_rot = uniform(0, 360) + pole_length = uniform(1.6, 2.0) + + parameters.update( + { + "Leg Number": leg_number, + "Leg Pole Number": pole_number, + "Leg Diameter": leg_diameter, + "Leg Joint Height": joint_height, + "Leg Wheel Arc Sweep Angle": wheel_arc_sweep_angle, + "Leg Wheel Width": wheel_width, + "Leg Wheel Rot": wheel_rot, + "Leg Pole Length": pole_length, + # 'Leg Material': choice(['metal']) + } + ) + + else: + raise NotImplementedError + + return parameters, leg_style + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod( + obj, geometry_assemble_chair, apply=True, input_kwargs=self.params + ) + tagging.tag_system.relabel_obj(obj) + + obj.rotation_euler.z += np.pi / 2 + butil.apply_transform(obj) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) diff --git a/infinigen/assets/objects/seating/chairs/seats/curvy_seats.py b/infinigen/assets/objects/seating/chairs/seats/curvy_seats.py new file mode 100644 index 000000000..5e131c936 --- /dev/null +++ b/infinigen/assets/objects/seating/chairs/seats/curvy_seats.py @@ -0,0 +1,356 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.table_decorations.utils import ( + nodegroup_lofting, + nodegroup_warp_around_curve, +) +from infinigen.assets.objects.tables.table_utils import nodegroup_bent +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +# TODO: set material automatically + + +@node_utils.to_nodegroup( + "generate_curvy_seats", singleton=False, type="GeometryNodeTree" +) +def generate_curvy_seats(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "U Resolution", 256), + ("NodeSocketInt", "V Resolution", 128), + ("NodeSocketFloat", "Width", 0.5000), + ("NodeSocketFloat", "Thickness", 0.0300), + ("NodeSocketFloat", "Front Relative Width", 0.5000), + ("NodeSocketFloat", "Front Bent", -0.3800), + ("NodeSocketFloat", "Seat Bent", -0.5600), + ("NodeSocketFloat", "Mid Relative Width", 0.5000), + ("NodeSocketFloat", "Mid Bent", -0.7000), + ("NodeSocketFloat", "Back Relative Width", 0.5000), + ("NodeSocketFloat", "Back Bent", -0.2000), + ("NodeSocketFloat", "Top Relative Width", 0.5000), + ("NodeSocketFloat", "Top Bent", -0.2000), + ("NodeSocketFloat", "Seat Height", 0.6000), + ("NodeSocketFloat", "Mid Pos", 0.5000), + ("NodeSocketMaterial", "SeatMaterial", None), + ], + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Width"], + "Y": group_input.outputs["Thickness"], + "Z": 1.0000, + }, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_1.outputs["Curve"], + "Translation": (0.0000, 0.0000, 0.5000), + "Scale": combine_xyz, + }, + ) + + bent = nw.new_node( + nodegroup_bent().name, + input_kwargs={ + "Geometry": transform_geometry_1, + "Amount": group_input.outputs["Seat Bent"], + }, + ) + + curve_circle_2 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["Mid Relative Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply, + "Y": group_input.outputs["Thickness"], + "Z": 1.0000, + }, + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_2.outputs["Curve"], + "Translation": (0.0000, 0.0000, 1.0000), + "Scale": combine_xyz_2, + }, + ) + + bent_1 = nw.new_node( + nodegroup_bent().name, + input_kwargs={ + "Geometry": transform_geometry_2, + "Amount": group_input.outputs["Mid Bent"], + }, + ) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_3.outputs["Curve"], + "Scale": (0.0000, 0.0050, 1.0000), + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["Front Relative Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_1, "Y": 0.0050, "Z": 1.0000} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Translation": (0.0000, 0.0000, 0.0600), + "Scale": combine_xyz_1, + }, + ) + + bent_2 = nw.new_node( + nodegroup_bent().name, + input_kwargs={ + "Geometry": transform_geometry, + "Amount": group_input.outputs["Front Bent"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [bent_1, bent, bent_2, transform_geometry_3]}, + ) + + curve_circle_4 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["Back Relative Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply_2, + "Y": group_input.outputs["Thickness"], + "Z": 1.0000, + }, + ) + + transform_geometry_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_4.outputs["Curve"], + "Translation": (0.0000, 0.0000, 1.5000), + "Scale": combine_xyz_3, + }, + ) + + bent_3 = nw.new_node( + nodegroup_bent().name, + input_kwargs={ + "Geometry": transform_geometry_4, + "Amount": group_input.outputs["Back Bent"], + }, + ) + + curve_circle_5 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["Top Relative Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": 0.0050, "Z": 1.0000} + ) + + transform_geometry_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_5.outputs["Curve"], + "Translation": (0.0000, 0.0000, 2.0200), + "Scale": combine_xyz_4, + }, + ) + + bent_4 = nw.new_node( + nodegroup_bent().name, + input_kwargs={ + "Geometry": transform_geometry_5, + "Amount": group_input.outputs["Top Bent"], + }, + ) + + curve_circle_6 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["U Resolution"], + "Radius": 0.5000, + }, + ) + + transform_geometry_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_6.outputs["Curve"], + "Translation": (0.0000, 0.0000, 2.1000), + "Scale": (0.0000, 0.0050, 1.0000), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_geometry_6, bent_4, bent_3]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [join_geometry_2, join_geometry]} + ) + + lofting_001 = nw.new_node( + nodegroup_lofting().name, + input_kwargs={ + "Profile Curves": join_geometry_1, + "U Resolution": group_input.outputs["U Resolution"], + "V Resolution": group_input.outputs["V Resolution"], + }, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_4, "Z": 0.0300} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": group_input.outputs["Mid Pos"], "Z": -0.0500}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"Y": multiply_5, "Z": group_input.outputs["Seat Height"]}, + ) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": 128, + "Start": combine_xyz_6, + "Start Handle": combine_xyz_7, + "End Handle": (0.0000, 0.1000, 0.1000), + "End": combine_xyz_5, + }, + ) + + warparoundcurvealt = nw.new_node( + nodegroup_warp_around_curve().name, + input_kwargs={ + "Geometry": lofting_001.outputs["Geometry"], + "Curve": bezier_segment, + }, + ) + + # material_func =np.random.choice([plastic.shader_rough_plastic, metal.get_shader(), wood_new.shader_wood, leather.shader_leather]) + + warparoundcurvealt = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": warparoundcurvealt, + "Material": group_input.outputs["SeatMaterial"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": warparoundcurvealt}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/seating/chairs/seats/round_seats.py b/infinigen/assets/objects/seating/chairs/seats/round_seats.py new file mode 100644 index 000000000..eea4cd75b --- /dev/null +++ b/infinigen/assets/objects/seating/chairs/seats/round_seats.py @@ -0,0 +1,67 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from numpy.random import uniform + +from infinigen.assets.objects.tables.table_top import nodegroup_capped_cylinder +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "generate_round_seats", singleton=False, type="GeometryNodeTree" +) +def generate_round_seats( + nw: NodeWrangler, + thickness=None, + radius=None, + cap_radius=None, + bevel_factor=None, + seat_material=None, +): + # Code generated using version 2.6.4 of the node_transpiler + if thickness is None: + thickness = uniform(0.05, 0.12) + if radius is None: + radius = uniform(0.35, 0.45) + if cap_radius is None: + cap_radius = uniform(2.0, 3.2) + if bevel_factor is None: + bevel_factor = uniform(0.01, 0.04) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: thickness, 1: 1.0}, attrs={"operation": "MULTIPLY"} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: bevel_factor, 1: thickness}, + attrs={"operation": "DIVIDE"}, + ) + + cappedcylinder = nw.new_node( + nodegroup_capped_cylinder().name, + input_kwargs={ + "Thickness": multiply, + "Radius": radius, + "Cap Flatness": cap_radius, + "Fillet Radius Vertical": divide, + "Cap Relative Scale": 0.0140, + "Cap Relative Z Offset": -0.0020, + "Resolution": 128, + }, + ) + + seat = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": cappedcylinder, "Material": seat_material}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": seat}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/seating/mattress.py b/infinigen/assets/objects/seating/mattress.py similarity index 54% rename from infinigen/assets/seating/mattress.py rename to infinigen/assets/objects/seating/mattress.py index 1d9a9612a..a7a0bbc28 100644 --- a/infinigen/assets/seating/mattress.py +++ b/infinigen/assets/objects/seating/mattress.py @@ -1,92 +1,100 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.materials import fabrics from infinigen.assets.scatters import clothes -from infinigen.assets.utils.decorate import ( - subdivide_edge_ring, read_co, -) +from infinigen.assets.utils.decorate import read_co, subdivide_edge_ring from infinigen.assets.utils.object import new_bbox, new_cube from infinigen.core import surface -from infinigen.core.nodes import NodeWrangler, Nodes +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory from infinigen.core.surface import write_attr_data +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil from infinigen.core.util.random import random_general as rg -from infinigen.assets.material_assignments import AssetList - def make_coiled(obj, dot_distance, dot_depth, dot_size): - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.poke() bpy.ops.mesh.tris_convert_to_quads() bpy.ops.mesh.poke() bpy.ops.mesh.poke() - bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) for v in bm.verts: if len(v.link_edges) == 16: v.select_set(True) bm.select_flush(False) bmesh.update_edit_mesh(obj.data) - radius = dot_distance * uniform(.06, .08) - bpy.ops.mesh.bevel(offset=radius, affect='VERTICES') - bpy.ops.mesh.extrude_region_shrink_fatten(TRANSFORM_OT_shrink_fatten={'value': -dot_depth}) - bpy.ops.mesh.extrude_region_shrink_fatten(TRANSFORM_OT_shrink_fatten={'value': dot_depth}) + radius = dot_distance * uniform(0.06, 0.08) + bpy.ops.mesh.bevel(offset=radius, affect="VERTICES") + bpy.ops.mesh.extrude_region_shrink_fatten( + TRANSFORM_OT_shrink_fatten={"value": -dot_depth} + ) + bpy.ops.mesh.extrude_region_shrink_fatten( + TRANSFORM_OT_shrink_fatten={"value": dot_depth} + ) bpy.ops.mesh.select_more() bpy.ops.mesh.select_more() - write_attr_data(obj, 'tip', np.zeros(len(obj.data.polygons)), domain='FACE') - surface.set_active(obj, 'tip') - with butil.ViewportMode(obj, 'EDIT'): + write_attr_data(obj, "tip", np.zeros(len(obj.data.polygons)), domain="FACE") + surface.set_active(obj, "tip") + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.attribute_set(value_float=1) def geo_scale(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.new_node(Nodes.NamedAttribute, ['tip']) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.new_node(Nodes.NamedAttribute, ["tip"]) geometry = nw.new_node( Nodes.ScaleElements, - [geometry, selection, nw.combine(*([dot_size / radius] * 3))] + [geometry, selection, nw.combine(*([dot_size / radius] * 3))], ) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) surface.add_geomod(obj, geo_scale, apply=True) - butil.modify_mesh(obj, 'TRIANGULATE', min_vertices=4) - butil.modify_mesh(obj, 'SMOOTH', factor=uniform(.5, 1.), iterations=5) + butil.modify_mesh(obj, "TRIANGULATE", min_vertices=4) + butil.modify_mesh(obj, "SMOOTH", factor=uniform(0.5, 1.0), iterations=5) class MattressFactory(AssetFactory): - types = 'weighted_choice', (1, 'coiled'), (1, 'wrapped') + types = "weighted_choice", (1, "coiled"), (1, "wrapped") def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.width = log_uniform(.9, 2.) + self.width = log_uniform(0.9, 2.0) self.size = uniform(2, 2.4) - self.thickness = uniform(.2, .35) - self.dot_distance = log_uniform(.16, .2) - self.dot_size = uniform(.005, .02) - self.dot_depth = uniform(.04, .08) - self.wrap_distance = .05 - self.surface = fabrics - self.type= rg(self.types) - materials = AssetList['MattressFactory']() - self.surface = materials['surface'].assign_material() + self.thickness = uniform(0.2, 0.35) + self.dot_distance = log_uniform(0.16, 0.2) + self.dot_size = uniform(0.005, 0.02) + self.dot_depth = uniform(0.04, 0.08) + self.wrap_distance = 0.05 + self.surface = fabrics.fabric_random + self.type = rg(self.types) + materials = AssetList["MattressFactory"]() + self.surface = materials["surface"].assign_material() def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox( - -self.width / 2, self.width / 2, -self.size / 2, self.size / 2, -self.thickness / 2, - self.thickness / 2 + -self.width / 2, + self.width / 2, + -self.size / 2, + self.size / 2, + -self.thickness / 2, + self.thickness / 2, ) def create_asset(self, **params) -> bpy.types.Object: @@ -94,9 +102,9 @@ def create_asset(self, **params) -> bpy.types.Object: obj.scale = self.width / 2, self.size / 2, self.thickness / 2 butil.apply_transform(obj) match self.type: - case 'coiled': + case "coiled": self.make_coiled(obj) - case 'wrapped': + case "wrapped": self.make_wrapped(obj) return obj @@ -112,14 +120,19 @@ def make_wrapped(self, obj): axis = np.zeros(3) axis[i] = 1 subdivide_edge_ring(obj, int(np.ceil(size / self.wrap_distance)), axis) - butil.modify_mesh(obj, 'BEVEL', width=self.wrap_distance / 3, segments=2) - vg = obj.vertex_groups.new(name='pin') - vg.add(np.nonzero((read_co(obj)[:, -1] < 1e-1 - self.thickness / 2))[0].tolist(), 1, 'REPLACE') + butil.modify_mesh(obj, "BEVEL", width=self.wrap_distance / 3, segments=2) + vg = obj.vertex_groups.new(name="pin") + vg.add( + np.nonzero((read_co(obj)[:, -1] < 1e-1 - self.thickness / 2))[0].tolist(), + 1, + "REPLACE", + ) clothes.cloth_sim( - obj, gravity=0, + obj, + gravity=0, use_pressure=True, - uniform_pressure_force=uniform(.1, .2), - vertex_group_mass='pin' + uniform_pressure_force=uniform(0.1, 0.2), + vertex_group_mass="pin", ) def finalize_assets(self, assets): diff --git a/infinigen/assets/seating/pillow.py b/infinigen/assets/objects/seating/pillow.py similarity index 53% rename from infinigen/assets/seating/pillow.py rename to infinigen/assets/objects/seating/pillow.py index f5f871a24..d87d4a3a6 100644 --- a/infinigen/assets/seating/pillow.py +++ b/infinigen/assets/objects/seating/pillow.py @@ -6,91 +6,115 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.materials import art, fabrics from infinigen.assets.scatters import clothes -from infinigen.assets.utils.decorate import read_normal, read_selected, select_faces, subsurf, set_shade_smooth -from infinigen.assets.utils.object import center, join_objects, new_base_circle, new_grid +from infinigen.assets.utils.decorate import ( + read_normal, + read_selected, + select_faces, + set_shade_smooth, + subsurf, +) +from infinigen.assets.utils.object import ( + center, + join_objects, + new_base_circle, + new_grid, +) from infinigen.assets.utils.uv import unwrap_faces from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.random import log_uniform from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform from infinigen.core.util.random import random_general as rg -from infinigen.assets.material_assignments import AssetList class PillowFactory(AssetFactory): - shapes = 'weighted_choice', (4, 'square'), (4, 'rectangle'), (1, 'circle'), (1, 'torus') + shapes = ( + "weighted_choice", + (4, "square"), + (4, "rectangle"), + (1, "circle"), + (1, "torus"), + ) def __init__(self, factory_seed, coarse=False): super(PillowFactory, self).__init__(factory_seed, coarse) self.shape = rg(self.shapes) - self.width = uniform(.4, .7) + self.width = uniform(0.4, 0.7) match self.shape: - case 'square': + case "square": self.size = self.width case _: - self.size = self.width * log_uniform(.6, .8) - self.bevel_width = uniform(.02, .05) - self.thickness = log_uniform(.006, .008) - self.extrude_thickness = self.thickness * log_uniform(1, 8) if uniform() < .5 else 0 + self.size = self.width * log_uniform(0.6, 0.8) + self.bevel_width = uniform(0.02, 0.05) + self.thickness = log_uniform(0.006, 0.008) + self.extrude_thickness = ( + self.thickness * log_uniform(1, 8) if uniform() < 0.5 else 0 + ) self.surface = np.random.choice([art.ArtFabric(self.factory_seed), fabrics]) - self.has_seam = uniform() < .3 and not self.shape == 'torus' - self.seam_radius = uniform(.01, .02) + self.has_seam = uniform() < 0.3 and not self.shape == "torus" + self.seam_radius = uniform(0.01, 0.02) - materials = AssetList['PillowFactory']() - self.surface = materials['surface'].assign_material() + materials = AssetList["PillowFactory"]() + self.surface = materials["surface"].assign_material() if self.surface == art.ArtFabric: self.surface = self.surface(self.factory_seed) def create_asset(self, **params) -> bpy.types.Object: match self.shape: - case 'circle': + case "circle": obj = new_base_circle(vertices=128) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.fill_grid() - case 'torus': + case "torus": obj = new_base_circle(vertices=128) - inner = new_base_circle(vertices=128, radius=uniform(.2, .4)) + inner = new_base_circle(vertices=128, radius=uniform(0.2, 0.4)) obj = join_objects([obj, inner]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops(number_cuts=12, interpolation='LINEAR') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=12, interpolation="LINEAR" + ) obj = bpy.context.active_object case _: obj = new_grid(x_subdivisions=32, y_subdivisions=32) obj.scale = self.width / 2, self.size / 2, 1 butil.apply_transform(obj, True) unwrap_faces(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=0) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0) normal = read_normal(obj) - group = obj.vertex_groups.new(name='pin') + group = obj.vertex_groups.new(name="pin") if self.has_seam: - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') - select_faces(obj, lambda x, y, z: (x ** 2 + y ** 2 < self.seam_radius ** 2) & (z > 0)) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") + select_faces( + obj, lambda x, y, z: (x**2 + y**2 < self.seam_radius**2) & (z > 0) + ) bpy.ops.mesh.region_to_loop() - bpy.ops.mesh.select_mode(type='VERT') + bpy.ops.mesh.select_mode(type="VERT") selection = read_selected(obj) - group.add(np.nonzero(selection)[0].tolist(), 1, 'REPLACE') - select_faces(obj, np.abs(normal[:, -1]) < .1) - + group.add(np.nonzero(selection)[0].tolist(), 1, "REPLACE") + select_faces(obj, np.abs(normal[:, -1]) < 0.1) + match self.shape: - case 'torus': + case "torus": pressure = uniform(8, 12) case _: pressure = uniform(1, 2) clothes.cloth_sim( - obj, tension_stiffness=uniform(0, 5), + obj, + tension_stiffness=uniform(0, 5), gravity=0, use_pressure=True, uniform_pressure_force=pressure, - vertex_group_mass='pin' if self.has_seam else "" + vertex_group_mass="pin" if self.has_seam else "", ) if self.extrude_thickness > 0: - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.extrude_region_shrink_fatten( - TRANSFORM_OT_shrink_fatten={'value': self.extrude_thickness} + TRANSFORM_OT_shrink_fatten={"value": self.extrude_thickness} ) obj.location = -center(obj) butil.apply_transform(obj, True) @@ -100,16 +124,20 @@ def create_asset(self, **params) -> bpy.types.Object: def make_circle(self): obj = new_base_circle(vertices=128) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.fill_grid() - select_faces(obj, lambda x, y, z: x ** 2 + y ** 2 < self.seam_radius ** 2) + select_faces(obj, lambda x, y, z: x**2 + y**2 < self.seam_radius**2) bpy.ops.mesh.region_to_loop() return obj def make_gird(self): obj = new_grid(x_subdivisions=64, y_subdivisions=64) - with butil.ViewportMode(obj, 'EDIT'): - select_faces(obj, lambda x, y, z: (np.abs(x) < self.seam_radius) & (np.abs(y) < self.seam_radius)) + with butil.ViewportMode(obj, "EDIT"): + select_faces( + obj, + lambda x, y, z: (np.abs(x) < self.seam_radius) + & (np.abs(y) < self.seam_radius), + ) bpy.ops.mesh.region_to_loop() return obj diff --git a/infinigen/assets/objects/seating/sofa.py b/infinigen/assets/objects/seating/sofa.py new file mode 100644 index 000000000..4ab7c1931 --- /dev/null +++ b/infinigen/assets/objects/seating/sofa.py @@ -0,0 +1,1475 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick, Stamatis Alexandropolous, Yiming Zuo + + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.core import surface, tagging +from infinigen.core import tags as t +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import clip_gaussian + + +@node_utils.to_nodegroup( + "nodegroup_array_fill_line", singleton=False, type="GeometryNodeTree" +) +def nodegroup_array_fill_line(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Line Start", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Line End", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Instance Dimensions", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "Count", 10), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Instance Dimensions"], + 1: (0.0000, -0.5000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Line End"], + 1: multiply.outputs["Vector"], + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Line Start"], + 1: multiply.outputs["Vector"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={ + "Count": group_input.outputs["Count"], + "Start Location": add.outputs["Vector"], + "Offset": subtract.outputs["Vector"], + }, + attrs={"mode": "END_POINTS"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": group_input.outputs["Instance"]}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_corner_cube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_corner_cube(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Location", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVectorTranslation", "CenteringLoc", (0.5000, 0.5000, 0.0000)), + ("NodeSocketVectorTranslation", "Dimensions", (1.0000, 1.0000, 1.0000)), + ("NodeSocketFloat", "SupportingEdgeFac", 0.0000), + ("NodeSocketInt", "Vertices X", 4), + ("NodeSocketInt", "Vertices Y", 4), + ("NodeSocketInt", "Vertices Z", 4), + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": group_input.outputs["Dimensions"], + "Vertices X": group_input.outputs["Vertices X"], + "Vertices Y": group_input.outputs["Vertices Y"], + "Vertices Z": group_input.outputs["Vertices Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["CenteringLoc"], + 9: (0.5000, 0.5000, 0.5000), + 10: (-0.5000, -0.5000, -0.5000), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: map_range.outputs["Vector"], + 1: group_input.outputs["Dimensions"], + 2: group_input.outputs["Location"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Translation": multiply_add.outputs["Vector"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": transform_geometry, + "Name": "UVMap", + 3: cube.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": store_named_attribute}, + attrs={"is_active_output": True}, + ) + + +ARM_TYPE_SQUARE = 0 +ARM_TYPE_ROUND = 1 +ARM_TYPE_ANGULAR = 2 + + +@node_utils.to_nodegroup( + "nodegroup_sofa_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_sofa_geometry(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Dimensions", (0.0000, 0.9000, 2.5000)), + ("NodeSocketVector", "Arm Dimensions", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Back Dimensions", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Seat Dimensions", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "Foot Dimensions", (0.0000, 0.0000, 0.0000)), + ("NodeSocketFloat", "Baseboard Height", 0.1300), + ("NodeSocketFloat", "Backrest Width", 0.1100), + ("NodeSocketFloat", "Seat Margin", 0.9700), + ("NodeSocketFloat", "Backrest Angle", -0.2000), + ("NodeSocketFloatFactor", "arm_width", 0.7000), + ("NodeSocketInt", "Arm Type", 0), + ("NodeSocketFloatFactor", "Arm_height", 0.7318), + ("NodeSocketFloatAngle", "arms_angle", 0.8727), + ("NodeSocketBool", "Footrest", False), + ("NodeSocketInt", "Count", 4), + ("NodeSocketFloat", "Scaling footrest", 1.5000), + ("NodeSocketInt", "Reflection", 0), + ("NodeSocketBool", "leg_type", False), + ("NodeSocketFloat", "leg_dimensions", 0.5000), + ("NodeSocketFloat", "leg_z", 1.0000), + ("NodeSocketInt", "leg_faces", 20), + ("NodeSocketBool", "Subdivide", True), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Dimensions"], + 1: (0.0000, 0.5000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Arm Dimensions"]} + ) + + arm_cube = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "Location": multiply.outputs["Vector"], + "CenteringLoc": (0.0000, 1.0000, 0.0000), + "Dimensions": reroute, + "Vertices Z": 10, + }, + label="ArmCube", + ) + + reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": arm_cube}) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": reroute}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: -0.1000, + 2: separate_xyz_1.outputs["Z"], + 3: -0.1000, + 4: 0.2000, + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["arm_width"], + "Value": map_range.outputs["Result"], + }, + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0092, 0.7688), + (0.1011, 0.5937), + (0.1494, 0.4062), + (0.3954, 0.0781), + (1.0000, 0.2187), + ], + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_2.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: subtract}, + attrs={"operation": "MULTIPLY"}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_14 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": position_1} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz_14.outputs["X"], + 1: -1.0000, + 2: 0.6000, + 3: 2.1000, + 4: -1.1000, + }, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, + input_kwargs={ + "Factor": group_input.outputs["Arm_height"], + "Value": map_range_1.outputs["Result"], + }, + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.1341, 0.2094), (0.7386, 1.0000), (0.9682, 0.0781), (1.0000, 0.0000)], + ) + + separate_xyz_15 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": (-2.9000, 3.3000, 0.0000)} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_14.outputs["Z"], 1: separate_xyz_15.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": multiply_2} + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": combine_xyz, + "Axis": (1.0000, 0.0000, 0.0000), + "Angle": group_input.outputs["arms_angle"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": reroute_1, "Offset": vector_rotate} + ) + + multiply_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Dimensions"], + 1: (0.0000, 0.5000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Arm Dimensions"]}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_3.outputs["Z"], 1: separate_xyz_3.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_3.outputs["X"], + "Y": separate_xyz_3.outputs["Y"], + "Z": subtract_2, + }, + ) + + reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": combine_xyz_1}) + + arm_cube_1 = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "Location": multiply_3.outputs["Vector"], + "CenteringLoc": (0.0000, 1.0000, 0.0000), + "Dimensions": reroute_2, + }, + label="ArmCube", + ) + + separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": reroute_2}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_4.outputs["X"], 1: 1.0001}, + attrs={"operation": "MULTIPLY"}, + ) + + reroute_3 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": multiply_4}) + + arm_cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Side Segments": 4, + "Radius": separate_xyz_4.outputs["Y"], + "Depth": reroute_3, + }, + attrs={"fill_type": "TRIANGLE_FAN"}, + ) + + arm_cylinder = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": arm_cylinder.outputs["Mesh"], + "Name": "UVMap", + 3: arm_cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_3, 1: 2.0000}, + attrs={"operation": "DIVIDE"}, + ) + + separate_xyz_5 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply_3.outputs["Vector"]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": divide, + "Y": separate_xyz_5.outputs["Y"], + "Z": separate_xyz_4.outputs["Z"], + }, + ) + + arm_cylinder = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": arm_cylinder, + "Translation": combine_xyz_2, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + roundtop = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [arm_cube_1, arm_cylinder]} + ) + + square_or_round = nw.new_node( + Nodes.Switch, + input_kwargs={ + "Switch": nw.compare( + "EQUAL", group_input.outputs["Arm Type"], ARM_TYPE_SQUARE + ), + "False": roundtop, + "True": arm_cube_1, + }, + ) + + angular_or_squareround = nw.new_node( + Nodes.Switch, + input_kwargs={ + "Switch": nw.compare( + "EQUAL", group_input.outputs["Arm Type"], ARM_TYPE_ANGULAR + ), + "False": square_or_round, + "True": set_position, + }, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": angular_or_squareround, + "Scale": (1.0000, -1.0000, 1.0000), + }, + ) + + flip_faces = nw.new_node( + Nodes.FlipFaces, input_kwargs={"Mesh": transform_geometry_1} + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [flip_faces, angular_or_squareround]}, + ) + + separate_xyz_6 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Back Dimensions"]}, + ) + + separate_xyz_7 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Arm Dimensions"]}, + ) + + separate_xyz_8 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Dimensions"]} + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_7.outputs["Y"], + 1: -2.0000, + 2: separate_xyz_8.outputs["Y"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_6.outputs["X"], + "Y": multiply_add, + "Z": separate_xyz_6.outputs["Z"], + }, + ) + + back_board = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "CenteringLoc": (0.0000, 0.5000, -1.0000), + "Dimensions": combine_xyz_3, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="BackBoard", + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [join_geometry_2, back_board]} + ) + + multiply_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_3, 1: (1.0000, 0.0000, 0.0000)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Arm Dimensions"], + 1: (0.0000, -2.0000, 0.0000), + 2: group_input.outputs["Dimensions"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Back Dimensions"], + 1: (-1.0000, 0.0000, 0.0000), + 2: multiply_add_1.outputs["Vector"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + separate_xyz_9 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply_add_2.outputs["Vector"]} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_9.outputs["X"], + "Y": separate_xyz_9.outputs["Y"], + "Z": group_input.outputs["Baseboard Height"], + }, + ) + + base_board = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "Location": multiply_5.outputs["Vector"], + "CenteringLoc": (0.0000, 0.5000, -1.0000), + "Dimensions": combine_xyz_4, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="BaseBoard", + ) + + reroute_13 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Count"]} + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: reroute_13, 3: 4}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + reroute_5 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz_9.outputs["Y"]} + ) + + separate_xyz_10 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Seat Dimensions"]}, + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_5, 1: separate_xyz_10.outputs["Y"]}, + attrs={"operation": "DIVIDE"}, + ) + + ceil = nw.new_node( + Nodes.Math, input_kwargs={0: divide_1}, attrs={"operation": "CEIL"} + ) + + combine_xyz_14 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": ceil, "Z": 1.0000} + ) + + divide_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_4, 1: combine_xyz_14}, + attrs={"operation": "DIVIDE"}, + ) + + reroute_12 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": divide_2.outputs["Vector"]} + ) + + base_board_1 = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "Location": multiply_5.outputs["Vector"], + "CenteringLoc": (0.0000, 0.5000, -1.0000), + "Dimensions": reroute_12, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="BaseBoard", + ) + + equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={0: 4.0000, 2: reroute_13, 3: 4}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + switch_8 = nw.new_node( + Nodes.Switch, + input_kwargs={0: equal_1, 8: divide_2.outputs["Vector"], 9: combine_xyz_4}, + attrs={"input_type": "VECTOR"}, + ) + + separate_xyz_16 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": switch_8.outputs[3]} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_16.outputs["Y"], 1: 0.7000}, + attrs={"operation": "MULTIPLY"}, + ) + + grid_1 = nw.new_node( + Nodes.MeshGrid, + input_kwargs={"Size Y": multiply_6, "Vertices X": 1, "Vertices Y": 2}, + ) + + combine_xyz_18 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": 0.1000, + "Y": separate_xyz_16.outputs["Y"], + "Z": separate_xyz_16.outputs["Z"], + }, + ) + + subtract_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: switch_8.outputs[3], 1: combine_xyz_18}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_7 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Back Dimensions"], + 1: (1.0000, 0.0000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_3.outputs["Vector"], 1: multiply_7.outputs["Vector"]}, + ) + + transform_geometry_10 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": grid_1.outputs["Mesh"], + "Translation": add.outputs["Vector"], + "Scale": (1.0000, 1.0000, 0.9000), + }, + ) + + cone = nw.new_node( + "GeometryNodeMeshCone", + input_kwargs={ + "Vertices": group_input.outputs["leg_faces"], + "Side Segments": 4, + "Radius Top": 0.0100, + "Radius Bottom": 0.0250, + "Depth": 0.0700, + }, + ) + + reroute_9 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["leg_dimensions"]} + ) + + combine_xyz_17 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": reroute_9, + "Y": reroute_9, + "Z": group_input.outputs["leg_z"], + }, + ) + + transform_geometry_9 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cone.outputs["Mesh"], + "Translation": (0.0000, 0.0000, 0.0100), + "Rotation": (0.0000, 3.1416, 0.0000), + "Scale": combine_xyz_17, + }, + ) + + foot_cube = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "CenteringLoc": (0.5000, 0.5000, 0.9000), + "Dimensions": group_input.outputs["Foot Dimensions"], + }, + label="FootCube", + ) + + transform_geometry_12 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": foot_cube, "Scale": (0.5000, 0.8000, 0.8000)}, + ) + + switch_6 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["leg_type"], + 14: transform_geometry_9, + 15: transform_geometry_12, + }, + ) + + transform_geometry_8 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": switch_6.outputs[6]} + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform_geometry_10, + "Instance": transform_geometry_8, + "Scale": (1.0000, 1.0000, 1.2000), + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + join_geometry_10 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [base_board_1, realize_instances_1]}, + ) + + subtract_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_14, 1: (1.0000, 1.0000, 1.0000)}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_8 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_4.outputs["Vector"], 1: (0.0000, 0.5000, 0.0000)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_9 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: divide_2.outputs["Vector"], 1: multiply_8.outputs["Vector"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_16 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": group_input.outputs["Reflection"], "Z": 1.0000}, + ) + + multiply_10 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_9.outputs["Vector"], 1: combine_xyz_16}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_12 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Scaling footrest"], + "Y": 1.0000, + "Z": 1.0000, + }, + ) + + transform_geometry_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_10, + "Translation": multiply_10.outputs["Vector"], + "Scale": combine_xyz_12, + }, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Footrest"], 15: transform_geometry_5}, + ) + + combine_xyz_19 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Scaling footrest"], + "Y": 1.3000, + "Z": 1.0000, + }, + ) + + transform_geometry_11 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances_1, "Scale": combine_xyz_19}, + ) + + base_board_2 = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "Location": multiply_5.outputs["Vector"], + "CenteringLoc": (0.0000, 0.5000, -1.0000), + "Dimensions": combine_xyz_4, + "Vertices X": 3, + "Vertices Y": 3, + "Vertices Z": 3, + }, + label="BaseBoard", + ) + + combine_xyz_13 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Scaling footrest"], + "Y": 1.0000, + "Z": 1.0000, + }, + ) + + transform_geometry_6 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": base_board_2, "Scale": combine_xyz_13}, + ) + + join_geometry_11 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_geometry_11, transform_geometry_6]}, + ) + + switch_4 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Footrest"], 15: join_geometry_11}, + ) + + switch_5 = nw.new_node( + Nodes.Switch, + input_kwargs={1: equal, 14: switch_2.outputs[6], 15: switch_4.outputs[6]}, + ) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry_3, base_board, switch_5.outputs[6]]}, + ) + + grid = nw.new_node(Nodes.MeshGrid, input_kwargs={"Vertices X": 2, "Vertices Y": 2}) + + multiply_11 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Dimensions"], + 1: (0.5000, 0.0000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_12 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Dimensions"], + 1: (1.0000, 1.0000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_13 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Foot Dimensions"], + 1: (2.5000, 2.5000, 0.0000), + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_5 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_12.outputs["Vector"], + 1: multiply_13.outputs["Vector"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": grid.outputs["Mesh"], + "Translation": multiply_11.outputs["Vector"], + "Scale": subtract_5.outputs["Vector"], + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": transform_geometry_2, "Instance": transform_geometry_8}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + join_geometry_5 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry_4, realize_instances]}, + ) + + reroute_10 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Count"]} + ) + + equal_2 = nw.new_node( + Nodes.Compare, + input_kwargs={1: 4.0000, 2: reroute_10, 3: 4}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + reroute_4 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": combine_xyz_4}) + + multiply_14 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: reroute_4, 1: (0.0000, -0.5000, 1.0000)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_15 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: reroute_4, 1: (0.0000, 0.5000, 1.0000)}, + attrs={"operation": "MULTIPLY"}, + ) + + equal_3 = nw.new_node( + Nodes.Compare, + input_kwargs={1: 4.0000, 2: reroute_10, 3: 4}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + reroute_11 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Reflection"]} + ) + + switch_7 = nw.new_node( + Nodes.Switch, + input_kwargs={0: equal_3, 4: reroute_11, 5: 1}, + attrs={"input_type": "INT"}, + ) + + combine_xyz_15 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": switch_7.outputs[1], "Z": 1.1000}, + ) + + multiply_16 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_15.outputs["Vector"], 1: combine_xyz_15}, + attrs={"operation": "MULTIPLY"}, + ) + + divide_3 = nw.new_node( + Nodes.Math, input_kwargs={0: reroute_5, 1: ceil}, attrs={"operation": "DIVIDE"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_10.outputs["X"], + "Y": divide_3, + "Z": separate_xyz_10.outputs["Z"], + }, + ) + + reroute_6 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": combine_xyz_5}) + + multiply_17 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: reroute_6, 1: combine_xyz_15}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_18 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz_5, 1: (1.0000, 1.0300, 1.0000)}, + attrs={"operation": "MULTIPLY"}, + ) + + seat_cushion = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "CenteringLoc": (0.0000, 0.5000, 0.0000), + "Dimensions": multiply_18.outputs["Vector"], + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="SeatCushion", + ) + + upwards_part = nw.new_node( + Nodes.Compare, + input_kwargs={"A": nw.new_node(Nodes.Index), "B": 2}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + seat_cushion = tagging.tag_nodegroup( + nw, seat_cushion, t.Subpart.SupportSurface, selection=upwards_part + ) + + index = nw.new_node(Nodes.Index) + + equal_4 = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: 1}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": seat_cushion, + "Selection": equal_4, + "Name": "TAG_support", + 6: True, + }, + attrs={"data_type": "BOOLEAN", "domain": "FACE"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 1.0000 + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Selection": value, + "Name": "TAG_cushion", + 6: True, + }, + attrs={"data_type": "BOOLEAN", "domain": "FACE"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Seat Margin"], + "Y": group_input.outputs["Seat Margin"], + "Z": 1.0000, + }, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute_2, "Scale": combine_xyz_6}, + ) + + combine_xyz_11 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Scaling footrest"], + "Y": 1.0000, + "Z": 1.1000, + }, + ) + + transform_geometry_7 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_geometry_3, "Scale": combine_xyz_11}, + ) + + nodegroup_array_fill_line_002 = nw.new_node( + nodegroup_array_fill_line().name, + input_kwargs={ + "Line Start": multiply_14.outputs["Vector"], + "Line End": multiply_16.outputs["Vector"], + "Instance Dimensions": multiply_17.outputs["Vector"], + "Count": reroute_10, + "Instance": transform_geometry_7, + }, + ) + + separate_xyz_17 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply_16.outputs["Vector"]} + ) + + combine_xyz_21 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": separate_xyz_17.outputs["Z"]} + ) + + reroute_14 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": ceil}) + + combine_xyz_20 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": reroute_14, "Z": 1.0000} + ) + + transform_geometry_13 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_geometry_7, "Scale": combine_xyz_20}, + ) + + nodegroup_array_fill_line_002_1 = nw.new_node( + nodegroup_array_fill_line().name, + input_kwargs={ + "Line End": combine_xyz_21, + "Count": 1, + "Instance": transform_geometry_13, + }, + ) + + switch_9 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: equal_2, + 14: nodegroup_array_fill_line_002, + 15: nodegroup_array_fill_line_002_1, + }, + ) + + switch_3 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Footrest"], 15: switch_9.outputs[6]}, + ) + + nodegroup_array_fill_line_002_2 = nw.new_node( + nodegroup_array_fill_line().name, + input_kwargs={ + "Line Start": multiply_14.outputs["Vector"], + "Line End": multiply_15.outputs["Vector"], + "Instance Dimensions": reroute_6, + "Count": reroute_14, + "Instance": transform_geometry_3, + }, + ) + + join_geometry_9 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [switch_3.outputs[6], nodegroup_array_fill_line_002_2] + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": join_geometry_9, "Level": 2} + ) + + separate_xyz_11 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Seat Dimensions"]}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Backrest Width"], + "Z": separate_xyz_11.outputs["Z"], + }, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_14.outputs["Vector"], 1: combine_xyz_7}, + ) + + add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_15.outputs["Vector"], 1: combine_xyz_7}, + ) + + separate_xyz_12 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Dimensions"]} + ) + + subtract_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_12.outputs["Z"], 1: separate_xyz_11.outputs["Z"]}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_6, 1: group_input.outputs["Baseboard Height"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": subtract_7, + "Y": divide_3, + "Z": group_input.outputs["Backrest Width"], + }, + ) + + seat_cushion_1 = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "CenteringLoc": (0.1000, 0.5000, 1.0000), + "Dimensions": combine_xyz_8, + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="SeatCushion", + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": seat_cushion_1, "Offset Scale": 0.0300} + ) + + scale_elements = nw.new_node( + Nodes.ScaleElements, + input_kwargs={ + "Geometry": extrude_mesh.outputs["Mesh"], + "Selection": extrude_mesh.outputs["Top"], + "Scale": 0.6000, + }, + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": scale_elements} + ) + + random_value = nw.new_node(Nodes.RandomValue, attrs={"data_type": "FLOAT_VECTOR"}) + + store_named_attribute_3 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": subdivision_surface_1, + "Name": "UVMap", + 3: random_value.outputs["Value"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + multiply_19 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Backrest Width"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_13 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Back Dimensions"]}, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz_13.outputs["X"], 1: 0.1000} + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_19, 1: add_3}) + + combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add_4}) + + add_5 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Backrest Angle"], 1: -1.5708} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_5}) + + transform_geometry_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_3, + "Translation": combine_xyz_9, + "Rotation": combine_xyz_10, + "Scale": combine_xyz_6, + }, + ) + + nodegroup_array_fill_line_003 = nw.new_node( + nodegroup_array_fill_line().name, + input_kwargs={ + "Line Start": add_1.outputs["Vector"], + "Line End": add_2.outputs["Vector"], + "Instance Dimensions": reroute_6, + "Count": ceil, + "Instance": transform_geometry_4, + }, + ) + + join_geometry_6 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [subdivide_mesh, nodegroup_array_fill_line_003]}, + ) + + join_geometry_7 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [join_geometry_5, realize_instances, join_geometry_6] + }, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": join_geometry_5, "Level": 2} + ) + + join_geometry_8 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [subdivide_mesh_1, realize_instances, join_geometry_6] + }, + ) + + subdivision_surface_2 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": join_geometry_8, "Level": 1} + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={1: True, 14: join_geometry_7, 15: subdivision_surface_2}, + ) + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Subdivide"], + 14: join_geometry_7, + 15: subdivision_surface_2, + }, + ) + + bounding_box = nw.new_node( + nodegroup_corner_cube().name, + input_kwargs={ + "CenteringLoc": (0.0000, 0.5000, -1.0000), + "Dimensions": group_input.outputs["Dimensions"], + "Vertices X": 2, + "Vertices Y": 2, + "Vertices Z": 2, + }, + label="BoundingBox", + ) + + reroute_7 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": bounding_box}) + + reroute_8 = nw.new_node(Nodes.Reroute, input_kwargs={"Input": reroute_7}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": switch_1.outputs[6], "BoundingBox": reroute_8}, + attrs={"is_active_output": True}, + ) + + +def sofa_parameter_distribution(dimensions=None): + if dimensions is None: + dimensions = ( + uniform(0.95, 1.1), + clip_gaussian(1.75, 0.75, 0.9, 3), + uniform(0.69, 0.97), + ) + + return { + "Dimensions": dimensions, + "Arm Dimensions": ( + uniform(1, 1), + uniform(0.06, 0.15), + uniform(0.5, 0.75), + ), + "Back Dimensions": (uniform(0.15, 0.25), 0.0000, uniform(0.5, 0.75)), + "Seat Dimensions": (dimensions[0], uniform(0.7, 1), uniform(0.15, 0.3)), + "Foot Dimensions": (uniform(0.07, 0.25), 0.06, 0.06), + "Baseboard Height": uniform(0.05, 0.09), + "Backrest Width": uniform(0.1, 0.2), + "Seat Margin": uniform(0.9700, 1), + "Backrest Angle": uniform(-0.15, -0.5), + "Arm Type": np.random.choice( + [ARM_TYPE_SQUARE, ARM_TYPE_ROUND, ARM_TYPE_ANGULAR], p=[0.4, 0.2, 0.4] + ), + "arm_width": uniform(0.6, 0.9), + "Arm_height": uniform(0.7, 1.0), + "arms_angle": uniform(0.0, 1.08), + "Footrest": True if uniform() > 0.5 and dimensions[1] > 2 else False, + "Count": 1 if uniform() > 0.2 else 4, + "Scaling footrest": uniform(1.3, 1.6), + "Reflection": 1 if uniform() > 0.5 else -1, + "leg_type": True if uniform() > 0.5 else False, + "leg_dimensions": uniform(0.4, 0.9), + "leg_z": uniform(1.1, 2.5), + "leg_faces": uniform(4, 25), + } + + +class SofaFactory(AssetFactory): + def __init__(self, factory_seed): + super().__init__(factory_seed) + with FixedSeed(factory_seed): + self.params = sofa_parameter_distribution() + # from infinigen.assets.scatters.clothes import ClothesCover + # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(1, 1.5), + # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() + materials = AssetList["SofaFactory"]() + self.sofa_fabric = materials["sofa_fabric"].assign_material() + + def create_placeholder(self, **_): + obj = butil.spawn_vert() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_sofa_geometry(), + ng_inputs={ + **self.params, + }, + apply=True, + ) + tagging.tag_system.relabel_obj(obj) + surface.add_material(obj, self.sofa_fabric) + return obj + + def create_asset(self, i, placeholder, face_size, **_): + hipoly = butil.copy(placeholder, keep_materials=True) + + butil.modify_mesh(hipoly, "SUBSURF", levels=1, apply=True) + + with butil.SelectObjects(hipoly): + bpy.ops.object.shade_smooth() + + return hipoly + + +class ArmChairFactory(SofaFactory): + def __init__(self, factory_seed): + super().__init__(factory_seed) + with FixedSeed(factory_seed): + dimensions = (uniform(0.8, 1), uniform(0.9, 1.1), uniform(0.69, 0.97)) + self.params = sofa_parameter_distribution(dimensions=dimensions) diff --git a/infinigen/assets/shelves/__init__.py b/infinigen/assets/objects/shelves/__init__.py similarity index 75% rename from infinigen/assets/shelves/__init__.py rename to infinigen/assets/objects/shelves/__init__.py index 7fea19c8b..fa9b44b14 100644 --- a/infinigen/assets/shelves/__init__.py +++ b/infinigen/assets/objects/shelves/__init__.py @@ -1,10 +1,10 @@ -from .simple_desk import SimpleDeskFactory, SidetableDeskFactory -from .simple_bookcase import SimpleBookcaseFactory from .cell_shelf import CellShelfFactory, TVStandFactory -from .triangle_shelf import TriangleShelfFactory -from .large_shelf import LargeShelfFactory +from .countertop import CountertopFactory from .doors import CabinetDoorBaseFactory -from .single_cabinet import SingleCabinetFactory from .kitchen_cabinet import KitchenCabinetFactory -from .kitchen_space import KitchenSpaceFactory, KitchenIslandFactory -from .countertop import CountertopFactory +from .kitchen_space import KitchenIslandFactory, KitchenSpaceFactory +from .large_shelf import LargeShelfFactory +from .simple_bookcase import SimpleBookcaseFactory +from .simple_desk import SidetableDeskFactory, SimpleDeskFactory +from .single_cabinet import SingleCabinetFactory +from .triangle_shelf import TriangleShelfFactory diff --git a/infinigen/assets/objects/shelves/cabinet.py b/infinigen/assets/objects/shelves/cabinet.py new file mode 100644 index 000000000..93527d63f --- /dev/null +++ b/infinigen/assets/objects/shelves/cabinet.py @@ -0,0 +1,1592 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials.shelf_shaders import get_shelf_material +from infinigen.assets.objects.shelves.large_shelf import LargeShelfBaseFactory +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "nodegroup_node_group", singleton=False, type="GeometryNodeTree" +) +def nodegroup_node_group(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": (0.0120, 0.00060, 0.0400)}) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": 0.0100, "Depth": 0.00050}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": (0.0050, 0.0000, 0.0000), + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0200, 0.0006, 0.0120)} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cube_1, "Translation": (0.0080, 0.0000, 0.0000)}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [cube, transform, transform_1]} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "attach_height", 0.1000), + ("NodeSocketFloat", "door_width", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["door_width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 0.0181}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": subtract, "Z": group_input.outputs["attach_height"]}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_knob_handle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_knob_handle(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.0100), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "length", 0.5000), + ("NodeSocketFloat", "knob_mid_height", 0.0000), + ("NodeSocketFloat", "edge_width", 0.5000), + ("NodeSocketFloat", "door_width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["thickness_2"], + 1: group_input.outputs["thickness_1"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["length"]} + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": 64, + "Radius": group_input.outputs["Radius"], + "Depth": add_1, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["door_width"], + 1: group_input.outputs["edge_width"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.005}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add_2, + "Y": multiply_1, + "Z": group_input.outputs["knob_mid_height"], + }, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_6, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_6}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_mid_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_mid_board(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_k = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_1} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_4} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": transform_4, "Material": kwargs["material"][0]}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_7, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 1.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_2} + ) + + transform_7 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_8} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": transform_7, "Material": kwargs["material"][1]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances, "mid_height": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_mid_board_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_mid_board_001(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + multiply_k = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: 1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_1} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_4} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": transform_4, "Material": kwargs["material"][0]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": set_material} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances, "mid_height": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_double_rampled_edge", singleton=False, type="GeometryNodeTree" +) +def nodegroup_double_rampled_edge(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "ramp_angle", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_10}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 3, "Radius": 0.0100} + ) + + endpoint_selection = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000} + ) + + tangent = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "TANGENT"} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 2.0000, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "MULTIPLY"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": add_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": endpoint_selection, + "Position": combine_xyz_7, + }, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": add_5} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_1, + "Position": combine_xyz_8, + }, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={"operation": "LESS_THAN"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 0.9900}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than} + ) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_5, "Y": add_4} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": op_and, + "Position": combine_xyz_9, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": set_position_2, + "Fill Caps": True, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": add_4, "Z": add} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_6}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": add_3, "Z": add} + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_7}) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_6}) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_3} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + multiply_8 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_8}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_11}, + ) + + combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_12}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position_2, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": transform_2, + "Fill Caps": True, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh, transform_4, curve_to_mesh_1]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": join_geometry_1, "Distance": 0.0001}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": merge_by_distance} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": realize_instances, "Level": 4} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": subdivide_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_ramped_edge", singleton=False, type="GeometryNodeTree" +) +def nodegroup_ramped_edge(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "ramp_angle", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_10}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 3, "Radius": 0.0100} + ) + + endpoint_selection = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000} + ) + + tangent = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "TANGENT"} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: subtract}, + attrs={"operation": "SUBTRACT"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Y": add_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": endpoint_selection, + "Position": combine_xyz_7, + }, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Y": add_5} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_1, + "Position": combine_xyz_8, + }, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={"operation": "LESS_THAN"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 0.9900}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_2, "Y": add_4} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": op_and, + "Position": combine_xyz_9, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": set_position_2, + "Fill Caps": True, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": add_4, "Z": add} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_3}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": add_3, "Z": add} + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_5}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_4, "Y": add_6} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_3} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_6}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_11}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh, transform_4]} + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": join_geometry_1, "Distance": 0.0001}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": merge_by_distance} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": realize_instances, "Level": 4} + ) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_7}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": subdivide_mesh, "Translation": combine_xyz_4}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_panel_edge_frame", singleton=False, type="GeometryNodeTree" +) +def nodegroup_panel_edge_frame(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "vertical_edge", None), + ("NodeSocketFloat", "door_width", 0.5000), + ("NodeSocketFloat", "door_height", 0.0000), + ("NodeSocketGeometry", "horizontal_edge", None), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["door_width"], 2: 0.0010}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + transform_7 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["horizontal_edge"], + "Translation": (0.0000, -0.0001, 0.0000), + "Scale": (0.9999, 1.0000, 1.0000), + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: 1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["door_height"], 1: 0.0001} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Z": add_1}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_7, + "Translation": combine_xyz_2, + "Rotation": (0.0000, -1.5708, 0.0000), + }, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add_2}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_7, + "Translation": combine_xyz_1, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_add}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["vertical_edge"], + "Translation": combine_xyz, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_3, transform_2, transform_1, transform]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply, "Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +def geometry_door_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + door_height = nw.new_node(Nodes.Value, label="door_height") + door_height.outputs[0].default_value = kwargs["door_height"] + + door_edge_thickness_2 = nw.new_node(Nodes.Value, label="door_edge_thickness_2") + door_edge_thickness_2.outputs[0].default_value = kwargs["edge_thickness_2"] + + door_edge_width = nw.new_node(Nodes.Value, label="door_edge_width") + door_edge_width.outputs[0].default_value = kwargs["edge_width"] + + door_edge_thickness_1 = nw.new_node(Nodes.Value, label="door_edge_thickness_1") + door_edge_thickness_1.outputs[0].default_value = kwargs["edge_thickness_1"] + + door_edge_ramp_angle = nw.new_node(Nodes.Value, label="door_edge_ramp_angle") + door_edge_ramp_angle.outputs[0].default_value = kwargs["edge_ramp_angle"] + + ramped_edge = nw.new_node( + nodegroup_ramped_edge().name, + input_kwargs={ + "height": door_height, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + door_width = nw.new_node(Nodes.Value, label="door_width") + door_width.outputs[0].default_value = kwargs["door_width"] + + ramped_edge_1 = nw.new_node( + nodegroup_ramped_edge().name, + input_kwargs={ + "height": door_width, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + panel_edge_frame = nw.new_node( + nodegroup_panel_edge_frame().name, + input_kwargs={ + "vertical_edge": ramped_edge, + "door_width": door_width, + "door_height": door_height, + "horizontal_edge": ramped_edge_1, + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: panel_edge_frame.outputs["Value"], 1: 0.0001} + ) + + mid_board_thickness = nw.new_node(Nodes.Value, label="mid_board_thickness") + mid_board_thickness.outputs[0].default_value = kwargs["board_thickness"] + + if kwargs["has_mid_ramp"]: + mid_board = nw.new_node( + nodegroup_mid_board(material=kwargs["board_material"]).name, + input_kwargs={ + "height": door_height, + "thickness": mid_board_thickness, + "width": door_width, + }, + ) + else: + mid_board = nw.new_node( + nodegroup_mid_board_001(material=kwargs["board_material"]).name, + input_kwargs={ + "height": door_height, + "thickness": mid_board_thickness, + "width": door_width, + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add, "Y": -0.0001, "Z": mid_board.outputs["mid_height"]}, + ) + + frame = [panel_edge_frame.outputs["Geometry"]] + if kwargs["has_mid_ramp"]: + double_rampled_edge = nw.new_node( + nodegroup_double_rampled_edge().name, + input_kwargs={ + "height": door_width, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": double_rampled_edge, + "Translation": combine_xyz_5, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + frame.append(transform_5) + + join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": frame}) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": kwargs["frame_material"], + }, + ) + + knob_raduis = nw.new_node(Nodes.Value, label="knob_raduis") + knob_raduis.outputs[0].default_value = kwargs["knob_R"] + + know_length = nw.new_node(Nodes.Value, label="know_length") + know_length.outputs[0].default_value = kwargs["knob_length"] + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: door_height}, attrs={"operation": "MULTIPLY"} + ) + + knob_handle = nw.new_node( + nodegroup_knob_handle().name, + input_kwargs={ + "Radius": knob_raduis, + "thickness_1": door_edge_thickness_1, + "thickness_2": door_edge_thickness_2, + "length": know_length, + "knob_mid_height": multiply, + "edge_width": door_edge_width, + "door_width": door_width, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": knob_handle, "Material": kwargs["frame_material"]}, + ) + + attach_gadgets = [] + + for h in kwargs["attach_height"]: + attach_height = nw.new_node(Nodes.Value, label="attach_height") + attach_height.outputs[0].default_value = h + + attach = nw.new_node( + nodegroup_node_group().name, + input_kwargs={"attach_height": attach_height, "door_width": door_width}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": attach, "Material": get_shelf_material("metal")}, + ) + attach_gadgets.append(set_material_1) + + geos = [ + set_material_2, + set_material_3, + mid_board.outputs["Geometry"], + ] + attach_gadgets + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": geos}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: door_width, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": transform} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": triangulate, + "Scale": (-1.0 if kwargs["door_left_hinge"] else 1.0, 1.0000, 1.0000), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_1, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +def geometry_cabinet_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + right_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": kwargs["door"][0]} + ) + left_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": kwargs["door"][1]} + ) + shelf_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": kwargs["shelf"]}) + + doors = [] + transform_r = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": right_door_info.outputs["Geometry"], + "Translation": kwargs["door_hinge_pos"][0], + "Rotation": (0, 0, kwargs["door_open_angle"]), + }, + ) + doors.append(transform_r) + if len(kwargs["door_hinge_pos"]) > 1: + transform_l = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": left_door_info.outputs["Geometry"], + "Translation": kwargs["door_hinge_pos"][1], + "Rotation": (0, 0, kwargs["door_open_angle"]), + }, + ) + doors.append(transform_l) + + attaches = [] + for pos in kwargs["attach_pos"]: + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0006, 0.0200, 0.04500)} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": -0.0100}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0005, 0.0340, 0.0200)} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, cube_1]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Translation": (0.0000, -0.0170, 0.0000), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_1, + "Rotation": (0.0000, 0.0000, -1.5708), + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_2, "Translation": pos} + ) + + attaches.append(transform_3) + + join_geometry_a = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": attaches} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_a, + "Material": get_shelf_material("metal"), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [shelf_info.outputs["Geometry"]] + doors + [set_material] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class CabinetDoorBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(CabinetDoorBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = {} + + def get_asset_params(self, i=0): + params = self.params.copy() + if params.get("door_height", None) is None: + params["door_height"] = uniform(0.7, 2.2) + if params.get("door_width", None) is None: + params["door_width"] = uniform(0.3, 0.4) + if params.get("edge_thickness_1", None) is None: + params["edge_thickness_1"] = uniform(0.01, 0.02) + if params.get("edge_width", None) is None: + params["edge_width"] = uniform(0.03, 0.05) + if params.get("edge_thickness_2", None) is None: + params["edge_thickness_2"] = uniform(0.005, 0.01) + if params.get("edge_ramp_angle", None) is None: + params["edge_ramp_angle"] = uniform(0.6, 0.8) + params["board_thickness"] = params["edge_thickness_1"] - 0.005 + if params.get("knob_R", None) is None: + params["knob_R"] = uniform(0.003, 0.006) + if params.get("knob_length", None) is None: + params["knob_length"] = uniform(0.018, 0.035) + if params.get("attach_height", None) is None: + gap = uniform(0.05, 0.15) + params["attach_height"] = [gap, params["door_height"] - gap] + if params.get("has_mid_ramp", None) is None: + params["has_mid_ramp"] = np.random.choice([True, False], p=[0.6, 0.4]) + if params.get("door_left_hinge", None) is None: + params["door_left_hinge"] = False + + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.5, 0.2, 0.3] + ) + if params.get("board_material", None) is None: + if params["has_mid_ramp"]: + lower_mat = np.random.choice( + [params["frame_material"], "glass"], p=[0.7, 0.3] + ) + upper_mat = np.random.choice([lower_mat, "glass"], p=[0.6, 0.4]) + params["board_material"] = [lower_mat, upper_mat] + else: + params["board_material"] = [params["frame_material"]] + + params = self.get_material_func(params) + return params + + def get_material_func(self, params, randomness=True): + params["frame_material"] = get_shelf_material(params["frame_material"]) + materials = [] + if not isinstance(params["board_material"], list): + params["board_material"] = [params["board_material"]] + for mat in params["board_material"]: + materials.append(get_shelf_material(mat)) + params["board_material"] = materials + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_door_nodes, apply=True, attributes=[], input_kwargs=obj_params + ) + + if params.get("ret_params", False): + return obj, obj_params + + return obj + + +class CabinetDoorIkeaFactory(CabinetDoorBaseFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(CabinetDoorIkeaFactory, self).__init__(factory_seed, coarse=coarse) + self.params = { + "edge_thickness_1": 0.012, + "edge_thickness_2": 0.008, + "board_thickness": 0.006, + "edge_width": 0.02, + "edge_ramp_angle": 0.5, + "knob_R": 0.004, + "knob_length": 0.03, + "has_mid_ramp": False, + "attach_height": 0.08, + } + + def get_asset_params(self, i=0): + params = self.params.copy() + if params.get("door_height", None) is None: + params["door_height"] = uniform(0.7, 2.2) + if params.get("door_width", None) is None: + params["door_width"] = uniform(0.3, 0.4) + if params.get("door_left_hinge", None) is None: + params["door_left_hinge"] = False + + params["attach_height"] = [ + params["door_height"] - params["attach_height"], + params["attach_height"], + ] + params = self.get_material_func(params) + return params + + +class CabinetBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(CabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.shelf_params = {} + self.door_params = {} + self.mat_params = {} + self.shelf_fac = LargeShelfBaseFactory(factory_seed) + self.door_fac = CabinetDoorBaseFactory(factory_seed) + + def sample_params(self): + # Update fac params + pass + + def get_material_params(self): + with FixedSeed(self.factory_seed): + params = self.mat_params.copy() + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.5, 0.2, 0.3] + ) + return params + + def get_shelf_params(self, i=0): + params = self.shelf_params.copy() + if params.get("shelf_cell_width", None) is None: + params["shelf_cell_width"] = [ + np.random.choice([0.76, 0.36], p=[0.5, 0.5]) + * np.clip(normal(1.0, 0.1), 0.75, 1.25) + ] + if params.get("shelf_cell_height", None) is None: + num_v_cells = randint(3, 7) + shelf_cell_height = [] + for i in range(num_v_cells): + shelf_cell_height.append(0.3 * np.clip(normal(1.0, 0.06), 0.75, 1.25)) + params["shelf_cell_height"] = shelf_cell_height + if params.get("frame_material", None) is None: + params["frame_material"] = self.mat_params["frame_material"] + + return params + + def get_door_params(self, i=0): + params = self.door_params.copy() + + # get door params + shelf_width = ( + self.shelf_params["shelf_width"] + + self.shelf_params["side_board_thickness"] * 2 + ) + if params.get("door_width", None) is None: + if shelf_width < 0.55: + params["door_width"] = shelf_width + params["num_door"] = 1 + else: + params["door_width"] = shelf_width / 2.0 - 0.0005 + params["num_door"] = 2 + if params.get("door_height", None) is None: + params["door_height"] = ( + self.shelf_params["division_board_z_translation"][-1] + - self.shelf_params["division_board_z_translation"][0] + + self.shelf_params["division_board_thickness"] + ) + if len( + self.shelf_params["division_board_z_translation"] + ) > 5 and np.random.choice([True, False], p=[0.5, 0.5]): + params["door_height"] = ( + self.shelf_params["division_board_z_translation"][3] + - self.shelf_params["division_board_z_translation"][0] + + self.shelf_params["division_board_thickness"] + ) + if params.get("frame_material", None) is None: + params["frame_material"] = self.mat_params["frame_material"] + + return params + + def get_cabinet_params(self, i=0): + params = dict() + + shelf_width = ( + self.shelf_params["shelf_width"] + + self.shelf_params["side_board_thickness"] * 2 + ) + if self.door_params["num_door"] == 1: + params["door_hinge_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.0025, + -shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ) + ] + params["door_open_angle"] = 0 + params["attach_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0, + -self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + elif self.door_params["num_door"] == 2: + params["door_hinge_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.008, + -shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ), + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.008, + shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ), + ] + params["door_open_angle"] = 0 + params["attach_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0, + -self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + [ + ( + self.shelf_params["shelf_depth"] / 2.0, + self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + else: + raise NotImplementedError + + return params + + def get_cabinet_components(self, i): + # update material params + self.sample_params() + self.mat_params = self.get_material_params() + + # create shelf + shelf_params = self.get_shelf_params(i=i) + self.shelf_fac.params = shelf_params + shelf, shelf_params = self.shelf_fac.create_asset(i=i, ret_params=True) + shelf.name = "cabinet_frame" + self.shelf_params = shelf_params + + # create doors + door_params = self.get_door_params(i=i) + self.door_fac.params = door_params + self.door_fac.params["door_left_hinge"] = False + right_door, door_obj_params = self.door_fac.create_asset(i=i, ret_params=True) + right_door.name = "cabinet_right_door" + self.door_fac.params = door_obj_params + self.door_fac.params["door_left_hinge"] = True + left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) + left_door.name = "cabinet_left_door" + self.door_params = door_obj_params + + return shelf, right_door, left_door + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + shelf, right_door, left_door = self.get_cabinet_components(i=i) + + # create cabinet + cabinet_params = self.get_cabinet_params(i=i) + surface.add_geomod( + obj, + geometry_cabinet_nodes, + attributes=[], + input_kwargs={ + "door": [right_door, left_door], + "shelf": shelf, + "door_hinge_pos": cabinet_params["door_hinge_pos"], + "door_open_angle": cabinet_params["door_open_angle"], + "attach_pos": cabinet_params["attach_pos"], + }, + ) + butil.delete([shelf, left_door, right_door]) + return obj + + +class CabinetFactory(CabinetBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.25, 0.35), + uniform(0.3, 0.7), + uniform(0.9, 1.8), + ) + + params["bottom_board_height"] = 0.083 + params["shelf_depth"] = params["Dimensions"][0] - 0.01 + num_h = int((params["Dimensions"][2] - 0.083) / 0.3) + params["shelf_cell_height"] = [ + (params["Dimensions"][2] - 0.083) / num_h for _ in range(num_h) + ] + params["shelf_cell_width"] = [params["Dimensions"][1]] + self.shelf_params = self.shelf_fac.sample_params() diff --git a/infinigen/assets/objects/shelves/cell_shelf.py b/infinigen/assets/objects/shelves/cell_shelf.py new file mode 100644 index 000000000..e36a60223 --- /dev/null +++ b/infinigen/assets/objects/shelves/cell_shelf.py @@ -0,0 +1,1598 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import metal +from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_metallic, + shader_shelves_black_metallic_sampler, + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_metallic, + shader_shelves_white_metallic_sampler, + shader_shelves_white_sampler, + shader_shelves_wood, + shader_shelves_wood_sampler, +) +from infinigen.assets.objects.shelves.utils import nodegroup_tagged_cube +from infinigen.assets.utils.object import new_bbox +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "nodegroup_screw_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_screw_head(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", input_kwargs={"Radius": 0.0050, "Depth": 0.0010} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Z", 0.5000), + ("NodeSocketFloat", "leg", 0.5000), + ("NodeSocketFloat", "X", 0.5000), + ("NodeSocketFloat", "external", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["external"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X"], 1: add}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Z"], 1: group_input.outputs["leg"]}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "MULTIPLY"} + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_2}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": multiply_1, "Z": add_2} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cylinder.outputs["Mesh"], "Translation": combine_xyz}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": subtract_1, "Z": add_2} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_1, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": subtract_1, "Z": add_2} + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_2, + }, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": multiply_1, "Z": add_2} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_3, + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_2, transform_3, transform_4, transform_5]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_base_frame", singleton=False, type="GeometryNodeTree" +) +def nodegroup_base_frame(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "leg_height", 0.5000), + ("NodeSocketFloat", "leg_size", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "bottom_x", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["leg_size"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["leg_height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add, "Z": add_1} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["bottom_x"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": multiply_1, "Z": multiply_2} + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_3, "Y": multiply_1, "Z": multiply_2}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: 0.0000}) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_4, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": subtract_1, "Z": multiply_2} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_3} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_3, "Y": subtract_1, "Z": multiply_2}, + ) + + transform_5 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_4} + ) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "MULTIPLY"} + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: multiply_4}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_2, "Y": add, "Z": add} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_5, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": subtract_3} + ) + + transform_6 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_6} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: 0.0000}) + + subtract_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_5, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": subtract_4, "Z": subtract_3} + ) + + transform_7 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_7} + ) + + subtract_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: multiply_4}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": subtract_5, "Z": add} + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_8, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + subtract_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_6}, attrs={"operation": "MULTIPLY"} + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract_5}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: add}) + + subtract_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_5, "Y": add_6, "Z": subtract_7} + ) + + transform_8 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_2, "Translation": combine_xyz_9} + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_5, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_10 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_7, "Y": add_6, "Z": subtract_7} + ) + + transform_9 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cube_2, "Translation": combine_xyz_10}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + transform_2, + transform_3, + transform_4, + transform_5, + transform_6, + transform_7, + transform_8, + transform_9, + ] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.0000), + ("NodeSocketFloat", "Z", 0.5000), + ("NodeSocketFloat", "leg", 0.5000), + ("NodeSocketFloat", "external", 0.5000), + ], + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Z"], 1: 0.0000}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": 0.01, "Z": add}, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["leg"]} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: group_input.outputs["external"]} + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add_2}) + + transform_6 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_5} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_6}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_attach_gadget", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attach_gadget(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "z", 0.5000), + ("NodeSocketFloat", "base_leg", 0.5000), + ("NodeSocketFloat", "x", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "size", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["size"], 1: 0.0000} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": 0.0010, "Z": add} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_4}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["x"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["thickness"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["base_leg"], 1: group_input.outputs["z"]}, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: group_input.outputs["thickness"]} + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -0.02}) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_2, "Z": subtract_2} + ) + + transform_6 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_5} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Z": subtract_2} + ) + + transform_7 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_6} + ) + + join_geometry_5 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_6, transform_7]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_5}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_h_division_placement", singleton=False, type="GeometryNodeTree" +) +def nodegroup_h_division_placement(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "cell_size", 0.5000), + ("NodeSocketFloat", "leg_height", 0.5000), + ("NodeSocketFloat", "division_board_thickness", 0.5000), + ("NodeSocketFloat", "external_board_thickness", 0.5000), + ("NodeSocketFloat", "index", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["index"], 1: 0.0000} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["cell_size"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["external_board_thickness"], 1: 0.0000}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: add_2}, attrs={"operation": "MULTIPLY"} + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) + + add_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["division_board_thickness"], + 1: group_input.outputs["leg_height"], + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "MULTIPLY"} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_3}) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add_5}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply, "Z": add_6} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": combine_xyz}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_h_division_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_h_division_board(nw: NodeWrangler, tag_support=False): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "cell_size", 0.5000), + ("NodeSocketFloat", "horizontal_cell_num", 0.5000), + ("NodeSocketFloat", "division_board_thickness", 0.5000), + ("NodeSocketFloat", "depth", 0.0000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 0.0000}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["cell_size"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: group_input.outputs["division_board_thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["division_board_thickness"], 1: 0.0000}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add_2, "Y": group_input.outputs["depth"], "Z": add_3}, + ) + if tag_support: + cube = nw.new_node( + nodegroup_tagged_cube().name, input_kwargs={"Size": combine_xyz} + ) + else: + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": cube}, attrs={"is_active_output": True} + ) + + +@node_utils.to_nodegroup( + "nodegroup_v_division_board_placement", singleton=False, type="GeometryNodeTree" +) +def nodegroup_v_division_board_placement(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "base_leg", 0.5000), + ("NodeSocketFloat", "external_thickness", 0.5000), + ("NodeSocketFloat", "side_z", 0.5000), + ("NodeSocketFloat", "index", 0.5000), + ("NodeSocketFloat", "h_cell_num", 0.5000), + ("NodeSocketFloat", "division_thickness", 0.5000), + ("NodeSocketFloat", "cell_size", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["h_cell_num"], 1: 0.0000} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={1: add_1}, attrs={"operation": "MULTIPLY"} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["index"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: add_2}, + attrs={"operation": "SUBTRACT"}, + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: group_input.outputs["division_thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: add_2}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["cell_size"], 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["base_leg"], + 1: group_input.outputs["external_thickness"], + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["side_z"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: multiply_5}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_4, "Y": multiply_4, "Z": add_6} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": combine_xyz_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_v_division_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_v_division_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "division_board_thickness", 0.0000), + ("NodeSocketFloat", "depth", 0.0000), + ("NodeSocketFloat", "cell_size", 0.5000), + ("NodeSocketFloat", "vertical_cell_num", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["cell_size"], 1: add}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1.0000}, attrs={"operation": "SUBTRACT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["division_board_thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + add_200 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: -0.001} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["division_board_thickness"], + "Y": add_200, + "Z": add_1, + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": cube, "Value": add_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_top_bottom_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_top_bottom_board(nw: NodeWrangler, tag_support=False): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "base_leg_height", 0.5000), + ("NodeSocketFloat", "horizontal_cell_num", 0.5000), + ("NodeSocketFloat", "vertical_cell_num", 0.5000), + ("NodeSocketFloat", "cell_size", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "division_board_thickness", 0.5000), + ("NodeSocketFloat", "external_board_thickness", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["external_board_thickness"], 1: 0.0000}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["division_board_thickness"], 1: 0.0000}, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 0.0000}, + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -1.0000}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + add_5 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["cell_size"], 1: 0.0000} + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_5, 1: add_2}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_2}) + + add_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_6, 1: 0.0020}) + + add_8 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_9 = nw.new_node(Nodes.Math, input_kwargs={0: add_8, 1: 0.0000}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_7, "Y": add_9, "Z": add} + ) + + if tag_support: + cube_1 = nw.new_node( + nodegroup_tagged_cube().name, input_kwargs={"Size": combine_xyz_3} + ) + else: + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add_8}, attrs={"operation": "MULTIPLY"} + ) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + add_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: group_input.outputs["base_leg_height"]}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_3, "Z": add_10} + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz} + ) + + add_11 = nw.new_node(Nodes.Math, input_kwargs={0: add_10, 1: add}) + + add_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_12, 1: add_5}, attrs={"operation": "MULTIPLY"} + ) + + add_13 = nw.new_node(Nodes.Math, input_kwargs={0: add_11, 1: multiply_5}) + + add_14 = nw.new_node(Nodes.Math, input_kwargs={0: add_12, 1: -1.0000}) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: add_14}, attrs={"operation": "MULTIPLY"} + ) + + add_15 = nw.new_node(Nodes.Math, input_kwargs={0: add_13, 1: multiply_6}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_3, "Z": add_15} + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_1} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1, "x": add_7}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_side_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_side_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "base_leg_height", 0.5000), + ("NodeSocketFloat", "horizontal_cell_num", 0.5000), + ("NodeSocketFloat", "vertical_cell_num", 0.5000), + ("NodeSocketFloat", "cell_size", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "division_thickness", 0.5000), + ("NodeSocketFloat", "external_thickness", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["external_thickness"], 1: 0.0000}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: 1.0000}, attrs={"operation": "SUBTRACT"} + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["division_thickness"], 1: 0.0000}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: add_3}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["cell_size"], 1: 0.0000} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: add_4}, attrs={"operation": "MULTIPLY"} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_5} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_4, 1: group_input.outputs["horizontal_cell_num"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 1.0000}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: subtract_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply_3}) + + add_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: add_6}) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={1: add_7}, attrs={"operation": "MULTIPLY"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_5}, attrs={"operation": "MULTIPLY"} + ) + + add_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_6, 1: group_input.outputs["base_leg_height"]}, + ) + + add_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["external_thickness"], 1: add_8}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_4, "Y": multiply_5, "Z": add_9} + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_7, "Y": multiply_5, "Z": add_9} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + base_leg_height = nw.new_node(Nodes.Value, label="base_leg_height") + base_leg_height.outputs[0].default_value = kwargs["base_leg_height"] + + horizontal_cell_num = nw.new_node(Nodes.Integer, label="horizontal_cell_num") + horizontal_cell_num.integer = kwargs["horizontal_cell_num"] + + vertical_cell_num = nw.new_node(Nodes.Integer, label="vertical_cell_num") + vertical_cell_num.integer = kwargs["vertical_cell_num"] + + cell_size = nw.new_node(Nodes.Value, label="cell_size") + cell_size.outputs[0].default_value = kwargs["cell_size"] + + depth = nw.new_node(Nodes.Value, label="depth") + depth.outputs[0].default_value = kwargs["depth"] + + division_board_thickness = nw.new_node( + Nodes.Value, label="division_board_thickness" + ) + division_board_thickness.outputs[0].default_value = kwargs[ + "division_board_thickness" + ] + + external_board_thickness = nw.new_node( + Nodes.Value, label="external_board_thickness" + ) + external_board_thickness.outputs[0].default_value = kwargs[ + "external_board_thickness" + ] + + sideboard = nw.new_node( + nodegroup_side_board().name, + input_kwargs={ + "base_leg_height": base_leg_height, + "horizontal_cell_num": horizontal_cell_num, + "vertical_cell_num": vertical_cell_num, + "cell_size": cell_size, + "depth": depth, + "division_thickness": division_board_thickness, + "external_thickness": external_board_thickness, + }, + ) + + topbottomboard = nw.new_node( + nodegroup_top_bottom_board(tag_support=kwargs.get("tag_support", False)).name, + input_kwargs={ + "base_leg_height": base_leg_height, + "horizontal_cell_num": horizontal_cell_num, + "vertical_cell_num": vertical_cell_num, + "cell_size": cell_size, + "depth": depth, + "division_board_thickness": division_board_thickness, + "external_board_thickness": external_board_thickness, + }, + ) + + vdivisionboard = nw.new_node( + nodegroup_v_division_board().name, + input_kwargs={ + "division_board_thickness": division_board_thickness, + "depth": depth, + "cell_size": cell_size, + "vertical_cell_num": vertical_cell_num, + }, + ) + + all_components = [sideboard, topbottomboard.outputs["Geometry"]] + + v_division_boards = [] + for i in range(1, kwargs["horizontal_cell_num"]): + v_division_index = nw.new_node(Nodes.Integer, label="VDivisionIndex") + v_division_index.integer = i + + vdivisionboardplacement = nw.new_node( + nodegroup_v_division_board_placement().name, + input_kwargs={ + "depth": depth, + "base_leg": base_leg_height, + "external_thickness": external_board_thickness, + "side_z": vdivisionboard.outputs["Value"], + "index": v_division_index, + "h_cell_num": horizontal_cell_num, + "division_thickness": division_board_thickness, + "cell_size": cell_size, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": vdivisionboard.outputs["Mesh"], + "Translation": vdivisionboardplacement, + }, + ) + v_division_boards.append(transform_1) + + if len(v_division_boards) > 0: + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": v_division_boards} + ) + all_components.append(join_geometry_1) + + hdivisionboard = nw.new_node( + nodegroup_h_division_board(tag_support=kwargs.get("tag_support", False)).name, + input_kwargs={ + "cell_size": cell_size, + "horizontal_cell_num": horizontal_cell_num, + "division_board_thickness": division_board_thickness, + "depth": depth, + }, + ) + + h_division_boards = [] + for j in range(1, kwargs["vertical_cell_num"]): + h_division_index = nw.new_node(Nodes.Integer, label="HDivisionIndex") + h_division_index.integer = j + + hdivisionplacement = nw.new_node( + nodegroup_h_division_placement().name, + input_kwargs={ + "depth": depth, + "cell_size": cell_size, + "leg_height": base_leg_height, + "division_board_thickness": external_board_thickness, + "external_board_thickness": division_board_thickness, + "index": h_division_index, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": hdivisionboard, + "Translation": hdivisionplacement, + }, + ) + h_division_boards.append(transform) + + if len(h_division_boards) > 0: + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": h_division_boards} + ) + all_components.append(join_geometry) + + if kwargs["has_backboard"]: + backboard = nw.new_node( + nodegroup_back_board().name, + input_kwargs={ + "X": topbottomboard.outputs["x"], + "Z": vdivisionboard.outputs["Value"], + "leg": base_leg_height, + "external": external_board_thickness, + }, + ) + all_components.append(backboard) + else: + attach_square_size = nw.new_node(Nodes.Value, label="attach_square_size") + attach_square_size.outputs[0].default_value = kwargs["attachment_size"] + + attachgadget = nw.new_node( + nodegroup_attach_gadget().name, + input_kwargs={ + "z": vdivisionboard.outputs["Value"], + "base_leg": base_leg_height, + "x": topbottomboard.outputs["x"], + "thickness": external_board_thickness, + "size": attach_square_size, + }, + ) + all_components.append(attachgadget) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": all_components} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_4} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances, + "Material": surface.shaderfunc_to_material(kwargs["wood_material"]), + }, + ) + + base_leg_size = nw.new_node(Nodes.Value, label="base_leg_size") + base_leg_size.outputs[0].default_value = kwargs["base_leg_size"] + + merge_components = [set_material_1] + if kwargs["has_base_frame"]: + baseframe = nw.new_node( + nodegroup_base_frame().name, + input_kwargs={ + "leg_height": base_leg_height, + "leg_size": base_leg_size, + "depth": depth, + "bottom_x": topbottomboard.outputs["x"], + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": baseframe} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances_1, + "Material": surface.shaderfunc_to_material(kwargs["base_material"]), + }, + ) + merge_components.append(set_material) + + screwhead = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "Z": vdivisionboard.outputs["Value"], + "leg": base_leg_height, + "X": topbottomboard.outputs["x"], + "external": external_board_thickness, + "depth": depth, + }, + ) + + realize_instances_2 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": screwhead} + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances_2, + "Material": surface.shaderfunc_to_material(metal.get_shader()), + }, + ) + merge_components.append(set_material_2) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": merge_components} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": join_geometry_2} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class CellShelfBaseFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(CellShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) + with FixedSeed(factory_seed): + self.params = self.sample_params() + self.params = self.get_asset_params(self.params) + + def get_asset_params(self, params): + if params is None: + params = {} + + if params.get("depth", None) is None: + params["depth"] = np.clip(normal(0.39, 0.05), 0.29, 0.49) + if params.get("cell_size", None) is None: + params["cell_size"] = np.clip(normal(0.335, 0.03), 0.26, 0.40) + if params.get("vertical_cell_num", None) is None: + params["vertical_cell_num"] = randint(1, 7) + if params.get("horizontal_cell_num", None) is None: + params["horizontal_cell_num"] = randint(1, 7) + if params.get("division_board_thickness", None) is None: + params["division_board_thickness"] = np.clip( + normal(0.015, 0.005), 0.008, 0.022 + ) + if params.get("external_board_thickness", None) is None: + params["external_board_thickness"] = np.clip( + normal(0.04, 0.005), 0.028, 0.052 + ) + if params.get("has_backboard", None) is None: + params["has_backboard"] = False + if params.get("has_base_frame", None) is None: + params["has_base_frame"] = np.random.choice([True, False], p=[0.4, 0.6]) + if params["has_base_frame"]: + if params.get("base_leg_height", None) is None: + params["base_leg_height"] = np.clip(normal(0.174, 0.03), 0.1, 0.25) + if params.get("base_leg_size", None) is None: + params["base_leg_size"] = np.clip(normal(0.035, 0.007), 0.02, 0.05) + if params.get("base_material", None) is None: + params["base_material"] = np.random.choice( + ["black", "white"], p=[0.4, 0.6] + ) + else: + params["base_leg_height"] = 0.0 + params["base_leg_size"] = 0.0 + params["base_material"] = "white" + if params.get("attachment_size", None) is None: + params["attachment_size"] = np.clip(normal(0.05, 0.02), 0.02, 0.1) + if params.get("wood_material", None) is None: + params["wood_material"] = np.random.choice( + ["black_wood", "white", "wood"], p=[0.3, 0.2, 0.5] + ) + params["tag_support"] = True + params = self.get_material_func(params, randomness=True) + return params + + def get_material_func(self, params, randomness=True): + if params["wood_material"] == "white": + if randomness: + params["wood_material"] = lambda x: shader_shelves_white( + x, **shader_shelves_white_sampler() + ) + else: + params["wood_material"] = shader_shelves_white + elif params["wood_material"] == "black_wood": + if randomness: + params["wood_material"] = lambda x: shader_shelves_black_wood( + x, **shader_shelves_black_wood_sampler() + ) + else: + params["wood_material"] = shader_shelves_black_wood + elif params["wood_material"] == "wood": + if randomness: + params["wood_material"] = lambda x: shader_shelves_wood( + x, **shader_shelves_wood_sampler() + ) + else: + params["wood_material"] = shader_shelves_wood + else: + raise NotImplementedError + + if params["base_material"] == "white": + if randomness: + params["base_material"] = lambda x: shader_shelves_white_metallic( + x, **shader_shelves_white_metallic_sampler() + ) + else: + params["base_material"] = shader_shelves_white_metallic + elif params["base_material"] == "black": + if randomness: + params["base_material"] = lambda x: shader_shelves_black_metallic( + x, **shader_shelves_black_metallic_sampler() + ) + else: + params["base_material"] = shader_shelves_black_metallic + else: + raise NotImplementedError + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.params + surface.add_geomod( + obj, geometry_nodes, attributes=[], input_kwargs=obj_params, apply=True + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class CellShelfFactory(CellShelfBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.3, 0.45), + uniform(2 * 0.35, 6 * 0.35), + uniform(1 * 0.35, 6 * 0.35), + ) + h_cell_num = int(params["Dimensions"][1] / 0.35) + params["cell_size"] = params["Dimensions"][1] / h_cell_num + params["horizontal_cell_num"] = h_cell_num + params["vertical_cell_num"] = max( + int(params["Dimensions"][2] / params["cell_size"]), 1 + ) + params["depth"] = params["Dimensions"][0] + params["has_base_frame"] = False + params["Dimensions"] = list(params["Dimensions"]) + params["Dimensions"][2] = params["vertical_cell_num"] * params["cell_size"] + return params + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + x, y, z = ( + self.params["Dimensions"][0], + self.params["Dimensions"][1], + self.params["Dimensions"][2], + ) + return new_bbox( + 0, + x, + -y / 2 * 1.1, + y / 2 * 1.1, + 0, + z + + (self.params["vertical_cell_num"] - 1) + * self.params["division_board_thickness"] + + 2 * self.params["external_board_thickness"], + ) + + +class TVStandFactory(CellShelfFactory): + def sample_params( + self, + ): # TODO HACK copied code just following the pattern to get this working + params = dict() + params["Dimensions"] = ( + uniform(0.3, 0.45), + uniform(2 * 0.35, 6 * 0.35), + uniform(0.3, 0.5), + ) + h_cell_num = int(params["Dimensions"][1] / 0.35) + params["cell_size"] = params["Dimensions"][1] / h_cell_num + params["horizontal_cell_num"] = h_cell_num + params["vertical_cell_num"] = max( + int(params["Dimensions"][2] / params["cell_size"]), 1 + ) + params["depth"] = params["Dimensions"][0] + params["has_base_frame"] = False + params["Dimensions"] = list(params["Dimensions"]) + params["Dimensions"][2] = params["vertical_cell_num"] * params["cell_size"] + return params diff --git a/infinigen/assets/shelves/countertop.py b/infinigen/assets/objects/shelves/countertop.py similarity index 69% rename from infinigen/assets/shelves/countertop.py rename to infinigen/assets/objects/shelves/countertop.py index 087503dce..e2ff31b74 100644 --- a/infinigen/assets/shelves/countertop.py +++ b/infinigen/assets/objects/shelves/countertop.py @@ -7,44 +7,51 @@ import shapely from numpy.random import uniform -from infinigen.assets.materials import marble, ceramic +from infinigen.assets.materials import ceramic, marble from infinigen.assets.materials.woods import wood_tile -from infinigen.assets.utils.decorate import read_normal, read_center, select_faces +from infinigen.assets.utils.decorate import read_center, read_normal, select_faces from infinigen.assets.utils.mesh import separate_selected, snap_mesh from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.shapes import obj2polygon, safe_polygon2obj, buffer, dissolve_limited +from infinigen.assets.utils.shapes import ( + buffer, + dissolve_limited, + obj2polygon, + safe_polygon2obj, +) from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.random import random_general as rg -from infinigen.core.util import blender as butil - class CountertopFactory(AssetFactory): - surfaces = 'weighted_choice', (5, marble), (2, ceramic), (2, wood_tile) + surfaces = "weighted_choice", (5, marble), (2, ceramic), (2, wood_tile) def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) self.surface = rg(self.surfaces) - self.thickness = uniform(.02, .06) - self.extrusion = 0 if uniform() < .4 else uniform(.02, .03) - self.h_snap = .5 - self.v_snap = .5 - self.v_merge = .1 - self.z_range = .5, 1.5 + self.thickness = uniform(0.02, 0.06) + self.extrusion = 0 if uniform() < 0.4 else uniform(0.02, 0.03) + self.h_snap = 0.5 + self.v_snap = 0.5 + self.v_merge = 0.1 + self.z_range = 0.5, 1.5 self.surface = rg(self.surfaces) @staticmethod def generate_shelves(): from .kitchen_cabinet import KitchenCabinetFactory from .simple_desk import SimpleDeskFactory + shelves = make_asset_collection( [ KitchenCabinetFactory(np.random.randint(1e7)), - SimpleDeskFactory(np.random.randint(1e7))], 10 + SimpleDeskFactory(np.random.randint(1e7)), + ], + 10, ) for s in shelves.objects: - s.location = *uniform(-1, 1, 2), uniform(0, .5) + s.location = *uniform(-1, 1, 2), uniform(0, 0.5) s.rotation_euler[-1] = np.pi / 2 * np.random.randint(4) return shelves @@ -59,7 +66,9 @@ def create_asset(self, shelves=None, **params) -> bpy.types.Object: t = deep_clone_obj(s) z = read_center(t)[:, -1] max_z = np.max(z[(self.z_range[0] < z) & (z < self.z_range[1])]) - selection = (read_normal(t)[:, -1] > .5) & (z - 1e-2 < max_z) & (max_z < z + 1e-2) + selection = ( + (read_normal(t)[:, -1] > 0.5) & (z - 1e-2 < max_z) & (max_z < z + 1e-2) + ) select_faces(t, selection) r = separate_selected(t, True) r.location = s.location @@ -86,7 +95,10 @@ def create_asset(self, shelves=None, **params) -> bpy.types.Object: groups = [] for i in range(len(geoms)): for j in range(i): - if geoms[i].distance(geoms[j]) <= self.h_snap and zs[i] - zs[j] < self.v_snap: + if ( + geoms[i].distance(geoms[j]) <= self.h_snap + and zs[i] - zs[j] < self.v_snap + ): group = next(g for g in groups if j in g) group.add(i) break @@ -97,10 +109,15 @@ def create_asset(self, shelves=None, **params) -> bpy.types.Object: n = len(group) geoms_ = [geoms[i] for i in group] zs_ = [zs[i] for i in group] - geom_unions = [self.rebuffer(shapely.union_all(geoms_[i:]), self.h_snap / 2) for i in - range(n)] + geom_unions = [ + self.rebuffer(shapely.union_all(geoms_[i:]), self.h_snap / 2) + for i in range(n) + ] geom_unions.append(shapely.Point()) - shapes = [self.rebuffer(geom_unions[i].difference(geom_unions[i + 1]), -1e-4) for i in range(n)] + shapes = [ + self.rebuffer(geom_unions[i].difference(geom_unions[i + 1]), -1e-4) + for i in range(n) + ] for s, z in zip(shapes, zs_): if s.area > 0: o = safe_polygon2obj(self.rebuffer(s, -1e-4).buffer(0)) @@ -120,23 +137,28 @@ def create_asset(self, shelves=None, **params) -> bpy.types.Object: o = safe_polygon2obj(s) if o is None: continue - butil.modify_mesh(o, 'WELD', merge_threshold=5e-4) + butil.modify_mesh(o, "WELD", merge_threshold=5e-4) o.location[-1] = zs_[i] - with butil.ViewportMode(o, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(o, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, zs_[j] - zs_[i])} + TRANSFORM_OT_translate={"value": (0, 0, zs_[j] - zs_[i])} ) objs.append(o) obj = join_objects(objs) snap_mesh(obj, 2e-2) dissolve_limited(obj) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.normals_make_consistent(inside=False) butil.modify_mesh( - obj, 'SOLIDIFY', thickness=self.thickness, use_even_offset=True, offset=1, use_quality_normals=True + obj, + "SOLIDIFY", + thickness=self.thickness, + use_even_offset=True, + offset=1, + use_quality_normals=True, ) if shelves_generated: @@ -146,8 +168,8 @@ def create_asset(self, shelves=None, **params) -> bpy.types.Object: @staticmethod def rebuffer(shape, distance): - return shape.buffer(distance, join_style='mitre', cap_style='flat').buffer( - -distance, join_style='mitre', cap_style='flat' + return shape.buffer(distance, join_style="mitre", cap_style="flat").buffer( + -distance, join_style="mitre", cap_style="flat" ) def finalize_assets(self, assets): diff --git a/infinigen/assets/objects/shelves/doors.py b/infinigen/assets/objects/shelves/doors.py new file mode 100644 index 000000000..0f237e794 --- /dev/null +++ b/infinigen/assets/objects/shelves/doors.py @@ -0,0 +1,1314 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials.shelf_shaders import ( + shader_glass, + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_sampler, + shader_shelves_wood, + shader_shelves_wood_sampler, +) +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_node_group", singleton=False, type="GeometryNodeTree" +) +def nodegroup_node_group(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": (0.0120, 0.00060, 0.0400)}) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": 0.0100, "Depth": 0.00050}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": (0.0050, 0.0000, 0.0000), + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0200, 0.0006, 0.0120)} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cube_1, "Translation": (0.0080, 0.0000, 0.0000)}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [cube, transform, transform_1]} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "attach_height", 0.1000), + ("NodeSocketFloat", "door_width", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["door_width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 0.0181}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": subtract, "Z": group_input.outputs["attach_height"]}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_knob_handle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_knob_handle(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.0100), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "length", 0.5000), + ("NodeSocketFloat", "knob_mid_height", 0.0000), + ("NodeSocketFloat", "edge_width", 0.5000), + ("NodeSocketFloat", "door_width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["thickness_2"], + 1: group_input.outputs["thickness_1"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["length"]} + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": 64, + "Radius": group_input.outputs["Radius"], + "Depth": add_1, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["door_width"], + 1: group_input.outputs["edge_width"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.005}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add_2, + "Y": multiply_1, + "Z": group_input.outputs["knob_mid_height"], + }, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_6, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_6}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_mid_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_mid_board(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_k = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_1} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_4} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_4, + "Material": surface.shaderfunc_to_material(kwargs["material"][0]), + }, + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_7, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 1.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_2} + ) + + transform_7 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_8} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_7, + "Material": surface.shaderfunc_to_material(kwargs["material"][1]), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances, "mid_height": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_mid_board_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_mid_board_001(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + multiply_k = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: 1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_k, "Z": multiply_1} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_4} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform_4, + "Material": surface.shaderfunc_to_material(kwargs["material"][0]), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": set_material} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances, "mid_height": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_double_rampled_edge", singleton=False, type="GeometryNodeTree" +) +def nodegroup_double_rampled_edge(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "ramp_angle", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_10}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 3, "Radius": 0.0100} + ) + + endpoint_selection = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000} + ) + + tangent = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "TANGENT"} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 2.0000, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "MULTIPLY"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": add_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": endpoint_selection, + "Position": combine_xyz_7, + }, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": add_5} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_1, + "Position": combine_xyz_8, + }, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={"operation": "LESS_THAN"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 0.9900}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than} + ) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_5, "Y": add_4} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": op_and, + "Position": combine_xyz_9, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": set_position_2, + "Fill Caps": True, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": add_4, "Z": add} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_6}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": add_3, "Z": add} + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_7}) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_6}) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_3} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + multiply_8 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_8}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_11}, + ) + + combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_12}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position_2, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line_1, + "Profile Curve": transform_2, + "Fill Caps": True, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh, transform_4, curve_to_mesh_1]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": join_geometry_1, "Distance": 0.0001}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": merge_by_distance} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": realize_instances, "Level": 4} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": subdivide_mesh}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_ramped_edge", singleton=False, type="GeometryNodeTree" +) +def nodegroup_ramped_edge(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness_2", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "thickness_1", 0.5000), + ("NodeSocketFloat", "ramp_angle", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_10}) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 3, "Radius": 0.0100} + ) + + endpoint_selection = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"End Size": 0} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000} + ) + + tangent = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "TANGENT"} + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: subtract}, + attrs={"operation": "SUBTRACT"}, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Y": add_4} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": endpoint_selection, + "Position": combine_xyz_7, + }, + ) + + endpoint_selection_1 = nw.new_node( + Nodes.EndpointSelection, input_kwargs={"Start Size": 0} + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Y": add_5} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position, + "Selection": endpoint_selection_1, + "Position": combine_xyz_8, + }, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={"operation": "LESS_THAN"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 0.9900}, + attrs={"operation": "GREATER_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_2, "Y": add_4} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": op_and, + "Position": combine_xyz_9, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": set_position_2, + "Fill Caps": True, + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Y": add_4, "Z": add} + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_3}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": add_3, "Z": add} + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_1}) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3}, attrs={"operation": "MULTIPLY"} + ) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_5}) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_4, "Y": add_6} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_3} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + multiply_6 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_6}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_11}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh, transform_4]} + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": join_geometry_1, "Distance": 0.0001}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": merge_by_distance} + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": realize_instances, "Level": 4} + ) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_7}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": subdivide_mesh, "Translation": combine_xyz_4}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_panel_edge_frame", singleton=False, type="GeometryNodeTree" +) +def nodegroup_panel_edge_frame(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "vertical_edge", None), + ("NodeSocketFloat", "door_width", 0.5000), + ("NodeSocketFloat", "door_height", 0.0000), + ("NodeSocketGeometry", "horizontal_edge", None), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["door_width"], 2: 0.0010}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + transform_7 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["horizontal_edge"], + "Translation": (0.0000, -0.0001, 0.0000), + "Scale": (0.9999, 1.0000, 1.0000), + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: 1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["door_height"], 1: 0.0001} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Z": add_1}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_7, + "Translation": combine_xyz_2, + "Rotation": (0.0000, -1.5708, 0.0000), + }, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add_2}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_7, + "Translation": combine_xyz_1, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_add}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["vertical_edge"], + "Translation": combine_xyz, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + # transform_1 = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': transform_1}) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_3, transform_2, transform_1, transform]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply, "Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +def geometry_door_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + door_height = nw.new_node(Nodes.Value, label="door_height") + door_height.outputs[0].default_value = kwargs["door_height"] + + door_edge_thickness_2 = nw.new_node(Nodes.Value, label="door_edge_thickness_2") + door_edge_thickness_2.outputs[0].default_value = kwargs["edge_thickness_2"] + + door_edge_width = nw.new_node(Nodes.Value, label="door_edge_width") + door_edge_width.outputs[0].default_value = kwargs["edge_width"] + + door_edge_thickness_1 = nw.new_node(Nodes.Value, label="door_edge_thickness_1") + door_edge_thickness_1.outputs[0].default_value = kwargs["edge_thickness_1"] + + door_edge_ramp_angle = nw.new_node(Nodes.Value, label="door_edge_ramp_angle") + door_edge_ramp_angle.outputs[0].default_value = kwargs["edge_ramp_angle"] + + ramped_edge = nw.new_node( + nodegroup_ramped_edge().name, + input_kwargs={ + "height": door_height, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + door_width = nw.new_node(Nodes.Value, label="door_width") + door_width.outputs[0].default_value = kwargs["door_width"] + + ramped_edge_1 = nw.new_node( + nodegroup_ramped_edge().name, + input_kwargs={ + "height": door_width, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + panel_edge_frame = nw.new_node( + nodegroup_panel_edge_frame().name, + input_kwargs={ + "vertical_edge": ramped_edge, + "door_width": door_width, + "door_height": door_height, + "horizontal_edge": ramped_edge_1, + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: panel_edge_frame.outputs["Value"], 1: 0.0001} + ) + + mid_board_thickness = nw.new_node(Nodes.Value, label="mid_board_thickness") + mid_board_thickness.outputs[0].default_value = kwargs["board_thickness"] + + if kwargs["has_mid_ramp"]: + mid_board = nw.new_node( + nodegroup_mid_board(material=kwargs["panel_material"]).name, + input_kwargs={ + "height": door_height, + "thickness": mid_board_thickness, + "width": door_width, + }, + ) + else: + mid_board = nw.new_node( + nodegroup_mid_board_001(material=kwargs["panel_material"]).name, + input_kwargs={ + "height": door_height, + "thickness": mid_board_thickness, + "width": door_width, + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": add, "Y": -0.0001, "Z": mid_board.outputs["mid_height"]}, + ) + + frame = [panel_edge_frame.outputs["Geometry"]] + if kwargs["has_mid_ramp"]: + double_rampled_edge = nw.new_node( + nodegroup_double_rampled_edge().name, + input_kwargs={ + "height": door_width, + "thickness_2": door_edge_thickness_2, + "width": door_edge_width, + "thickness_1": door_edge_thickness_1, + "ramp_angle": door_edge_ramp_angle, + }, + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": double_rampled_edge, + "Translation": combine_xyz_5, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + frame.append(transform_5) + + knob_raduis = nw.new_node(Nodes.Value, label="knob_raduis") + knob_raduis.outputs[0].default_value = kwargs["knob_R"] + + know_length = nw.new_node(Nodes.Value, label="know_length") + know_length.outputs[0].default_value = kwargs["knob_length"] + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: door_height}, attrs={"operation": "MULTIPLY"} + ) + + knob_handle = nw.new_node( + nodegroup_knob_handle().name, + input_kwargs={ + "Radius": knob_raduis, + "thickness_1": door_edge_thickness_1, + "thickness_2": door_edge_thickness_2, + "length": know_length, + "knob_mid_height": multiply, + "edge_width": door_edge_width, + "door_width": door_width, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": frame + [knob_handle]} + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": surface.shaderfunc_to_material(kwargs["frame_material"]), + }, + ) + + geos = [set_material_3, mid_board.outputs["Geometry"]] + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": geos}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: door_width, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": transform} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": triangulate, + "Scale": (-1.0 if kwargs["door_left_hinge"] else 1.0, 1.0000, 1.0000), + }, + ) + + if kwargs["door_left_hinge"]: + transform_1 = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": transform_1}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_1, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_2}, + attrs={"is_active_output": True}, + ) + + +class CabinetDoorBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(CabinetDoorBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = {} + + def get_asset_params(self, i=0): + params = self.params.copy() + if params.get("door_height", None) is None: + params["door_height"] = uniform(0.7, 2.2) + if params.get("door_width", None) is None: + params["door_width"] = uniform(0.3, 0.4) + if params.get("edge_thickness_1", None) is None: + params["edge_thickness_1"] = uniform(0.01, 0.018) + if params.get("edge_width", None) is None: + params["edge_width"] = uniform(0.03, 0.05) + if params.get("edge_thickness_2", None) is None: + params["edge_thickness_2"] = uniform(0.005, 0.008) + if params.get("edge_ramp_angle", None) is None: + params["edge_ramp_angle"] = uniform(0.6, 0.8) + params["board_thickness"] = params["edge_thickness_1"] - 0.005 + if params.get("knob_R", None) is None: + params["knob_R"] = uniform(0.003, 0.006) + if params.get("knob_length", None) is None: + params["knob_length"] = uniform(0.018, 0.035) + if params.get("attach_height", None) is None: + gap = uniform(0.05, 0.15) + params["attach_height"] = [gap, params["door_height"] - gap] + if params.get("has_mid_ramp", None) is None: + params["has_mid_ramp"] = np.random.choice([True, False], p=[0.6, 0.4]) + if params.get("door_left_hinge", None) is None: + params["door_left_hinge"] = False + + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.5, 0.2, 0.3] + ) + if params.get("panel_material", None) is None: + if params["has_mid_ramp"]: + lower_mat = np.random.choice( + [params["frame_material"], "glass"], p=[0.7, 0.3] + ) + upper_mat = np.random.choice([lower_mat, "glass"], p=[0.6, 0.4]) + params["panel_material"] = [lower_mat, upper_mat] + else: + params["panel_material"] = [params["frame_material"]] + + params = self.get_material_func(params) + return params + + def get_material_func(self, params, randomness=True): + white_wood_params = shader_shelves_white_sampler() + black_wood_params = shader_shelves_black_wood_sampler() + normal_wood_params = shader_shelves_wood_sampler() + if params["frame_material"] == "white": + if randomness: + params["frame_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["frame_material"] = shader_shelves_white + elif params["frame_material"] == "black_wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, z_axis_texture=True + ) + elif params["frame_material"] == "wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_wood( + x, z_axis_texture=True + ) + + materials = [] + if not isinstance(params["panel_material"], list): + params["panel_material"] = [params["board_material"]] + for mat in params["panel_material"]: + if mat == "white": + if randomness: + + def mat(x): + return shader_shelves_white(x, **white_wood_params) + else: + mat = shader_shelves_white + elif mat == "black_wood": + if randomness: + + def mat(x): + return shader_shelves_black_wood( + x, **black_wood_params, z_axis_texture=True + ) + else: + + def mat(x): + return shader_shelves_black_wood(x, z_axis_texture=True) + elif mat == "wood": + if randomness: + + def mat(x): + return shader_shelves_wood( + x, **normal_wood_params, z_axis_texture=True + ) + else: + + def mat(x): + return shader_shelves_wood(x, z_axis_texture=True) + elif mat == "glass": + if randomness: + + def mat(x): + return shader_glass(x) + else: + mat = shader_glass + materials.append(mat) + params["panel_material"] = materials + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_door_nodes, apply=True, attributes=[], input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + if params.get("ret_params", False): + return obj, obj_params + + return obj diff --git a/infinigen/assets/objects/shelves/drawers.py b/infinigen/assets/objects/shelves/drawers.py new file mode 100644 index 000000000..baed2c69e --- /dev/null +++ b/infinigen/assets/objects/shelves/drawers.py @@ -0,0 +1,717 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials import metal +from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_sampler, + shader_shelves_wood, + shader_shelves_wood_sampler, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_board_rail", singleton=False, type="GeometryNodeTree" +) +def nodegroup_board_rail(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + cylinder_1 = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": 0.0040, "Depth": 0.0050}, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder_1.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder_1.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0200}) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_1}) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_3, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 0.0300}, attrs={"operation": "SUBTRACT"} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 0.0020, "Y": subtract, "Z": group_input.outputs["width"]}, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": store_named_attribute} + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": 0.0030, "Depth": subtract}, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Translation": combine_xyz_1, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_1, "Scale": (1.0000, 1.0000, -1.0000)}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_2, transform_1]} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_5, transform, join_geometry_2]}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: 0.0030}) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: 0.0200}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_2, "Y": multiply_3, "Z": add_3} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": combine_xyz_2}, + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_3, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_4, transform_3]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_kallax_drawer_frame", singleton=False, type="GeometryNodeTree" +) +def nodegroup_kallax_drawer_frame(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 4, + "Vertices Y": 4, + "Vertices Z": 4, + }, + ) + + store_named_attribute_1 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_3}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 2: 0.0100}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": add_4, "Z": multiply_add} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_1, + "Translation": combine_xyz_1, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.0001}) + + add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add_5}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_6, "Y": add_1, "Z": add} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_2, + "Vertices X": 4, + "Vertices Y": 4, + "Vertices Z": 4, + }, + ) + + store_named_attribute_2 = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Name": "uv_map", + 3: cube_1.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: -0.5000, 2: -0.0001}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_add_1, "Z": 0.0100} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute_2, + "Translation": combine_xyz_3, + }, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_3, "Y": add, "Z": add_2} + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": 4, + "Vertices Y": 4, + "Vertices Z": 4, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube_2.outputs["Mesh"], + "Name": "uv_map", + 3: cube_2.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: -1.0000, 2: multiply_2}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 2: 0.0100}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_add_2, "Z": multiply_add_3} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_5}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, transform, transform_2, transform_3]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_door_knob", singleton=False, type="GeometryNodeTree" +) +def nodegroup_door_knob(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.0040), + ("NodeSocketFloat", "length", 0.5000), + ("NodeSocketFloat", "z", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["length"], 1: 0.0000} + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": 64, + "Radius": group_input.outputs["Radius"], + "Depth": add, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["z"], 1: 0.0000} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": add_1, "Z": multiply_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": store_named_attribute, + "Translation": combine_xyz_2, + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_drawer_door_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_drawer_door_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply, "Z": multiply_1} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + door_thickness = nw.new_node(Nodes.Value, label="door_thickness") + door_thickness.outputs[0].default_value = kwargs["drawer_board_thickness"] + + drawer_board_width = nw.new_node(Nodes.Value, label="drawer_board_width") + drawer_board_width.outputs[0].default_value = kwargs["drawer_board_width"] + + drawer_board_height = nw.new_node(Nodes.Value, label="drawer_board_height") + drawer_board_height.outputs[0].default_value = kwargs["drawer_board_height"] + + drawer_door_board = nw.new_node( + nodegroup_drawer_door_board().name, + input_kwargs={ + "thickness": door_thickness, + "width": drawer_board_width, + "height": drawer_board_height, + }, + ) + + knob_radius = nw.new_node(Nodes.Value, label="knob_radius") + knob_radius.outputs[0].default_value = kwargs["knob_radius"] + + knob_length = nw.new_node(Nodes.Value, label="knob_length") + knob_length.outputs[0].default_value = kwargs["knob_length"] + + door_knob = nw.new_node( + nodegroup_door_knob().name, + input_kwargs={ + "Radius": knob_radius, + "length": knob_length, + "z": drawer_board_height, + }, + ) + + drawer_depth = nw.new_node(Nodes.Value, label="drawer_depth") + drawer_depth.outputs[0].default_value = ( + kwargs["drawer_depth"] - kwargs["drawer_board_thickness"] + ) + + drawer_side_height = nw.new_node(Nodes.Value, label="drawer_side_height") + drawer_side_height.outputs[0].default_value = kwargs["drawer_side_height"] + + drawer_width = nw.new_node(Nodes.Value, label="drawer_width") + drawer_width.outputs[0].default_value = kwargs["drawer_width"] + + kallax_drawer_frame = nw.new_node( + nodegroup_kallax_drawer_frame().name, + input_kwargs={ + "depth": drawer_depth, + "height": drawer_side_height, + "thickness": door_thickness, + "width": drawer_width, + }, + ) + + side_tilt_width = nw.new_node(Nodes.Value, label="side_tilt_width") + side_tilt_width.outputs[0].default_value = kwargs["side_tilt_width"] + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [door_knob, drawer_door_board, kallax_drawer_frame]}, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry, + "Material": surface.shaderfunc_to_material(kwargs["frame_material"]), + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": set_material_2} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output_1 = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class CabinetDrawerBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(CabinetDrawerBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = {} + + def get_asset_params(self, i=0): + params = self.params.copy() + if params.get("drawer_board_thickness", None) is None: + params["drawer_board_thickness"] = uniform(0.005, 0.01) + if params.get("drawer_board_width", None) is None: + params["drawer_board_width"] = uniform(0.3, 0.7) + if params.get("drawer_board_height", None) is None: + params["drawer_board_height"] = uniform(0.25, 0.4) + if params.get("drawer_depth", None) is None: + params["drawer_depth"] = uniform(0.3, 0.4) + if params.get("drawer_side_height", None) is None: + params["drawer_side_height"] = uniform(0.05, 0.2) + if params.get("drawer_width", None) is None: + params["drawer_width"] = params["drawer_board_width"] - uniform( + 0.015, 0.025 + ) + if params.get("side_tilt_width", None) is None: + params["side_tilt_width"] = uniform(0.02, 0.03) + if params.get("knob_radius", None) is None: + params["knob_radius"] = uniform(0.003, 0.006) + if params.get("knob_length", None) is None: + params["knob_length"] = uniform(0.018, 0.035) + + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.5, 0.2, 0.3] + ) + if params.get("knob_material", None) is None: + params["knob_material"] = np.random.choice( + [params["frame_material"], "metal"], p=[0.5, 0.5] + ) + + params = self.get_material_func(params) + return params + + def get_material_func(self, params, randomness=True): + white_wood_params = shader_shelves_white_sampler() + black_wood_params = shader_shelves_black_wood_sampler() + normal_wood_params = shader_shelves_wood_sampler() + if params["frame_material"] == "white": + if randomness: + params["frame_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["frame_material"] = shader_shelves_white + elif params["frame_material"] == "black_wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, z_axis_texture=True + ) + elif params["frame_material"] == "wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_wood( + x, z_axis_texture=True + ) + + if params["knob_material"] == "metal": + params["knob_material"] = metal.get_shader() + else: + params["knob_material"] = params["frame_material"] + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, apply=True, attributes=[], input_kwargs=obj_params + ) + + if params.get("ret_params", False): + return obj, obj_params + + return obj diff --git a/infinigen/assets/objects/shelves/kitchen_cabinet.py b/infinigen/assets/objects/shelves/kitchen_cabinet.py new file mode 100644 index 000000000..d2d487ebe --- /dev/null +++ b/infinigen/assets/objects/shelves/kitchen_cabinet.py @@ -0,0 +1,423 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_sampler, + shader_shelves_wood, + shader_shelves_wood_sampler, +) +from infinigen.assets.objects.shelves.doors import CabinetDoorBaseFactory +from infinigen.assets.objects.shelves.drawers import CabinetDrawerBaseFactory +from infinigen.assets.objects.shelves.large_shelf import LargeShelfBaseFactory +from infinigen.assets.utils.object import new_bbox +from infinigen.core import surface, tagging +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + cabinets = [] + for i, component in enumerate(kwargs["components"]): + frame_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": component[0]} + ) + + attachments = [] + if component[1] == "door": + right_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": component[2][0]} + ) + left_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": component[2][1]} + ) + + transform_r = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": right_door_info.outputs["Geometry"], + "Translation": component[2][2]["door_hinge_pos"][0], + "Rotation": (0, 0, component[2][2]["door_open_angle"]), + }, + ) + attachments.append(transform_r) + if len(component[2][2]["door_hinge_pos"]) > 1: + transform_l = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": left_door_info.outputs["Geometry"], + "Translation": component[2][2]["door_hinge_pos"][1], + "Rotation": (0, 0, component[2][2]["door_open_angle"]), + }, + ) + attachments.append(transform_l) + elif component[1] == "drawer": + for j, drawer in enumerate(component[2]): + drawer_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": drawer[0]} + ) + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": drawer_info.outputs["Geometry"], + "Translation": drawer[1]["drawer_hinge_pos"], + }, + ) + attachments.append(transform) + else: + continue + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": attachments} + ) + # [frame_info.outputs['Geometry']]}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Translation": (0, kwargs["y_translations"][i], 0), + }, + ) + cabinets.append(transform) + + try: + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": cabinets} + ) + except TypeError: + import pdb + + pdb.set_trace() + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +class KitchenCabinetBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(KitchenCabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.frame_params = {} + self.material_params = {} + self.cabinet_widths = [] + self.frame_fac = LargeShelfBaseFactory(factory_seed) + self.door_fac = CabinetDoorBaseFactory(factory_seed) + self.drawer_fac = CabinetDrawerBaseFactory(factory_seed) + self.drawer_only = False + with FixedSeed(factory_seed): + self.params = self.sample_params() + + def sample_params(self): + pass + + def get_material_params(self): + with FixedSeed(self.factory_seed): + params = self.material_params.copy() + if params.get("frame_material", None) is None: + with FixedSeed(self.factory_seed): + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.4, 0.3, 0.3] + ) + params["board_material"] = params["frame_material"] + return self.get_material_func(params, randomness=True) + + def get_material_func(self, params, randomness=True): + with FixedSeed(self.factory_seed): + white_wood_params = shader_shelves_white_sampler() + black_wood_params = shader_shelves_black_wood_sampler() + normal_wood_params = shader_shelves_wood_sampler() + if params["frame_material"] == "white": + if randomness: + params["frame_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["frame_material"] = shader_shelves_white + elif params["frame_material"] == "black_wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, z_axis_texture=True + ) + elif params["frame_material"] == "wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_wood( + x, z_axis_texture=True + ) + + if params["board_material"] == "white": + if randomness: + params["board_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["board_material"] = shader_shelves_white + elif params["board_material"] == "black_wood": + if randomness: + params["board_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params + ) + else: + params["board_material"] = shader_shelves_black_wood + elif params["board_material"] == "wood": + if randomness: + params["board_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params + ) + else: + params["board_material"] = shader_shelves_wood + + params["panel_meterial"] = params["frame_material"] + params["knob_material"] = params["frame_material"] + return params + + def get_frame_params(self, width, i=0): + params = self.frame_params.copy() + params["shelf_cell_width"] = [width] + params.update(self.material_params.copy()) + return params + + def get_attach_params(self, attach_type, i=0): + param_sets = [] + if attach_type == "none": + pass + elif attach_type == "door": + params = dict() + shelf_width = ( + self.frame_params["shelf_width"] + + self.frame_params["side_board_thickness"] * 2 + ) + if shelf_width <= 0.6: + params["door_width"] = shelf_width + params["has_mid_ramp"] = False + params["edge_thickness_1"] = 0.01 + params["door_hinge_pos"] = [ + ( + self.frame_params["shelf_depth"] / 2.0 + 0.0025, + -shelf_width / 2.0, + self.frame_params["bottom_board_height"], + ) + ] + params["door_open_angle"] = 0 + else: + params["door_width"] = shelf_width / 2.0 - 0.0005 + params["has_mid_ramp"] = False + params["edge_thickness_1"] = 0.01 + params["door_hinge_pos"] = [ + ( + self.frame_params["shelf_depth"] / 2.0 + 0.008, + -shelf_width / 2.0, + self.frame_params["bottom_board_height"], + ), + ( + self.frame_params["shelf_depth"] / 2.0 + 0.008, + shelf_width / 2.0, + self.frame_params["bottom_board_height"], + ), + ] + params["door_open_angle"] = 0 + + params["door_height"] = ( + self.frame_params["division_board_z_translation"][-1] + - self.frame_params["division_board_z_translation"][0] + + self.frame_params["division_board_thickness"] + ) + params.update(self.material_params.copy()) + param_sets.append(params) + elif attach_type == "drawer": + for i, h in enumerate(self.frame_params["shelf_cell_height"]): + params = dict() + drawer_h = ( + self.frame_params["division_board_z_translation"][i + 1] + - self.frame_params["division_board_z_translation"][i] + - self.frame_params["division_board_thickness"] + ) + drawer_depth = self.frame_params["shelf_depth"] + params["drawer_board_width"] = self.frame_params["shelf_width"] + params["drawer_board_height"] = drawer_h + params["drawer_depth"] = drawer_depth + params["drawer_hinge_pos"] = ( + self.frame_params["shelf_depth"] / 2.0, + 0, + ( + self.frame_params["division_board_thickness"] / 2.0 + + self.frame_params["division_board_z_translation"][i] + ), + ) + params.update(self.material_params.copy()) + param_sets.append(params) + else: + raise NotImplementedError + + return param_sets + + def get_cabinet_params(self, i=0): + x_translations = [] + accum_w, thickness = ( + 0, + self.frame_params.get("side_board_thickness", 0.005), + ) # instructed by Beining + for w in self.cabinet_widths: + accum_w += thickness + w / 2.0 + x_translations.append(accum_w) + accum_w += thickness + w / 2.0 + 0.0005 + return x_translations + + def create_cabinet_components(self, i, drawer_only=False): + # update material params + self.material_params = self.get_material_params() + + components = [] + for k, w in enumerate(self.cabinet_widths): + # create frame + frame_params = self.get_frame_params(w, i=i) + self.frame_fac.params = frame_params + frame, frame_params = self.frame_fac.create_asset(i=i, ret_params=True) + frame.name = f"cabinet_frame_{k}" + self.frame_params = frame_params + + # create attach + if drawer_only: + attach_type = np.random.choice(["drawer", "door"], p=[0.5, 0.5]) + else: + attach_type = np.random.choice( + ["drawer", "door", "none"], p=[0.4, 0.4, 0.2] + ) + + attach_params = self.get_attach_params(attach_type, i=i) + if attach_type == "door": + self.door_fac.params = attach_params[0] + self.door_fac.params["door_left_hinge"] = False + right_door, door_obj_params = self.door_fac.create_asset( + i=i, ret_params=True + ) + right_door.name = f"cabinet_right_door_{k}" + self.door_fac.params = door_obj_params + self.door_fac.params["door_left_hinge"] = True + left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) + left_door.name = f"cabinet_left_door_{k}" + components.append( + [frame, "door", [right_door, left_door, attach_params[0]]] + ) + + elif attach_type == "drawer": + drawers = [] + for j, p in enumerate(attach_params): + self.drawer_fac.params = p + drawer = self.drawer_fac.create_asset(i=i) + drawer.name = f"drawer_{k}_layer{j}" + drawers.append([drawer, p]) + components.append([frame, "drawer", drawers]) + + elif attach_type == "none": + components.append([frame, "none"]) + + else: + raise NotImplementedError + + return components + + def create_asset(self, i=0, **params): + components = self.create_cabinet_components(i=i, drawer_only=self.drawer_only) + cabinet_params = self.get_cabinet_params(i=i) + join_objs = [] + + contain_attach = False + for com in components: + if com[1] == "none": + continue + else: + contain_attach = True + + if contain_attach: + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + surface.add_geomod( + obj, + geometry_nodes, + attributes=[], + input_kwargs={ + "components": components, + "y_translations": cabinet_params, + }, + apply=True, + ) + + join_objs += [obj] + + for i, c in enumerate(components): + if c[1] == "door": + butil.delete(c[2][:-1]) + elif c[1] == "drawer": + butil.delete([x[0] for x in c[2]]) + c[0].location = (0, cabinet_params[i], 0) + butil.apply_transform(c[0], loc=True) + join_objs.append(c[0]) + + # butil.delete(c[:1]) + obj = butil.join_objects(join_objs) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class KitchenCabinetFactory(KitchenCabinetBaseFactory): + def __init__( + self, factory_seed, params={}, coarse=False, dimensions=None, drawer_only=False + ): + self.dimensions = dimensions + super().__init__(factory_seed, params, coarse) + self.drawer_only = drawer_only + + def sample_params(self): + params = dict() + if self.dimensions is None: + dimensions = (uniform(0.25, 0.35), uniform(1.0, 4.0), uniform(0.5, 1.3)) + self.dimensions = dimensions + else: + dimensions = self.dimensions + params["Dimensions"] = dimensions + + params["bottom_board_height"] = 0.06 + params["shelf_depth"] = params["Dimensions"][0] - 0.01 + num_h = int((params["Dimensions"][2] - 0.06) / 0.3) + params["shelf_cell_height"] = [ + (params["Dimensions"][2] - 0.06) / num_h for _ in range(num_h) + ] + + self.frame_params = params + + n_cells = max(int(params["Dimensions"][1] / 0.45), 1) + intervals = np.random.uniform(0.55, 1.0, size=(n_cells,)) + intervals = intervals / intervals.sum() * params["Dimensions"][1] + self.cabinet_widths = intervals.tolist() + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + x, y, z = self.dimensions + return new_bbox(-x / 2 * 1.2, x / 2 * 1.2, 0, y * 1.1, 0, (z + 0.06) * 1.03) diff --git a/infinigen/assets/objects/shelves/kitchen_space.py b/infinigen/assets/objects/shelves/kitchen_space.py new file mode 100644 index 000000000..a13f5776a --- /dev/null +++ b/infinigen/assets/objects/shelves/kitchen_space.py @@ -0,0 +1,326 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo, Stamatis Alexandropoulos + +import bpy +from mathutils import Vector +from numpy.random import choice, uniform + +from infinigen.assets.materials.table_materials import shader_marble +from infinigen.assets.objects.shelves.kitchen_cabinet import KitchenCabinetFactory +from infinigen.assets.objects.tables.table_top import nodegroup_generate_table_top +from infinigen.assets.objects.wall_decorations.range_hood import RangeHoodFactory +from infinigen.assets.utils.object import new_bbox +from infinigen.core import surface, tagging +from infinigen.core import tags as t +from infinigen.core.constraints.example_solver.room.constants import ( + WALL_HEIGHT, + WALL_THICKNESS, +) +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +def nodegroup_tag_cube(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + index = nw.new_node(Nodes.Index) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: 5}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + cube = tagging.tag_nodegroup( + nw, group_input.outputs["Geometry"], t.Subpart.SupportSurface, selection=equal + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": cube}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes_add_cabinet_top(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.0500 + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Max"]} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Min"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 1.4140}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: subtract}, + attrs={"operation": "DIVIDE"}, + ) + + generatetabletop = nw.new_node( + nodegroup_generate_table_top().name, + input_kwargs={ + "Thickness": value, + "N-gon": 4, + "Profile Width": multiply, + "Aspect Ratio": divide, + "Fillet Ratio": 0.0100, + "Fillet Radius Vertical": 0.0100, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": generatetabletop, + "Material": surface.shaderfunc_to_material(shader_marble), + }, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}, + ) + + divide_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "DIVIDE"} + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Max"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": divide_1, "Z": separate_xyz_2.outputs["Z"]} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_material, "Translation": combine_xyz}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [group_input.outputs["Geometry"], transform_geometry] + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +def geometry_node_to_tagged_bbox(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": bounding_box, "Scale": (0.9700, 0.9700, 1.000)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry}, + attrs={"is_active_output": True}, + ) + + +def geometry_node_to_bbox(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": bounding_box, "Scale": (0.9700, 0.9700, 1.000)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry}, + attrs={"is_active_output": True}, + ) + + +class KitchenSpaceFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None, island=False): + super(KitchenSpaceFactory, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + if dimensions is None: + dimensions = Vector( + ( + uniform(0.7, 1), + uniform(1.7, 5), + uniform(2.3, WALL_HEIGHT - WALL_THICKNESS), + ) + ) + + self.island = island + if self.island: + dimensions.x *= uniform(1.5, 2) + + self.dimensions = dimensions + + self.params = self.sample_parameters(dimensions) + + def sample_parameters(self, dimensions): + self.cabinet_bottom_height = uniform(0.8, 1.0) + self.cabinet_top_height = uniform(0.8, 1.0) + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + x, y, z = self.dimensions + box = new_bbox( + -x / 2 * 1.08, x / 2 * 1.08, 0, y, 0, self.cabinet_bottom_height + 0.13 + ) + surface.add_geomod(box, nodegroup_tag_cube, apply=True) + + if not self.island: + box_top = new_bbox( + -x / 2, x * 0.16, 0, y, z - self.cabinet_top_height - 0.1, z + ) + box = butil.join_objects([box, box_top]) + + return box + + def create_asset(self, **params): + x, y, z = self.dimensions + parts = [] + + cabinet_bottom_height = self.cabinet_bottom_height + cabinet_top_height = self.cabinet_top_height + + cabinet_bottom_factory = KitchenCabinetFactory( + self.factory_seed, + dimensions=(x, y - 0.15, cabinet_bottom_height), + drawer_only=True, + ) + cabinet_bottom = cabinet_bottom_factory(i=0) + parts.append(cabinet_bottom) + + surface.add_geomod(cabinet_bottom, geometry_nodes_add_cabinet_top, apply=True) + + if not self.island: + # top + top_mid_width = uniform(1.0, 1.3) + cabinet_top_width = (y - top_mid_width) / 2.0 - 0.05 + + cabinet_top_factory = KitchenCabinetFactory( + self.factory_seed, + dimensions=(x / 2.0, cabinet_top_width, cabinet_top_height), + drawer_only=False, + ) + cabinet_top_left = cabinet_top_factory(i=0) + cabinet_top_right = cabinet_top_factory(i=1) + + cabinet_top_left.location = (-x / 4.0, 0.0, z - cabinet_top_height) + cabinet_top_right.location = ( + -x / 4.0, + y - cabinet_top_width, + z - cabinet_top_height, + ) + + # hood / cab + # mid_style = choice(['range_hood', 'cabinet']) + # mid_style = 'range_hood' + mid_style = choice(["cabinet"]) + if mid_style == "range_hood": + range_hood_factory = RangeHoodFactory( + self.factory_seed, + dimensions=(x * 0.66, top_mid_width + 0.15, cabinet_top_height), + ) + top_mid = range_hood_factory(i=0) + top_mid.location = (-x * 0.5, y / 2.0, z - cabinet_top_height + 0.05) + + elif mid_style == "cabinet": + cabinet_top_mid_factory = KitchenCabinetFactory( + self.factory_seed, + dimensions=(x * 0.66, top_mid_width, cabinet_top_height * 0.8), + drawer_only=False, + ) + top_mid = cabinet_top_mid_factory(i=0) + top_mid.location = ( + -x / 6.0, + y / 2.0 - top_mid_width / 2.0, + z - (cabinet_top_height * 0.8), + ) + + else: + raise NotImplementedError + + # parts += [sink, cabinet_top_left, cabinet_top_right, top_mid] + parts += [cabinet_top_left, cabinet_top_right, top_mid] + + kitchen_space = butil.join_objects( + parts + ) # [cabinet_bottom, sink, cabinet_top_left, cabinet_top_right, top_mid]) + + if not self.island: + kitchen_space.dimensions = self.dimensions + butil.apply_transform(kitchen_space) + + tagging.tag_system.relabel_obj(kitchen_space) + + return kitchen_space + + +class KitchenIslandFactory(KitchenSpaceFactory): + def __init__(self, factory_seed): + super(KitchenIslandFactory, self).__init__( + factory_seed=factory_seed, + island=True, + ) diff --git a/infinigen/assets/objects/shelves/large_shelf.py b/infinigen/assets/objects/shelves/large_shelf.py new file mode 100644 index 000000000..ae6ffe7e8 --- /dev/null +++ b/infinigen/assets/objects/shelves/large_shelf.py @@ -0,0 +1,966 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_sampler, + shader_shelves_wood, + shader_shelves_wood_sampler, +) +from infinigen.assets.objects.shelves.utils import nodegroup_tagged_cube +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_screw_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_screw_head(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Depth", 0.0050), + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "division_thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "screw_width_gap", 0.5000), + ("NodeSocketFloat", "screw_depth_gap", 0.0000), + ], + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Radius": group_input.outputs["Radius"], + "Depth": group_input.outputs["Depth"], + }, + attrs={"fill_type": "TRIANGLE_FAN"}, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cylinder.outputs["Mesh"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["screw_width_gap"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["screw_width_gap"], 1: 0.0000} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: add}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["division_thickness"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": multiply_2, "Z": multiply_3} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Y": subtract_1, "Z": multiply_3} + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_4}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_1, transform_6]} + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_2, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_4, join_geometry_2]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_3} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_attachment", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attachment(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "attach_thickness", 0.0000), + ("NodeSocketFloat", "attach_length", 0.0000), + ("NodeSocketFloat", "attach_z_translation", 0.0000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "attach_gap", 0.5000), + ("NodeSocketFloat", "attach_width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_width"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_length"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add, + "Y": add_1, + "Z": group_input.outputs["attach_thickness"], + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["attach_gap"]}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": subtract_1, + "Y": add_2, + "Z": group_input.outputs["attach_z_translation"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_1, transform]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_division_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_division_board(nw: NodeWrangler, material, tag_support=False): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.0000), + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "depth", 0.0000), + ("NodeSocketFloat", "z_translation", 0.0000), + ("NodeSocketFloat", "x_translation", 0.0000), + ("NodeSocketFloat", "screw_depth", 0.0000), + ("NodeSocketFloat", "screw_radius", 0.0000), + ("NodeSocketFloat", "screw_width_gap", 0.0000), + ("NodeSocketFloat", "screw_depth_gap", 0.0000), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": group_input.outputs["depth"], + "Z": group_input.outputs["thickness"], + }, + ) + + if tag_support: + cube = nw.new_node( + nodegroup_tagged_cube().name, input_kwargs={"Size": combine_xyz} + ) + else: + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + screw_head = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "Depth": group_input.outputs["screw_depth"], + "Radius": group_input.outputs["screw_radius"], + "division_thickness": group_input.outputs["thickness"], + "width": group_input.outputs["width"], + "depth": group_input.outputs["depth"], + "screw_width_gap": group_input.outputs["screw_width_gap"], + "screw_depth_gap": group_input.outputs["screw_depth_gap"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [cube, screw_head]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["x_translation"], + "Z": group_input.outputs["z_translation"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bottom_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bottom_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.0000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "y_gap", 0.5000), + ("NodeSocketFloat", "x_translation", 0.0000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "width", 0.0000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": group_input.outputs["thickness"], + "Z": add, + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["y_gap"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["x_translation"], + "Y": subtract, + "Z": multiply_1, + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["width"], "Y": add, "Z": add_1}, + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: -0.5000, 2: multiply}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_add, "Z": multiply_1} + ) + + transform_5 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_2, "Translation": combine_xyz_5} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_5}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_side_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_side_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "board_thickness", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "x_translation", 0.0000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["board_thickness"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["x_translation"], "Z": multiply}, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + side_board_thickness = nw.new_node(Nodes.Value, label="side_board_thickness") + side_board_thickness.outputs[0].default_value = kwargs["side_board_thickness"] + + shelf_depth = nw.new_node(Nodes.Value, label="shelf_depth") + shelf_depth.outputs[0].default_value = kwargs["shelf_depth"] + + add = nw.new_node(Nodes.Math, input_kwargs={0: shelf_depth, 1: 0.0040}) + + shelf_height = nw.new_node(Nodes.Value, label="shelf_height") + shelf_height.outputs[0].default_value = kwargs["shelf_height"] + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: shelf_height, 1: 0.0020}) + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: shelf_height, 1: -0.0010}) + side_boards = [] + + for x in kwargs["side_board_x_translation"]: + side_board_x_translation = nw.new_node( + Nodes.Value, label="side_board_x_translation" + ) + side_board_x_translation.outputs[0].default_value = x + + side_board = nw.new_node( + nodegroup_side_board().name, + input_kwargs={ + "board_thickness": side_board_thickness, + "depth": add, + "height": add_1, + "x_translation": side_board_x_translation, + }, + ) + side_boards.append(side_board) + + shelf_width = nw.new_node(Nodes.Value, label="shelf_width") + shelf_width.outputs[0].default_value = kwargs["shelf_width"] + + backboard_thickness = nw.new_node(Nodes.Value, label="backboard_thickness") + backboard_thickness.outputs[0].default_value = kwargs["backboard_thickness"] + + add_side = nw.new_node( + Nodes.Math, input_kwargs={0: shelf_width, 1: kwargs["side_board_thickness"] * 2} + ) + back_board = nw.new_node( + nodegroup_back_board().name, + input_kwargs={ + "width": add_side, + "thickness": backboard_thickness, + "height": add_2, + "depth": shelf_depth, + }, + ) + + bottom_board_y_gap = nw.new_node(Nodes.Value, label="bottom_board_y_gap") + bottom_board_y_gap.outputs[0].default_value = kwargs["bottom_board_y_gap"] + + bottom_board_height = nw.new_node(Nodes.Value, label="bottom_board_height") + bottom_board_height.outputs[0].default_value = kwargs["bottom_board_height"] + + bottom_boards = [] + for i in range(len(kwargs["shelf_cell_width"])): + bottom_gap_x_translation = nw.new_node( + Nodes.Value, label="bottom_gap_x_translation" + ) + bottom_gap_x_translation.outputs[0].default_value = kwargs[ + "bottom_gap_x_translation" + ][i] + + shelf_cell_width = nw.new_node(Nodes.Value, label="shelf_cell_width") + shelf_cell_width.outputs[0].default_value = kwargs["shelf_cell_width"][i] + + bottomboard = nw.new_node( + nodegroup_bottom_board().name, + input_kwargs={ + "thickness": side_board_thickness, + "depth": shelf_depth, + "y_gap": bottom_board_y_gap, + "x_translation": bottom_gap_x_translation, + "height": bottom_board_height, + "width": shelf_cell_width, + }, + ) + + bottom_boards.append(bottomboard) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [back_board] + side_boards + bottom_boards}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances, + "Material": surface.shaderfunc_to_material(kwargs["frame_material"]), + }, + ) + + division_board_thickness = nw.new_node( + Nodes.Value, label="division_board_thickness" + ) + division_board_thickness.outputs[0].default_value = kwargs[ + "division_board_thickness" + ] + + division_boards = [] + for i in range(len(kwargs["shelf_cell_width"])): + for j in range(len(kwargs["division_board_z_translation"])): + division_board_z_translation = nw.new_node( + Nodes.Value, label="division_board_z_translation" + ) + division_board_z_translation.outputs[0].default_value = kwargs[ + "division_board_z_translation" + ][j] + + division_board_x_translation = nw.new_node( + Nodes.Value, label="division_board_x_translation" + ) + division_board_x_translation.outputs[0].default_value = kwargs[ + "division_board_x_translation" + ][i] + + shelf_cell_width = nw.new_node(Nodes.Value, label="shelf_cell_width") + shelf_cell_width.outputs[0].default_value = kwargs["shelf_cell_width"][i] + + screw_depth_head = nw.new_node(Nodes.Value, label="screw_depth_head") + screw_depth_head.outputs[0].default_value = kwargs["screw_depth_head"] + + screw_head_radius = nw.new_node(Nodes.Value, label="screw_head_radius") + screw_head_radius.outputs[0].default_value = kwargs["screw_head_radius"] + + screw_width_gap = nw.new_node(Nodes.Value, label="screw_width_gap") + screw_width_gap.outputs[0].default_value = kwargs["screw_width_gap"] + + screw_depth_gap = nw.new_node(Nodes.Value, label="screw_depth_gap") + screw_depth_gap.outputs[0].default_value = kwargs["screw_depth_gap"] + + division_board = nw.new_node( + nodegroup_division_board( + material=kwargs["board_material"], + tag_support=kwargs.get("tag_support", False), + ).name, + input_kwargs={ + "thickness": division_board_thickness, + "width": shelf_cell_width, + "depth": shelf_depth, + "z_translation": division_board_z_translation, + "x_translation": division_board_x_translation, + "screw_depth": screw_depth_head, + "screw_radius": screw_head_radius, + "screw_width_gap": screw_width_gap, + "screw_depth_gap": screw_depth_gap, + }, + ) + division_boards.append(division_board) + + attach_thickness = nw.new_node(Nodes.Value, label="attach_thickness") + attach_thickness.outputs[0].default_value = kwargs["attach_thickness"] + + attach_length = nw.new_node(Nodes.Value, label="attach_length") + attach_length.outputs[0].default_value = kwargs["attach_length"] + + attach_z_translation = nw.new_node(Nodes.Value, label="attach_z_translation") + attach_z_translation.outputs[0].default_value = kwargs["attach_z_translation"] + + attach_gap = nw.new_node(Nodes.Value, label="attach_gap") + attach_gap.outputs[0].default_value = kwargs["attach_gap"] + + attach_width = nw.new_node(Nodes.Value, label="attach_width") + attach_width.outputs[0].default_value = kwargs["attach_width"] + + join_geometry_k = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": division_boards} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_k, + "Material": surface.shaderfunc_to_material(kwargs["board_material"]), + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + realize_instances_3 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_3} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances_3} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class LargeShelfBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(LargeShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = {} + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("shelf_depth", None) is None: + params["shelf_depth"] = np.clip(normal(0.26, 0.03), 0.18, 0.36) + if params.get("side_board_thickness", None) is None: + params["side_board_thickness"] = np.clip(normal(0.02, 0.002), 0.015, 0.025) + if params.get("back_board_thickness", None) is None: + params["backboard_thickness"] = 0.01 + if params.get("bottom_board_y_gap", None) is None: + params["bottom_board_y_gap"] = uniform(0.01, 0.05) + if params.get("bottom_board_height", None) is None: + params["bottom_board_height"] = np.clip( + normal(0.083, 0.01), 0.05, 0.11 + ) * np.random.choice([1.0, 0.0], p=[0.8, 0.2]) + if params.get("division_board_thickness", None) is None: + params["division_board_thickness"] = np.clip( + normal(0.02, 0.002), 0.015, 0.025 + ) + if params.get("screw_depth_head", None) is None: + params["screw_depth_head"] = uniform(0.001, 0.004) + if params.get("screw_head_radius", None) is None: + params["screw_head_radius"] = uniform(0.001, 0.004) + if params.get("screw_width_gap", None) is None: + params["screw_width_gap"] = uniform(0.0, 0.02) + if params.get("screw_depth_gap", None) is None: + params["screw_depth_gap"] = uniform(0.025, 0.06) + if params.get("attach_length", None) is None: + params["attach_length"] = uniform(0.05, 0.1) + if params.get("attach_width", None) is None: + params["attach_width"] = uniform(0.01, 0.025) + if params.get("attach_thickness", None) is None: + params["attach_thickness"] = uniform(0.002, 0.005) + if params.get("attach_gap", None) is None: + params["attach_gap"] = uniform(0.0, 0.05) + if params.get("shelf_cell_width", None) is None: + num_h_cells = randint(1, 4) + shelf_cell_width = [] + for i in range(num_h_cells): + shelf_cell_width.append( + np.random.choice([0.76, 0.36], p=[0.5, 0.5]) + * np.clip(normal(1.0, 0.1), 0.75, 1.25) + ) + params["shelf_cell_width"] = shelf_cell_width + if params.get("shelf_cell_height", None) is None: + num_v_cells = randint(3, 8) + shelf_cell_height = [] + for i in range(num_v_cells): + shelf_cell_height.append(0.3 * np.clip(normal(1.0, 0.1), 0.75, 1.25)) + params["shelf_cell_height"] = shelf_cell_height + + params = self.update_translation_params(params) + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.4, 0.3, 0.3] + ) + if params.get("board_material", None) is None: + params["board_material"] = params["frame_material"] + + params = self.get_material_func(params) + params["tag_support"] = True + return params + + def get_material_func(self, params, randomness=True): + white_wood_params = shader_shelves_white_sampler() + black_wood_params = shader_shelves_black_wood_sampler() + normal_wood_params = shader_shelves_wood_sampler() + if params["frame_material"] == "white": + if randomness: + params["frame_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["frame_material"] = shader_shelves_white + elif params["frame_material"] == "black_wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_black_wood( + x, z_axis_texture=True + ) + elif params["frame_material"] == "wood": + if randomness: + params["frame_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params, z_axis_texture=True + ) + else: + params["frame_material"] = lambda x: shader_shelves_wood( + x, z_axis_texture=True + ) + + if params["board_material"] == "white": + if randomness: + params["board_material"] = lambda x: shader_shelves_white( + x, **white_wood_params + ) + else: + params["board_material"] = shader_shelves_white + elif params["board_material"] == "black_wood": + if randomness: + params["board_material"] = lambda x: shader_shelves_black_wood( + x, **black_wood_params + ) + else: + params["board_material"] = shader_shelves_black_wood + elif params["board_material"] == "wood": + if randomness: + params["board_material"] = lambda x: shader_shelves_wood( + x, **normal_wood_params + ) + else: + params["board_material"] = shader_shelves_wood + + return params + + def update_translation_params(self, params): + cell_widths = params["shelf_cell_width"] + cell_heights = params["shelf_cell_height"] + side_thickness = params["side_board_thickness"] + div_thickness = params["division_board_thickness"] + + # get shelf_width and shelf_height + width = (len(cell_widths) - 1) * side_thickness * 2 + ( + len(cell_widths) - 1 + ) * 0.001 + height = (len(cell_heights) + 1) * div_thickness + params["bottom_board_height"] + for w in cell_widths: + width += w + for h in cell_heights: + height += h + + params["shelf_width"] = width + params["shelf_height"] = height + params["attach_z_translation"] = height - div_thickness + + # get side_board_x_translation + dist = -(width + side_thickness) / 2.0 + side_board_x_translation = [dist] + + for w in cell_widths: + dist += side_thickness + w + side_board_x_translation.append(dist) + dist += side_thickness + 0.001 + side_board_x_translation.append(dist) + side_board_x_translation = side_board_x_translation[:-1] + + # get division_board_z_translation + dist = params["bottom_board_height"] + div_thickness / 2.0 + division_board_z_translation = [dist] + for h in cell_heights: + dist += h + div_thickness + division_board_z_translation.append(dist) + + # get division_board_x_translation + division_board_x_translation = [] + for i in range(len(cell_widths)): + division_board_x_translation.append( + (side_board_x_translation[2 * i] + side_board_x_translation[2 * i + 1]) + / 2.0 + ) + + params["side_board_x_translation"] = side_board_x_translation + params["division_board_x_translation"] = division_board_x_translation + params["division_board_z_translation"] = division_board_z_translation + params["bottom_gap_x_translation"] = division_board_x_translation + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params + ) + + if params.get("ret_params", False): + return obj, obj_params + + tagging.tag_system.relabel_obj(obj) + + return obj + + +class LargeShelfFactory(LargeShelfBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.25, 0.35), + uniform(0.3, 2.0), + uniform(0.9, 2.0), + ) + + params["bottom_board_height"] = 0.083 + params["shelf_depth"] = params["Dimensions"][0] - 0.01 + num_h = int((params["Dimensions"][2] - 0.083) / 0.3) + params["shelf_cell_height"] = [ + (params["Dimensions"][2] - 0.083) / num_h for _ in range(num_h) + ] + num_v = max(int(params["Dimensions"][1] / 0.5), 1) + params["shelf_cell_width"] = [ + params["Dimensions"][1] / num_v for _ in range(num_v) + ] + return params diff --git a/infinigen/assets/objects/shelves/simple_bookcase.py b/infinigen/assets/objects/shelves/simple_bookcase.py new file mode 100644 index 000000000..4f6b83e84 --- /dev/null +++ b/infinigen/assets/objects/shelves/simple_bookcase.py @@ -0,0 +1,867 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.materials.shelf_shaders import get_shelf_material +from infinigen.assets.objects.shelves.utils import nodegroup_tagged_cube +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_attach_gadget", singleton=False, type="GeometryNodeTree" +) +def nodegroup_attach_gadget(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "division_thickness", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "attach_thickness", 0.5000), + ("NodeSocketFloat", "attach_width", 0.5000), + ("NodeSocketFloat", "attach_back_len", 0.5000), + ("NodeSocketFloat", "attach_top_len", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_width"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_top_len"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_thickness"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: add_1}, attrs={"operation": "SUBTRACT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["height"], + 1: group_input.outputs["division_thickness"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply, "Z": subtract_1} + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["attach_back_len"], 1: 0.0000} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_2, "Z": add_4} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_1, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_4}, attrs={"operation": "MULTIPLY"} + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_1, "Z": subtract_2} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_1, "Translation": combine_xyz_3} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"attach1": transform, "attach2": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_screw_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_screw_head(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Depth", 0.0050), + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "bottom_gap", 0.5000), + ("NodeSocketFloat", "division_thickness", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "screw_gap", 0.5000), + ], + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Radius": group_input.outputs["Radius"], + "Depth": group_input.outputs["Depth"], + }, + attrs={"fill_type": "TRIANGLE_FAN"}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["screw_gap"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: add}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["division_thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": subtract, "Z": subtract_1} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_1}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: group_input.outputs["bottom_gap"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": subtract, "Z": add_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": multiply_3, "Z": subtract_1} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_2}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: add_1}) + + multiply_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Z": multiply_4} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_3}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": multiply_3, "Z": add_1} + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_4}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + transform_2, + transform_1, + transform_3, + transform_5, + transform_6, + ] + }, + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_2, "Scale": (-1.0000, 1.0000, 1.0000)}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_4, join_geometry_2]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_back_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_back_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["width"], "Y": add, "Z": add_1}, + ) + + cube_2 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_4, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: -0.5000, 2: multiply}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply_add, "Z": multiply_1} + ) + + transform_5 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube_2, "Translation": combine_xyz_5} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_5}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_division_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_division_board(nw: NodeWrangler, tag_support=False): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "board_thickness", 0.0000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "side_thickness", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["side_thickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": subtract, + "Y": add, + "Z": group_input.outputs["board_thickness"], + }, + ) + + if tag_support: + cube_1 = nw.new_node( + nodegroup_tagged_cube().name, input_kwargs={"Size": combine_xyz_3} + ) + else: + cube_1 = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz_3, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": cube_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_division_boards", singleton=False, type="GeometryNodeTree" +) +def nodegroup_division_boards(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "gap", 0.5000), + ("NodeSocketGeometry", "Geometry", None), + ], + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, + input_kwargs={"Geometry": group_input.outputs["Geometry"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["gap"], 1: multiply} + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances_1, "Translation": combine_xyz_1}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: add}) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances_1, "Translation": combine_xyz_2}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances_1, "Translation": combine_xyz}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "board1": transform_2, + "board2": transform_3, + "board3": transform_4, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_side_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_side_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "board_thickness", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "width", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["board_thickness"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1, "Z": add_2} + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_2, 1: 0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Z": multiply_1} + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_2, "Z": multiply_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + side_board_thickness = nw.new_node(Nodes.Value, label="side_board_thickness") + side_board_thickness.outputs[0].default_value = kwargs["side_board_thickness"] + + shelf_depth = nw.new_node(Nodes.Value, label="shelf_depth") + shelf_depth.outputs[0].default_value = kwargs["depth"] + + shelf_height = nw.new_node(Nodes.Value, label="shelf_height") + shelf_height.outputs[0].default_value = kwargs["height"] + + shelf_width = nw.new_node(Nodes.Value, label="shelf_width") + shelf_width.outputs[0].default_value = kwargs["width"] + + side_board = nw.new_node( + nodegroup_side_board().name, + input_kwargs={ + "board_thickness": side_board_thickness, + "depth": shelf_depth, + "height": shelf_height, + "width": shelf_width, + }, + ) + + division_board_thickness = nw.new_node( + Nodes.Value, label="division_board_thickness" + ) + division_board_thickness.outputs[0].default_value = kwargs[ + "division_board_thickness" + ] + + bottom_gap = nw.new_node(Nodes.Value, label="bottom_gap") + bottom_gap.outputs[0].default_value = kwargs["bottom_gap"] + + division_board = nw.new_node( + nodegroup_division_board(tag_support=kwargs["tag_support"]).name, + input_kwargs={ + "board_thickness": division_board_thickness, + "depth": shelf_depth, + "width": shelf_width, + "side_thickness": side_board_thickness, + }, + ) + + division_boards = nw.new_node( + nodegroup_division_boards().name, + input_kwargs={ + "thickness": division_board_thickness, + "height": shelf_height, + "gap": bottom_gap, + "Geometry": division_board, + }, + ) + + backboard_thickness = nw.new_node(Nodes.Value, label="backboard_thickness") + backboard_thickness.outputs[0].default_value = kwargs["backboard_thickness"] + + back_board = nw.new_node( + nodegroup_back_board().name, + input_kwargs={ + "width": shelf_width, + "thickness": backboard_thickness, + "height": shelf_height, + "depth": shelf_depth, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + side_board, + division_boards.outputs["board1"], + division_boards.outputs["board2"], + back_board, + division_boards.outputs["board3"], + ] + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances, + "Material": kwargs["frame_material"], + }, + ) + + screw_depth_head = nw.new_node(Nodes.Value, label="screw_depth_head") + screw_depth_head.outputs[0].default_value = kwargs["screw_head_depth"] + + screw_head_radius = nw.new_node(Nodes.Value, label="screw_head_radius") + screw_head_radius.outputs[0].default_value = kwargs["screw_head_radius"] + + screw_head_gap = nw.new_node(Nodes.Value, label="screw_head_gap") + screw_head_gap.outputs[0].default_value = kwargs["screw_head_dist"] + + screw_head = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "Depth": screw_depth_head, + "Radius": screw_head_radius, + "bottom_gap": bottom_gap, + "division_thickness": division_board_thickness, + "width": shelf_width, + "height": shelf_height, + "depth": shelf_depth, + "screw_gap": screw_head_gap, + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": screw_head} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances_1, + "Material": kwargs["metal_material"], + }, + ) + + attach_thickness = nw.new_node(Nodes.Value, label="attach_thickness") + attach_thickness.outputs[0].default_value = kwargs["attach_thickness"] + + attach_width = nw.new_node(Nodes.Value, label="attach_width") + attach_width.outputs[0].default_value = kwargs["attach_width"] + + attach_back_length = nw.new_node(Nodes.Value, label="attach_back_length") + attach_back_length.outputs[0].default_value = kwargs["attach_back_length"] + + attach_top_length = nw.new_node(Nodes.Value, label="attach_top_length") + attach_top_length.outputs[0].default_value = kwargs["attach_top_length"] + + attach_gadget = nw.new_node( + nodegroup_attach_gadget().name, + input_kwargs={ + "division_thickness": division_board_thickness, + "height": shelf_height, + "attach_thickness": attach_thickness, + "attach_width": attach_width, + "attach_back_len": attach_back_length, + "attach_top_len": attach_top_length, + "depth": shelf_depth, + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + attach_gadget.outputs["attach1"], + attach_gadget.outputs["attach2"], + ] + }, + ) + + realize_instances_2 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_2} + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances_2, + "Material": kwargs["metal_material"], + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material, set_material_1, set_material_2]}, + ) + + realize_instances_3 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances_3} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class SimpleBookcaseBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(SimpleBookcaseBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("depth", None) is None: + params["depth"] = np.clip(normal(0.3, 0.05), 0.15, 0.45) + if params.get("width", None) is None: + params["width"] = np.clip(normal(0.5, 0.1), 0.25, 0.75) + if params.get("height", None) is None: + params["height"] = np.clip(normal(0.8, 0.1), 0.5, 1.0) + params["side_board_thickness"] = uniform(0.005, 0.03) + params["division_board_thickness"] = np.clip(normal(0.015, 0.005), 0.005, 0.025) + params["bottom_gap"] = np.clip(normal(0.14, 0.05), 0.0, 0.2) + params["backboard_thickness"] = uniform(0.01, 0.02) + params["screw_head_depth"] = uniform(0.002, 0.008) + params["screw_head_radius"] = uniform(0.003, 0.008) + params["screw_head_dist"] = uniform(0.03, 0.1) + params["attach_thickness"] = uniform(0.002, 0.005) + params["attach_width"] = uniform(0.01, 0.04) + params["attach_top_length"] = uniform(0.03, 0.1) + params["attach_back_length"] = uniform(0.02, 0.05) + params["frame_material"] = get_shelf_material("white") + params["metal_material"] = get_shelf_material("metal") + params["tag_support"] = True + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, apply=True, attributes=[], input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class SimpleBookcaseFactory(SimpleBookcaseBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.25, 0.4), + uniform(0.5, 0.7), + uniform(0.7, 0.9), + ) + params["depth"] = params["Dimensions"][0] - 0.015 + params["width"] = params["Dimensions"][1] + params["height"] = params["Dimensions"][2] + return params diff --git a/infinigen/assets/objects/shelves/simple_desk.py b/infinigen/assets/objects/shelves/simple_desk.py new file mode 100644 index 000000000..422d09f22 --- /dev/null +++ b/infinigen/assets/objects/shelves/simple_desk.py @@ -0,0 +1,428 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.materials.shelf_shaders import ( + shader_shelves_black_metallic, + shader_shelves_black_metallic_sampler, + shader_shelves_black_wood, + shader_shelves_black_wood_sampler, + shader_shelves_white, + shader_shelves_white_metallic, + shader_shelves_white_metallic_sampler, + shader_shelves_white_sampler, +) +from infinigen.assets.objects.shelves.utils import nodegroup_tagged_cube +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_table_legs", singleton=False, type="GeometryNodeTree" +) +def nodegroup_table_legs(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "thickness", 0.5000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloatDistance", "radius", 0.0200), + ("NodeSocketFloat", "width", 0.5000), + ("NodeSocketFloat", "depth", 0.5000), + ("NodeSocketFloat", "dist", 0.5000), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["height"], + 1: group_input.outputs["thickness"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Radius": group_input.outputs["radius"], + "Depth": subtract, + "Vertices": 128, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["dist"], 1: 0.0000} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={1: group_input.outputs["depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: add}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": subtract_1, "Y": subtract_2, "Z": multiply_2}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_2, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_3, "Y": subtract_2, "Z": multiply_2}, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_3, + }, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": subtract_1, "Y": multiply_4, "Z": multiply_2}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_4, + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_3, "Y": multiply_4, "Z": multiply_2}, + ) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_5, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform, transform_2, transform_3, transform_4]}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_table_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_table_top(nw: NodeWrangler, tag_support=True): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "depth", 0.0000), + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "height", 0.5000), + ("NodeSocketFloat", "thickness", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": group_input.outputs["depth"], + "Z": add, + }, + ) + + if tag_support: + cube = nw.new_node( + nodegroup_tagged_cube().name, input_kwargs={"Size": combine_xyz} + ) + + else: + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 10, + "Vertices Y": 10, + "Vertices Z": 10, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["height"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract}) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + table_depth = nw.new_node(Nodes.Value, label="table_depth") + table_depth.outputs[0].default_value = kwargs["depth"] + + table_width = nw.new_node(Nodes.Value, label="table_width") + table_width.outputs[0].default_value = kwargs["width"] + + table_height = nw.new_node(Nodes.Value, label="table_height") + table_height.outputs[0].default_value = kwargs["height"] + + top_thickness = nw.new_node(Nodes.Value, label="top_thickness") + top_thickness.outputs[0].default_value = kwargs["thickness"] + + table_top = nw.new_node( + nodegroup_table_top(tag_support=True).name, + input_kwargs={ + "depth": table_depth, + "width": table_width, + "height": table_height, + "thickness": top_thickness, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": table_top, + "Material": surface.shaderfunc_to_material(kwargs["top_material"]), + }, + ) + + leg_radius = nw.new_node(Nodes.Value, label="leg_radius") + leg_radius.outputs[0].default_value = kwargs["leg_radius"] + + leg_center_to_edge = nw.new_node(Nodes.Value, label="leg_center_to_edge") + leg_center_to_edge.outputs[0].default_value = kwargs["leg_dist"] + + table_legs = nw.new_node( + nodegroup_table_legs().name, + input_kwargs={ + "thickness": top_thickness, + "height": table_height, + "radius": leg_radius, + "width": table_width, + "depth": table_depth, + "dist": leg_center_to_edge, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": table_legs, + "Material": surface.shaderfunc_to_material(kwargs["leg_material"]), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": realize_instances} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, 1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class SimpleDeskBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(SimpleDeskBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = params + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("depth", None) is None: + params["depth"] = np.clip(normal(0.6, 0.05), 0.45, 0.7) + if params.get("width", None) is None: + params["width"] = np.clip(normal(1.0, 0.1), 0.7, 1.3) + if params.get("height", None) is None: + params["height"] = np.clip(normal(0.73, 0.05), 0.6, 0.83) + if params.get("top_material", None) is None: + params["top_material"] = np.random.choice(["white", "black_wood"]) + if params.get("leg_material", None) is None: + params["leg_material"] = np.random.choice(["white", "black"]) + if params.get("leg_radius", None) is None: + params["leg_radius"] = uniform(0.01, 0.025) + if params.get("leg_dist", None) is None: + params["leg_dist"] = uniform(0.035, 0.07) + if params.get("thickness", None) is None: + params["thickness"] = uniform(0.01, 0.03) + + params = self.get_material_func(params) + return params + + def get_material_func(self, params, randomness=True): + if params["top_material"] == "white": + if randomness: + params["top_material"] = lambda x: shader_shelves_white( + x, **shader_shelves_white_sampler() + ) + else: + params["top_material"] = shader_shelves_white + elif params["top_material"] == "black_wood": + if randomness: + params["top_material"] = lambda x: shader_shelves_black_wood( + x, **shader_shelves_black_wood_sampler() + ) + else: + params["top_material"] = shader_shelves_black_wood + else: + raise NotImplementedError + + if params["leg_material"] == "white": + if randomness: + params["leg_material"] = lambda x: shader_shelves_white_metallic( + x, **shader_shelves_white_metallic_sampler() + ) + else: + params["leg_material"] = shader_shelves_white_metallic + elif params["leg_material"] == "black": + if randomness: + params["leg_material"] = lambda x: shader_shelves_black_metallic( + x, **shader_shelves_black_metallic_sampler() + ) + else: + params["leg_material"] = shader_shelves_black_metallic + else: + raise NotImplementedError + + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class SimpleDeskFactory(SimpleDeskBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = (uniform(0.5, 0.75), uniform(0.8, 2), uniform(0.6, 0.8)) + params["depth"] = params["Dimensions"][0] + params["width"] = params["Dimensions"][1] + params["height"] = params["Dimensions"][2] + return params + + +class SidetableDeskFactory(SimpleDeskBaseFactory): + def sample_params(self): + params = dict() + w = 0.55 * normal(1, 0.1) + params["Dimensions"] = (w, w, w * normal(1, 0.05)) + params["depth"] = params["Dimensions"][0] + params["width"] = params["Dimensions"][1] + params["height"] = params["Dimensions"][2] + return params diff --git a/infinigen/assets/objects/shelves/single_cabinet.py b/infinigen/assets/objects/shelves/single_cabinet.py new file mode 100644 index 000000000..f319ac392 --- /dev/null +++ b/infinigen/assets/objects/shelves/single_cabinet.py @@ -0,0 +1,324 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.shelves.doors import CabinetDoorBaseFactory +from infinigen.assets.objects.shelves.large_shelf import LargeShelfBaseFactory +from infinigen.assets.utils.object import new_bbox +from infinigen.core import surface, tagging +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +def geometry_cabinet_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + right_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": kwargs["door"][0]} + ) + left_door_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": kwargs["door"][1]} + ) + shelf_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": kwargs["shelf"]}) + + doors = [] + transform_r = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": right_door_info.outputs["Geometry"], + "Translation": kwargs["door_hinge_pos"][0], + "Rotation": (0, 0, kwargs["door_open_angle"]), + }, + ) + doors.append(transform_r) + if len(kwargs["door_hinge_pos"]) > 1: + transform_l = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": left_door_info.outputs["Geometry"], + "Translation": kwargs["door_hinge_pos"][1], + "Rotation": (0, 0, kwargs["door_open_angle"]), + }, + ) + doors.append(transform_l) + + attaches = [] + for pos in kwargs["attach_pos"]: + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0006, 0.0200, 0.04500)} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": -0.0100}) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz} + ) + + cube_1 = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": (0.0005, 0.0340, 0.0200)} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, cube_1]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Translation": (0.0000, -0.0170, 0.0000), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_1, + "Rotation": (0.0000, 0.0000, -1.5708), + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_2, "Translation": pos} + ) + + attaches.append(transform_3) + + join_geometry_a = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": attaches} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": doors + [join_geometry_a]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class SingleCabinetBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(SingleCabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.shelf_params = {} + self.door_params = {} + self.mat_params = {} + self.shelf_fac = LargeShelfBaseFactory(factory_seed) + self.door_fac = CabinetDoorBaseFactory(factory_seed) + with FixedSeed(factory_seed): + self.params = self.sample_params() + + def sample_params(self): + # Update fac params + pass + + def get_material_params(self): + with FixedSeed(self.factory_seed): + params = self.mat_params.copy() + if params.get("frame_material", None) is None: + params["frame_material"] = np.random.choice( + ["white", "black_wood", "wood"], p=[0.5, 0.2, 0.3] + ) + return params + + def get_shelf_params(self, i=0): + params = self.shelf_params.copy() + if params.get("shelf_cell_width", None) is None: + params["shelf_cell_width"] = [ + np.random.choice([0.76, 0.36], p=[0.5, 0.5]) + * np.clip(normal(1.0, 0.1), 0.75, 1.25) + ] + if params.get("shelf_cell_height", None) is None: + num_v_cells = randint(3, 7) + shelf_cell_height = [] + for i in range(num_v_cells): + shelf_cell_height.append(0.3 * np.clip(normal(1.0, 0.06), 0.75, 1.25)) + params["shelf_cell_height"] = shelf_cell_height + if params.get("frame_material", None) is None: + params["frame_material"] = self.mat_params["frame_material"] + + return params + + def get_door_params(self, i=0): + params = self.door_params.copy() + + # get door params + shelf_width = ( + self.shelf_params["shelf_width"] + + self.shelf_params["side_board_thickness"] * 2 + ) + if params.get("door_width", None) is None: + if shelf_width < 0.55: + params["door_width"] = shelf_width + params["num_door"] = 1 + else: + params["door_width"] = shelf_width / 2.0 - 0.0005 + params["num_door"] = 2 + if params.get("door_height", None) is None: + params["door_height"] = ( + self.shelf_params["division_board_z_translation"][-1] + - self.shelf_params["division_board_z_translation"][0] + + self.shelf_params["division_board_thickness"] + ) + if len( + self.shelf_params["division_board_z_translation"] + ) > 5 and np.random.choice([True, False], p=[0.5, 0.5]): + params["door_height"] = ( + self.shelf_params["division_board_z_translation"][3] + - self.shelf_params["division_board_z_translation"][0] + + self.shelf_params["division_board_thickness"] + ) + if params.get("frame_material", None) is None: + params["frame_material"] = self.mat_params["frame_material"] + + return params + + def get_cabinet_params(self, i=0): + params = dict() + + shelf_width = ( + self.shelf_params["shelf_width"] + + self.shelf_params["side_board_thickness"] * 2 + ) + if self.door_params["num_door"] == 1: + params["door_hinge_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.0025, + -shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ) + ] + params["door_open_angle"] = 0 + params["attach_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0, + -self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + elif self.door_params["num_door"] == 2: + params["door_hinge_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.008, + -shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ), + ( + self.shelf_params["shelf_depth"] / 2.0 + 0.008, + shelf_width / 2.0, + self.shelf_params["bottom_board_height"], + ), + ] + params["door_open_angle"] = 0 + params["attach_pos"] = [ + ( + self.shelf_params["shelf_depth"] / 2.0, + -self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + [ + ( + self.shelf_params["shelf_depth"] / 2.0, + self.shelf_params["shelf_width"] / 2.0, + self.shelf_params["bottom_board_height"] + z, + ) + for z in self.door_params["attach_height"] + ] + else: + raise NotImplementedError + + return params + + def get_cabinet_components(self, i): + # update material params + self.mat_params = self.get_material_params() + + # create shelf + shelf_params = self.get_shelf_params(i=i) + self.shelf_fac.params = shelf_params + shelf, shelf_params = self.shelf_fac.create_asset(i=i, ret_params=True) + shelf.name = "cabinet_frame" + self.shelf_params = shelf_params + + # create doors + door_params = self.get_door_params(i=i) + self.door_fac.params = door_params + self.door_fac.params["door_left_hinge"] = False + right_door, door_obj_params = self.door_fac.create_asset(i=i, ret_params=True) + right_door.name = "cabinet_right_door" + self.door_fac.params = door_obj_params + self.door_fac.params["door_left_hinge"] = True + left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) + left_door.name = "cabinet_left_door" + self.door_params = door_obj_params + + return shelf, right_door, left_door + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + shelf, right_door, left_door = self.get_cabinet_components(i=i) + + # create cabinet + cabinet_params = self.get_cabinet_params(i=i) + surface.add_geomod( + obj, + geometry_cabinet_nodes, + attributes=[], + apply=True, + input_kwargs={ + "door": [right_door, left_door], + "shelf": shelf, + "door_hinge_pos": cabinet_params["door_hinge_pos"], + "door_open_angle": cabinet_params["door_open_angle"], + "attach_pos": cabinet_params["attach_pos"], + }, + ) + butil.delete([left_door, right_door]) + obj = butil.join_objects([shelf, obj]) + + tagging.tag_system.relabel_obj(obj) + return obj + + +class SingleCabinetFactory(SingleCabinetBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.25, 0.35), + uniform(0.3, 0.7), + uniform(0.9, 1.8), + ) + + params["bottom_board_height"] = 0.083 + params["shelf_depth"] = params["Dimensions"][0] - 0.01 + num_h = int((params["Dimensions"][2] - 0.083) / 0.3) + params["shelf_cell_height"] = [ + (params["Dimensions"][2] - 0.083) / num_h for _ in range(num_h) + ] + params["shelf_cell_width"] = [params["Dimensions"][1]] + self.shelf_params = params + self.dims = params["Dimensions"] + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + x, y, z = self.dims + return new_bbox( + -x / 2 * 1.2, x / 2 * 1.2, -y / 2 * 1.2, y / 2 * 1.2, 0, (z + 0.083) * 1.02 + ) diff --git a/infinigen/assets/objects/shelves/triangle_shelf.py b/infinigen/assets/objects/shelves/triangle_shelf.py new file mode 100644 index 000000000..7d7fba9c6 --- /dev/null +++ b/infinigen/assets/objects/shelves/triangle_shelf.py @@ -0,0 +1,1394 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.materials.shelf_shaders import get_shelf_material +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_table_profile", singleton=False, type="GeometryNodeTree" +) +def nodegroup_table_profile(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Profile N-gon", 4), + ("NodeSocketFloat", "Profile Width", 1.0000), + ("NodeSocketFloat", "Profile Aspect Ratio", 1.0000), + ("NodeSocketFloat", "Profile Fillet Ratio", 0.2000), + ], + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.7071 + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Profile N-gon"], + "Radius": value, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 3.1416, 1: group_input.outputs["Profile N-gon"]}, + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": divide}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Rotation": combine_xyz_1, + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Profile Aspect Ratio"], + 1: group_input.outputs["Profile Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Profile Width"], + "Y": multiply, + "Z": 1.0000, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_2, "Scale": combine_xyz} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Profile Width"], + 1: group_input.outputs["Profile Fillet Ratio"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + fillet_curve_1 = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": transform_1, + "Count": 4, + "Radius": multiply_1, + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Output": fillet_curve_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_to_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_to_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketGeometry", "Shape Curve", None), + ("NodeSocketFloat", "Height", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_1}) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": curve_line, "Tilt": 3.1416} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": set_curve_tilt, "Count": 128, "Length": 0.0500}, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": resample_curve, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Profile Curve": group_input.outputs["Shape Curve"], + "Fill Caps": True, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Profile Curve"], + "Factor": capture_attribute.outputs[2], + }, + attrs={"mode": "FACTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": sample_curve.outputs["Position"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={"operation": "LENGTH"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + 2: separate_xyz_1.outputs["Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + 3: multiply, + 4: 0.0000, + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply_1, + "Y": multiply_2, + "Z": map_range.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Position": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leg_straight", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leg_straight(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloat", "Height", 0.5000), + ("NodeSocketInt", "N-gon", 0), + ("NodeSocketFloat", "Profile Width", 0.5000), + ("NodeSocketFloat", "Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Fillet Ratio", 0.2000), + ("NodeSocketInt", "Resolution", 128), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_1}) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": curve_line, "Tilt": 3.1416} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": set_curve_tilt, + "Count": group_input.outputs["Resolution"], + "Length": 0.0500, + }, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": resample_curve, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + tableprofile = nw.new_node( + nodegroup_table_profile().name, + input_kwargs={ + "Profile N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Profile Aspect Ratio": group_input.outputs["Aspect Ratio"], + "Profile Fillet Ratio": group_input.outputs["Fillet Ratio"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Profile Curve": tableprofile, + "Fill Caps": True, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Profile Curve"], + "Factor": capture_attribute.outputs[2], + }, + attrs={"mode": "FACTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": sample_curve.outputs["Position"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={"operation": "LENGTH"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + 2: separate_xyz_1.outputs["Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + 3: multiply, + 4: 0.0000, + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply_1, + "Y": multiply_2, + "Z": map_range.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Position": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": set_position, "Profile Curve": tableprofile}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_board", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_board(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, -1.0000), + "End": (1.0000, 0.0000, 1.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Thickness", 0.5000), + ("NodeSocketFloat", "Fillet Radius Vertical", 0.0000), + ("NodeSocketFloat", "width", 0.0000), + ("NodeSocketFloat", "extrude_length", 0.0000), + ], + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["width"]} + ) + + curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_3}) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["width"]} + ) + + curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_4}) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["width"], + "Y": group_input.outputs["extrude_length"], + }, + ) + + curve_line_3 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_3, "End": combine_xyz_6} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["extrude_length"], + "Y": group_input.outputs["width"], + }, + ) + + curve_line_4 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_4, "End": combine_xyz_5} + ) + + curve_line_5 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_6, "End": combine_xyz_5} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + curve_line_1, + curve_line_2, + curve_line_3, + curve_line_4, + curve_line_5, + ] + }, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": join_geometry_1} + ) + + merge_by_distance_1 = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": curve_to_mesh_1} + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": merge_by_distance_1} + ) + + curve_to_board = nw.new_node( + nodegroup_curve_to_board().name, + input_kwargs={ + "Profile Curve": curve_line, + "Shape Curve": mesh_to_curve, + "Height": group_input.outputs["Thickness"], + }, + ) + + arc = nw.new_node( + "GeometryNodeCurveArc", + input_kwargs={"Resolution": 4, "Radius": 0.7071, "Sweep Angle": 4.7124}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": arc.outputs["Curve"], + "Rotation": (0.0000, 0.0000, -0.7854), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Rotation": (0.0000, 1.5708, 0.0000)}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_2, "Translation": (0.0000, 0.5000, 0.0000)}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": group_input, "Z": 1.0000} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_3, "Scale": combine_xyz} + ) + + fillet_curve = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": transform_4, + "Count": 8, + "Radius": group_input, + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": fillet_curve, + "Rotation": (1.5708, 1.5708, 0.0000), + "Scale": group_input.outputs["Thickness"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Profile Curve": transform_6} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Translation": combine_xyz_1}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_board.outputs["Mesh"], transform_5]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": join_geometry} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Thickness"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": merge_by_distance, "Translation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_side_leg", singleton=False, type="GeometryNodeTree") +def nodegroup_side_leg(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, -1.0000), + "End": (1.0000, 0.0000, 1.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Thickness", 0.5000), + ("NodeSocketInt", "N-gon", 0), + ("NodeSocketFloat", "Profile Width", 0.5000), + ("NodeSocketFloat", "Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Fillet Ratio", 0.2000), + ("NodeSocketFloat", "Fillet Radius Vertical", 0.0000), + ], + ) + + legstraight = nw.new_node( + nodegroup_leg_straight().name, + input_kwargs={ + "Profile Curve": curve_line, + "Height": group_input.outputs["Thickness"], + "N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Aspect Ratio": group_input.outputs["Aspect Ratio"], + "Fillet Ratio": group_input.outputs["Fillet Ratio"], + }, + ) + + arc = nw.new_node( + "GeometryNodeCurveArc", + input_kwargs={"Resolution": 4, "Radius": 0.7071, "Sweep Angle": 4.7124}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": arc.outputs["Curve"], + "Rotation": (0.0000, 0.0000, -0.7854), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Rotation": (0.0000, 1.5708, 0.0000)}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_2, "Translation": (0.0000, 0.5000, 0.0000)}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": group_input, "Z": 1.0000} + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_3, "Scale": combine_xyz} + ) + + fillet_curve = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": transform_4, + "Count": 8, + "Radius": group_input, + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": fillet_curve, + "Rotation": (1.5708, 1.5708, 0.0000), + "Scale": group_input.outputs["Thickness"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": legstraight.outputs["Profile Curve"], + "Profile Curve": transform_6, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Translation": combine_xyz_1}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_5, legstraight.outputs["Mesh"]]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": join_geometry} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Thickness"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": merge_by_distance, "Translation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_side_boards", singleton=False, type="GeometryNodeTree" +) +def nodegroup_side_boards(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Y", 0.0000), + ("NodeSocketFloat", "Z", 0.0000), + ("NodeSocketFloat", "x1", 0.5000), + ("NodeSocketFloat", "x2", 0.5000), + ("NodeSocketFloat", "x3", 0.0010), + ("NodeSocketFloat", "x4", 0.5000), + ("NodeSocketFloat", "x5", 0.5000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["x5"], 1: 0.0000} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add, + "Y": group_input.outputs["Y"], + "Z": group_input.outputs["Z"], + }, + ) + + cube = nw.new_node( + Nodes.MeshCube, + input_kwargs={ + "Size": combine_xyz, + "Vertices X": 5, + "Vertices Y": 5, + "Vertices Z": 5, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["x3"]} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["x1"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["x2"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Z": subtract} + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_1} + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["x4"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add_1, "Z": subtract_1} + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": cube, "Translation": combine_xyz_2} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_1]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shelf_boards", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shelf_boards(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Thickness", 0.0100), + ("NodeSocketFloat", "Bottom_z", 0.0000), + ("NodeSocketFloat", "Mid_z", 0.0000), + ("NodeSocketFloat", "Top_z", 0.0000), + ("NodeSocketFloat", "Board_width", 0.3000), + ("NodeSocketFloat", "Leg_gap", 0.5000), + ("NodeSocketFloat", "extrude_length", 0.5000), + ], + ) + + curve_board = nw.new_node( + nodegroup_curve_board().name, + input_kwargs={ + "Thickness": group_input.outputs["Thickness"], + "Fillet Radius Vertical": 0.0100, + "width": group_input.outputs["Board_width"], + "extrude_length": group_input.outputs["extrude_length"], + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Leg_gap"], 1: 0.0000} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Z": group_input.outputs["Bottom_z"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_board, + "Translation": combine_xyz_1, + "Rotation": (0.0000, 0.0000, -1.5708), + }, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Z": group_input.outputs["Mid_z"]} + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_board, + "Translation": combine_xyz_4, + "Rotation": (0.0000, 0.0000, -1.5708), + }, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": add, "Z": group_input.outputs["Top_z"]} + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_board, + "Translation": combine_xyz_5, + "Rotation": (0.0000, 0.0000, -1.5708), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, transform_5, transform_6]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_screw_head", singleton=False, type="GeometryNodeTree" +) +def nodegroup_screw_head(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", input_kwargs={"Radius": 0.004, "Depth": 0.0030} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "leg_width", 0.5000), + ("NodeSocketFloat", "board_thickness", 0.5000), + ("NodeSocketFloat", "board_height", 0.5000), + ("NodeSocketFloat", "leg_gap", 0.5000), + ("NodeSocketFloat", "board_width", 0.5000), + ("NodeSocketFloat", "leg_depth", 0.0000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["leg_width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["leg_depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.0000, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["board_thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["board_height"], 1: multiply_2} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": subtract, "Z": add} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["board_width"], 1: 0.0000} + ) + + divide1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["leg_depth"], 1: 0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: divide1}) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": add_2, "Z": add} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_1}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["leg_gap"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_3}) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_3, 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract_1, "Y": subtract, "Z": add} + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_2}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, transform_2, transform_3]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shelf_legs", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shelf_legs(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "leg_gap", 0.5000), + ("NodeSocketFloat", "leg_curve_ratio", 0.5000), + ("NodeSocketFloat", "leg_width", 0.5000), + ("NodeSocketFloat", "leg_length", 0.5000), + ("NodeSocketFloat", "board_width", 0.5000), + ("NodeSocketFloat", "leg_depth", 0.0000), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["leg_width"], 1: 0.0000} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["leg_length"], 1: 0.0000} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["leg_depth"], + 1: group_input.outputs["leg_length"], + }, + attrs={"operation": "DIVIDE"}, + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["leg_curve_ratio"], 1: 0.0000} + ) + + side_leg = nw.new_node( + nodegroup_side_leg().name, + input_kwargs={ + "Thickness": add, + "N-gon": 4, + "Profile Width": add_1, + "Aspect Ratio": divide, + "Fillet Ratio": add_2, + "Fillet Radius Vertical": add_2, + }, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add_1}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": side_leg, + "Translation": combine_xyz, + "Rotation": (0.0000, 1.5708, 0.0000), + }, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["board_width"], 1: 0.0000} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: add}, attrs={"operation": "SUBTRACT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["leg_gap"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: multiply_1}) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add_4}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_3}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_3}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Translation": combine_xyz_2}, + ) + + transform_3 = nw.new_node(Nodes.Transform, input_kwargs={"Geometry": transform}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_4, transform_2, transform_3]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_2}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + leg_gap = nw.new_node(Nodes.Value, label="leg_gap") + leg_gap.outputs[0].default_value = kwargs["leg_board_gap"] + + curvature_ratio = nw.new_node(Nodes.Value, label="curvature_ratio") + curvature_ratio.outputs[0].default_value = kwargs["leg_curvature_ratio"] + + leg_width = nw.new_node(Nodes.Value, label="leg_width") + leg_width.outputs[0].default_value = kwargs["leg_width"] + + leg_length = nw.new_node(Nodes.Value, label="leg_length") + leg_length.outputs[0].default_value = kwargs["leg_length"] + + leg_depth = nw.new_node(Nodes.Value, label="leg_depth") + leg_depth.outputs[0].default_value = kwargs["leg_depth"] + + board_width = nw.new_node(Nodes.Value, label="board_width") + board_width.outputs[0].default_value = kwargs["board_width"] + + shelf_legs = nw.new_node( + nodegroup_shelf_legs().name, + input_kwargs={ + "leg_gap": leg_gap, + "leg_curve_ratio": curvature_ratio, + "leg_width": leg_width, + "leg_length": leg_length, + "board_width": board_width, + "leg_depth": leg_depth, + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": shelf_legs, "Material": kwargs["leg_material"]}, + ) + + board_thickness = nw.new_node(Nodes.Value, label="board_thickness") + board_thickness.outputs[0].default_value = kwargs["board_thickness"] + + board_extrude_length = nw.new_node(Nodes.Value, label="board_extrude_length") + board_extrude_length.outputs[0].default_value = kwargs["board_extrude_length"] + + bottom_layer_height = nw.new_node(Nodes.Value, label="bottom_layer_height") + bottom_layer_height.outputs[0].default_value = kwargs["bottom_layer_height"] + + mid_layer_height = nw.new_node(Nodes.Value, label="mid_layer_height") + mid_layer_height.outputs[0].default_value = kwargs["mid_layer_height"] + + top_layer_height = nw.new_node(Nodes.Value, label="top_layer_height") + top_layer_height.outputs[0].default_value = kwargs["top_layer_height"] + + screwhead1 = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "leg_width": leg_width, + "board_thickness": board_thickness, + "board_height": bottom_layer_height, + "leg_gap": leg_gap, + "board_width": board_width, + "leg_depth": leg_depth, + }, + ) + + screwhead2 = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "leg_width": leg_width, + "board_thickness": board_thickness, + "board_height": mid_layer_height, + "leg_gap": leg_gap, + "board_width": board_width, + "leg_depth": leg_depth, + }, + ) + + screwhead3 = nw.new_node( + nodegroup_screw_head().name, + input_kwargs={ + "leg_width": leg_width, + "board_thickness": board_thickness, + "board_height": top_layer_height, + "leg_gap": leg_gap, + "board_width": board_width, + "leg_depth": leg_depth, + }, + ) + + join_geometry2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [screwhead1, screwhead2, screwhead3]}, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry2, + "Material": get_shelf_material("metal"), + }, + ) + + shelf_boards = nw.new_node( + nodegroup_shelf_boards().name, + input_kwargs={ + "Thickness": board_thickness, + "Bottom_z": bottom_layer_height, + "Mid_z": mid_layer_height, + "Top_z": top_layer_height, + "Board_width": board_width, + "Leg_gap": leg_gap, + "extrude_length": board_extrude_length, + }, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": shelf_boards, "Material": kwargs["board_material"]}, + ) + + side_board_height = nw.new_node(Nodes.Value, label="side_board_height") + side_board_height.outputs[0].default_value = kwargs["side_board_height"] + + side_boards = nw.new_node( + nodegroup_side_boards().name, + input_kwargs={ + "Y": leg_depth, + "Z": side_board_height, + "x1": side_board_height, + "x2": bottom_layer_height, + "x3": leg_gap, + "x4": top_layer_height, + "x5": board_width, + }, + ) + + set_material_3 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": side_boards, "Material": kwargs["leg_material"]}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [set_material, set_material_2, set_material_1, set_material_3] + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + transform4 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": realize_instances, "Scale": (-1, 1, 1)}, + ) + + triangulate = nw.new_node( + "GeometryNodeTriangulate", input_kwargs={"Mesh": transform4} + ) + + transform5 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": triangulate, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform5}, + attrs={"is_active_output": True}, + ) + + +class TriangleShelfBaseFactory(AssetFactory): + def __init__(self, factory_seed, params={}, coarse=False): + super(TriangleShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) + self.params = {} + + def sample_params(self): + return self.params.copy() + + def get_asset_params(self, i=0): + params = self.sample_params() + if params.get("leg_board_gap", None) is None: + params["leg_board_gap"] = uniform(0.002, 0.005) + if params.get("leg_width", None) is None: + params["leg_width"] = uniform(0.01, 0.03) + if params.get("leg_depth", None) is None: + params["leg_depth"] = uniform(0.01, 0.02) + if params.get("leg_length", None) is None: + params["leg_length"] = np.clip(normal(0.6, 0.05), 0.45, 0.75) + if params.get("leg_curvature_ratio", None) is None: + params["leg_curvature_ratio"] = uniform(0.0, 0.02) + if params.get("board_thickness", None) is None: + params["board_thickness"] = uniform(0.01, 0.025) + if params.get("board_width", None) is None: + params["board_width"] = np.clip(normal(0.3, 0.03), 0.2, 0.4) + if params.get("board_extrude_length", None) is None: + params["board_extrude_length"] = uniform(0.03, 0.07) + if params.get("side_board_height", None) is None: + params["side_board_height"] = uniform(0.02, 0.04) + if params.get("bottom_layer_height", None) is None: + params["bottom_layer_height"] = uniform(0.05, 0.1) + if params.get("shelf_layer_height", None) is None: + params["top_layer_height"] = params["leg_length"] - uniform(0.02, 0.07) + if params.get("board_material", None) is None: + params["board_material"] = np.random.choice( + ["black_wood", "wood", "white"], p=[0.2, 0.6, 0.2] + ) + if params.get("leg_material", None) is None: + params["leg_material"] = np.random.choice( + ["black_wood", "wood", "white"], p=[0.2, 0.6, 0.2] + ) + params["mid_layer_height"] = ( + params["top_layer_height"] + params["bottom_layer_height"] + ) / 2.0 + + params = self.get_material_func(params) + return params + + def get_material_func(self, params, randomness=True): + params["board_material"] = get_shelf_material(params["board_material"]) + params["leg_material"] = get_shelf_material( + params["leg_material"], z_axis_texture=True + ) + return params + + def create_asset(self, i=0, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + obj_params = self.get_asset_params(i) + surface.add_geomod( + obj, geometry_nodes, attributes=[], input_kwargs=obj_params, apply=True + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + +class TriangleShelfFactory(TriangleShelfBaseFactory): + def sample_params(self): + params = dict() + params["Dimensions"] = ( + uniform(0.25, 0.35), + uniform(0.25, 0.35), + uniform(0.5, 0.7), + ) + params["leg_length"] = params["Dimensions"][2] + params["board_width"] = params["Dimensions"][0] + return params diff --git a/infinigen/assets/objects/shelves/utils.py b/infinigen/assets/objects/shelves/utils.py new file mode 100644 index 000000000..2f0fa73f2 --- /dev/null +++ b/infinigen/assets/objects/shelves/utils.py @@ -0,0 +1,92 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +import numpy as np + +from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util import blender as butil + + +def get_nodegroup_assets(func, params): + bpy.ops.mesh.primitive_plane_add( + size=1, enter_editmode=False, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + obj = bpy.context.active_object + + with butil.TemporaryObject(obj) as base_obj: + node_group_func = func(**params) + geo_outputs = [ + o + for o in node_group_func.outputs + if o.bl_socket_idname == "NodeSocketGeometry" + ] + results = { + o.name: extract_nodegroup_geo( + base_obj, node_group_func, o.name, ng_params={} + ) + for o in geo_outputs + } + + return results + + +@node_utils.to_nodegroup( + "nodegroup_tagged_cube", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tagged_cube(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorTranslation", "Size", (1.0000, 1.0000, 1.0000)) + ], + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": group_input.outputs["Size"]} + ) + + index = nw.new_node(Nodes.Index) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: 2}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + cube = tagging.tag_nodegroup(nw, cube, t.Subpart.SupportSurface, selection=equal) + + # subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': cube, 'Level': 2}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": cube}, attrs={"is_active_output": True} + ) + + +def blender_rotate(vec): + if isinstance(vec, tuple): + vec = list(vec) + if isinstance(vec, list): + vec = np.array(vec, dtype=np.float32) + if len(vec.shape) == 1: + vec = np.expand_dims(vec, axis=-1) + if vec.shape[0] == 3: + new_vec = np.array([[1, 0, 0], [0, 0, 1], [0, -1, 0]], dtype=np.float32) @ vec + return new_vec.squeeze() + if vec.shape[0] == 4: + new_vec = ( + np.array( + [[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]], + dtype=np.float32, + ) + @ vec + ) + return new_vec.squeeze() diff --git a/infinigen/assets/small_plants/__init__.py b/infinigen/assets/objects/small_plants/__init__.py similarity index 72% rename from infinigen/assets/small_plants/__init__.py rename to infinigen/assets/objects/small_plants/__init__.py index 70eeaac6a..db5e5b2b3 100644 --- a/infinigen/assets/small_plants/__init__.py +++ b/infinigen/assets/objects/small_plants/__init__.py @@ -1,4 +1,4 @@ from .fern import FernFactory -from .succulent import SucculentFactory +from .snake_plant import SnakePlantFactory from .spider_plant import SpiderPlantFactory -from .snake_plant import SnakePlantFactory \ No newline at end of file +from .succulent import SucculentFactory diff --git a/infinigen/assets/objects/small_plants/fern.py b/infinigen/assets/objects/small_plants/fern.py new file mode 100644 index 000000000..4a66f72ed --- /dev/null +++ b/infinigen/assets/objects/small_plants/fern.py @@ -0,0 +1,1451 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han +# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=MGxNuS_-bpo by Bad Normals + + +import bpy +import gin +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import simple_greenery +from infinigen.assets.objects.small_plants import leaf_general as Leaf +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +def random_pinnae_level2_curvature(): + z_max_curvature = uniform(0.3, 0.45, (1,))[0] + y_curvature_noise = np.clip(np.abs(normal(0.0, 0.2, (1,))), a_min=0.0, a_max=0.3)[0] + y_curvature_k = uniform(-0.04, 0.2, (1,))[0] + z_curvature, y_curvature = [0.25], [0.5] + for k in range(1, 6): + z_curvature.append(0.25 + z_max_curvature * k / 5.0) + y_curvature.append(0.5 + y_curvature_k + y_curvature_noise * k / 5.0) + x_curvature = [0.0 for _ in range(6)] + return x_curvature, y_curvature, z_curvature + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_yaxis_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_yaxis_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "From Max", 1.0), + ("NodeSocketFloat", "Value", 1.0), + ], + ) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value"], + 2: group_input.outputs["From Max"], + }, + ) + curvature = np.clip(normal(0, 0.3, 1), a_min=-0.4, a_max=0.4) + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.1, curvature / 5.0 + 0.5), + (0.25, curvature / 2.5 + 0.5), + (0.45, curvature / 1.5 + 0.5), + (0.6, curvature / 1.2 + 0.5), + (1.0, curvature + 0.5), + ], + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={"operation": "ADD"} + ) + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1.0}, attrs={"operation": "MULTIPLY"} + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_zaxis_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_zaxis_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "From Max", 1.0), + ("NodeSocketFloat", "Value", 1.0), + ], + ) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value"], + 2: group_input.outputs["From Max"], + }, + ) + curvature = normal(0, 0.2, 1) + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.1, curvature / 5.0 + 0.5), + (0.25, curvature / 2.5 + 0.5), + (0.45, curvature / 1.5 + 0.5), + (0.6, curvature / 1.2 + 0.5), + (1.0, curvature + 0.5), + ], + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={"operation": "ADD"} + ) + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1.0}, attrs={"operation": "MULTIPLY"} + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_gravity_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_gravity_rotation(nw: NodeWrangler, gravity_rotation=1.0): + # Code generated using version 2.4.3 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "From Max", 1.0), + ("NodeSocketFloat", "Value", 1.0), + ], + ) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value"], + 2: group_input.outputs["From Max"], + }, + ) + curvature = uniform(0.25, 0.42, size=(1,))[0] * gravity_rotation + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.1, curvature / 5.0 + 0.5), + (0.25, curvature / 2.5 + 0.5), + (0.45, curvature / 1.67 + 0.5), + (0.6, curvature / 1.25 + 0.5), + (1.0, curvature + 0.5), + ], + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={"operation": "ADD"} + ) + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 1}, attrs={"operation": "MULTIPLY"} + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_xaxis_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_xaxis_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "From Max", 1.0000), + ("NodeSocketFloat", "Value1", 1.0000), + ("NodeSocketFloat", "Value2", 1.0000), + ], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value1"], + 2: group_input.outputs["From Max"], + }, + attrs={"clamp": False}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0000, 0.0000), + (0.2000, 0.2563), + (0.4843, 0.4089), + (0.7882, 0.3441), + (1.0000, 0.0000), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value2"], 3: -1.5000, 4: 0.0000}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_stein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_stein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": group_input.outputs["Mesh"]} + ) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value2"], 1: 0.01}, + attrs={"operation": "MULTIPLY"}, + ) + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": mesh_to_curve, "Radius": multiply} + ) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value1"], 1: 15.0}, + attrs={"operation": "MULTIPLY"}, + ) + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": multiply_1, "Resolution": 10} + ) + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_scale", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_scale(nw: NodeWrangler, pinnae_contour): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "Value2", 1.0), + ], + ) + + pinnae_contour_float_curve = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": group_input.outputs["Value1"]}, + label="PinnaeContourFloatCurve", + ) + node_utils.assign_curve( + pinnae_contour_float_curve.mapping.curves[0], + [ + (0.0, pinnae_contour[0]), + (0.2, pinnae_contour[1]), + (0.4, pinnae_contour[2]), + (0.55, pinnae_contour[3]), + (0.7, pinnae_contour[4]), + (0.8, pinnae_contour[5]), + (0.9, pinnae_contour[6]), + (1.0, pinnae_contour[7]), + ], + ) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value2"], 3: 1.0, 4: 3.0}, + ) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: pinnae_contour_float_curve, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_instance_rotation", + singleton=False, + type="GeometryNodeTree", +) +def nodegroup_pinnae_level1_instance_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 1.0), + ], + ) + map_range_8 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value2"], 3: 2, 4: 3.1}, + ) + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Value1"], + 1: map_range_8.outputs["Result"], + }, + ) + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add}) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level1_rotation(nw: NodeWrangler, gravity_rotation=1): + # Code generated using version 2.4.3 of the node_transpiler + + position = nw.new_node(Nodes.InputPosition) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, + attrs={"operation": "MULTIPLY"}, + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value2"], 1: 0.0} + ) + pinnae_index = nw.new_node(Nodes.Index, label="PinnaeIndex") + pinnaelevel1xaxisrotation = nw.new_node( + nodegroup_pinnae_level1_xaxis_rotation().name, + input_kwargs={ + "From Max": add, + 1: pinnae_index, + 2: group_input.outputs["Value1"], + }, + ) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position, + "Center": (0, 0, 0), + "Angle": pinnaelevel1xaxisrotation, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + pinnaelevel1gravityrotation = nw.new_node( + nodegroup_pinnae_level1_gravity_rotation( + gravity_rotation=gravity_rotation + ).name, + input_kwargs={"From Max": add, "Value": pinnae_index}, + ) + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": vector_rotate, + "Center": (0, 0, 0), + "Angle": pinnaelevel1gravityrotation, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + pinnaelevel1zaxisrotation = nw.new_node( + nodegroup_pinnae_level1_zaxis_rotation().name, + input_kwargs={"From Max": add, "Value": pinnae_index}, + ) + vector_rotate_2 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": vector_rotate_1, + "Center": multiply.outputs["Vector"], + "Angle": pinnaelevel1zaxisrotation, + }, + attrs={"rotation_type": "Z_AXIS"}, + ) + pinnaelevel1yaxisrotation = nw.new_node( + nodegroup_pinnae_level1_yaxis_rotation().name, + input_kwargs={"From Max": add, "Value": pinnae_index}, + ) + vector_rotate_3 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": vector_rotate_2, + "Center": multiply.outputs["Vector"], + "Angle": pinnaelevel1yaxisrotation, + }, + attrs={"rotation_type": "Y_AXIS"}, + ) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": vector_rotate_3, "Value": pinnaelevel1xaxisrotation}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level1_instance_position", + singleton=False, + type="GeometryNodeTree", +) +def nodegroup_pinnae_level1_instance_position(nw: NodeWrangler, pinnae_contour): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "From Max", 1.0), + ("NodeSocketFloat", "Value2", 1.0), + ], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value1"], + 2: group_input.outputs["From Max"], + 3: 1.0, + 4: 0.0, + }, + ) + + float_curve_2 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_3.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_2.mapping.curves[0], + [ + (0.0, pinnae_contour[0]), + (0.2, pinnae_contour[1]), + (0.4, pinnae_contour[2]), + (0.55, pinnae_contour[3]), + (0.7, pinnae_contour[4]), + (0.8, pinnae_contour[5]), + (0.9, pinnae_contour[6]), + (1.0, pinnae_contour[7]), + ], + ) + accumulate_field_1 = nw.new_node( + Nodes.AccumulateField, input_kwargs={1: float_curve_2} + ) + # pinnae scale w.r.t fern age + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value2"], 3: 0.3, 4: 4.5}, + ) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: accumulate_field_1.outputs[4], + 1: map_range_5.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": combine_xyz_1, "Result": map_range_3.outputs["Result"]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level2_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level2_rotation( + nw: NodeWrangler, z_axis_rotate, y_axis_rotate, x_axis_rotate +): + # Code generated using version 2.4.3 of the node_transpiler + + position_1 = nw.new_node(Nodes.InputPosition) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "Value2", 0.5), + ("NodeSocketFloat", "Value3", 0.5), + ], + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value2"], 1: 0.0} + ) + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0} + ) + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add, "From Max": add_1} + ) + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, z_axis_rotate[0]), + (0.1, z_axis_rotate[1]), + (0.25, z_axis_rotate[2]), + (0.45, z_axis_rotate[3]), + (0.6, z_axis_rotate[4]), + (1.0, z_axis_rotate[5]), + ], + ) + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve_1, 1: -0.25}) + + # pinna z-axis curvature w.r.t the fern age + map_range_7 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value1"], 3: 1.2, 4: 0.0}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: map_range_7.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Center": (0, 0, 0), "Angle": multiply_1}, + attrs={"rotation_type": "Z_AXIS"}, + ) + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": add, 2: add_1}) + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, y_axis_rotate[0]), + (0.1, y_axis_rotate[1]), + (0.25, y_axis_rotate[2]), + (0.45, y_axis_rotate[3]), + (0.6, y_axis_rotate[4]), + (1.0, y_axis_rotate[5]), + ], + ) + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}) + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: 1.0}, attrs={"operation": "MULTIPLY"} + ) + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": vector_rotate_1, "Angle": multiply_2}, + attrs={"rotation_type": "Y_AXIS"}, + ) + map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={"Value": add, 2: add_1}) + float_curve_2 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_2.mapping.curves[0], + [ + (0.0, x_axis_rotate[0]), + (0.1, x_axis_rotate[1]), + (0.25, x_axis_rotate[2]), + (0.45, x_axis_rotate[3]), + (0.6, x_axis_rotate[4]), + (1.0, x_axis_rotate[5]), + ], + ) + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_2, 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + vector_rotate_2 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": vector_rotate, "Angle": multiply_3}, + attrs={"rotation_type": "X_AXIS"}, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level2_set_point", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level2_set_point(nw: NodeWrangler, pinna_contour): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "From Max", 1.0), + ("NodeSocketFloat", "Value2", 1.0), + ], + ) + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Value1"], + 2: group_input.outputs["From Max"], + 3: 1.0, + 4: 0.0, + }, + ) + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_4.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, pinna_contour[0]), + (0.38, pinna_contour[1]), + (0.55, pinna_contour[2]), + (0.75, pinna_contour[3]), + (0.9, pinna_contour[4]), + (1.0, pinna_contour[5]), + ], + ) + accumulate_field_2 = nw.new_node( + Nodes.AccumulateField, input_kwargs={1: float_curve} + ) + + # pinna scale w.r.t fern age + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value2"], 3: 0.5, 4: 2.0}, + ) + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: accumulate_field_2.outputs[4], + 1: map_range_6.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Vector": combine_xyz_2, + "Value": float_curve, + "Result": map_range_4.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level2_instance_on_points", + singleton=False, + type="GeometryNodeTree", +) +def nodegroup_pinnae_level2_instance_on_points(nw: NodeWrangler, leaf, pinna_contour): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Value1", 1.0), + ("NodeSocketFloat", "Value2", 0.5), + ("NodeSocketFloat", "Value3", 1.0), + ], + ) + index = nw.new_node(Nodes.Index) + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": object_info_2.outputs["Geometry"], + "Scale": (1.2, -1.0, 1.0), + }, + ) + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": object_info_2.outputs["Geometry"], + "Scale": (1.2, 1.0, 1.0), + }, + ) + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, transform_2]} + ) + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value2"], 1: -0.3} + ) + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": 1.57, "Z": add}) + float_curve_6 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": group_input.outputs["Value1"]} + ) + node_utils.assign_curve( + float_curve_6.mapping.curves[0], + [ + (0.0, pinna_contour[0]), + (0.38, pinna_contour[1]), + (0.55, pinna_contour[2]), + (0.75, pinna_contour[3]), + (0.9, pinna_contour[4]), + (1.0, pinna_contour[5]), + ], + ) + # pinna leaf size w.r.t the fern age + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value3"], 3: 6, 4: 8}, + ) + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: float_curve_6, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": index, + "Instance": join_geometry, + "Rotation": combine_xyz_3, + "Scale": multiply.outputs["Vector"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": instance_on_points_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pinnae_level2_stein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pinnae_level2_stein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 0.5), + ("NodeSocketGeometry", "Mesh", None), + ], + ) + mesh_to_curve_1 = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": group_input.outputs["Mesh"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value1"], 1: 0.1}, + attrs={"operation": "MULTIPLY"}, + ) + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": mesh_to_curve_1, "Radius": multiply}, + ) + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value2"], 1: 0.5}, + attrs={"operation": "MULTIPLY"}, + ) + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": multiply_1, "Resolution": 10} + ) + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh_1} + ) + + +@node_utils.to_nodegroup("nodegroup_pinnae", singleton=False, type="GeometryNodeTree") +def geometry_pinnae_nodes( + nw: NodeWrangler, + leaf, + leaf_num_param=18, + age_param=0.4, + pinna_num_param=40, + version_num_param=4, + gravity_rotation=1, +): + # Code generated using version 2.4.3 of the node_transpiler + + # Define Input Node + leaf_index = nw.new_node(Nodes.Index, label="LeafIndex") + pinna_index = nw.new_node(Nodes.Index, label="PinnaIndex") + pinna_num = nw.new_node(Nodes.Integer, label="PinnaNum", attrs={"integer": 10}) + pinna_num.integer = pinna_num_param + age = nw.new_node(Nodes.Value, label="Age") + age.outputs[0].default_value = age_param + + mesh_lines_left, selections_left = [], [] + mesh_lines_right, selections_right = [], [] + + # Generate Random Pinnae Contour, Two Modes: Linear+Noise, StepwiseLinear+Noise + mode_random_bit = randint(0, 2, size=(1,))[0] + if mode_random_bit: + pinnae_contour = [0, 0.2, 0.6, 1.4, 3.0, 4.0, 5.0, 6.0] + for i in range(8): + pinnae_contour[i] = (pinnae_contour[i] + normal(0, 0.04 * i, (1,))[0]) / 6.0 + else: + pinnae_contour = [0, 0.2, 0.6, 1.4, 3.0, 4.0, 5.0, 4.2] + for i in range(8): + pinnae_contour[i] = (pinnae_contour[i] + normal(0, 0.04 * i, (1,))[0]) / 6.0 + + # Common Components + pinnaelevel1instanceposition = nw.new_node( + nodegroup_pinnae_level1_instance_position(pinnae_contour).name, + input_kwargs={0: pinna_index, "From Max": pinna_num, 2: age}, + ) + left_noise, right_noise = ( + nw.new_node(Nodes.WhiteNoiseTexture), + nw.new_node(Nodes.WhiteNoiseTexture), + ) + pinnaelevel1scale = nw.new_node( + nodegroup_pinnae_level1_scale(pinnae_contour).name, + input_kwargs={0: pinnaelevel1instanceposition.outputs["Result"], 1: age}, + ) + + # Left & Right Instance Point Selections for each Version + random_bit = randint(2, size=(1,))[0] + for i in range(version_num_param): + index = nw.new_node(Nodes.Index) + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={0: left_noise.outputs["Value"], 1: i / version_num_param}, + attrs={"operation": "GREATER_EQUAL"}, + ) + less_equal = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: left_noise.outputs["Value"], + 1: (i + 1) / version_num_param, + }, + attrs={"operation": "LESS_EQUAL"}, + ) + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 2.0}, + attrs={"operation": "GREATER_THAN"}, + ) + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 2.0}, attrs={"operation": "MODULO"} + ) + if random_bit: + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: 1, 1: modulo}, + attrs={"operation": "SUBTRACT"}, + ) + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: modulo} + ) + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + selections_left.append(op_and_2) + + random_bit = randint(2, size=(1,))[0] + for i in range(version_num_param): + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={0: right_noise.outputs["Value"], 1: i / version_num_param}, + attrs={"operation": "GREATER_EQUAL"}, + ) + less_equal = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: right_noise.outputs["Value"], + 1: (i + 1) / version_num_param, + }, + attrs={"operation": "LESS_EQUAL"}, + ) + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal} + ) + index = nw.new_node(Nodes.Index) + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: 2.0}, + attrs={"operation": "GREATER_THAN"}, + ) + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 2.0}, attrs={"operation": "MODULO"} + ) + if random_bit: + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: 1, 1: modulo}, + attrs={"operation": "SUBTRACT"}, + ) + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: modulo} + ) + op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) + selections_right.append(op_and_2) + + # Each Pinna Version + rotation, pinnaelevel1rotation = True, None + for i in range(version_num_param): + # Define the Pinna Contour of each Version + pinna_contour = [] + k = uniform(0.5, 0.58, size=(1,))[0] + for j in range(6): + pinna_contour.append( + k * np.clip(j * (1.0 + normal(0, 0.1, (1,))[0]) / 5.0 + 0.08, 0, 0.7) + ) + # Define the Num Leaf of each Version + integer_2 = nw.new_node(Nodes.Integer, attrs={"integer": 10}) + integer_2.integer = leaf_num_param + randint(-1, 2, (1,))[0] + + mesh_line_pinna = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": pinna_num, "Offset": (0.0, 0.0, 0.0)} + ) + set_position_pinna = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_line_pinna, + "Position": pinnaelevel1instanceposition.outputs["Vector"], + }, + ) + if rotation: + pinnaelevel1rotation = nw.new_node( + nodegroup_pinnae_level1_rotation( + gravity_rotation=gravity_rotation + ).name, + input_kwargs={"Geometry": set_position_pinna, 1: age, 2: pinna_num}, + ) + rotation = False + pinnaelevel1instancerotation = nw.new_node( + nodegroup_pinnae_level1_instance_rotation().name, + input_kwargs={0: pinnaelevel1rotation.outputs["Value"], 1: age}, + ) + set_rotation_pinna = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_pinna, + "Position": pinnaelevel1rotation.outputs["Vector"], + }, + ) + mesh_line_leaf = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": integer_2, "Offset": (0.0, 0.0, 0.0)} + ) + pinnaelevel2setpoint = nw.new_node( + nodegroup_pinnae_level2_set_point(pinna_contour=pinna_contour).name, + input_kwargs={0: leaf_index, "From Max": integer_2, 2: age}, + ) + set_position_leaf = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_line_leaf, + "Position": pinnaelevel2setpoint.outputs["Vector"], + }, + ) + + x_curvature, y_curvature, z_curvature = random_pinnae_level2_curvature() + pinnaelevel2rotation = nw.new_node( + nodegroup_pinnae_level2_rotation( + z_axis_rotate=z_curvature, + y_axis_rotate=y_curvature, + x_axis_rotate=x_curvature, + ).name, + input_kwargs={ + "Geometry": set_position_leaf, + 1: age, + 2: leaf_index, + 3: integer_2, + }, + ) + set_rotation_leaf = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_leaf, + "Position": pinnaelevel2rotation, + }, + ) + pinna_on_pinnae = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_rotation_pinna, + "Selection": selections_left[i], + "Instance": set_rotation_leaf, + "Rotation": pinnaelevel1instancerotation, + "Scale": pinnaelevel1scale, + }, + ) + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": pinna_on_pinnae, + "Rotation": (-0.1571, 0.0, 0.0), + }, + ) + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": rotate_instances, "Scale": (-1.0, 1.0, 1.0)}, + ) + pinnaelevel2stein = nw.new_node( + nodegroup_pinnae_level2_stein().name, + input_kwargs={ + 0: pinnaelevel2setpoint.outputs["Result"], + "Mesh": scale_instances, + }, + ) + pinnaelevel2instanceonpoints = nw.new_node( + nodegroup_pinnae_level2_instance_on_points( + leaf=leaf, pinna_contour=pinna_contour + ).name, + input_kwargs={ + "Points": scale_instances, + 1: pinnaelevel2setpoint.outputs["Result"], + 2: 0.0, + 3: age, + }, + ) + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [pinnaelevel2stein, pinnaelevel2instanceonpoints] + }, + ) + + mesh_lines_left.append(join_geometry) + if i == version_num_param - 1: + pinnaelevel1stein = nw.new_node( + nodegroup_pinnae_level1_stein().name, + input_kwargs={ + "Mesh": set_rotation_pinna, + 1: age, + 2: pinnaelevel1instanceposition.outputs["Result"], + }, + ) + mesh_lines_left.append(pinnaelevel1stein) + + for i in range(version_num_param): + # Define the Pinna Contour of each Version + pinna_contour = [] + k = uniform(0.5, 0.58, size=(1,))[0] + for j in range(6): + pinna_contour.append( + k * np.clip(j * (1.0 + normal(0, 0.1, (1,))[0]) / 5.0 + 0.08, 0, 0.7) + ) + # Define the Num Leaf of each Version + integer_2 = nw.new_node(Nodes.Integer, attrs={"integer": 10}) + integer_2.integer = leaf_num_param + randint(-1, 2, (1,))[0] + + mesh_line_pinna = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": pinna_num, "Offset": (0.0, 0.0, 0.0)} + ) + set_position_pinna = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_line_pinna, + "Position": pinnaelevel1instanceposition.outputs["Vector"], + }, + ) + pinnaelevel1instancerotation = nw.new_node( + nodegroup_pinnae_level1_instance_rotation().name, + input_kwargs={0: pinnaelevel1rotation.outputs["Value"], 1: age}, + ) + set_rotation_pinna = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_pinna, + "Position": pinnaelevel1rotation.outputs["Vector"], + }, + ) + mesh_line_leaf = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": integer_2, "Offset": (0.0, 0.0, 0.0)} + ) + + pinnaelevel2setpoint = nw.new_node( + nodegroup_pinnae_level2_set_point(pinna_contour=pinna_contour).name, + input_kwargs={0: leaf_index, "From Max": integer_2, 2: age}, + ) + + set_position_leaf = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_line_leaf, + "Position": pinnaelevel2setpoint.outputs["Vector"], + }, + ) + x_curvature, y_curvature, z_curvature = random_pinnae_level2_curvature() + pinnaelevel2rotation = nw.new_node( + nodegroup_pinnae_level2_rotation( + z_axis_rotate=z_curvature, + y_axis_rotate=y_curvature, + x_axis_rotate=x_curvature, + ).name, + input_kwargs={ + "Geometry": set_position_leaf, + 1: age, + 2: leaf_index, + 3: integer_2, + }, + ) + set_rotation_leaf = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_leaf, + "Position": pinnaelevel2rotation, + }, + ) + pinna_on_pinnae = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_rotation_pinna, + "Selection": selections_right[i], + "Instance": set_rotation_leaf, + "Scale": pinnaelevel1scale, + "Rotation": pinnaelevel1instancerotation, + }, + ) + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": pinna_on_pinnae, + "Rotation": (-0.1571, 0.0, 0.0), + }, + ) + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": rotate_instances, "Scale": (1.0, 1.0, 1.0)}, + ) + pinnaelevel2stein = nw.new_node( + nodegroup_pinnae_level2_stein().name, + input_kwargs={ + 0: pinnaelevel2setpoint.outputs["Result"], + "Mesh": scale_instances, + }, + ) + pinnaelevel2instanceonpoints = nw.new_node( + nodegroup_pinnae_level2_instance_on_points( + leaf=leaf, pinna_contour=pinna_contour + ).name, + input_kwargs={ + "Points": scale_instances, + 1: pinnaelevel2setpoint.outputs["Result"], + 2: 0.0, + 3: age, + }, + ) + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [pinnaelevel2stein, pinnaelevel2instanceonpoints] + }, + ) + mesh_lines_right.append(join_geometry) + + join_geometry_whole = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": mesh_lines_left + mesh_lines_right}, + ) + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry_whole} + ) + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": 0.4, "Roughness": 0.2} + ) + set_positions = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Offset": noise_texture.outputs["Color"], + }, + ) + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_positions} + ) + + +def check_vicinity(rotation, pinnae_rs): + for r in pinnae_rs: + if abs(rotation[1] - r[1]) < 0.1 and abs(rotation[2] - r[2]) < 0.15: + return True + return False + + +def geo_fern(nw: NodeWrangler, **kwargs): + pinnaes = [] + # Two modes: Random Like and Flatten Like + fern_mode = kwargs["fern_mode"] + pinnae_num = kwargs["pinnae_num"] + scale = kwargs["scale"] + version_num = kwargs["version_num"] + leaf = kwargs["leaf"] + if fern_mode == "young_and_grownup": + rotates = [] # Horizontal grownup pinnae + # Generate non-overlapping pinnae orientations + for i in range(pinnae_num): + flip_bit = randint(0, 3, (1,))[0] + if flip_bit: + rotate_z = uniform(2.74, 3.54, (1,))[0] + else: + rotate_z = uniform(-0.4, 0.4, (1,))[0] + rotate_x = uniform(0.8, 1.1, (1,))[0] + rotate_z2 = uniform(0, 6.28, (1,))[0] + if flip_bit: + gravity_dir = 1 + else: + gravity_dir = -1 + rotate = (rotate_z, rotate_x, rotate_z2, gravity_dir) + if check_vicinity(rotate, rotates): + continue + else: + rotates.append(rotate) + # Generate pinnae + for r in rotates: + random_age = uniform(0.7, 0.95, (1,))[0] + random_leaf_num = randint(15, 25, (1,))[0] + random_pinna_num = randint(60, 80, (1,))[0] + shape = nw.new_node( + geometry_pinnae_nodes( + leaf, + leaf_num_param=random_leaf_num, + age_param=random_age, + pinna_num_param=random_pinna_num, + version_num_param=version_num, + gravity_rotation=r[3], + ).name + ) + z_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": shape, "Rotation": (0.0, 0.0, r[0])}, + ) + x_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": z_transform, "Rotation": (-r[1], 0.0, 0.0)}, + ) + z2_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": x_transform, "Rotation": (0.0, 0.0, r[2])}, + ) + pinnaes.append(z2_transform) + + # Verticle young pinnae + young_num = randint(0, 5, size=(1,))[0] + for i in range(young_num): + random_age = uniform(0.2, 0.5, (1,))[0] + random_leaf_num = randint(14, 20, (1,))[0] + random_pinna_num = randint(60, 100, (1,))[0] + rotate_z = uniform(0, 6.28, (1,)) + rotate_x = uniform(0, 0.4, (1,)) + rotate_z2 = uniform(0, 6.28, (1,)) + shape = nw.new_node( + geometry_pinnae_nodes( + leaf, + leaf_num_param=random_leaf_num, + age_param=random_age, + pinna_num_param=random_pinna_num, + version_num_param=version_num, + gravity_rotation=0, + ).name + ) + z_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": shape, "Rotation": (0.0, 0.0, rotate_z[0])}, + ) + x_transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": z_transform, + "Rotation": (-rotate_x[0], 0.0, 0.0), + }, + ) + z2_transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": x_transform, + "Rotation": (0.0, 0.0, rotate_z2[0]), + }, + ) + pinnaes.append(z2_transform) + elif fern_mode == "all_grownup": + # Random grownup pinnae + rotates = [] + for i in range(pinnae_num): + rotate_z = normal(3.14, 0.2, (1,))[0] + rotate_x = uniform(0.5, 1.1, (1,))[0] + rotate_z2 = uniform(0, 6.28, (1,))[0] + rotate = (rotate_z, rotate_x, rotate_z2, 1) + if check_vicinity(rotate, rotates): + continue + else: + rotates.append(rotate) + + for r in rotates: + random_age = uniform(0.7, 0.9, (1,))[0] + random_leaf_num = randint(16, 25, (1,))[0] + random_pinna_num = randint(60, 80, (1,))[0] + shape = nw.new_node( + geometry_pinnae_nodes( + leaf, + leaf_num_param=random_leaf_num, + age_param=random_age, + pinna_num_param=random_pinna_num, + version_num_param=version_num, + gravity_rotation=r[3], + ).name + ) + z_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": shape, "Rotation": (0.0, 0.0, r[0])}, + ) + x_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": z_transform, "Rotation": (-r[1], 0.0, 0.0)}, + ) + z2_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": x_transform, "Rotation": (0.0, 0.0, r[2])}, + ) + pinnaes.append(z2_transform) + elif fern_mode == "single_pinnae": + shape = nw.new_node( + geometry_pinnae_nodes( + leaf, + leaf_num_param=20, + age_param=kwargs["age"], + pinna_num_param=60, + version_num_param=version_num, + ).name + ) + pinnaes.append(shape) + else: + raise NotImplementedError + + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": pinnaes}) + geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Scale": (scale, scale, scale)}, + ) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + +@gin.register +class FernFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(FernFactory, self).__init__(factory_seed, coarse=coarse) + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + if "fern_mode" not in params: + type_bit = randint(0, 2, (1,))[0] + if type_bit: + params["fern_mode"] = "young_and_grownup" + else: + params["fern_mode"] = "all_grownup" + + if "scale" not in params: + params["scale"] = 0.02 + + if "version_num" not in params: + params["version_num"] = 5 + + if "pinnae_num" not in params: + params["pinnae_num"] = randint(12, 30, size=(1,))[0] + + # Make the Leaf and Delete It Later + lf_seed = randint(0, 1000, size=(1,))[0] + leaf_model = Leaf.LeafFactory( + genome={"leaf_width": 0.4, "width_rand": 0.04}, factory_seed=lf_seed + ) + leaf = leaf_model.create_asset(material=False) + params["leaf"] = leaf + + surface.add_geomod( + obj, geo_fern, apply=True, attributes=[], input_kwargs=params + ) + butil.delete([leaf]) + with butil.SelectObjects(obj): + bpy.ops.object.material_slot_remove() + bpy.ops.object.shade_flat() + + simple_greenery.apply(obj) + + return obj + + def debug_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + params["fern_mode"] = "single_pinnae" + params["scale"] = 1.0 + params["version_num"] = 5 + params["pinnae_num"] = 1 + params["age"] = uniform(0.5, 0.9) + + leaf_model = Leaf.LeafFactory( + genome={"leaf_width": 0.4, "width_rand": 0.04}, factory_seed=0 + ) + leaf = leaf_model.create_asset(material=False) + params["leaf"] = leaf + surface.add_geomod( + obj, geo_fern, apply=True, attributes=[], input_kwargs=params + ) + + bpy.ops.object.convert(target="MESH") + butil.delete([leaf]) + tag_object(obj, "fern") + return obj + + +# if __name__ == '__main__': +# fern = FernFactory(0) +# obj = fern.debug_asset() +# simple_greenery.apply([obj]) diff --git a/infinigen/assets/small_plants/leaf_general.py b/infinigen/assets/objects/small_plants/leaf_general.py similarity index 56% rename from infinigen/assets/small_plants/leaf_general.py rename to infinigen/assets/objects/small_plants/leaf_general.py index 8f3222f7a..6882cb1e4 100644 --- a/infinigen/assets/small_plants/leaf_general.py +++ b/infinigen/assets/objects/small_plants/leaf_general.py @@ -3,26 +3,20 @@ # Authors: Beining Han -import pdb - -import numpy as np import bpy +import numpy as np -from infinigen.assets.trees.utils import helper, mesh, materials - +from infinigen.assets.objects.trees.utils import mesh from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -C = bpy.context -D = bpy.data -from infinigen.core.tagging import tag_object, tag_nodegroup class LeafFactory(AssetFactory): - scale = 0.3 - def __init__(self, factory_seed, genome: dict=None, coarse=False): + def __init__(self, factory_seed, genome: dict = None, coarse=False): super(LeafFactory, self).__init__(factory_seed, coarse=coarse) self.genome = dict( leaf_width=0.5, @@ -31,7 +25,7 @@ def __init__(self, factory_seed, genome: dict=None, coarse=False): x_offset=0, flip_leaf=False, z_scaling=0, - width_rand=0.33 + width_rand=0.33, ) if genome: for k, g in genome.items(): @@ -39,15 +33,15 @@ def __init__(self, factory_seed, genome: dict=None, coarse=False): self.genome[k] = g def create_asset(self, **params) -> bpy.types.Object: - # bpy.ops.object.mode_set(mode = 'OBJECT') - bpy.ops.mesh.primitive_circle_add(enter_editmode=False, align='WORLD', - location=(0, 0, 0), scale=(1, 1, 1)) + bpy.ops.mesh.primitive_circle_add( + enter_editmode=False, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) bpy.ops.object.editmode_toggle() bpy.ops.mesh.edge_face_add() obj = bpy.context.active_object - min_radius = .02 + min_radius = 0.02 radii_ref = [1] n = len(obj.data.vertices) // 2 @@ -56,37 +50,47 @@ def create_asset(self, **params) -> bpy.types.Object: bpy.ops.mesh.subdivide() a = np.linspace(0, np.pi, n) - if self.genome['flip_leaf']: + if self.genome["flip_leaf"]: a = a[::-1] - x = np.sin(a) * (self.genome['leaf_width'] + np.random.randn() * self.genome['width_rand']) + self.genome['x_offset'] - y = -np.cos(.9 * (a - self.genome['alpha'])) - z = x ** 2 * self.genome['z_scaling'] - - full_coords = np.concatenate([np.stack([x, y, z], 1), - np.stack([-x[::-1], y[::-1], z], 1), - np.array([[0, y[0], 0]])]).flatten() - bpy.ops.object.mode_set(mode='OBJECT') - obj.data.vertices.foreach_set('co', full_coords) - - if self.genome['use_wave']: - bpy.ops.object.modifier_add(type='WAVE') - bpy.context.object.modifiers["Wave"].height = np.random.randn() * .3 - bpy.context.object.modifiers["Wave"].width = 0.75 + \ - np.random.randn() * .1 + x = ( + np.sin(a) + * ( + self.genome["leaf_width"] + + np.random.randn() * self.genome["width_rand"] + ) + + self.genome["x_offset"] + ) + y = -np.cos(0.9 * (a - self.genome["alpha"])) + z = x**2 * self.genome["z_scaling"] + + full_coords = np.concatenate( + [ + np.stack([x, y, z], 1), + np.stack([-x[::-1], y[::-1], z], 1), + np.array([[0, y[0], 0]]), + ] + ).flatten() + bpy.ops.object.mode_set(mode="OBJECT") + obj.data.vertices.foreach_set("co", full_coords) + + if self.genome["use_wave"]: + bpy.ops.object.modifier_add(type="WAVE") + bpy.context.object.modifiers["Wave"].height = np.random.randn() * 0.3 + bpy.context.object.modifiers["Wave"].width = 0.75 + np.random.randn() * 0.1 bpy.context.object.modifiers["Wave"].speed = np.random.rand() mesh.finalize_obj(obj) - C.scene.cursor.location = obj.data.vertices[-1].co + bpy.context.scene.cursor.location = obj.data.vertices[-1].co bpy.ops.object.origin_set(type="ORIGIN_CURSOR") obj.location = (0, 0, 0) obj.scale *= self.scale butil.apply_transform(obj) - tag_object(obj, 'leaf') + tag_object(obj, "leaf") return obj -if __name__ == '__main__': +if __name__ == "__main__": leaf = LeafFactory(factory_seed=0) - leaf.create_asset() \ No newline at end of file + leaf.create_asset() diff --git a/infinigen/assets/small_plants/leaf_heart.py b/infinigen/assets/objects/small_plants/leaf_heart.py similarity index 58% rename from infinigen/assets/small_plants/leaf_heart.py rename to infinigen/assets/objects/small_plants/leaf_heart.py index 83cc15249..52eda2981 100644 --- a/infinigen/assets/small_plants/leaf_heart.py +++ b/infinigen/assets/objects/small_plants/leaf_heart.py @@ -4,36 +4,31 @@ # Authors: Beining Han -import numpy as np import bpy -from infinigen.assets.trees.utils import mesh +import numpy as np + +from infinigen.assets.objects.trees.utils import mesh from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -C = bpy.context -D = bpy.data -from infinigen.core.tagging import tag_object, tag_nodegroup class LeafHeartFactory(AssetFactory): scale = 0.2 def __init__(self, factory_seed, genome: dict = None, coarse=False): super(LeafHeartFactory, self).__init__(factory_seed, coarse=coarse) - self.genome = dict( - leaf_width=1.0, - use_wave=True, - z_scaling=0, - width_rand=0.1 - ) + self.genome = dict(leaf_width=1.0, use_wave=True, z_scaling=0, width_rand=0.1) if genome: for k, g in genome.items(): assert k in self.genome self.genome[k] = g def create_asset(self, **params) -> bpy.types.Object: - # bpy.ops.object.mode_set(mode = 'OBJECT') - bpy.ops.mesh.primitive_circle_add(enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) + bpy.ops.mesh.primitive_circle_add( + enter_editmode=False, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) bpy.ops.object.editmode_toggle() bpy.ops.mesh.edge_face_add() @@ -45,31 +40,47 @@ def create_asset(self, **params) -> bpy.types.Object: bpy.ops.mesh.subdivide() a = np.linspace(0, np.pi, n) - x = 16. * (np.sin(a - np.pi) ** 3) * (self.genome['leaf_width'] + np.random.randn() * self.genome['width_rand']) - y = 13. * np.cos(a - np.pi) - 5 * np.cos(2 * (a - np.pi)) - 2 * np.cos(3 * (a - np.pi)) + x = ( + 16.0 + * (np.sin(a - np.pi) ** 3) + * ( + self.genome["leaf_width"] + + np.random.randn() * self.genome["width_rand"] + ) + ) + y = ( + 13.0 * np.cos(a - np.pi) + - 5 * np.cos(2 * (a - np.pi)) + - 2 * np.cos(3 * (a - np.pi)) + ) x, y = x * 0.3, y * 0.3 - z = x ** 2 * self.genome['z_scaling'] - - full_coords = np.concatenate([np.stack([x, y, z], 1), np.stack([-x[::-1], y[::-1], z], 1), - np.array([[0, y[0], 0]])]).flatten() - bpy.ops.object.mode_set(mode='OBJECT') - obj.data.vertices.foreach_set('co', full_coords) + z = x**2 * self.genome["z_scaling"] + + full_coords = np.concatenate( + [ + np.stack([x, y, z], 1), + np.stack([-x[::-1], y[::-1], z], 1), + np.array([[0, y[0], 0]]), + ] + ).flatten() + bpy.ops.object.mode_set(mode="OBJECT") + obj.data.vertices.foreach_set("co", full_coords) if self.genome["use_wave"]: - bpy.ops.object.modifier_add(type='WAVE') + bpy.ops.object.modifier_add(type="WAVE") bpy.context.object.modifiers["Wave"].height = 0.8 * np.random.randn() * 0.8 - bpy.context.object.modifiers["Wave"].width = 3.5 + np.random.randn() * 1. + bpy.context.object.modifiers["Wave"].width = 3.5 + np.random.randn() * 1.0 bpy.context.object.modifiers["Wave"].speed = 40 + np.random.uniform(-10, 20) mesh.finalize_obj(obj) - C.scene.cursor.location = obj.data.vertices[-1].co + bpy.context.scene.cursor.location = obj.data.vertices[-1].co bpy.ops.object.origin_set(type="ORIGIN_CURSOR") obj.location = (0, 0, 0) obj.scale *= self.scale butil.apply_transform(obj) - tag_object(obj, 'leaf_heart') + tag_object(obj, "leaf_heart") return obj @@ -77,5 +88,3 @@ def create_asset(self, **params) -> bpy.types.Object: # if __name__ == '__main__': # leaf = LeafHeartFactory(factory_seed=0) # leaf.create_asset() - - diff --git a/infinigen/assets/objects/small_plants/num_leaf_grass.py b/infinigen/assets/objects/small_plants/num_leaf_grass.py new file mode 100644 index 000000000..98f1246ab --- /dev/null +++ b/infinigen/assets/objects/small_plants/num_leaf_grass.py @@ -0,0 +1,279 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import simple_greenery +from infinigen.assets.objects.small_plants.leaf_general import LeafFactory +from infinigen.assets.objects.small_plants.leaf_heart import LeafHeartFactory +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup, tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_leafon_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem( + nw: NodeWrangler, + z_rotation=( + 0, + 0, + 0, + ), + leaf_scale=1.0, + leaf=None, +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Points", None)] + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = leaf_scale + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": endpoint_selection, + "Instance": object_info.outputs["Geometry"], + "Rotation": align_euler_to_vector, + "Scale": value, + }, + ) + + vector_1 = nw.new_node(Nodes.Vector) + vector_1.vector = z_rotation + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": instance_on_points, "Rotation": vector_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": rotate_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.4}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 12, "Radius": 0.03} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, curve_to_mesh, "stem")}, + ) + + +def geo_face_colors(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + rotation_scale = kwargs["stem_rotation"] + leaf_num = kwargs["leaf_num"] + leaf = kwargs["leaf"] + mid_z = uniform(0.35, 0.65, size=(1,))[0] + mid_x = normal(0.0, rotation_scale, size=(1,))[0] + mid_y = normal(0.0, rotation_scale, size=(1,))[0] + vector_2 = nw.new_node(Nodes.Vector) + vector_2.vector = (mid_x, mid_y, mid_z) + + top_x = normal(0.0, rotation_scale, size=(1,))[0] + top_y = normal(0.0, rotation_scale, size=(1,))[0] + vector = nw.new_node(Nodes.Vector) + vector.vector = (top_x, top_y, 1.0) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.0, 0.0, 0.0), + "Middle": vector_2, + "End": vector, + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Scale": 1.0, "Roughness": 0.2} + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Fac"], 1: (-0.5, -0.5, -0.5)}, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: add.outputs["Vector"], + 1: spline_parameter_1.outputs["Factor"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": quadratic_bezier, + "Offset": multiply.outputs["Vector"], + }, + ) + + stemgeometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": set_position} + ) + + leaf_scale = uniform(0.15, 0.35, size=(1,))[0] * kwargs["leaf_scale"] + leaves = [] + rotation = 0 + for _ in range(leaf_num): + leaves.append( + nw.new_node( + nodegroup_leaf_on_stem( + z_rotation=(0, 0, rotation), leaf_scale=leaf_scale, leaf=leaf + ).name, + input_kwargs={"Points": set_position}, + ) + ) + rotation += 6.28 / leaf_num + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": leaves + [stemgeometry]} + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": join_geometry} + ) + + colored = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": realize_instances, + "Material": surface.shaderfunc_to_material( + simple_greenery.shader_simple_greenery + ), + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": colored}) + + +class NumLeafGrassFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(NumLeafGrassFactory, self).__init__(factory_seed, coarse=coarse) + self.leaf_num = [2, 3, 4] + self.leaf_model = [LeafFactory, LeafHeartFactory] + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + lf_seed = randint(0, 1000, size=(1,))[0] + leaf_num = np.random.choice(self.leaf_num, size=(1,), p=[0.2, 0.4, 0.4])[0] + z_offset = normal(0, 0.05, size=(1,))[0] + if leaf_num == 2: + leaf_model = LeafFactory( + genome={"leaf_width": 0.95, "width_rand": 0.1, "z_scaling": z_offset}, + factory_seed=lf_seed, + ) + leaf = leaf_model.create_asset() + params["leaf_scale"] = 2.0 + elif leaf_num == 3: + leaf_model = LeafHeartFactory( + genome={"leaf_width": 1.1, "width_rand": 0.05, "z_scaling": z_offset}, + factory_seed=lf_seed, + ) + leaf = leaf_model.create_asset() + params["leaf_scale"] = 1.0 + else: + leaf_model = LeafHeartFactory( + genome={"leaf_width": 0.85, "width_rand": 0.05, "z_scaling": z_offset}, + factory_seed=lf_seed, + ) + leaf = leaf_model.create_asset() + params["leaf_scale"] = 1.0 + + params["leaf"] = leaf + params["leaf_num"] = leaf_num + params["stem_rotation"] = 0.15 + + surface.add_geomod( + obj, geo_face_colors, apply=True, attributes=[], input_kwargs=params + ) + butil.delete([leaf]) + with butil.SelectObjects(obj): + bpy.ops.object.material_slot_remove() + bpy.ops.object.shade_flat() + + tag_object(obj, "num_leaf_grass") + return obj + + +# if __name__ == '__main__': +# grass = NumLeafGrassFactory(0) +# obj = grass.create_asset() diff --git a/infinigen/assets/objects/small_plants/snake_plant.py b/infinigen/assets/objects/small_plants/snake_plant.py new file mode 100644 index 000000000..e101a7aca --- /dev/null +++ b/infinigen/assets/objects/small_plants/snake_plant.py @@ -0,0 +1,404 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han +# Acknowledgements: This file draws inspiration from https://blenderartists.org/t/extrude-face-along-curve-with-geometry-nodes/1432653/3 + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import snake_plant +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_pedal_thickness", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_thickness(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 1.0)] + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Value"], 3: 0.2, 4: 0.04}, + ) + + thickness = nw.new_node(Nodes.Value) + thickness.outputs[0].default_value = uniform(0.1, 0.35) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_3.outputs["Result"], 1: thickness}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_z_pedal_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_z_pedal_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_1 = nw.new_node(Nodes.InputPosition) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 1.0)] + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": group_input.outputs["Value"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.0), + (0.25, 0.25 + uniform(-0.1, 0.1)), + (0.50, 0.5 + uniform(-0.15, 0.15)), + (0.75, 0.5 + uniform(0.25, 0.25)), + (1.0, 1.0), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: uniform(0.8, 2.0)}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": multiply}, + attrs={"rotation_type": "Z_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_x_pedal_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_x_pedal_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_1 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.5, 1: spline_parameter_1.outputs["Factor"]}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": multiply}, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate} + ) + + +@node_utils.to_nodegroup("nodegroup_setup", singleton=False, type="GeometryNodeTree") +def nodegroup_setup(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 25, + "Start": (0.0, 0.0, 0.0), + "Middle": (0.0, 0.0, 1.0), + "End": (uniform(-0.2, 0.2), uniform(0.2, 0.2), 2.0), + }, + ) + + x_pedal_rotation = nw.new_node(nodegroup_x_pedal_rotation().name) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": quadratic_bezier, "Offset": x_pedal_rotation}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 2: spline_parameter.outputs["Factor"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Spline": capture_attribute_1.outputs[2], + "Geometry": capture_attribute_1.outputs["Geometry"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_edge_extrusion", singleton=False, type="GeometryNodeTree" +) +def nodegroup_edge_extrusion(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 1.0), + ("NodeSocketGeometry", "Geometry", None), + ], + ) + + init_width = uniform(0.15, 0.3) + + normal = nw.new_node(Nodes.InputNormal) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: normal}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": group_input.outputs["Value"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, init_width), + (0.25, init_width + uniform(0.0, 0.1)), + (0.50, init_width + uniform(0.02, 0.18)), + (0.75, init_width + uniform(0.02, 0.1)), + (1.0, 0.0), + ], + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": float_curve}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": set_position_1} + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": curve_to_mesh, + "Offset": capture_attribute.outputs["Attribute"], + "Offset Scale": float_curve, + }, + attrs={"mode": "EDGES"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Mesh": extrude_mesh.outputs["Mesh"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_face_extrusion", singleton=False, type="GeometryNodeTree" +) +def nodegroup_face_extrusion(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Value", 1.0), + ], + ) + + z_pedal_rotation = nw.new_node( + nodegroup_z_pedal_rotation().name, + input_kwargs={"Value": group_input.outputs["Value"]}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": z_pedal_rotation, + }, + ) + + pedal_thickness = nw.new_node( + nodegroup_pedal_thickness().name, + input_kwargs={"Value": group_input.outputs["Value"]}, + ) + + extrude_mesh_2 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": set_position_2, + "Offset Scale": pedal_thickness, + "Individual": False, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": extrude_mesh_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_single_pedal", singleton=False, type="GeometryNodeTree" +) +def nodegroup_single_pedal_nodes(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + setup = nw.new_node(nodegroup_setup().name) + + edge_extrusion = nw.new_node( + nodegroup_edge_extrusion().name, + input_kwargs={ + "Value": setup.outputs["Spline"], + "Geometry": setup.outputs["Geometry"], + }, + ) + + face_extrusion = nw.new_node( + nodegroup_face_extrusion().name, + input_kwargs={"Geometry": edge_extrusion, "Value": setup.outputs["Spline"]}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": face_extrusion, "Level": 2} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": subdivision_surface} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +def check_vicinity(param, pedal_params): + for p in pedal_params: + r1 = max(param[0] * np.sin(param[1]), 0.2) + r2 = max(p[0] * np.sin(p[1]), 0.2) + dist = np.linalg.norm([param[2] - p[2], param[3] - p[3]]) + if r1 + r2 > dist: + return True + return False + + +def geometry_snake_plant_nodes(nw: NodeWrangler, **kwargs): + num_pedals = kwargs["num_pedals"] + pedals = [] + pedal_params = [] + c = 0 + while c < 50 and len(pedal_params) < num_pedals: + c += 1 + scale = uniform(0.7, 1.0) + x_rotation = normal(0, 0.15) + x, y = uniform(-0.7, 0.7), uniform(-0.7, 0.7) + param = (scale, x_rotation, x, y) + if check_vicinity(param, pedal_params): + continue + else: + pedal_params.append(param) + + for param in pedal_params: + scale = param[0] + z_rotation = uniform(0, 6.28) + x_rotation = param[1] + z2_rotation = uniform(0, 6.28) + x, y = param[2], param[3] + pedal = nw.new_node(nodegroup_single_pedal_nodes().name) + s_transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": pedal, + "Scale": (scale, scale, scale), + "Rotation": (0.0, 0.0, z_rotation), + }, + ) + x_transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": s_transform, "Rotation": (x_rotation, 0.0, 0.0)}, + ) + z_transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": x_transform, + "Rotation": (0.0, 0.0, z2_rotation), + "Translation": (x, y, 0), + }, + ) + pedals.append(z_transform) + pedals = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": pedals}) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": pedals, + "Material": surface.shaderfunc_to_material(snake_plant.shader_snake_plant), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +class SnakePlantFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(SnakePlantFactory, self).__init__(factory_seed, coarse=coarse) + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + pedal_num = randint(4, 8) + params["num_pedals"] = pedal_num + + surface.add_geomod( + obj, geometry_snake_plant_nodes, apply=True, input_kwargs=params + ) + + # convert to appropriate units - TODO replace this + butil.apply_modifiers(obj) + obj.scale = (0.2, 0.2, 0.2) + butil.apply_transform(obj, scale=True) + + butil.purge_empty_materials(obj) + + tag_object(obj, "snake_plant") + return obj + + +if __name__ == "__main__": + grass = SnakePlantFactory(0) + obj = grass.create_asset() diff --git a/infinigen/assets/objects/small_plants/spider_plant.py b/infinigen/assets/objects/small_plants/spider_plant.py new file mode 100644 index 000000000..ff4b68567 --- /dev/null +++ b/infinigen/assets/objects/small_plants/spider_plant.py @@ -0,0 +1,430 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han +# Acknowledgements: This file draws inspiration from https://blenderartists.org/t/extrude-face-along-curve-with-geometry-nodes/1432653/3 + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import spider_plant +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_set_leaf_countour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_set_leaf_countour(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 1.0)] + ) + + float_curve_2 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": group_input.outputs["Value"]} + ) + k = uniform(0, 0.05) + node_utils.assign_curve( + float_curve_2.mapping.curves[0], + [ + (0.0, 0.1), + (0.2, 0.1 + k / 1.5), + (0.4, 0.1 + k / 1.5), + (0.6, 0.1), + (0.8, 0.1 - k), + (1.0, 0.0), + ], + handles=["AUTO", "AUTO", "AUTO", "AUTO", "AUTO", "VECTOR"], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_2, 1: uniform(0.8, 1.3)}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_2, "Value": multiply} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_z_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_z_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_8 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_1.outputs["Factor"], + 4: np.abs(normal(0, 0.6)), + }, + ) + + vector_rotate_6 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_8, + "Center": (0.0, 0.0, 0.5), + "Angle": map_range_1.outputs["Result"], + }, + attrs={"rotation_type": "Z_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate_6} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_x_rotation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_x_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_5 = nw.new_node(Nodes.InputPosition) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 4: np.abs(normal(0, 1.2)), + }, + ) + + vector_rotate_4 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_5, + "Center": (0.0, 0.0, 0.5), + "Angle": map_range.outputs["Result"], + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate_4} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_rotate_on_base", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_rotate_on_base(nw: NodeWrangler, x_R=0.0): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.3, 3: 0.3}) + + add = nw.new_node(Nodes.Math, input_kwargs={0: x_R, 1: random_value_2.outputs[1]}) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.6, 3: 0.6}) + + noise_texture_1 = nw.new_node(Nodes.NoiseTexture) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture_1.outputs["Fac"], 3: -0.5, 4: 0.5}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": add, + "Y": random_value_3.outputs[1], + "Z": map_range_1.outputs["Result"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_scale_align", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_scale_align(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal}, attrs={"axis": "Y"} + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_texture.outputs["Fac"], 3: 0.6, 4: 1.1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Rotation": align_euler_to_vector, + "Result": map_range.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 100, + "Start": (0.0, 0.0, 0.0), + "Middle": (0.0, 0.0, 0.5), + "End": (0.0, 0.0, 1.0), + }, + ) + + leaf_x_rotation = nw.new_node(nodegroup_leaf_x_rotation().name) + + set_position_7 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": quadratic_bezier, "Offset": leaf_x_rotation}, + ) + + leaf_z_rotation = nw.new_node(nodegroup_leaf_z_rotation().name) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_7, "Offset": leaf_z_rotation}, + ) + + spline_parameter_3 = nw.new_node(Nodes.SplineParameter) + + capture_attribute_3 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position_2, + 2: spline_parameter_3.outputs["Factor"], + }, + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": capture_attribute_3.outputs["Geometry"], 1: normal_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_leaf_countour = nw.new_node( + nodegroup_set_leaf_countour().name, + input_kwargs={"Value": capture_attribute_3.outputs[2]}, + ) + + set_position_8 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Offset": set_leaf_countour.outputs["Vector"], + }, + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": set_position_8, "Fill Caps": True} + ) + + extrude_mesh_3 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": curve_to_mesh_2, + "Offset": capture_attribute_2.outputs["Attribute"], + "Offset Scale": set_leaf_countour.outputs["Value"], + }, + attrs={"mode": "EDGES"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": extrude_mesh_3}) + + +def geometry_spider_plant_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + num_leaf_versions = kwargs["num_leaf_versions"] + num_plant_bases = kwargs["num_plant_bases"] + base_radius = kwargs["base_radius"] + leaf_x_R = kwargs["leaf_x_R"] + leaf_x_S = kwargs["leaf_x_S"] + + leaves, bases = [], [] + for _ in range(num_leaf_versions): + leaf = nw.new_node(nodegroup_leaf_geometry().name) + leaves.append(leaf) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": leaves} + ) + + for i in range(num_plant_bases): + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": base_radius[i]} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": curve_circle.outputs["Curve"], + "Count": randint(20, 40), + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: -0.3 * base_radius[i], 3: 0.3 * base_radius[i]}, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: -0.3 * base_radius[i], 3: 0.3 * base_radius[i]}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": random_value.outputs[1], "Y": random_value_1.outputs[1]}, + ) + + set_position_3 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Offset": combine_xyz}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": geometry_to_instance} + ) + + leaf_scale_align = nw.new_node(nodegroup_leaf_scale_align().name) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position_3, + "Instance": subdivision_surface, + "Pick Instance": True, + "Rotation": leaf_scale_align.outputs["Rotation"], + "Scale": leaf_scale_align.outputs["Result"], + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = leaf_x_S[i] + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": instance_on_points, "Scale": value}, + ) + + leaf_rotate_on_base = nw.new_node( + nodegroup_leaf_rotate_on_base(x_R=leaf_x_R[i]).name + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": scale_instances, + "Rotation": leaf_rotate_on_base, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + bases.append(realize_instances) + + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": bases}) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": join_geometry} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_shade_smooth, + "Material": surface.shaderfunc_to_material( + spider_plant.shader_spider_plant + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material} + ) + + +class SpiderPlantFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(SpiderPlantFactory, self).__init__(factory_seed, coarse=coarse) + + def get_params(self): + params = {} + params["num_leaf_versions"] = randint(4, 8) + num_bases = randint(5, 12) + params["num_plant_bases"] = num_bases + base_radius, leaf_x_R, leaf_x_S = [], [], [] + init_base_radius = uniform(0.10, 0.20) + diff_base_radius = init_base_radius - 0.04 + init_x_R, diff_x_R = uniform(1.2, 1.5), uniform(0.7, 1.1) + init_x_S, diff_x_S = uniform(1.4, 2.0), uniform(0.2, 0.6) + for i in range(params["num_plant_bases"]): + base_radius.append(init_base_radius - (i * diff_base_radius) / num_bases) + leaf_x_R.append(init_x_R - (i * diff_x_R) / num_bases) + leaf_x_S.append(init_x_S - (i * diff_x_S) / num_bases) + params["base_radius"] = base_radius + params["leaf_x_R"] = leaf_x_R + params["leaf_x_S"] = leaf_x_S + + return params + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.get_params() + + surface.add_geomod( + obj, geometry_spider_plant_nodes, apply=True, input_kwargs=params + ) + surface.add_material(obj, spider_plant.shader_spider_plant, selection=None) + + # convert to appropriate units - TODO replace this + butil.apply_modifiers(obj) + obj.scale = (0.1, 0.1, 0.1) + butil.apply_transform(obj, scale=True) + + tag_object(obj, "spider_plant") + return obj + + +if __name__ == "__main__": + fac = SpiderPlantFactory(0) + fac.create_asset() diff --git a/infinigen/assets/objects/small_plants/succulent.py b/infinigen/assets/objects/small_plants/succulent.py new file mode 100644 index 000000000..640ee5dcc --- /dev/null +++ b/infinigen/assets/objects/small_plants/succulent.py @@ -0,0 +1,726 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.materials import succulent +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal_2 = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal_2, 1: combine_xyz_3}, + attrs={"operation": "MULTIPLY"}, + ) + + index_1 = nw.new_node(Nodes.Index) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: 63.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": greater_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_bottom", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 64.0}, attrs={"operation": "LESS_THAN"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": less_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 128, "Radius": 0.05} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Y_bottom", 0.0), + ("NodeSocketFloat", "X", 0.0), + ("NodeSocketFloat", "Y_top", 0.0), + ], + ) + + pedal_cross_contour_bottom = nw.new_node( + nodegroup_pedal_cross_contour_bottom().name, + input_kwargs={ + "Y": group_input.outputs["Y_bottom"], + "X": group_input.outputs["X"], + }, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": pedal_cross_contour_bottom.outputs["Value"], + "Offset": pedal_cross_contour_bottom.outputs["Vector"], + }, + ) + + pedal_cross_contour_top = nw.new_node( + nodegroup_pedal_cross_contour_top().name, + input_kwargs={"Y": group_input.outputs["Y_top"], "X": group_input.outputs["X"]}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": pedal_cross_contour_top.outputs["Value"], + "Offset": pedal_cross_contour_top.outputs["Vector"], + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 7.0, "Detail": 15.0}, + attrs={"noise_dimensions": "4D"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Fac"], "Scale": uniform(0.00, 0.02)}, + attrs={"operation": "SCALE"}, + ) + + set_position_5 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_2, "Offset": scale.outputs["Vector"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_5} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_z_contour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_z_contour(nw: NodeWrangler, curve_param=[]): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, curve_param[0]), + (0.2, curve_param[1] * (1.0 + normal(0, 0.04))), + (0.4, curve_param[2] * (1.0 + normal(0, 0.1))), + (0.6, curve_param[3] * (1.0 + normal(0, 0.03))), + (0.8, curve_param[4] * (1.0 + normal(0, 0.06))), + (0.9, curve_param[5] * (1.0 + normal(0, 0.04))), + (1.0, 0.0), + ], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_3 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + k = uniform(0.0, 0.3) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, 0.0), + (0.2, 0.2 - k / 2.5), + (0.4, 0.4 - k / 1.1), + (0.6, 0.6 - k), + (0.8, 0.8 - k / 1.5), + (1.0, 1.0 - k / 3.0), + ], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.2)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_3, + "Center": (0.0, 0.0, 0.2), + "Angle": multiply, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_rotation_on_base_circle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_rotation_on_base_circle(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.1, 3: 0.1}) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value1", -1.3), + ("NodeSocketFloat", "Value2", -1.57), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_1.outputs[1], 1: group_input.outputs["Value1"]}, + ) + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.3, 3: 0.3}) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_2.outputs[1], 1: group_input.outputs["Value2"]}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Z": add_1}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_base_perturbation", singleton=False, type="GeometryNodeTree" +) +def nodegroup_base_perturbation(nw: NodeWrangler, R=1.0): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_4 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.8 * R, 3: 0.8 * R} + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.8 * R, 3: 0.8 * R} + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.2 * R, 3: 0.2 * R} + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value_1.outputs[1], 1: group_input.outputs["Value"]}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_4.outputs[1], + "Y": random_value.outputs[1], + "Z": add, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_geometry(nw: NodeWrangler, curve_param=[]): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": (0.0, 0.0, 0.2)}) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 64}) + integer.integer = 64 + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": integer} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Y_bottom", 0.0), + ("NodeSocketFloat", "X", 0.0), + ("NodeSocketFloat", "Y_top", 0.0), + ("NodeSocketFloat", "pedal_stem", 0.2), + ("NodeSocketFloat", "pedal_z", 0.5), + ], + ) + + pedal_stem_curvature = nw.new_node( + nodegroup_pedal_stem_curvature().name, + input_kwargs={"Value": group_input.outputs["pedal_stem"]}, + ) + + set_position_4 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Offset": pedal_stem_curvature}, + ) + + pedal_z_contour = nw.new_node( + nodegroup_pedal_z_contour(curve_param=curve_param).name, + input_kwargs={"Value": group_input.outputs["pedal_z"]}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": set_position_4, "Radius": pedal_z_contour}, + ) + + pedal_cross_contour = nw.new_node( + nodegroup_pedal_cross_contour().name, + input_kwargs={ + "Y_bottom": group_input.outputs["Y_bottom"], + "X": group_input.outputs["X"], + "Y_top": group_input.outputs["Y_top"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": pedal_cross_contour, + "Fill Caps": True, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_on_base", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_on_base(nw: NodeWrangler, R=1.0): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 0.1), + ("NodeSocketFloat", "x_R", -1.3), + ("NodeSocketFloat", "z_R", -1.57), + ("NodeSocketInt", "Resolution", 10), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, 1.0, 1.0)), + ("NodeSocketFloat", "base_z", 0.5), + ], + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Radius": group_input.outputs["Radius"], + }, + ) + + base_perturbation = nw.new_node( + nodegroup_base_perturbation(R=R).name, + input_kwargs={"Value": group_input.outputs["base_z"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle_1.outputs["Curve"], + "Offset": base_perturbation, + }, + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": normal_1}, + attrs={"pivot_axis": "Z"}, + ) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.7, 3: 1.2}) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector_1, + "Scale": random_value_3.outputs[1], + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + pedal_rotation_on_base_circle = nw.new_node( + nodegroup_pedal_rotation_on_base_circle().name, + input_kwargs={0: group_input.outputs["x_R"], 1: group_input.outputs["z_R"]}, + ) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": realize_instances_1, + "Rotation": pedal_rotation_on_base_circle, + }, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_1, + "Scale": group_input.outputs["Scale"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": scale_instances} + ) + + +def geometry_succulent_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + pedal_bases = [] + + pedal_cross_coutour_y_bottom = nw.new_node( + Nodes.Value, label="pedal_cross_coutour_y_bottom" + ) + pedal_cross_coutour_y_bottom.outputs[0].default_value = kwargs["cross_y_bottom"] + + pedal_cross_coutour_x = nw.new_node(Nodes.Value, label="pedal_cross_coutour_x") + pedal_cross_coutour_x.outputs[0].default_value = kwargs["cross_x"] + + pedal_cross_coutour_y_top = nw.new_node( + Nodes.Value, label="pedal_cross_coutour_y_top" + ) + pedal_cross_coutour_y_top.outputs[0].default_value = kwargs["cross_y_top"] + pedal_stem_curvature_scale = nw.new_node( + Nodes.Value, label="pedal_stem_curvature_scale" + ) + pedal_stem_curvature_scale.outputs[0].default_value = np.abs(normal(0, 1.0)) + + pedal_z_coutour_scale = nw.new_node(Nodes.Value, label="pedal_z_coutour_scale") + pedal_z_coutour_scale.outputs[0].default_value = uniform(0.4, 0.9) + material = kwargs["material"] + + for i in range(kwargs["num_bases"]): + pedal_geometry = nw.new_node( + nodegroup_pedal_geometry(curve_param=kwargs["pedal_curve_param"]).name, + input_kwargs={ + "Y_bottom": pedal_cross_coutour_y_bottom, + "X": pedal_cross_coutour_x, + "Y_top": pedal_cross_coutour_y_top, + "pedal_stem": pedal_stem_curvature_scale, + "pedal_z": pedal_z_coutour_scale, + }, + ) + + base_circle_radius = nw.new_node(Nodes.Value, label="base_circle_radius") + base_circle_radius.outputs[0].default_value = kwargs["base_radius"][i] + + pedal_x_rotation = nw.new_node(Nodes.Value, label="pedal_x_rotation") + pedal_x_rotation.outputs[0].default_value = kwargs["pedal_x_R"][i] + + base_z_rotation = nw.new_node(Nodes.Value, label="base_z_rotation") + base_z_rotation.outputs[0].default_value = -1.57 + normal(0, 0.3) + + base_pedal_num = nw.new_node( + Nodes.Integer, label="base_pedal_num", attrs={"integer": 10} + ) + base_pedal_num.integer = kwargs["base_pedal_num"][i] + + pedal_scale = nw.new_node(Nodes.Value, label="pedal_scale") + pedal_scale.outputs[0].default_value = kwargs["base_pedal_scale"][i] + + base_z = nw.new_node(Nodes.Value, label="base_z") + base_z.outputs[0].default_value = kwargs["base_z"][i] + + pedal_on_base = nw.new_node( + nodegroup_pedal_on_base(R=kwargs["base_radius"][i]).name, + input_kwargs={ + "Radius": base_circle_radius, + "x_R": pedal_x_rotation, + "z_R": base_z_rotation, + "Resolution": base_pedal_num, + "Instance": pedal_geometry, + "Scale": pedal_scale, + "base_z": base_z, + }, + ) + pedal_bases.append(pedal_on_base) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": pedal_bases} + ) + + set_shade_smooth_1 = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": join_geometry} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_shade_smooth_1, + "Material": surface.shaderfunc_to_material(material), + }, + ) + + realized = nw.new_node(Nodes.RealizeInstances, [set_material]) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": realized}) + + +class SucculentFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(SucculentFactory, self).__init__(factory_seed, coarse=coarse) + self.mode = np.random.choice(["thin_pedal", "thick_pedal"], p=[0.65, 0.35]) + + def get_params(self, mode): + if mode == "thin_pedal": + params = {} + params["cross_y_bottom"] = uniform(0.08, 0.25) + params["cross_y_top"] = uniform(-0.04, 0.02) + params["cross_x"] = uniform(0.3, 0.6) + # get geometry params on each base + num_bases = randint(5, 8) + params["num_bases"] = num_bases + base_radius, pedal_x_R, base_pedal_num, base_pedal_scale, base_z = ( + [], + [], + [], + [], + [], + ) + init_base_radius, diff_base_radius = uniform(0.09, 0.11), 0.1 + init_x_R, diff_x_R = uniform(-1.2, -1.35), uniform(-0.7, -1.1) + init_pedal_num = randint(num_bases, 15) + diff_pedal_scale = uniform(0.5, 0.9) + for i in range(num_bases): + base_radius.append( + init_base_radius - (i * diff_base_radius) / num_bases + ) + pedal_x_R.append(init_x_R - (i * diff_x_R) / num_bases) + base_pedal_num.append(init_pedal_num - i + randint(0, 2)) + base_pedal_scale.append(1.0 - (i * diff_pedal_scale) / num_bases) + base_z.append(0.0 + i * uniform(0.005, 0.008)) + params["base_radius"] = base_radius + params["pedal_x_R"] = pedal_x_R + params["base_pedal_num"] = base_pedal_num + params["base_pedal_scale"] = base_pedal_scale + params["base_z"] = base_z + + contour_bit = randint(0, 3) + material_bit = randint(0, 3) + + if contour_bit == 0: + params["pedal_curve_param"] = [0.08, 0.4, 0.46, 0.36, 0.17, 0.05] + elif contour_bit == 1: + params["pedal_curve_param"] = [0.22, 0.37, 0.50, 0.49, 0.30, 0.08] + elif contour_bit == 2: + params["pedal_curve_param"] = [0.21, 0.26, 0.31, 0.36, 0.29, 0.16] + else: + raise NotImplementedError + + if material_bit == 0: + params["material"] = succulent.shader_green_transition_succulent + elif material_bit == 1: + params["material"] = succulent.shader_pink_transition_succulent + elif material_bit == 2: + params["material"] = succulent.shader_green_succulent + else: + raise NotImplementedError + + return params + + elif mode == "thick_pedal": + params = {} + params["cross_y_bottom"] = uniform(0.22, 0.30) + params["cross_y_top"] = uniform(0.08, 0.15) + params["cross_x"] = uniform(0.14, 0.16) + # get geometry params on each base + num_bases = randint(3, 6) + params["num_bases"] = num_bases + base_radius, pedal_x_R, base_pedal_num, base_pedal_scale, base_z = ( + [], + [], + [], + [], + [], + ) + init_base_radius, diff_base_radius = uniform(0.12, 0.14), 0.11 + init_x_R, diff_x_R = uniform(-1.3, -1.4), uniform(-0.1, -1.2) + init_pedal_num = randint(num_bases, 12) + diff_pedal_scale = uniform(0.6, 0.9) + for i in range(num_bases): + base_radius.append( + init_base_radius - (i * diff_base_radius) / num_bases + ) + pedal_x_R.append(init_x_R - (i * diff_x_R) / num_bases) + base_pedal_num.append(init_pedal_num - i + randint(0, 2)) + base_pedal_scale.append(1.0 - (i * diff_pedal_scale) / num_bases) + base_z.append(0.0 + i * uniform(0.005, 0.006)) + params["base_radius"] = base_radius + params["pedal_x_R"] = pedal_x_R + params["base_pedal_num"] = base_pedal_num + params["base_pedal_scale"] = base_pedal_scale + params["base_z"] = base_z + + contour_bit = randint(0, 2) + material_bit = randint(0, 2) + + if contour_bit == 0: + params["pedal_curve_param"] = [0.10, 0.36, 0.44, 0.45, 0.30, 0.24] + elif contour_bit == 1: + params["pedal_curve_param"] = [0.16, 0.35, 0.48, 0.42, 0.30, 0.18] + else: + raise NotImplementedError + + if material_bit == 0: + params["material"] = succulent.shader_yellow_succulent + elif material_bit == 1: + params["material"] = succulent.shader_whitish_green_succulent + else: + raise NotImplementedError + + return params + else: + raise NotImplementedError + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.get_params(self.mode) + + surface.add_geomod( + obj, + geometry_succulent_nodes, + apply=True, + attributes=[], + input_kwargs=params, + ) + + obj.scale = (0.2, 0.2, 0.2) + obj.location.z += 0.01 + butil.apply_transform(obj, loc=True, scale=True) + + tag_object(obj, "succulent") + + return obj + + +if __name__ == "__main__": + fac = SucculentFactory(0) + fac.create_asset() diff --git a/infinigen/assets/table_decorations/__init__.py b/infinigen/assets/objects/table_decorations/__init__.py similarity index 53% rename from infinigen/assets/table_decorations/__init__.py rename to infinigen/assets/objects/table_decorations/__init__.py index a58696a3a..1768f2fb2 100644 --- a/infinigen/assets/table_decorations/__init__.py +++ b/infinigen/assets/objects/table_decorations/__init__.py @@ -1,3 +1,3 @@ -from .vase import VaseFactory +from .book import BookColumnFactory, BookFactory, BookStackFactory from .sink import SinkFactory, TapFactory -from .book import BookFactory, BookColumnFactory, BookStackFactory +from .vase import VaseFactory diff --git a/infinigen/assets/table_decorations/book.py b/infinigen/assets/objects/table_decorations/book.py similarity index 69% rename from infinigen/assets/table_decorations/book.py rename to infinigen/assets/objects/table_decorations/book.py index 785abedce..dc41242b0 100644 --- a/infinigen/assets/table_decorations/book.py +++ b/infinigen/assets/objects/table_decorations/book.py @@ -1,59 +1,59 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import math + +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np -import math -import trimesh from numpy.random import uniform -from trimesh import proximity +from infinigen.assets.material_assignments import AssetList from infinigen.assets.materials import text from infinigen.assets.utils.decorate import read_co, write_attribute, write_co -from infinigen.assets.utils.object import center, join_objects, new_bbox, new_cube, obj2trimesh from infinigen.assets.utils.mesh import longest_ray +from infinigen.assets.utils.object import center, join_objects, new_bbox, new_cube from infinigen.assets.utils.uv import wrap_front_back_side from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList class BookFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(BookFactory, self).__init__(factory_seed, coarse) self.rel_scale = log_uniform(1, 1.5) self.skewness = log_uniform(1.3, 1.8) - self.unit = .0127 - self.is_paperback = uniform() < .5 - self.margin = uniform(.005, .01) - self.offset = 0 if uniform() < .5 else log_uniform(.002, .008) - self.thickness = uniform(.002, .003) - - materials = AssetList['BookFactory']() - self.surface = materials['surface'].assign_material() - self.cover_surface = materials['cover_surface'].assign_material() + self.unit = 0.0127 + self.is_paperback = uniform() < 0.5 + self.margin = uniform(0.005, 0.01) + self.offset = 0 if uniform() < 0.5 else log_uniform(0.002, 0.008) + self.thickness = uniform(0.002, 0.003) + + materials = AssetList["BookFactory"]() + self.surface = materials["surface"].assign_material() + self.cover_surface = materials["cover_surface"].assign_material() if self.cover_surface == text.Text: self.cover_surface = self.cover_surface(self.factory_seed) - scratch_prob, edge_wear_prob = materials['wear_tear_prob'] - self.scratch, self.edge_wear = materials['wear_tear'] + scratch_prob, edge_wear_prob = materials["wear_tear_prob"] + self.scratch, self.edge_wear = materials["wear_tear"] self.scratch = None if uniform() > scratch_prob else self.scratch self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear - - self.texture_shared = uniform() < .2 + + self.texture_shared = uniform() < 0.2 def create_asset(self, **params) -> bpy.types.Object: - width = int(log_uniform(.08, .15) * self.rel_scale / self.unit) * self.unit + width = int(log_uniform(0.08, 0.15) * self.rel_scale / self.unit) * self.unit height = int(width * self.skewness / self.unit) * self.unit - depth = uniform(.01, .02) * self.rel_scale + depth = uniform(0.01, 0.02) * self.rel_scale fn = self.make_paperback if self.is_paperback else self.make_hardcover # noinspection PyArgumentList obj = fn(width, height, depth) - + return obj def finalize_assets(self, assets): @@ -69,17 +69,17 @@ def make_paperback(self, width, height, depth): obj.scale = width / 2, height / 2, depth / 2 butil.apply_transform(obj, True) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [] for e in bm.edges: u, v = e.verts if u.co[0] > 0 and v.co[0] > 0 and u.co[-1] != v.co[-1]: geom.append(e) - bmesh.ops.delete(bm, geom=geom, context='EDGES') + bmesh.ops.delete(bm, geom=geom, context="EDGES") self.make_cover(obj) - write_attribute(obj, 1, 'cover', 'FACE') + write_attribute(obj, 1, "cover", "FACE") obj = join_objects([paper, obj]) return obj @@ -95,27 +95,36 @@ def make_hardcover(self, width, height, depth): paper = self.make_paper(depth, height, width) obj = new_cube() count = 8 - butil.modify_mesh(obj, 'ARRAY', count=count, relative_offset_displace=(0, 0, 1), - use_merge_vertices=True) + butil.modify_mesh( + obj, + "ARRAY", + count=count, + relative_offset_displace=(0, 0, 1), + use_merge_vertices=True, + ) obj.location = 1, 1, 1 butil.apply_transform(obj, loc=True) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [] for v in bm.verts: if v.co[0] > 0 and 0 < v.co[-1] < count * 2: geom.append(v) - bmesh.ops.delete(bm, geom=geom, context='VERTS') - obj.location = 0, - self.margin, 0 - obj.scale = (width + self.margin) / 2, height / 2 + self.margin, depth / 2 / count + bmesh.ops.delete(bm, geom=geom, context="VERTS") + obj.location = 0, -self.margin, 0 + obj.scale = ( + (width + self.margin) / 2, + height / 2 + self.margin, + depth / 2 / count, + ) butil.apply_transform(obj, True) x, y, z = read_co(obj).T ratio = np.minimum(z / depth, 1 - z / depth) x -= 4 * ratio * (1 - ratio) * self.offset write_co(obj, np.stack([x, y, z]).T) self.make_cover(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) - write_attribute(obj, 1, 'cover', 'FACE') + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) + write_attribute(obj, 1, "cover", "FACE") obj = join_objects([paper, obj]) return obj @@ -131,16 +140,27 @@ class BookColumnFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(BookColumnFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.base_factories = [BookFactory(np.random.randint(1e5)) for _ in range(np.random.randint(1, 4))] + self.base_factories = [ + BookFactory(np.random.randint(1e5)) + for _ in range(np.random.randint(1, 4)) + ] self.n_books = np.random.randint(10, 20) - self.max_angle = uniform(0, np.pi / 9) if uniform() < .7 else 0 + self.max_angle = uniform(0, np.pi / 9) if uniform() < 0.7 else 0 self.max_rel_scale = max(f.rel_scale for f in self.base_factories) self.max_skewness = max(f.skewness for f in self.base_factories) def create_placeholder(self, **kwargs) -> bpy.types.Object: - height = .15 * self.max_rel_scale * self.max_skewness - return new_bbox(0, (.02 + np.sin(self.max_angle) * height) * self.n_books * self.max_rel_scale, - -.15 * self.max_rel_scale, 0, 0, height) + height = 0.15 * self.max_rel_scale * self.max_skewness + return new_bbox( + 0, + (0.02 + np.sin(self.max_angle) * height) + * self.n_books + * self.max_rel_scale, + -0.15 * self.max_rel_scale, + 0, + 0, + height, + ) def create_asset(self, **params) -> bpy.types.Object: books = [] @@ -150,12 +170,20 @@ def create_asset(self, **params) -> bpy.types.Object: x, y, z = read_co(obj).T obj.location = [-np.max(x), -np.min(y), -np.min(z)] butil.apply_transform(obj, True) - if uniform() < .5: - obj.rotation_euler = np.pi / 2 - uniform(0, self.max_angle), 0, np.pi / 2 + if uniform() < 0.5: + obj.rotation_euler = ( + np.pi / 2 - uniform(0, self.max_angle), + 0, + np.pi / 2, + ) else: obj.location[-1] = -np.max(z) butil.apply_transform(obj, True) - obj.rotation_euler = np.pi / 2 + uniform(0, self.max_angle), 0, np.pi / 2 + obj.rotation_euler = ( + np.pi / 2 + uniform(0, self.max_angle), + 0, + np.pi / 2, + ) butil.apply_transform(obj) if i > 0: obj.location[0] = 10 @@ -171,24 +199,31 @@ def create_asset(self, **params) -> bpy.types.Object: butil.apply_transform(obj, True) return obj + def rotate(theta, x, y): - return x * math.cos(theta) - y * math.sin(theta), x * math.sin(theta) + y * math.cos(theta) + return x * math.cos(theta) - y * math.sin(theta), x * math.sin( + theta + ) + y * math.cos(theta) + class BookStackFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(BookStackFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.base_factories = [BookFactory(np.random.randint(1e5)) for _ in range(np.random.randint(1, 4))] + self.base_factories = [ + BookFactory(np.random.randint(1e5)) + for _ in range(np.random.randint(1, 4)) + ] self.n_books = int(log_uniform(5, 15)) - self.max_angle = uniform(np.pi / 9, np.pi / 6) if uniform() < .7 else 0 + self.max_angle = uniform(np.pi / 9, np.pi / 6) if uniform() < 0.7 else 0 self.max_rel_scale = max(f.rel_scale for f in self.base_factories) self.max_skewness = max(f.skewness for f in self.base_factories) def create_placeholder(self, **kwargs) -> bpy.types.Object: - x_lo = -.15 * self.max_rel_scale / 2 - x_hi = .15 * self.max_rel_scale / 2 - y_lo = -.15 * self.max_rel_scale / 2 * self.max_skewness - y_hi = .15 * self.max_rel_scale / 2 * self.max_skewness + x_lo = -0.15 * self.max_rel_scale / 2 + x_hi = 0.15 * self.max_rel_scale / 2 + y_lo = -0.15 * self.max_rel_scale / 2 * self.max_skewness + y_hi = 0.15 * self.max_rel_scale / 2 * self.max_skewness theta = self.max_angle x_1, y_1 = rotate(theta, x_lo, y_lo) @@ -196,9 +231,14 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: x_3, y_3 = rotate(theta, x_hi, y_lo) x_4, y_4 = rotate(theta, x_hi, y_hi) - return new_bbox(min(min([x_1,x_2,x_3,x_4]), x_lo ), max(max([x_1,x_2,x_3,x_4]), x_hi), - min(min([y_1,y_2,y_3,y_4]), y_lo), max(max([y_1,y_2,y_3,y_4]), y_hi), - 0, self.n_books * .02 * self.max_rel_scale * 0.8) + return new_bbox( + min(min([x_1, x_2, x_3, x_4]), x_lo), + max(max([x_1, x_2, x_3, x_4]), x_hi), + min(min([y_1, y_2, y_3, y_4]), y_lo), + max(max([y_1, y_2, y_3, y_4]), y_hi), + 0, + self.n_books * 0.02 * self.max_rel_scale * 0.8, + ) def create_asset(self, **params) -> bpy.types.Object: books = [] diff --git a/infinigen/assets/objects/table_decorations/sink.py b/infinigen/assets/objects/table_decorations/sink.py new file mode 100644 index 000000000..c4899c0c4 --- /dev/null +++ b/infinigen/assets/objects/table_decorations/sink.py @@ -0,0 +1,1352 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Hongyu Wen: sink geometry +# - Meenal Parakh: material assignment +# - Stamatis Alexandropoulos: taps +# - Alexander Raistrick: placeholder, optimize detail, redo cutter + + +import bpy +import numpy as np +from numpy.random import uniform as U + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils import bbox_from_mesh +from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +class SinkFactory(AssetFactory): + def __init__( + self, factory_seed, coarse=False, dimensions=[1.0, 1.0, 1.0], upper_height=None + ): + super(SinkFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + self.factory_seed = factory_seed + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions, upper_height=upper_height) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + self.params.update(self.material_params) + + self.tap_factory = TapFactory(factory_seed) + + def get_material_params(self): + material_assignments = AssetList["SinkFactory"]() + params = { + "Sink": material_assignments["sink"].assign_material(), + "Tap": material_assignments["tap"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = U() < scratch_prob + is_edge_wear = U() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions, upper_height, use_default=False, open=False): + width = U(0.4, 1.0) + depth = U(0.4, 0.5) + curvature = U(1.0, 1.0) + if upper_height is None: + upper_height = U(0.2, 0.4) + lower_height = U(0.00, 0.01) + hole_radius = U(0.02, 0.05) + margin = U(0.02, 0.05) + watertap_margin = U(0.1, 0.12) + + params = { + "Width": width, + "Depth": depth, + "Curvature": curvature, + "Upper Height": upper_height, + "Lower Height": lower_height, + "HoleRadius": hole_radius, + "Margin": margin, + "WaterTapMargin": watertap_margin, + "ProtrudeAboveCounter": U(0.01, 0.025), + } + return params + + def _extract_geo_results(self): + params = self.params.copy() + params.pop("ProtrudeAboveCounter") + + with butil.TemporaryObject(butil.spawn_vert()) as temp: + obj = extract_nodegroup_geo( + temp, nodegroup_sink_geometry(), "Geometry", ng_params=params + ) + cutter = extract_nodegroup_geo( + temp, nodegroup_sink_geometry(), "Cutter", ng_params=params + ) + + return obj, cutter + + def create_placeholder(self, i, **kwargs) -> bpy.types.Object: + obj, cutter = self._extract_geo_results() + butil.delete(cutter) + + min_corner, max_corner = butil.bounds(obj) + min_corner[-1] = max_corner[-1] - self.params["ProtrudeAboveCounter"] + top_slice_placeholder = bbox_from_mesh.box_from_corners(min_corner, max_corner) + + butil.delete(obj) + + return top_slice_placeholder + + def create_asset(self, i, placeholder, state=None, **params): + obj, cutter = self._extract_geo_results() + tagging.tag_system.relabel_obj(obj) + + cutter.parent = obj + cutter.name = repr(self) + f".spawn_placeholder({i}).cutter" + cutter.hide_render = True + + tap_loc = (-self.params["Depth"] / 2, 0, self.params["Upper Height"]) + tap = self.tap_factory.spawn_asset(i, loc=tap_loc, rot=(0, 0, 0)) + tap.parent = obj + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +class TapFactory(AssetFactory): + def __init__(self, factory_seed): + super().__init__(factory_seed) + with FixedSeed(factory_seed): + self.params, self.scratch, self.edge_wear = self.get_material_params() + + @staticmethod + def tap_parameters(): + params = { + "base_width": U(0.570, 0.630), + "tap_head": U(0.7, 1.1), + "roation_z": U(5.5, 7.0), + "tap_height": U(0.5, 1), + "base_radius": U(0.0, 0.3), + "Switch": True if U() > 0.5 else False, + "Y": U(-0.5, -0.06), + "hand_type": True if U() > 0.2 else False, + "hands_length_x": U(0.750, 1.25), + "hands_length_Y": U(0.950, 1.550), + "one_side": True if U() > 0.5 else False, + "different_type": True if U() > 0.8 else False, + } + return params + + def get_material_params(self): + material_assignments = AssetList["TapFactory"]() + tap_material = material_assignments["tap"].assign_material() + + wrapped_params = {"Tap": surface.shaderfunc_to_material(tap_material)} + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = U() < scratch_prob + is_edge_wear = U() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + def create_asset(self, **_): + obj = butil.spawn_cube() + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_water_tap(), + ng_inputs=self.params, + apply=True, + ) + obj.scale = (0.4,) * 3 + obj.rotation_euler.z += np.pi + butil.apply_transform(obj) + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup("nodegroup_handle", singleton=False, type="GeometryNodeTree") +def nodegroup_handle(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Start": (0.0000, 0.0000, 0.0000), + "Start Handle": (0.0000, 0.0000, 0.7000), + "End Handle": (0.2000, 0.0000, 0.7000), + "End": (1.0000, 0.0000, 0.9000), + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0000, 0.9750), (1.0000, 0.1625)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": bezier_segment, "Radius": multiply} + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.2000}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["X"], + 1: 0.2000, + 3: 1.0000, + 4: 2.5000, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Y"], 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": multiply_1, + "Z": separate_xyz.outputs["Z"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Position": combine_xyz}, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": set_position, "Level": 2} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": subdivision_surface} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_water_tap", singleton=False, type="GeometryNodeTree" +) +def nodegroup_water_tap(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "base_width", U(0.2, 0.3)), + ("NodeSocketFloat", "tap_head", U(0.7, 1.1)), + ("NodeSocketFloat", "roation_z", U(5.5, 7.0)), + ("NodeSocketFloat", "tap_height", U(0.5, 1)), + ("NodeSocketFloatDistance", "base_radius", U(0.0, 0.1)), + ("NodeSocketBool", "Switch", True if U() > 0.5 else False), + ("NodeSocketFloat", "Y", U(-0.5, -0.06)), + ("NodeSocketBool", "hand_type", True if U() > 0.2 else False), + ("NodeSocketFloat", "hands_length_x", U(0.750, 1.25)), + ("NodeSocketFloat", "hands_length_Y", U(0.950, 1.550)), + ("NodeSocketBool", "one_side", True if U() > 0.5 else False), + ("NodeSocketBool", "different_type", True if U() > 0.8 else False), + ("NodeSocketBool", "length_one_side", True if U() > 0.8 else False), + ], + ) + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketMaterial", "Tap", None)] + ) + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.0500}) + + fill_curve_1 = nw.new_node( + Nodes.FillCurve, input_kwargs={"Curve": curve_circle.outputs["Curve"]} + ) + + extrude_mesh_1 = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve_1, "Offset Scale": 0.1500} + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": 0.2000, "Height": 0.7000}, + ) + + fillet_curve = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={"Curve": quadrilateral, "Count": 19, "Radius": 0.1000}, + attrs={"mode": "POLY"}, + ) + + fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": fillet_curve}) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve, "Offset Scale": 0.0500} + ) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0.0000, 0.0000, 0.6000)} + ) + + curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.0300}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle_1.outputs["Curve"], + }, + ) + + curve_circle_2 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.2000}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_2.outputs["Curve"], + "Translation": (0.0000, 0.2000, 0.0000), + }, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_geometry, + "Rotation": (-1.5708, 1.5708, 0.0000), + "Scale": (1.0000, 0.7000, 1.0000), + }, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 0.2000, "Y": group_input.outputs["Y"]} + ) + + bezier_segment = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": 177, + "Start": (0.0000, 0.0000, 0.0000), + "Start Handle": (0.0000, 1.2000, 0.0000), + "End Handle": combine_xyz_3, + "End": (-0.0500, 0.1000, 0.0000), + }, + ) + + trim_curve = nw.new_node( + Nodes.TrimCurve, input_kwargs={"Curve": bezier_segment, 3: 0.6625, 5: 3.0000} + ) + + transform_geometry_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": trim_curve, + "Rotation": (1.5708, 0.0000, 2.5220), + "Scale": (5.2000, 0.5000, 7.8000), + }, + ) + + curve_circle_3 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.0300}) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": transform_geometry_6, + "Profile Curve": curve_circle_3.outputs["Curve"], + }, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Switch"], + 14: transform_geometry_1, + 15: curve_to_mesh_2, + }, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": switch.outputs[6], + "Profile Curve": curve_circle_1.outputs["Curve"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: -0.0100}, + attrs={"operation": "GREATER_THAN"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={0: group_input.outputs["Switch"], 2: greater_than, 3: 1.0000}, + attrs={"input_type": "FLOAT"}, + ) + + separate_geometry = nw.new_node( + Nodes.SeparateGeometry, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Selection": switch_1.outputs["Output"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": 1.0000, "Z": group_input.outputs["tap_head"]}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Switch"], + 8: combine_xyz, + 9: (1.0000, 1.0000, 1.0000), + }, + attrs={"input_type": "VECTOR"}, + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": separate_geometry.outputs["Selection"], + "Translation": (0.0000, 0.0000, 0.6000), + "Scale": switch_2.outputs[3], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh, transform_geometry_2]}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["roation_z"]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": 1.0000, "Z": group_input.outputs["tap_height"]}, + ) + + transform_geometry_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Rotation": combine_xyz_1, + "Scale": combine_xyz_2, + }, + ) + + handle = nw.new_node(nodegroup_handle().name) + + transform_geometry_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": handle, + "Translation": (0.0000, -0.2000, 0.0000), + "Rotation": (0.0000, 0.0000, 3.6652), + "Scale": (0.3000, 0.3000, 0.3000), + }, + ) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": handle, + "Translation": (0.0000, 0.2000, 0.0000), + "Rotation": (0.0000, 0.0000, 2.6180), + "Scale": (0.3000, 0.3000, 0.3000), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_geometry_4, transform_geometry_3]}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": 41, + "Side Segments": 39, + "Radius": 0.0300, + "Depth": 0.1000, + }, + ) + + transform_geometry_7 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": (0.0000, 0.0500, 0.1000), + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + switch_5 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["one_side"], 14: transform_geometry_7}, + ) + + transform_geometry_8 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": (0.0000, -0.0500, 0.1000), + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [switch_5.outputs[6], transform_geometry_8]}, + ) + + cylinder_1 = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": 41, + "Side Segments": 39, + "Radius": 0.0050, + "Depth": 0.1000, + }, + ) + + transform_geometry_9 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder_1.outputs["Mesh"], + "Translation": (0.0000, 0.0800, 0.1500), + "Scale": (1.0000, 1.0000, 1.1000), + }, + ) + + switch_4 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["one_side"], 14: transform_geometry_9}, + ) + + transform_geometry_10 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder_1.outputs["Mesh"], + "Translation": (0.0000, -0.0800, 0.1500), + "Rotation": (0.0000, 0.0000, 0.0855), + "Scale": (1.0000, 1.0000, 1.1000), + }, + ) + + transform_geometry_17 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_geometry_10, + "Translation": (0.0000, -0.0100, -0.0050), + "Scale": (4.1000, 1.0000, 1.0000), + }, + ) + + switch_8 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["length_one_side"], + 14: transform_geometry_10, + 15: transform_geometry_17, + }, + ) + + switch_7 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["one_side"], + 14: transform_geometry_10, + 15: switch_8.outputs[6], + }, + ) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [switch_4.outputs[6], switch_7.outputs[6]]}, + ) + + join_geometry_5 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [join_geometry_3, join_geometry_4]}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["hands_length_x"], + "Y": group_input.outputs["hands_length_Y"], + "Z": 1.0000, + }, + ) + + transform_geometry_11 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_5, "Scale": combine_xyz_4}, + ) + + switch_3 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["hand_type"], + 14: join_geometry_2, + 15: transform_geometry_11, + }, + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.0500}) + + fill_curve = nw.new_node( + Nodes.FillCurve, input_kwargs={"Curve": curve_circle.outputs["Curve"]} + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve, "Offset Scale": 0.1500} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + transform_geometry_5, + switch_3.outputs[6], + extrude_mesh.outputs["Mesh"], + ] + }, + ) + + bezier_segment_1 = nw.new_node( + Nodes.CurveBezierSegment, + input_kwargs={ + "Resolution": 54, + "Start": (0.0000, 0.0000, 0.0000), + "Start Handle": (0.0000, 0.0000, 0.7000), + "End Handle": (0.2000, 0.0000, 0.7000), + "End": (1.0000, 0.0000, 0.9000), + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0000, 0.9750), (0.6295, 0.4125), (1.0000, 0.1625)], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": bezier_segment_1, "Radius": multiply}, + ) + + curve_circle_4 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.1000}) + + curve_to_mesh_3 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle_4.outputs["Curve"], + "Fill Caps": True, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz_1.outputs["X"], + 1: 0.2000, + 3: 1.0000, + 4: 2.5000, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_1.outputs["X"], + "Y": multiply_1, + "Z": separate_xyz_1.outputs["Z"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_to_mesh_3, + "Position": combine_xyz_5, + "Offset": (0.0000, 0.0000, 0.0000), + }, + ) + + subdivision_surface = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": set_position, "Level": 1} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, input_kwargs={"Geometry": subdivision_surface} + ) + + transform_geometry_12 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_shade_smooth, + "Translation": (0.0000, 0.0000, 0.1000), + "Rotation": (0.0000, 0.0000, 0.6807), + "Scale": (0.4000, 0.4000, 0.3000), + }, + ) + + curve_circle_5 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 307, "Radius": 0.0550} + ) + + fill_curve_2 = nw.new_node( + Nodes.FillCurve, input_kwargs={"Curve": curve_circle_5.outputs["Curve"]} + ) + + extrude_mesh_2 = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve_2, "Offset Scale": 0.1500} + ) + + cylinder_2 = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 100, "Radius": 0.0100, "Depth": 0.7000}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": cylinder_2.outputs["Mesh"]} + ) + + transform_geometry_13 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_position_1, + "Translation": (0.3000, 0.0000, 0.2500), + "Rotation": (0.0000, -2.0420, 0.0000), + "Scale": (1.7000, 3.1000, 1.0000), + }, + ) + + cylinder_3 = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 318, "Radius": 0.0200, "Depth": 0.0300}, + ) + + transform_geometry_14 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder_3.outputs["Mesh"], + "Translation": (0.5950, 0.0000, 0.3800), + }, + ) + + join_geometry_7 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_geometry_13, transform_geometry_14]}, + ) + + transform_geometry_15 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_7, "Scale": (0.9000, 1.0000, 1.0000)}, + ) + + join_geometry_8 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + transform_geometry_12, + extrude_mesh_2.outputs["Mesh"], + transform_geometry_15, + ] + }, + ) + + transform_geometry_16 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_8, + "Rotation": (0.0000, 0.0000, 3.1416), + }, + ) + + switch_6 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["different_type"], + 14: join_geometry_1, + 15: transform_geometry_16, + }, + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": group_input.outputs["base_width"], "Height": 0.7000}, + ) + + fillet_curve = nw.new_node( + Nodes.FilletCurve, + input_kwargs={ + "Curve": quadrilateral, + "Count": 19, + "Radius": group_input.outputs["base_radius"], + }, + attrs={"mode": "POLY"}, + ) + + fill_curve_1 = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": fillet_curve}) + + extrude_mesh_1 = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve_1, "Offset Scale": 0.0500} + ) + + join_geometry_6 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [switch_6.outputs[6], extrude_mesh_1.outputs["Mesh"]] + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_6, + "Material": group_input.outputs["Tap"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_sink_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_sink_geometry(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Depth", 2.0000), + ("NodeSocketFloat", "Curvature", 0.9500), + ("NodeSocketFloat", "Upper Height", 1.0000), + ("NodeSocketFloat", "Lower Height", -0.0500), + ("NodeSocketFloatDistance", "HoleRadius", 0.1000), + ("NodeSocketFloat", "Margin", 0.5000), + ("NodeSocketFloat", "WaterTapMargin", 0.5000), + ("NodeSocketMaterial", "Tap", None), + ("NodeSocketMaterial", "Sink", None), + ], + ) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Depth"]} + ) + + reroute_2 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Width"]} + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": reroute_3, "Height": reroute_2}, + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_3, 1: reroute_2}, + attrs={"operation": "MINIMUM"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: minimum, 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + # inside of sink curve + sink_interior_border = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={"Curve": quadrilateral, "Count": 50, "Radius": multiply}, + attrs={"mode": "POLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Curvature"], + "Y": group_input.outputs["Curvature"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": sink_interior_border, "Scale": combine_xyz_1}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["HoleRadius"]} + ) + + join_geometry_4 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_1, curve_circle.outputs["Curve"]]}, + ) + + fill_curve_1 = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": join_geometry_4}) + + # fill_curve_1 = tagging.tag_nodegroup(nw, fill_curve_1, t.Subpart.SupportSurface) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Lower Height"]} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": reroute}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": fill_curve_1, "Translation": combine_xyz_2}, + ) + + extrude_mesh_2 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": transform_2, + "Offset Scale": -0.0100, + "Individual": False, + }, + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Scale": (0.7000, 0.7000, 1.0000), + }, + ) + + join_geometry_6 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_circle.outputs["Curve"], transform_5]}, + ) + + fill_curve_4 = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": join_geometry_6}) + + add = nw.new_node(Nodes.Math, input_kwargs={0: reroute, 1: -0.0100}) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": fill_curve_4, "Translation": combine_xyz_4}, + ) + + extrude_mesh_4 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": transform_6, + "Offset Scale": group_input.outputs["Lower Height"], + "Individual": False, + }, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Lower Height"], 1: -0.0100} + ) + + combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add_1}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_6}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle.outputs["Curve"], + }, + ) + + transform_7 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Translation": combine_xyz_2}, + ) + + join_geometry_5 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + extrude_mesh_2.outputs["Mesh"], + transform_2, + extrude_mesh_4.outputs["Mesh"], + transform_7, + ] + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": sink_interior_border, + "Scale": (0.9900, 0.9900, 1.0000), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform, sink_interior_border]} + ) + + fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": join_geometry}) + + extrude_mesh_1 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": fill_curve, + "Offset Scale": group_input.outputs["Lower Height"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0000}, + attrs={"operation": "LESS_THAN"}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["X"], + 1: group_input.outputs["Curvature"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: separate_xyz_1.outputs["Y"], + 1: group_input.outputs["Curvature"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply_1, + "Y": multiply_2, + "Z": separate_xyz_1.outputs["Z"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": extrude_mesh_1.outputs["Mesh"], + "Selection": less_than, + "Position": combine_xyz, + }, + ) + + add_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["Margin"], + }, + ) + + add_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["Margin"], + }, + ) + + add_4 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: group_input.outputs["WaterTapMargin"]} + ) + + quadrilateral_1 = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": add_4, "Height": add_2}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["WaterTapMargin"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_3}) + + transform_8 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": quadrilateral_1, "Translation": combine_xyz_7}, + ) + + fillet_curve_1 = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={"Curve": transform_8, "Count": 10, "Radius": multiply}, + attrs={"mode": "POLY"}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [sink_interior_border, fillet_curve_1]}, + ) + + fill_curve_2 = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": join_geometry_2}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Lower Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Upper Height"], 1: multiply_4} + ) + + extrude_mesh_3 = nw.new_node( + Nodes.ExtrudeMesh, input_kwargs={"Mesh": fill_curve_2, "Offset Scale": add_5} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Lower Height"]} + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": reroute_1}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": extrude_mesh_3.outputs["Mesh"], + "Translation": combine_xyz_3, + }, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": transform_3} + ) + + # watertap = nw.new_node(nodegroup_water_tap().name, input_kwargs={'Tap': group_input.outputs['Tap']}) + + add_6 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Depth"], + 1: group_input.outputs["WaterTapMargin"], + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: add_6, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_5, "Z": group_input.outputs["Upper Height"]}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + join_geometry_5, + set_position, + join_geometry_3, + ] # , transform_geometry] + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_1, + "Material": group_input.outputs["Sink"], + }, + ) + + add_7 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["WaterTapMargin"], + 1: group_input.outputs["Margin"], + }, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: add_7, 1: 2.5600}, attrs={"operation": "DIVIDE"} + ) + + combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": divide}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_material, "Offset": combine_xyz_8}, + ) + + # region CREATE CUTTER (manually added by araistrick post-fact) + + sink_interior_border_simplified = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={"Curve": quadrilateral, "Count": 3, "Radius": multiply}, + attrs={"mode": "POLY"}, + ) + + scaled_sink_interior_border = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": sink_interior_border_simplified, + "Scale": (1.01, 1.01, 1), # scale it up just a little to avoid zclip + }, + ) + + fill_interior = nw.new_node( + Nodes.FillCurve, + input_kwargs={"Curve": scaled_sink_interior_border}, + attrs={"mode": "NGONS"}, + ) + + extrude_amt = nw.scalar_add( + group_input.outputs["Lower Height"], group_input.outputs["Upper Height"], 0.05 + ) + extrude = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={"Mesh": fill_interior, "Offset Scale": extrude_amt}, + ) + + # same translation as set_position_1, to keep it in sync + setpos_move_cutter = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": extrude, "Offset": combine_xyz_8} + ) + + # endregion + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position_1, "Cutter": setpos_move_cutter}, + ) + + +def geometry_node_to_bbox(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": bounding_box, "Scale": (0.100, 0.100, 0.1000)}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/table_decorations/utils.py b/infinigen/assets/objects/table_decorations/utils.py new file mode 100644 index 000000000..340781c88 --- /dev/null +++ b/infinigen/assets/objects/table_decorations/utils.py @@ -0,0 +1,655 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_star_profile", singleton=False, type="GeometryNodeTree" +) +def nodegroup_star_profile(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Resolution", 64), + ("NodeSocketInt", "Points", 64), + ("NodeSocketFloatDistance", "Inner Radius", 0.9000), + ], + ) + + star = nw.new_node( + "GeometryNodeCurveStar", + input_kwargs={ + "Points": group_input.outputs["Points"], + "Inner Radius": group_input.outputs["Inner Radius"], + "Outer Radius": 1.0000, + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": star.outputs["Curve"], + "Count": group_input.outputs["Resolution"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": resample_curve}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_flip_index", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flip_index(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "V Resolution", 0), + ("NodeSocketInt", "U Resolution", 0), + ], + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, + attrs={"operation": "MODULO"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: group_input.outputs["U Resolution"]}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "FLOOR"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: floor}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Index": add}, attrs={"is_active_output": True} + ) + + +@node_utils.to_nodegroup( + "nodegroup_cylinder_side", singleton=False, type="GeometryNodeTree" +) +def nodegroup_cylinder_side(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 0), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["V Resolution"], 1: 1.0000}, + attrs={"operation": "SUBTRACT"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": group_input.outputs["U Resolution"], + "Side Segments": subtract, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": store_named_attribute, + "Top": cylinder.outputs["Top"], + "Side": cylinder.outputs["Side"], + "Bottom": cylinder.outputs["Bottom"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shifted_circle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shifted_circle(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Resolution", 32), + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "Z", 0.0000), + ("NodeSocketFloat", "Rot Z", 0.0000), + ], + ) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Radius": group_input.outputs["Radius"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Z"]} + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Rot Z"]}, + attrs={"operation": "RADIANS"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radians}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_3.outputs["Curve"], + "Translation": combine_xyz, + "Rotation": combine_xyz_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_lofting", singleton=False, type="GeometryNodeTree") +def nodegroup_lofting(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curves", None), + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 32), + ("NodeSocketBool", "Use Nurb", False), + ], + ) + + cylinderside = nw.new_node( + nodegroup_cylinder_side().name, + input_kwargs={ + "U Resolution": group_input.outputs["U Resolution"], + "V Resolution": group_input.outputs["V Resolution"], + }, + ) + + index = nw.new_node(Nodes.Index) + + evaluate_on_domain = nw.new_node( + Nodes.EvaluateonDomain, + input_kwargs={1: index}, + attrs={"data_type": "INT", "domain": "CURVE"}, + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: evaluate_on_domain.outputs[1]}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + curve_line = nw.new_node(Nodes.CurveLine) + + domain_size = nw.new_node( + Nodes.DomainSize, + input_kwargs={"Geometry": group_input.outputs["Profile Curves"]}, + attrs={"component": "CURVE"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": curve_line, + "Count": domain_size.outputs["Spline Count"], + }, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Profile Curves"], + "Selection": equal, + "Instance": resample_curve, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + position = nw.new_node(Nodes.InputPosition) + + flipindex = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": domain_size.outputs["Spline Count"], + "U Resolution": group_input.outputs["U Resolution"], + }, + ) + + sample_index_2 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curves"], + 3: position, + "Index": flipindex, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Position": sample_index_2.outputs[2], + }, + ) + + set_spline_type_1 = nw.new_node( + Nodes.SplineType, + input_kwargs={"Curve": set_position}, + attrs={"spline_type": "CATMULL_ROM"}, + ) + + set_spline_type = nw.new_node( + Nodes.SplineType, + input_kwargs={"Curve": set_position}, + attrs={"spline_type": "NURBS"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Use Nurb"], + 14: set_spline_type_1, + 15: set_spline_type, + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": switch.outputs[6], + "Count": group_input.outputs["V Resolution"], + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + flipindex_1 = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": group_input.outputs["U Resolution"], + "U Resolution": group_input.outputs["V Resolution"], + }, + ) + + sample_index_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": resample_curve_1, + 3: position_1, + "Index": flipindex_1, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": cylinderside.outputs["Geometry"], + "Position": sample_index_3.outputs[2], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Top": cylinderside.outputs["Top"], + "Side": cylinderside.outputs["Side"], + "Bottom": cylinderside.outputs["Bottom"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_lofting_poly", singleton=False, type="GeometryNodeTree" +) +def nodegroup_lofting_poly(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curves", None), + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 32), + ("NodeSocketBool", "Use Nurb", False), + ], + ) + + reroute_2 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["V Resolution"]} + ) + + cylinderside_001 = nw.new_node( + nodegroup_cylinder_side().name, + input_kwargs={ + "U Resolution": group_input.outputs["U Resolution"], + "V Resolution": reroute_2, + }, + ) + + index = nw.new_node(Nodes.Index) + + evaluate_on_domain = nw.new_node( + Nodes.EvaluateonDomain, + input_kwargs={1: index}, + attrs={"domain": "CURVE", "data_type": "INT"}, + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: evaluate_on_domain.outputs[1]}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + curve_line = nw.new_node(Nodes.CurveLine) + + domain_size = nw.new_node( + Nodes.DomainSize, + input_kwargs={"Geometry": group_input.outputs["Profile Curves"]}, + attrs={"component": "CURVE"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": curve_line, + "Count": domain_size.outputs["Spline Count"], + }, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Profile Curves"], + "Selection": equal, + "Instance": resample_curve, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + position = nw.new_node(Nodes.InputPosition) + + flipindex_001 = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": domain_size.outputs["Spline Count"], + "U Resolution": group_input.outputs["U Resolution"], + }, + ) + + sample_index_2 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curves"], + 3: position, + "Index": flipindex_001, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Position": sample_index_2.outputs[2], + }, + ) + + set_spline_type_1 = nw.new_node( + Nodes.SplineType, input_kwargs={"Curve": set_position} + ) + + set_spline_type = nw.new_node( + Nodes.SplineType, + input_kwargs={"Curve": set_position}, + attrs={"spline_type": "NURBS"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Use Nurb"], + 14: set_spline_type_1, + 15: set_spline_type, + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": switch.outputs[6], "Count": reroute_2}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + flipindex_001_1 = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": group_input.outputs["U Resolution"], + "U Resolution": reroute_2, + }, + ) + + sample_index_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": resample_curve_1, + 3: position_1, + "Index": flipindex_001_1, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": cylinderside_001.outputs["Geometry"], + "Position": sample_index_3.outputs[2], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Top": cylinderside_001.outputs["Top"], + "Side": cylinderside_001.outputs["Side"], + "Bottom": cylinderside_001.outputs["Bottom"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_warp_around_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_warp_around_curve(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketInt", "Curve Resolution", 1024), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Curve Resolution"], 1: 1.0000} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": group_input.outputs["Curve"], "Count": add}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + position_2 = nw.new_node(Nodes.InputPosition) + + separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_2}) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Min"]} + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Max"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz_3.outputs["Z"], + 1: separate_xyz_1.outputs["Z"], + 2: separate_xyz_2.outputs["Z"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Curve Resolution"], + 1: map_range.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + round = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "ROUND"} + ) + + sample_index_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: position_1, "Index": round}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + sample_index_5 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: normal, "Index": round}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_5.outputs[2], "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + sample_index_4 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: curve_tangent, "Index": round}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_4.outputs[2], 1: sample_index_5.outputs[2]}, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: cross_product.outputs["Vector"], + "Scale": separate_xyz.outputs["Y"], + }, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + add_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_3.outputs[2], 1: add_1.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": add_2.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/table_decorations/vase.py b/infinigen/assets/objects/table_decorations/vase.py new file mode 100644 index 000000000..576d9c105 --- /dev/null +++ b/infinigen/assets/objects/table_decorations/vase.py @@ -0,0 +1,436 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + +import bpy +from numpy.random import choice, randint, uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.table_decorations.utils import ( + nodegroup_lofting, + nodegroup_star_profile, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed + + +class VaseFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(VaseFactory, self).__init__(factory_seed, coarse=coarse) + + if dimensions is None: + z = uniform(0.17, 0.5) + x = z * uniform(0.3, 0.6) + dimensions = (x, x, z) + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["VaseFactory"]() + params = { + "Material": material_assignments["surface"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # all in meters + if dimensions is None: + z = uniform(0.25, 0.40) + x = uniform(0.2, 0.4) * z + dimensions = (x, x, z) + + x, y, z = dimensions + + U_resolution = 64 + V_resolution = 64 + + neck_scale = uniform(0.2, 0.8) + + parameters = { + "Profile Inner Radius": choice([1.0, uniform(0.8, 1.0)]), + "Profile Star Points": randint(16, U_resolution // 2 + 1), + "U_resolution": U_resolution, + "V_resolution": V_resolution, + "Height": z, + "Diameter": x, + "Top Scale": neck_scale * uniform(0.8, 1.2), + "Neck Mid Position": uniform(0.7, 0.95), + "Neck Position": 0.5 * neck_scale + 0.5 + uniform(-0.05, 0.05), + "Neck Scale": neck_scale, + "Shoulder Position": uniform(0.3, 0.7), + "Shoulder Thickness": uniform(0.1, 0.25), + "Foot Scale": uniform(0.4, 0.6), + "Foot Height": uniform(0.01, 0.1), + "Material": choice(["glass", "ceramic"]), + } + + return parameters + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod(obj, geometry_vases, apply=True, input_kwargs=self.params) + butil.modify_mesh(obj, "SOLIDIFY", apply=True, thickness=0.002) + butil.modify_mesh(obj, "SUBSURF", apply=True, levels=2, render_levels=2) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +@node_utils.to_nodegroup( + "nodegroup_vase_profile", singleton=False, type="GeometryNodeTree" +) +def nodegroup_vase_profile(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloat", "Height", 0.0000), + ("NodeSocketFloat", "Diameter", 0.0000), + ("NodeSocketFloat", "Top Scale", 0.0000), + ("NodeSocketFloat", "Neck Mid Position", 0.0000), + ("NodeSocketFloat", "Neck Position", 0.5000), + ("NodeSocketFloat", "Neck Scale", 0.0000), + ("NodeSocketFloat", "Shoulder Position", 0.0000), + ("NodeSocketFloat", "Shoulder Thickness", 0.0000), + ("NodeSocketFloat", "Foot Scale", 0.0000), + ("NodeSocketFloat", "Foot Height", 0.0000), + ], + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Height"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Top Scale"], + 1: group_input.outputs["Diameter"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + neck_top = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + "Translation": combine_xyz_1, + "Scale": multiply, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Height"], + 1: group_input.outputs["Neck Position"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Diameter"], + 1: group_input.outputs["Neck Scale"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + neck = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + "Translation": combine_xyz, + "Scale": multiply_2, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["Neck Position"]}, + attrs={"use_clamp": True, "operation": "SUBTRACT"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: subtract, + 1: group_input.outputs["Neck Mid Position"], + 2: group_input.outputs["Neck Position"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: group_input.outputs["Height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_3}) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Neck Scale"], + 1: group_input.outputs["Top Scale"], + }, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={"operation": "DIVIDE"} + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Diameter"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + neck_middle = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + "Translation": combine_xyz_2, + "Scale": multiply_4, + }, + ) + + neck_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [neck, neck_middle, neck_top]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Shoulder Position"], + 3: group_input.outputs["Foot Height"], + 4: group_input.outputs["Neck Position"], + }, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Neck Position"], + 1: group_input.outputs["Foot Height"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["Shoulder Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_5} + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: group_input.outputs["Neck Position"]}, + attrs={"operation": "MINIMUM"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: minimum, 1: group_input.outputs["Height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_6}) + + body_top = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + "Translation": combine_xyz_3, + "Scale": group_input.outputs["Diameter"], + }, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_5}, + attrs={"operation": "SUBTRACT"}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: group_input.outputs["Foot Height"]}, + attrs={"operation": "MAXIMUM"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum, 1: group_input.outputs["Height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_7}) + + body_bottom = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Profile Curve"], + "Translation": combine_xyz_5, + "Scale": group_input.outputs["Diameter"], + }, + ) + + body_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [body_bottom, body_top]} + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Foot Height"], + 1: group_input.outputs["Height"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_8}) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Diameter"], + 1: group_input.outputs["Foot Scale"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + foot_top = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input, + "Translation": combine_xyz_4, + "Scale": multiply_9, + }, + ) + + foot_bottom = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": group_input, "Scale": multiply_9} + ) + + foot_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [foot_bottom, foot_top]} + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [foot_geometry, body_geometry, neck_geometry]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_2}, + attrs={"is_active_output": True}, + ) + + +def geometry_vases(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + starprofile = nw.new_node( + nodegroup_star_profile().name, + input_kwargs={ + "Resolution": kwargs["U_resolution"], + "Points": kwargs["Profile Star Points"], + "Inner Radius": kwargs["Profile Inner Radius"], + }, + ) + + vaseprofile = nw.new_node( + nodegroup_vase_profile().name, + input_kwargs={ + "Profile Curve": starprofile.outputs["Curve"], + "Height": kwargs["Height"], + "Diameter": kwargs["Diameter"], + "Top Scale": kwargs["Top Scale"], + "Neck Mid Position": kwargs["Neck Mid Position"], + "Neck Position": kwargs["Neck Position"], + "Neck Scale": kwargs["Neck Scale"], + "Shoulder Position": kwargs["Shoulder Position"], + "Shoulder Thickness": kwargs["Shoulder Thickness"], + "Foot Scale": kwargs["Foot Scale"], + "Foot Height": kwargs["Foot Height"], + }, + ) + + lofting = nw.new_node( + nodegroup_lofting().name, + input_kwargs={ + "Profile Curves": vaseprofile, + "U Resolution": 64, + "V Resolution": 64, + }, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": lofting.outputs["Geometry"], + "Selection": lofting.outputs["Top"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": delete_geometry, "Material": kwargs["Material"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_material}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/tables/__init__.py b/infinigen/assets/objects/tables/__init__.py similarity index 77% rename from infinigen/assets/tables/__init__.py rename to infinigen/assets/objects/tables/__init__.py index 96d9cfa0b..7fddf378a 100644 --- a/infinigen/assets/tables/__init__.py +++ b/infinigen/assets/objects/tables/__init__.py @@ -3,5 +3,5 @@ # Authors: Lingjie Mei from .cocktail_table import TableCocktailFactory -from .dining_table import TableDiningFactory, SideTableFactory, CoffeeTableFactory +from .dining_table import CoffeeTableFactory, SideTableFactory, TableDiningFactory from .table_top import TableTopFactory diff --git a/infinigen/assets/objects/tables/cocktail_table.py b/infinigen/assets/objects/tables/cocktail_table.py new file mode 100644 index 000000000..8decf48bf --- /dev/null +++ b/infinigen/assets/objects/tables/cocktail_table.py @@ -0,0 +1,344 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Yiming Zuo: primary author +# - Alexander Raistrick: implement placeholder + +import bpy +from numpy.random import choice, uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.tables.legs.single_stand import ( + nodegroup_generate_single_stand, +) +from infinigen.assets.objects.tables.legs.straight import ( + nodegroup_generate_leg_straight, +) +from infinigen.assets.objects.tables.legs.wheeled import nodegroup_wheeled_leg +from infinigen.assets.objects.tables.strechers import nodegroup_strecher +from infinigen.assets.objects.tables.table_top import nodegroup_generate_table_top +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_create_anchors, + nodegroup_create_legs_and_strechers, +) +from infinigen.core import surface, tagging +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import NoApply +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "geometry_create_legs", singleton=False, type="GeometryNodeTree" +) +def geometry_create_legs(nw: NodeWrangler, **kwargs): + createanchors = nw.new_node( + nodegroup_create_anchors().name, + input_kwargs={ + "Profile N-gon": kwargs["Leg Number"], + "Profile Width": kwargs["Leg Placement Top Relative Scale"] + * kwargs["Top Profile Width"], + "Profile Aspect Ratio": 1.0000, + }, + ) + + if kwargs["Leg Style"] == "single_stand": + leg = nw.new_node( + nodegroup_generate_single_stand(**kwargs).name, + input_kwargs={ + "Leg Height": kwargs["Leg Height"], + "Leg Diameter": kwargs["Leg Diameter"], + "Resolution": 64, + }, + ) + + leg = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": leg, + "Table Height": kwargs["Top Height"], + "Leg Bottom Relative Scale": kwargs[ + "Leg Placement Bottom Relative Scale" + ], + "Align Leg X rot": True, + }, + ) + + elif kwargs["Leg Style"] == "straight": + leg = nw.new_node( + nodegroup_generate_leg_straight(**kwargs).name, + input_kwargs={ + "Leg Height": kwargs["Leg Height"], + "Leg Diameter": kwargs["Leg Diameter"], + "Resolution": 32, + "N-gon": kwargs["Leg NGon"], + "Fillet Ratio": 0.1, + }, + ) + + strecher = nw.new_node( + nodegroup_strecher().name, + input_kwargs={"Profile Width": kwargs["Leg Diameter"] * 0.5}, + ) + + leg = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": leg, + "Table Height": kwargs["Top Height"], + "Strecher Instance": strecher, + "Strecher Index Increment": kwargs["Strecher Increament"], + "Strecher Relative Position": kwargs["Strecher Relative Pos"], + "Leg Bottom Relative Scale": kwargs[ + "Leg Placement Bottom Relative Scale" + ], + "Align Leg X rot": True, + }, + ) + + elif kwargs["Leg Style"] == "wheeled": + leg = nw.new_node( + nodegroup_wheeled_leg(**kwargs).name, + input_kwargs={ + "Joint Height": kwargs["Leg Joint Height"], + "Leg Diameter": kwargs["Leg Diameter"], + "Top Height": kwargs["Top Height"], + "Wheel Width": kwargs["Leg Wheel Width"], + "Wheel Rotation": kwargs["Leg Wheel Rot"], + "Pole Length": kwargs["Leg Pole Length"], + "Leg Number": kwargs["Leg Pole Number"], + }, + ) + + else: + raise NotImplementedError + + leg = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": leg, "Material": kwargs["LegMaterial"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": leg}, + attrs={"is_active_output": True}, + ) + + +def geometry_assemble_table(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + generatetabletop = nw.new_node( + nodegroup_generate_table_top().name, + input_kwargs={ + "Thickness": kwargs["Top Thickness"], + "N-gon": kwargs["Top Profile N-gon"], + "Profile Width": kwargs["Top Profile Width"], + "Aspect Ratio": kwargs["Top Profile Aspect Ratio"], + "Fillet Ratio": kwargs["Top Profile Fillet Ratio"], + "Fillet Radius Vertical": kwargs["Top Vertical Fillet Ratio"], + }, + ) + + tabletop_instance = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": generatetabletop, + "Translation": (0.0000, 0.0000, kwargs["Top Height"]), + }, + ) + + tabletop_instance = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": tabletop_instance, "Material": kwargs["TopMaterial"]}, + ) + + legs = nw.new_node(geometry_create_legs(**kwargs).name) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [tabletop_instance, legs]} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": generatetabletop.outputs["Curve"]} + ) + fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={"Curve": resample_curve}) + + voff = kwargs["Top Height"] + kwargs["Top Thickness"] + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={"Mesh": fill_curve, "Offset Scale": -voff, "Individual": False}, + ) + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [extrude_mesh.outputs["Mesh"], fill_curve]}, + ) + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Translation": (0, 0, voff)}, + ) + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: kwargs["is_placeholder"], + 14: join_geometry, + 15: transform_geometry_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": switch}, + attrs={"is_active_output": True}, + ) + + +class TableCocktailFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(TableCocktailFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + + # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), + # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() + self.clothes_scatter = NoApply() + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["TableCocktailFactory"]() + params = { + "TopMaterial": material_assignments["top"].assign_material(), + "LegMaterial": material_assignments["leg"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # all in meters + if dimensions is None: + x = uniform(0.5, 0.8) + z = uniform(1.0, 1.5) + dimensions = (x, x, z) + + x, y, z = dimensions + + NGon = choice([4, 32]) + if NGon >= 32: + round_table = True + else: + round_table = False + + leg_style = choice(["straight", "single_stand"]) + if leg_style == "single_stand": + leg_number = 1 + leg_diameter = uniform(0.7 * x, 0.9 * x) + + leg_curve_ctrl_pts = [ + (0.0, uniform(0.1, 0.2)), + (0.5, uniform(0.1, 0.2)), + (0.9, uniform(0.2, 0.3)), + (1.0, 1.0), + ] + + elif leg_style == "straight": + leg_diameter = uniform(0.05, 0.07) + + if round_table: + leg_number = choice([3, 4]) + else: + leg_number = NGon + + leg_curve_ctrl_pts = [ + (0.0, 1.0), + (0.4, uniform(0.85, 0.95)), + (1.0, uniform(0.4, 0.6)), + ] + + else: + raise NotImplementedError + + top_thickness = uniform(0.02, 0.05) + + parameters = { + "Top Profile N-gon": 32 if round_table else 4, + "Top Profile Width": x if round_table else 1.414 * x, + "Top Profile Aspect Ratio": 1.0, + "Top Profile Fillet Ratio": 0.499 if round_table else uniform(0.0, 0.05), + "Top Thickness": top_thickness, + "Top Vertical Fillet Ratio": uniform(0.1, 0.3), + # 'Top Material': choice(['marble', 'tiled_wood', 'plastic', 'glass']), + "Height": z, + "Top Height": z - top_thickness, + "Leg Number": leg_number, + "Leg Style": leg_style, + "Leg NGon": choice([4, 32]), + "Leg Placement Top Relative Scale": 0.7, + "Leg Placement Bottom Relative Scale": uniform(1.1, 1.3), + "Leg Height": 1.0, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood', 'glass']), + "Strecher Relative Pos": uniform(0.2, 0.6), + "Strecher Increament": choice([0, 1, 2]), + } + + return parameters + + def _execute_geonodes(self, is_placeholder): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + kwargs = {**self.params, "is_placeholder": is_placeholder} + surface.add_geomod( + obj, geometry_assemble_table, apply=True, input_kwargs=kwargs + ) + tagging.tag_system.relabel_obj(obj) + + return obj + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + return self._execute_geonodes(is_placeholder=True) + + def create_asset(self, **_): + return self._execute_geonodes(is_placeholder=False) + + def finalize_assets(self, assets): + self.clothes_scatter.apply(assets) + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) diff --git a/infinigen/assets/objects/tables/dining_table.py b/infinigen/assets/objects/tables/dining_table.py new file mode 100644 index 000000000..923ba07f8 --- /dev/null +++ b/infinigen/assets/objects/tables/dining_table.py @@ -0,0 +1,364 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy +from numpy.random import choice, normal, uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.tables.legs.single_stand import ( + nodegroup_generate_single_stand, +) +from infinigen.assets.objects.tables.legs.square import nodegroup_generate_leg_square +from infinigen.assets.objects.tables.legs.straight import ( + nodegroup_generate_leg_straight, +) +from infinigen.assets.objects.tables.strechers import nodegroup_strecher +from infinigen.assets.objects.tables.table_top import nodegroup_generate_table_top +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_create_anchors, + nodegroup_create_legs_and_strechers, +) +from infinigen.core import surface, tagging +from infinigen.core import tags as t +from infinigen.core.nodes import node_utils + +# from infinigen.assets.materials import metal, metal_shader_list +# from infinigen.assets.materials.fabrics import fabric +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import NoApply +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "geometry_create_legs", singleton=False, type="GeometryNodeTree" +) +def geometry_create_legs(nw: NodeWrangler, **kwargs): + createanchors = nw.new_node( + nodegroup_create_anchors().name, + input_kwargs={ + "Profile N-gon": kwargs["Leg Number"], + "Profile Width": kwargs["Leg Placement Top Relative Scale"] + * kwargs["Top Profile Width"], + "Profile Aspect Ratio": kwargs["Top Profile Aspect Ratio"], + }, + ) + + if kwargs["Leg Style"] == "single_stand": + leg = nw.new_node( + nodegroup_generate_single_stand(**kwargs).name, + input_kwargs={ + "Leg Height": kwargs["Leg Height"], + "Leg Diameter": kwargs["Leg Diameter"], + "Resolution": 64, + }, + ) + + leg = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": leg, + "Table Height": kwargs["Top Height"], + "Leg Bottom Relative Scale": kwargs[ + "Leg Placement Bottom Relative Scale" + ], + "Align Leg X rot": True, + }, + ) + + elif kwargs["Leg Style"] == "straight": + leg = nw.new_node( + nodegroup_generate_leg_straight(**kwargs).name, + input_kwargs={ + "Leg Height": kwargs["Leg Height"], + "Leg Diameter": kwargs["Leg Diameter"], + "Resolution": 32, + "N-gon": kwargs["Leg NGon"], + "Fillet Ratio": 0.1, + }, + ) + + strecher = nw.new_node( + nodegroup_strecher().name, + input_kwargs={"Profile Width": kwargs["Leg Diameter"] * 0.5}, + ) + + leg = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": leg, + "Table Height": kwargs["Top Height"], + "Strecher Instance": strecher, + "Strecher Index Increment": kwargs["Strecher Increament"], + "Strecher Relative Position": kwargs["Strecher Relative Pos"], + "Leg Bottom Relative Scale": kwargs[ + "Leg Placement Bottom Relative Scale" + ], + "Align Leg X rot": True, + }, + ) + + elif kwargs["Leg Style"] == "square": + leg = nw.new_node( + nodegroup_generate_leg_square(**kwargs).name, + input_kwargs={ + "Height": kwargs["Leg Height"], + "Width": 0.707 + * kwargs["Leg Placement Top Relative Scale"] + * kwargs["Top Profile Width"] + * kwargs["Top Profile Aspect Ratio"], + "Has Bottom Connector": (kwargs["Strecher Increament"] > 0), + "Profile Width": kwargs["Leg Diameter"], + }, + ) + + leg = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": leg, + "Table Height": kwargs["Top Height"], + "Leg Bottom Relative Scale": kwargs[ + "Leg Placement Bottom Relative Scale" + ], + "Align Leg X rot": True, + }, + ) + + else: + raise NotImplementedError + + leg = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": leg, "Material": kwargs["LegMaterial"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": leg}, + attrs={"is_active_output": True}, + ) + + +def geometry_assemble_table(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + generatetabletop = nw.new_node( + nodegroup_generate_table_top().name, + input_kwargs={ + "Thickness": kwargs["Top Thickness"], + "N-gon": kwargs["Top Profile N-gon"], + "Profile Width": kwargs["Top Profile Width"], + "Aspect Ratio": kwargs["Top Profile Aspect Ratio"], + "Fillet Ratio": kwargs["Top Profile Fillet Ratio"], + "Fillet Radius Vertical": kwargs["Top Vertical Fillet Ratio"], + }, + ) + + tabletop_instance = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": generatetabletop, + "Translation": (0.0000, 0.0000, kwargs["Top Height"]), + }, + ) + + tabletop_instance = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": tabletop_instance, "Material": kwargs["TopMaterial"]}, + ) + + legs = nw.new_node(geometry_create_legs(**kwargs).name) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [tabletop_instance, legs]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +class TableDiningFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(TableDiningFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + + # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), + # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() + self.clothes_scatter = NoApply() + self.material_params, self.scratch, self.edge_wear = ( + self.get_material_params() + ) + + self.params.update(self.material_params) + + def get_material_params(self): + material_assignments = AssetList["TableDiningFactory"]() + params = { + "TopMaterial": material_assignments["top"].assign_material(), + "LegMaterial": material_assignments["leg"].assign_material(), + } + wrapped_params = { + k: surface.shaderfunc_to_material(v) for k, v in params.items() + } + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = uniform() < scratch_prob + is_edge_wear = uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return wrapped_params, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + if dimensions is None: + width = uniform(0.91, 1.16) + + if uniform() < 0.7: + # oblong + length = uniform(1.4, 2.8) + else: + # approx square + length = width * normal(1, 0.1) + + dimensions = (length, width, uniform(0.65, 0.85)) + + # all in meters + x, y, z = dimensions + + NGon = 4 + + leg_style = choice(["straight", "single_stand", "square"], p=[0.5, 0.1, 0.4]) + # leg_style = choice(['straight']) + + if leg_style == "single_stand": + leg_number = 2 + leg_diameter = uniform(0.22 * x, 0.28 * x) + + leg_curve_ctrl_pts = [ + (0.0, uniform(0.1, 0.2)), + (0.5, uniform(0.1, 0.2)), + (0.9, uniform(0.2, 0.3)), + (1.0, 1.0), + ] + + top_scale = uniform(0.6, 0.7) + bottom_scale = 1.0 + + elif leg_style == "square": + leg_number = 2 + leg_diameter = uniform(0.07, 0.10) + + leg_curve_ctrl_pts = None + + top_scale = 0.8 + bottom_scale = 1.0 + + elif leg_style == "straight": + leg_diameter = uniform(0.05, 0.07) + + leg_number = 4 + + leg_curve_ctrl_pts = [ + (0.0, 1.0), + (0.4, uniform(0.85, 0.95)), + (1.0, uniform(0.4, 0.6)), + ] + + top_scale = 0.8 + bottom_scale = uniform(1.0, 1.2) + + else: + raise NotImplementedError + + top_thickness = uniform(0.03, 0.06) + + parameters = { + "Top Profile N-gon": NGon, + "Top Profile Width": 1.414 * x, + "Top Profile Aspect Ratio": y / x, + "Top Profile Fillet Ratio": uniform(0.0, 0.02), + "Top Thickness": top_thickness, + "Top Vertical Fillet Ratio": uniform(0.1, 0.3), + # 'Top Material': choice(['marble', 'tiled_wood', 'metal', 'fabric'], p=[.3, .3, .2, .2]), + "Height": z, + "Top Height": z - top_thickness, + "Leg Number": leg_number, + "Leg Style": leg_style, + "Leg NGon": 4, + "Leg Placement Top Relative Scale": top_scale, + "Leg Placement Bottom Relative Scale": bottom_scale, + "Leg Height": 1.0, + "Leg Diameter": leg_diameter, + "Leg Curve Control Points": leg_curve_ctrl_pts, + # 'Leg Material': choice(['metal', 'wood', 'glass', 'plastic']), + "Strecher Relative Pos": uniform(0.2, 0.6), + "Strecher Increament": choice([0, 1, 2]), + } + + return parameters + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # surface.add_geomod(obj, geometry_assemble_table, apply=False, input_kwargs=self.params) + surface.add_geomod( + obj, geometry_assemble_table, apply=True, input_kwargs=self.params + ) + tagging.tag_system.relabel_obj(obj) + assert tagging.tagged_face_mask(obj, {t.Subpart.SupportSurface}).sum() != 0 + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + # def finalize_assets(self, assets): + # self.clothes_scatter.apply(assets) + + +class SideTableFactory(TableDiningFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + if dimensions is None: + w = 0.55 * normal(1, 0.05) + h = 0.95 * w * normal(1, 0.05) + dimensions = (w, w, h) + super().__init__(factory_seed, coarse=coarse, dimensions=dimensions) + + +class CoffeeTableFactory(TableDiningFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + if dimensions is None: + dimensions = (uniform(1, 1.5), uniform(0.6, 0.9), uniform(0.4, 0.5)) + super().__init__(factory_seed, coarse=coarse, dimensions=dimensions) diff --git a/infinigen/assets/objects/tables/legs/single_stand.py b/infinigen/assets/objects/tables/legs/single_stand.py new file mode 100644 index 000000000..45ad29833 --- /dev/null +++ b/infinigen/assets/objects/tables/legs/single_stand.py @@ -0,0 +1,52 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_generate_radius_curve, + nodegroup_n_gon_cylinder, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_generate_single_stand", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_single_stand(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Leg Height", 0.0000), + ("NodeSocketFloat", "Leg Diameter", 1.0000), + ("NodeSocketInt", "Resolution", 64), + ], + ) + + generateradiuscurve = nw.new_node( + nodegroup_generate_radius_curve(kwargs["Leg Curve Control Points"]).name, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + ngoncylinder = nw.new_node( + nodegroup_n_gon_cylinder().name, + input_kwargs={ + "Radius Curve": generateradiuscurve, + "Height": group_input.outputs["Leg Height"], + "N-gon": group_input.outputs["Resolution"], + "Profile Width": group_input.outputs["Leg Diameter"], + "Aspect Ratio": 1.0000, + "Fillet Ratio": 0.0000, + "Resolution": group_input.outputs["Resolution"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": ngoncylinder.outputs["Mesh"]}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/tables/legs/square.py b/infinigen/assets/objects/tables/legs/square.py new file mode 100644 index 000000000..72d51f6c6 --- /dev/null +++ b/infinigen/assets/objects/tables/legs/square.py @@ -0,0 +1,154 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_merge_curve, + nodegroup_n_gon_profile, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_generate_leg_square", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_leg_square(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Width", 0.0000), + ("NodeSocketFloat", "Height", 0.0000), + ("NodeSocketFloatDistance", "Fillet Radius", 0.0300), + ("NodeSocketBool", "Has Bottom Connector", True), + ("NodeSocketInt", "Profile N-gon", 4), + ("NodeSocketFloatDistance", "Profile Width", 0.1000), + ("NodeSocketFloatDistance", "Profile Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Profile Fillet Ratio", 0.1000), + ], + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Has Bottom Connector"], 1: 4.0000}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Has Bottom Connector"], + 3: 4.7124, + 4: 6.2832, + }, + ) + + arc = nw.new_node( + "GeometryNodeCurveArc", + input_kwargs={ + "Resolution": add, + "Radius": 0.7071, + "Sweep Angle": map_range.outputs["Result"], + }, + ) + + mergecurve = nw.new_node( + nodegroup_merge_curve().name, input_kwargs={"Curve": arc.outputs["Curve"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Has Bottom Connector"], + 3: 1.5708, + 4: 3.1416, + }, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, + input_kwargs={"Curve": mergecurve, "Tilt": map_range_1.outputs["Result"]}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": set_curve_tilt, + "Rotation": (0.0000, 0.0000, -0.7854), + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform, + "Translation": (0.0000, 0.0000, -0.5000), + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Width"], + "Y": 1.0000, + "Z": group_input.outputs["Height"], + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_1, "Scale": combine_xyz} + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": transform_2, "Radius": 1.0000} + ) + + fillet_curve = nw.new_node( + Nodes.FilletCurve, + input_kwargs={ + "Curve": set_curve_radius, + "Count": 8, + "Radius": group_input.outputs["Fillet Radius"], + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + ngonprofile = nw.new_node( + nodegroup_n_gon_profile().name, + input_kwargs={ + "Profile N-gon": group_input.outputs["Profile N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Profile Aspect Ratio": group_input.outputs["Profile Aspect Ratio"], + "Profile Fillet Ratio": group_input.outputs["Profile Fillet Ratio"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": fillet_curve, + "Profile Curve": ngonprofile, + "Fill Caps": True, + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Rotation": (0.0000, 0.0000, 1.5708)}, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": transform_3, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/tables/legs/straight.py b/infinigen/assets/objects/tables/legs/straight.py new file mode 100644 index 000000000..ca47ffed3 --- /dev/null +++ b/infinigen/assets/objects/tables/legs/straight.py @@ -0,0 +1,54 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_generate_radius_curve, + nodegroup_n_gon_cylinder, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_generate_leg_straight", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_leg_straight(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Leg Height", 0.0000), + ("NodeSocketFloat", "Leg Diameter", 1.0000), + ("NodeSocketInt", "Resolution", 0), + ("NodeSocketInt", "N-gon", 32), + ("NodeSocketFloat", "Fillet Ratio", 0.0100), + ], + ) + + generateradiuscurve = nw.new_node( + nodegroup_generate_radius_curve(kwargs["Leg Curve Control Points"]).name, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + ngoncylinder = nw.new_node( + nodegroup_n_gon_cylinder().name, + input_kwargs={ + "Radius Curve": generateradiuscurve, + "Height": group_input.outputs["Leg Height"], + "N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Leg Diameter"], + "Aspect Ratio": 1.0000, + "Fillet Ratio": group_input.outputs["Fillet Ratio"], + "Resolution": group_input.outputs["Resolution"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": ngoncylinder.outputs["Mesh"]}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/tables/legs/wheeled.py b/infinigen/assets/objects/tables/legs/wheeled.py new file mode 100644 index 000000000..5360fe0aa --- /dev/null +++ b/infinigen/assets/objects/tables/legs/wheeled.py @@ -0,0 +1,462 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.tables.table_top import nodegroup_capped_cylinder +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_align_bottom_to_floor, + nodegroup_arc_top, + nodegroup_create_anchors, + nodegroup_create_legs_and_strechers, + nodegroup_n_gon_cylinder, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_chair_wheel", singleton=False, type="GeometryNodeTree" +) +def nodegroup_chair_wheel(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Arc Sweep Angle", 240.0000), + ("NodeSocketFloat", "Wheel Width", 0.0000), + ("NodeSocketFloat", "Wheel Rotation", 0.5000), + ("NodeSocketFloat", "Pole Width", 0.0000), + ("NodeSocketFloat", "Pole Aspect Ratio", 0.6000), + ("NodeSocketFloat", "Pole Length", 3.0000), + ], + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": group_input.outputs["Wheel Width"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Wheel Width"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_1, "End": combine_xyz_2} + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.0200 + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.5000 + + cappedcylinder = nw.new_node( + nodegroup_capped_cylinder().name, + input_kwargs={ + "Thickness": value, + "Radius": value_1, + "Cap Relative Scale": 0.0100, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: value, 1: -1.0000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_1}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cappedcylinder, + "Translation": combine_xyz, + "Rotation": (-1.5708, 0.0000, 0.0000), + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": position}, attrs={"axis": "Y"} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": curve_line, + "Instance": transform, + "Rotation": align_euler_to_vector, + }, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.0800}) + + arctop = nw.new_node( + nodegroup_arc_top().name, + input_kwargs={ + "Diameter": add, + "Sweep Angle": group_input.outputs["Arc Sweep Angle"], + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Wheel Width"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": multiply_2, "Height": 0.0200}, + ) + + fillet_curve = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": quadrilateral, + "Count": 4, + "Radius": 0.0300, + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": arctop, + "Profile Curve": fillet_curve, + "Fill Caps": True, + }, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: value_1, 1: 0.1000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: value_1, 1: 0.4000}, + attrs={"operation": "MULTIPLY"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Side Segments": 8, + "Fill Segments": 4, + "Radius": multiply_3, + "Depth": multiply_4, + }, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: value_1, 1: 0.4400}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: value_1, 1: 0.4500}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_5, "Z": multiply_6} + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_3, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [instance_on_points, curve_to_mesh, transform_2]}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_5, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_7}) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry, "Translation": combine_xyz_4}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Pole Length"], 1: 0.1500}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Pole Width"], 1: -0.3535, 2: -0.3000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": subtract, "Z": multiply_add} + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Wheel Rotation"]}, + attrs={"operation": "RADIANS"}, + ) + + combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radians}) + + transform_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_6, + "Translation": combine_xyz_5, + "Rotation": combine_xyz_6, + }, + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, -1.0000), + "End": (1.0000, 0.0000, 1.0000), + }, + ) + + ngoncylinder = nw.new_node( + nodegroup_n_gon_cylinder().name, + input_kwargs={ + "Radius Curve": curve_line_1, + "Height": group_input.outputs["Pole Length"], + "N-gon": 4, + "Profile Width": group_input.outputs["Pole Width"], + "Aspect Ratio": group_input.outputs["Pole Aspect Ratio"], + "Fillet Ratio": 0.1500, + "Resolution": 32, + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": ngoncylinder.outputs["Mesh"], + "Rotation": (0.0000, -1.5708, 0.0000), + }, + ) + + subdivision_surface_1 = nw.new_node( + Nodes.SubdivisionSurface, input_kwargs={"Mesh": transform_3, "Level": 0} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_4, subdivision_surface_1]}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.1500 + + transform_geometry = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": join_geometry_1, "Scale": value_2} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_wheeled_leg", singleton=False, type="GeometryNodeTree" +) +def nodegroup_wheeled_leg(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Joint Height", 0.0000), + ("NodeSocketFloat", "Leg Diameter", 0.0000), + ("NodeSocketFloat", "Top Height", 0.0000), + ("NodeSocketFloat", "Arc Sweep Angle", 240.0000), + ("NodeSocketFloat", "Wheel Width", 0.1300), + ("NodeSocketFloat", "Wheel Rotation", 0.5000), + ("NodeSocketFloat", "Pole Length", 1.8000), + ("NodeSocketInt", "Leg Number", 5), + ], + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 0.0010 + + createanchors = nw.new_node( + nodegroup_create_anchors().name, + input_kwargs={ + "Profile N-gon": group_input.outputs["Leg Number"], + "Profile Width": value_1, + "Profile Aspect Ratio": 1.0000, + }, + ) + + chair_wheel = nw.new_node( + nodegroup_chair_wheel().name, + input_kwargs={ + "Arc Sweep Angle": group_input.outputs["Arc Sweep Angle"], + "Wheel Width": group_input.outputs["Wheel Width"], + "Wheel Rotation": group_input.outputs["Wheel Rotation"], + "Pole Width": 0.5000, + "Pole Length": group_input.outputs["Pole Length"], + }, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": chair_wheel, "Rotation": (0.0000, 1.5708, 0.0000)}, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: 2.0000, 1: value_1}, attrs={"operation": "DIVIDE"} + ) + + createlegsandstrechers = nw.new_node( + nodegroup_create_legs_and_strechers().name, + input_kwargs={ + "Anchors": createanchors, + "Keep Legs": True, + "Leg Instance": transform_geometry, + "Table Height": 0.0250, + "Leg Bottom Relative Scale": divide, + "Strecher Index Increment": 1, + "Strecher Relative Position": 1.0000, + "Leg Bottom Offset": 0.0250, + "Align Leg X rot": True, + }, + ) + + alignbottomtofloor = nw.new_node( + nodegroup_align_bottom_to_floor().name, + input_kwargs={"Geometry": createlegsandstrechers}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Leg Diameter"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Joint Height"], + 1: alignbottomtofloor.outputs["Offset"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": multiply, "Depth": subtract}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: alignbottomtofloor.outputs["Offset"]}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Translation": combine_xyz_1, + }, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: 0.0025}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Top Height"], + 1: group_input.outputs["Joint Height"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + cylinder_1 = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={"Vertices": 64, "Radius": subtract_1, "Depth": subtract_2}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={1: subtract_2}, attrs={"operation": "MULTIPLY"} + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Top Height"], 1: multiply_2}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract_3}) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cylinder_1.outputs["Mesh"], + "Translation": combine_xyz_2, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + alignbottomtofloor.outputs["Geometry"], + transform_geometry_2, + transform_geometry_3, + ] + }, + ) + + # multiply_3 = nw.new_node(Nodes.Math, + # input_kwargs={0: group_input.outputs["Top Height"], 1: -1.0000}, + # attrs={'operation': 'MULTIPLY'}) + + # combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_3}) + + # transform_geometry_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_3}) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/tables/lofting.py b/infinigen/assets/objects/tables/lofting.py new file mode 100644 index 000000000..d5e12f82d --- /dev/null +++ b/infinigen/assets/objects/tables/lofting.py @@ -0,0 +1,585 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_flip_index", singleton=False, type="GeometryNodeTree" +) +def nodegroup_flip_index(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "V Resolution", 0), + ("NodeSocketInt", "U Resolution", 0), + ], + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, + attrs={"operation": "MODULO"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: modulo, 1: group_input.outputs["U Resolution"]}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "FLOOR"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: floor}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Index": add}, attrs={"is_active_output": True} + ) + + +@node_utils.to_nodegroup( + "nodegroup_cylinder_side", singleton=False, type="GeometryNodeTree" +) +def nodegroup_cylinder_side(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 0), + ], + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["V Resolution"], 1: 1.0000}, + attrs={"operation": "SUBTRACT"}, + ) + + cylinder = nw.new_node( + "GeometryNodeMeshCylinder", + input_kwargs={ + "Vertices": group_input.outputs["U Resolution"], + "Side Segments": subtract, + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cylinder.outputs["Mesh"], + "Name": "uv_map", + 3: cylinder.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": store_named_attribute, + "Top": cylinder.outputs["Top"], + "Side": cylinder.outputs["Side"], + "Bottom": cylinder.outputs["Bottom"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shifted_circle", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shifted_circle(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Resolution", 32), + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "Z", 0.0000), + ("NodeSocketFloat", "Rot Z", 0.0000), + ], + ) + + curve_circle_3 = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Resolution"], + "Radius": group_input.outputs["Radius"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Z"]} + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Rot Z"]}, + attrs={"operation": "RADIANS"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radians}) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle_3.outputs["Curve"], + "Translation": combine_xyz, + "Rotation": combine_xyz_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_3}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_shifted_square", singleton=False, type="GeometryNodeTree" +) +def nodegroup_shifted_square(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Resolution", 10), + ("NodeSocketFloatDistance", "Width", 1.0000), + ("NodeSocketFloat", "Z", 0.0000), + ("NodeSocketFloat", "Rot Z", 0.5000), + ], + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Width"], + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": quadrilateral, + "Count": group_input.outputs["Resolution"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Z"]} + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Rot Z"]}, + attrs={"operation": "RADIANS"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": radians}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": resample_curve, + "Translation": combine_xyz, + "Rotation": combine_xyz_1, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": transform_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_lofting", singleton=False, type="GeometryNodeTree") +def nodegroup_lofting(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Profile Curves", None), + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 32), + ("NodeSocketBool", "Use Nurb", False), + ], + ) + + cylinderside = nw.new_node( + nodegroup_cylinder_side().name, + input_kwargs={ + "U Resolution": group_input.outputs["U Resolution"], + "V Resolution": group_input, + }, + ) + + index = nw.new_node(Nodes.Index) + + evaluate_on_domain = nw.new_node( + Nodes.EvaluateonDomain, + input_kwargs={1: index}, + attrs={"domain": "CURVE", "data_type": "INT"}, + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: evaluate_on_domain.outputs[1]}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + curve_line = nw.new_node(Nodes.CurveLine) + + domain_size = nw.new_node( + Nodes.DomainSize, + input_kwargs={"Geometry": group_input}, + attrs={"component": "CURVE"}, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": curve_line, + "Count": domain_size.outputs["Spline Count"], + }, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input, + "Selection": equal, + "Instance": resample_curve, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + position = nw.new_node(Nodes.InputPosition) + + flipindex = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": domain_size.outputs["Spline Count"], + "U Resolution": group_input.outputs["U Resolution"], + }, + ) + + sample_index_2 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": group_input, 3: position, "Index": flipindex}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Position": sample_index_2.outputs[2], + }, + ) + + set_spline_type_1 = nw.new_node( + Nodes.SplineType, + input_kwargs={"Curve": set_position}, + attrs={"spline_type": "CATMULL_ROM"}, + ) + + set_spline_type = nw.new_node( + Nodes.SplineType, + input_kwargs={"Curve": set_position}, + attrs={"spline_type": "NURBS"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Use Nurb"], + 14: set_spline_type_1, + 15: set_spline_type, + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": switch.outputs[6], "Count": group_input}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + flipindex_1 = nw.new_node( + nodegroup_flip_index().name, + input_kwargs={ + "V Resolution": group_input.outputs["U Resolution"], + "U Resolution": group_input, + }, + ) + + sample_index_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": resample_curve_1, + 3: position_1, + "Index": flipindex_1, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": cylinderside.outputs["Geometry"], + "Position": sample_index_3.outputs[2], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Top": cylinderside.outputs["Top"], + "Side": cylinderside.outputs["Side"], + "Bottom": cylinderside.outputs["Bottom"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_warp_around_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_warp_around_curve(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketInt", "U Resolution", 32), + ("NodeSocketInt", "V Resolution", 32), + ("NodeSocketFloat", "Radius", 1.0000), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Count": group_input.outputs["V Resolution"], + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + index = nw.new_node(Nodes.Index) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["U Resolution"]}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "FLOOR"} + ) + + sample_index_3 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: position_1, "Index": floor}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + sample_index_5 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: normal, "Index": floor}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_5.outputs[2], "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + sample_index_4 = nw.new_node( + Nodes.SampleIndex, + input_kwargs={"Geometry": resample_curve, 3: curve_tangent, "Index": floor}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_4.outputs[2], 1: sample_index_5.outputs[2]}, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: cross_product.outputs["Vector"], + "Scale": separate_xyz.outputs["Y"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"], "Scale": group_input.outputs["Radius"]}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: sample_index_3.outputs[2], 1: scale_2.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": add_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +def geometry_nodes(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + integer = nw.new_node(Nodes.Integer) + integer.integer = 32 + + shiftedsquare = nw.new_node( + nodegroup_shifted_square().name, input_kwargs={"Resolution": integer} + ) + + shiftedcircle = nw.new_node( + nodegroup_shifted_circle().name, + input_kwargs={"Resolution": integer, "Radius": 0.9200, "Z": 2.5600}, + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": shiftedcircle, "Rotation": (0.0000, 0.0000, 0.7854)}, + ) + + shiftedsquare_1 = nw.new_node( + nodegroup_shifted_square().name, + input_kwargs={"Resolution": integer, "Z": 10.0000}, + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: integer, 1: 2.0000}, attrs={"operation": "DIVIDE"} + ) + + star = nw.new_node( + "GeometryNodeCurveStar", + input_kwargs={"Points": divide, "Inner Radius": 0.5000, "Outer Radius": 0.6600}, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": star.outputs["Curve"], + "Translation": (0.0000, 0.0000, 7.6000), + "Rotation": (0.0000, 0.0000, 0.7854), + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + shiftedsquare, + transform_geometry, + shiftedsquare_1, + transform_geometry_1, + ] + }, + ) + + v_resolution = nw.new_node(Nodes.Integer, label="V Resolution") + v_resolution.integer = 64 + + lofting = nw.new_node( + nodegroup_lofting().name, + input_kwargs={ + "Profile Curves": join_geometry, + "U Resolution": integer, + "V Resolution": v_resolution, + }, + ) + + object_info = nw.new_node( + Nodes.ObjectInfo, input_kwargs={"Object": bpy.data.objects["BezierCurve"]} + ) + + warparoundcurve = nw.new_node( + nodegroup_warp_around_curve().name, + input_kwargs={ + "Geometry": lofting.outputs["Geometry"], + "Curve": object_info.outputs["Geometry"], + "U Resolution": integer, + "V Resolution": v_resolution, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": warparoundcurve}, + attrs={"is_active_output": True}, + ) + + +def apply(obj, selection=None, **kwargs): + surface.add_geomod(obj, geometry_nodes, selection=selection, attributes=[]) + + +apply(bpy.context.active_object) diff --git a/infinigen/assets/objects/tables/strechers.py b/infinigen/assets/objects/tables/strechers.py new file mode 100644 index 000000000..021fc9948 --- /dev/null +++ b/infinigen/assets/objects/tables/strechers.py @@ -0,0 +1,48 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.assets.objects.tables.table_utils import nodegroup_n_gon_cylinder +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup("nodegroup_strecher", singleton=False, type="GeometryNodeTree") +def nodegroup_strecher(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, 1.0000), + "End": (1.0000, 0.0000, -1.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "N-gon", 32), + ("NodeSocketFloat", "Profile Width", 0.200), + ], + ) + + ngoncylinder = nw.new_node( + nodegroup_n_gon_cylinder().name, + input_kwargs={ + "Radius Curve": curve_line, + "Height": 1.0000, + "N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Aspect Ratio": 1.0000, + "Resolution": 64, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": ngoncylinder.outputs["Mesh"]}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/tables/table_top.py b/infinigen/assets/objects/tables/table_top.py new file mode 100644 index 000000000..8cb002b0a --- /dev/null +++ b/infinigen/assets/objects/tables/table_top.py @@ -0,0 +1,339 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +import bpy + +from infinigen.assets.objects.tables.table_utils import ( + nodegroup_create_cap, + nodegroup_n_gon_cylinder, +) +from infinigen.core import surface +from infinigen.core import tags as t +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "nodegroup_capped_cylinder", singleton=False, type="GeometryNodeTree" +) +def nodegroup_capped_cylinder(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Thickness", 0.5000), + ("NodeSocketFloat", "Radius", 0.2000), + ("NodeSocketFloatDistance", "Cap Flatness", 4.0000), + ("NodeSocketFloat", "Fillet Radius Vertical", 0.4000), + ("NodeSocketFloat", "Cap Relative Scale", 1.0000), + ("NodeSocketFloat", "Cap Relative Z Offset", 0.0000), + ("NodeSocketInt", "Resolution", 64), + ], + ) + + create_cap = nw.new_node( + nodegroup_create_cap().name, + input_kwargs={ + "Radius": group_input.outputs["Cap Flatness"], + "Resolution": group_input.outputs["Resolution"], + }, + label="CreateCap", + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["Cap Relative Z Offset"]}, + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: group_input.outputs["Cap Relative Scale"]}, + ) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": create_cap, + "Translation": combine_xyz_5, + "Scale": add_1, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + generatetabletop = nw.new_node( + nodegroup_generate_table_top().name, + input_kwargs={ + "Thickness": multiply, + "N-gon": group_input.outputs["Resolution"], + "Profile Width": multiply_2, + "Aspect Ratio": 1.0000, + "Fillet Ratio": 0.0000, + "Fillet Radius Vertical": group_input.outputs["Fillet Radius Vertical"], + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_5, generatetabletop]} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry_2}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_generate_table_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_table_top(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, 1.0000), + "End": (1.0000, 0.0000, -1.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Thickness", 0.5000), + ("NodeSocketInt", "N-gon", 0), + ("NodeSocketFloat", "Profile Width", 0.5000), + ("NodeSocketFloat", "Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Fillet Ratio", 0.2000), + ("NodeSocketFloat", "Fillet Radius Vertical", 0.0000), + ], + ) + + ngoncylinder = nw.new_node( + nodegroup_n_gon_cylinder().name, + input_kwargs={ + "Radius Curve": curve_line, + "Height": group_input.outputs["Thickness"], + "N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Aspect Ratio": group_input.outputs["Aspect Ratio"], + "Fillet Ratio": group_input.outputs["Fillet Ratio"], + "Profile Resolution": 512, + "Resolution": 10, + }, + ) + + arc = nw.new_node( + "GeometryNodeCurveArc", + input_kwargs={"Resolution": 4, "Radius": 0.7071, "Sweep Angle": 4.7124}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": arc.outputs["Curve"], + "Rotation": (0.0000, 0.0000, -0.7854), + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Rotation": (0.0000, 1.5708, 0.0000)}, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_2, "Translation": (0.0000, 0.5000, 0.0000)}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": 1.0000, + "Y": group_input.outputs["Fillet Radius Vertical"], + "Z": 1.0000, + }, + ) + + transform_4 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_3, "Scale": combine_xyz} + ) + + fillet_curve = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": transform_4, + "Count": 8, + "Radius": group_input.outputs["Fillet Radius Vertical"], + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + transform_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": fillet_curve, + "Rotation": (1.5708, 1.5708, 0.0000), + "Scale": group_input.outputs["Thickness"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": ngoncylinder.outputs["Profile Curve"], + "Profile Curve": transform_6, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_5 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_to_mesh, "Translation": combine_xyz_1}, + ) + + index = nw.new_node(Nodes.Index) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={"A": index, "B": 0}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + cap = tag_nodegroup( + nw, ngoncylinder.outputs["Caps"], t.Subpart.SupportSurface, selection=equal + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [transform_5, cap]} + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": join_geometry}) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Thickness"]} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": flip_faces, "Translation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform_1, + "Curve": ngoncylinder.outputs["Profile Curve"], + }, + ) + + +def geometry_generate_table_top_wrapper(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Profile N-gon", kwargs["Profile N-gon"]), + ("NodeSocketFloat", "Profile Width", kwargs["Profile Width"]), + ("NodeSocketFloat", "Profile Aspect Ratio", kwargs["Profile Aspect Ratio"]), + ("NodeSocketFloat", "Profile Fillet Ratio", kwargs["Profile Fillet Ratio"]), + ("NodeSocketFloat", "Thickness", kwargs["Thickness"]), + ( + "NodeSocketFloat", + "Vertical Fillet Ratio", + kwargs["Vertical Fillet Ratio"], + ), + ], + ) + + generatetabletop = nw.new_node( + nodegroup_generate_table_top().name, + input_kwargs={ + "Thickness": group_input.outputs["Thickness"], + "N-gon": group_input.outputs["Profile N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Aspect Ratio": group_input.outputs["Profile Aspect Ratio"], + "Fillet Ratio": group_input.outputs["Profile Fillet Ratio"], + "Fillet Radius Vertical": group_input.outputs["Vertical Fillet Ratio"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": generatetabletop}, + attrs={"is_active_output": True}, + ) + + +class TableTopFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(TableTopFactory, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.params = self.sample_parameters() + + @staticmethod + def sample_parameters(): + # all in meters + return { + "Profile N-gon": 4, + "Profile Width": 1.0, + "Profile Aspect Ratio": 1.0, + "Profile Fillet Ratio": 0.2000, + "Thickness": 0.1000, + "Vertical Fillet Ratio": 0.2000, + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod( + obj, + geometry_generate_table_top_wrapper, + apply=False, + input_kwargs=self.params, + ) + + return obj diff --git a/infinigen/assets/objects/tables/table_utils.py b/infinigen/assets/objects/tables/table_utils.py new file mode 100644 index 000000000..ea2a30c62 --- /dev/null +++ b/infinigen/assets/objects/tables/table_utils.py @@ -0,0 +1,1032 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_n_gon_profile", singleton=False, type="GeometryNodeTree" +) +def nodegroup_n_gon_profile(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Profile N-gon", 4), + ("NodeSocketFloat", "Profile Width", 1.0000), + ("NodeSocketFloat", "Profile Aspect Ratio", 1.0000), + ("NodeSocketFloat", "Profile Fillet Ratio", 0.2000), + ], + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.5000 + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["Profile N-gon"], + "Radius": value, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 3.1416, 1: group_input.outputs["Profile N-gon"]}, + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": divide}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Rotation": combine_xyz_1, + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform, "Rotation": (0.0000, 0.0000, -1.5708)}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Profile Aspect Ratio"], + 1: group_input.outputs["Profile Width"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Profile Width"], + "Y": multiply, + "Z": 1.0000, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": transform_2, "Scale": combine_xyz} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Profile Width"], + 1: group_input.outputs["Profile Fillet Ratio"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + fillet_curve_1 = nw.new_node( + "GeometryNodeFilletCurve", + input_kwargs={ + "Curve": transform_1, + "Count": 8, + "Radius": multiply_1, + "Limit Radius": True, + }, + attrs={"mode": "POLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Output": fillet_curve_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_n_gon_cylinder", singleton=False, type="GeometryNodeTree" +) +def nodegroup_n_gon_cylinder(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Radius Curve", None), + ("NodeSocketFloat", "Height", 0.5000), + ("NodeSocketInt", "N-gon", 0), + ("NodeSocketFloat", "Profile Width", 0.5000), + ("NodeSocketFloat", "Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Fillet Ratio", 0.2000), + ("NodeSocketInt", "Profile Resolution", 64), + ("NodeSocketInt", "Resolution", 128), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_1}) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": curve_line, "Tilt": 3.1416} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": set_curve_tilt, + "Count": group_input.outputs["Resolution"], + }, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": resample_curve, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + ngonprofile = nw.new_node( + nodegroup_n_gon_profile().name, + input_kwargs={ + "Profile N-gon": group_input.outputs["N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Profile Aspect Ratio": group_input.outputs["Aspect Ratio"], + "Profile Fillet Ratio": group_input.outputs["Fillet Ratio"], + }, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": ngonprofile, + "Count": group_input.outputs["Profile Resolution"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": capture_attribute.outputs["Geometry"], + "Profile Curve": resample_curve_1, + "Fill Caps": True, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curves": group_input.outputs["Radius Curve"], + "Factor": capture_attribute.outputs[2], + }, + attrs={"use_all_curves": True}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": sample_curve.outputs["Position"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz.outputs["X"], "Y": separate_xyz.outputs["Y"]}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={"operation": "LENGTH"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Radius Curve"], + 2: separate_xyz_1.outputs["Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + 3: multiply, + 4: 0.0000, + }, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": multiply_1, + "Y": multiply_2, + "Z": map_range.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh, "Position": combine_xyz_2}, + ) + + index = nw.new_node(Nodes.Index) + + domain_size = nw.new_node( + Nodes.DomainSize, input_kwargs={"Geometry": curve_to_mesh} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: domain_size.outputs["Face Count"], 1: 2.0000}, + attrs={"operation": "SUBTRACT"}, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: subtract}, + attrs={"operation": "LESS_THAN", "data_type": "INT"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": curve_to_mesh, "Selection": less_than}, + attrs={"domain": "FACE"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": set_position, + "Profile Curve": resample_curve_1, + "Caps": delete_geometry, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_generate_radius_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_radius_curve(nw: NodeWrangler, curve_control_points): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (1.0000, 0.0000, 1.0000), + "End": (1.0000, 0.0000, -1.0000), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Resolution", 128)] + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": curve_line, "Count": group_input.outputs["Resolution"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve(float_curve.mapping.curves[0], curve_control_points) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": float_curve, "Y": 1.0000, "Z": 1.0000} + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: combine_xyz_1}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": resample_curve, + "Position": multiply.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_create_anchors", singleton=False, type="GeometryNodeTree" +) +def nodegroup_create_anchors(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Profile N-gon", 0), + ("NodeSocketFloat", "Profile Width", 0.5000), + ("NodeSocketFloat", "Profile Aspect Ratio", 0.5000), + ("NodeSocketFloat", "Profile Rotation", 0.0000), + ], + ) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Profile N-gon"], 3: 1}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Profile N-gon"], 3: 2}, + attrs={"operation": "EQUAL", "data_type": "INT"}, + ) + + ngonprofile = nw.new_node( + nodegroup_n_gon_profile().name, + input_kwargs={ + "Profile N-gon": group_input.outputs["Profile N-gon"], + "Profile Width": group_input.outputs["Profile Width"], + "Profile Aspect Ratio": group_input.outputs["Profile Aspect Ratio"], + "Profile Fillet Ratio": 0.0000, + }, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": ngonprofile}, + attrs={"mode": "EVALUATED"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Profile Width"], 1: 0.3535}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Profile Width"], 1: -0.3535}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_1}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz, "End": combine_xyz_1} + ) + + curve_to_points_1 = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": curve_line}, + attrs={"mode": "EVALUATED"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: equal_1, + 14: curve_to_points.outputs["Points"], + 15: curve_to_points_1.outputs["Points"], + }, + ) + + points = nw.new_node("GeometryNodePoints") + + switch = nw.new_node( + Nodes.Switch, input_kwargs={1: equal, 14: switch_1.outputs[6], 15: points} + ) + + set_point_radius = nw.new_node( + Nodes.SetPointRadius, input_kwargs={"Points": switch.outputs[6]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Profile Rotation"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_point_radius, "Rotation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_create_legs_and_strechers", singleton=False, type="GeometryNodeTree" +) +def nodegroup_create_legs_and_strechers(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Anchors", None), + ("NodeSocketBool", "Keep Legs", False), + ("NodeSocketGeometry", "Leg Instance", None), + ("NodeSocketFloat", "Table Height", 0.0000), + ("NodeSocketFloat", "Leg Bottom Relative Scale", 0.0000), + ("NodeSocketFloat", "Leg Bottom Relative Rotation", 0.0000), + ("NodeSocketBool", "Keep Odd Strechers", True), + ("NodeSocketBool", "Keep Even Strechers", True), + ("NodeSocketGeometry", "Strecher Instance", None), + ("NodeSocketInt", "Strecher Index Increment", 0), + ("NodeSocketFloat", "Strecher Relative Position", 0.5000), + ("NodeSocketFloat", "Leg Bottom Offset", 0.0000), + ("NodeSocketBool", "Align Leg X rot", False), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Table Height"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Anchors"], + "Translation": combine_xyz, + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Leg Bottom Offset"]} + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: combine_xyz_3}, + attrs={"operation": "SUBTRACT"}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: subtract.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": subtract_1.outputs["Vector"], + "Angle": group_input.outputs["Leg Bottom Relative Rotation"], + }, + attrs={"rotation_type": "Z_AXIS"}, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Leg Bottom Relative Scale"], + "Y": group_input.outputs["Leg Bottom Relative Scale"], + "Z": 1.0000, + }, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_rotate, 1: combine_xyz_4}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: multiply.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": subtract_2}, + attrs={"axis": "Z"}, + ) + + align_euler_to_vector_3 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Rotation": align_euler_to_vector, "Vector": position}, + attrs={"pivot_axis": "Z"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Align Leg X rot"], + 8: align_euler_to_vector, + 9: align_euler_to_vector_3, + }, + attrs={"input_type": "VECTOR"}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: subtract_2}, attrs={"operation": "LENGTH"} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": 1.0000, "Z": length.outputs["Value"]}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform, + "Instance": group_input.outputs["Leg Instance"], + "Rotation": switch.outputs[3], + "Scale": combine_xyz_2, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Keep Legs"], 15: realize_instances}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Strecher Relative Position"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_2, "Scale": multiply_1}, + attrs={"operation": "SCALE"}, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + add = nw.new_node( + Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: position_2} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": transform, "Position": add.outputs["Vector"]}, + ) + + index = nw.new_node(Nodes.Index) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 2.0000}, attrs={"operation": "MODULO"} + ) + + op_and = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: modulo, 1: group_input.outputs["Keep Odd Strechers"]}, + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: modulo}, attrs={"operation": "NOT"} + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: group_input.outputs["Keep Even Strechers"], 1: op_not}, + ) + + op_or = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: op_and, 1: op_and_1}, + attrs={"operation": "OR"}, + ) + + domain_size = nw.new_node( + Nodes.DomainSize, + input_kwargs={"Geometry": transform}, + attrs={"component": "POINTCLOUD"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: domain_size.outputs["Point Count"], + 1: group_input.outputs["Strecher Index Increment"], + }, + attrs={"operation": "DIVIDE"}, + ) + + equal = nw.new_node( + Nodes.Compare, input_kwargs={0: divide, 1: 2.0000}, attrs={"operation": "EQUAL"} + ) + + boolean = nw.new_node(Nodes.Boolean, attrs={"boolean": True}) + + index_1 = nw.new_node(Nodes.Index) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: domain_size.outputs["Point Count"], 1: 2.0000}, + attrs={"operation": "DIVIDE"}, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={2: index_1, 3: divide_1}, + attrs={"operation": "LESS_THAN", "data_type": "INT"}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={0: equal, 6: boolean, 7: less_than}, + attrs={"input_type": "BOOLEAN"}, + ) + + op_and_2 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_or, 1: switch_2.outputs[2]} + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["Strecher Index Increment"]}, + ) + + modulo_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: domain_size.outputs["Point Count"]}, + attrs={"operation": "MODULO"}, + ) + + field_at_index = nw.new_node( + Nodes.FieldAtIndex, + input_kwargs={"Index": modulo_1, 3: position_1}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + subtract_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_1, 1: field_at_index.outputs[2]}, + attrs={"operation": "SUBTRACT"}, + ) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": subtract_3.outputs["Vector"]}, + attrs={"axis": "Z"}, + ) + + align_euler_to_vector_2 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Rotation": align_euler_to_vector_1}, + attrs={"pivot_axis": "Z"}, + ) + + length_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_3.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.0000, "Y": 1.0000, "Z": length_1.outputs["Value"]}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Selection": op_and_2, + "Instance": group_input.outputs["Strecher Instance"], + "Rotation": align_euler_to_vector_2, + "Scale": combine_xyz_1, + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [switch_1.outputs[6], realize_instances_1]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": join_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_create_cap", singleton=False, type="GeometryNodeTree" +) +def nodegroup_create_cap(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketInt", "Resolution", 64), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 257.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": group_input.outputs["Resolution"], + "Rings": multiply, + "Radius": group_input.outputs["Radius"], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": uv_sphere.outputs["Mesh"], + "Name": "uv_map", + 3: uv_sphere.outputs["UV Map"], + }, + attrs={"data_type": "FLOAT_VECTOR", "domain": "CORNER"}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Radius"], 1: 2.0000}, + attrs={"operation": "POWER"}, + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: power, 1: 1.0000}, attrs={"operation": "SUBTRACT"} + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "SQRT"} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: sqrt, 1: -1.0000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": store_named_attribute, "Translation": combine_xyz}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: separate_xyz.outputs["Z"]}, + attrs={"operation": "LESS_THAN"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": transform, "Selection": less_than}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": delete_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_arc_top", singleton=False, type="GeometryNodeTree") +def nodegroup_arc_top(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Diameter", 1.0000), + ("NodeSocketFloat", "Sweep Angle", 180.0000), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Diameter"], 1: 2.0000}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Sweep Angle"], 2: -90.0000}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + radians = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "RADIANS"} + ) + + radians_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Sweep Angle"]}, + attrs={"operation": "RADIANS"}, + ) + + arc = nw.new_node( + "GeometryNodeCurveArc", + input_kwargs={ + "Resolution": 32, + "Radius": divide, + "Start Angle": radians, + "Sweep Angle": radians_1, + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": arc.outputs["Curve"], + "Rotation": (1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_align_bottom_to_floor", singleton=False, type="GeometryNodeTree" +) +def nodegroup_align_bottom_to_floor(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": group_input.outputs["Geometry"]} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": bounding_box.outputs["Min"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Translation": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry_1, "Offset": multiply}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_bent", singleton=False, type="GeometryNodeTree") +def nodegroup_bent(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Amount", -0.1000), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position}, attrs={"operation": "LENGTH"} + ) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: separate_xyz.outputs["X"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["Amount"]}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, input_kwargs={"Vector": position, "Angle": multiply_1} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": vector_rotate, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_merge_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_merge_curve(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": group_input.outputs["Curve"]} + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": curve_to_mesh_1} + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, input_kwargs={"Mesh": merge_by_distance} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": mesh_to_curve}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/tableware/__init__.py b/infinigen/assets/objects/tableware/__init__.py similarity index 92% rename from infinigen/assets/tableware/__init__.py rename to infinigen/assets/objects/tableware/__init__.py index d1d3c46f8..66098264b 100644 --- a/infinigen/assets/tableware/__init__.py +++ b/infinigen/assets/objects/tableware/__init__.py @@ -2,21 +2,22 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from .spoon import SpoonFactory -from .knife import KnifeFactory -from .chopsticks import ChopsticksFactory -from .fork import ForkFactory, SpatulaFactory -from .pan import PanFactory -from .pot import PotFactory -from .cup import CupFactory -from .wineglass import WineglassFactory -from .plate import PlateFactory -from .bowl import BowlFactory -from .fruit_container import FruitContainerFactory + from .bottle import BottleFactory +from .bowl import BowlFactory from .can import CanFactory -from .jar import JarFactory +from .chopsticks import ChopsticksFactory +from .cup import CupFactory from .food_bag import FoodBagFactory from .food_box import FoodBoxFactory +from .fork import ForkFactory, SpatulaFactory +from .fruit_container import FruitContainerFactory +from .jar import JarFactory +from .knife import KnifeFactory from .lid import LidFactory -from .plant_container import PlantContainerFactory, LargePlantContainerFactory +from .pan import PanFactory +from .plant_container import LargePlantContainerFactory, PlantContainerFactory +from .plate import PlateFactory +from .pot import PotFactory +from .spoon import SpoonFactory +from .wineglass import WineglassFactory diff --git a/infinigen/assets/tableware/base.py b/infinigen/assets/objects/tableware/base.py similarity index 59% rename from infinigen/assets/tableware/base.py rename to infinigen/assets/objects/tableware/base.py index 8940bc8f7..5eb014251 100644 --- a/infinigen/assets/tableware/base.py +++ b/infinigen/assets/objects/tableware/base.py @@ -6,17 +6,15 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.decorate import read_co, write_attribute +from infinigen.assets.utils.misc import assign_material +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler - from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.util.math import FixedSeed - from infinigen.core.util import blender as butil -from infinigen.assets.utils.decorate import read_co, write_attribute -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.material_assignments import AssetList +from infinigen.core.util.math import FixedSeed class TablewareFactory(AssetFactory): @@ -26,16 +24,17 @@ class TablewareFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.thickness = .01 - material_assignments = AssetList['TablewareFactory'](fragile=self.is_fragile, - transparent=self.allow_transparent) + self.thickness = 0.01 + material_assignments = AssetList["TablewareFactory"]( + fragile=self.is_fragile, transparent=self.allow_transparent + ) - self.surface = material_assignments['surface'].assign_material() - self.inside_surface = material_assignments['inside'].assign_material() - self.guard_surface = material_assignments['guard'].assign_material() + self.surface = material_assignments["surface"].assign_material() + self.inside_surface = material_assignments["inside"].assign_material() + self.guard_surface = material_assignments["guard"].assign_material() - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - self.scratch, self.edge_wear = material_assignments['wear_tear'] + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + self.scratch, self.edge_wear = material_assignments["wear_tear"] self.scratch = None if uniform() > scratch_prob else self.scratch self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear @@ -43,9 +42,9 @@ def __init__(self, factory_seed, coarse=False): self.guard_depth = self.thickness self.has_guard = False self.has_inside = False - self.lower_thresh = uniform(.5, .8) - self.scale = 1. - self.metal_color = 'bw+natural' + self.lower_thresh = uniform(0.5, 0.8) + self.scale = 1.0 + self.metal_color = "bw+natural" def create_asset(self, **params) -> bpy.types.Object: raise NotImplementedError @@ -55,44 +54,48 @@ def add_guard(self, obj, selection): selection = False def geo_guard(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + geometry = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketGeometry", "Geometry", None)], + ) normal = nw.new_node(Nodes.InputNormal) x = nw.separate(nw.new_node(Nodes.InputPosition))[0] sel = surface.eval_argument(nw, selection, x=x, normal=normal) geometry, top, side = nw.new_node( Nodes.ExtrudeMesh, - input_args=[geometry, sel, None, self.guard_depth, - False] + input_args=[geometry, sel, None, self.guard_depth, False], ).outputs[:3] - guard = nw.boolean_math('OR', top, side) + guard = nw.boolean_math("OR", top, side) geometry = nw.new_node( Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': 'guard', 'Value': guard}, - attrs={'domain': 'FACE'} + input_kwargs={"Geometry": geometry, "Name": "guard", "Value": guard}, + attrs={"domain": "FACE"}, ) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) surface.add_geomod(obj, geo_guard, apply=True) @staticmethod def make_double_sided(selection): return lambda nw, x, normal: nw.boolean_math( - 'AND', + "AND", surface.eval_argument(nw, selection, x=x, normal=normal), nw.compare( - 'GREATER_THAN', - nw.math('ABSOLUTE', nw.separate(normal)[-1]), - .8 - ) + "GREATER_THAN", nw.math("ABSOLUTE", nw.separate(normal)[-1]), 0.8 + ), ) def finalize_assets(self, assets): assign_material(assets, []) self.surface.apply(assets, metal_color=self.metal_color) if self.has_inside: - self.inside_surface.apply(assets, selection='inside', clear=True, metal_color='bw+natural') + self.inside_surface.apply( + assets, selection="inside", clear=True, metal_color="bw+natural" + ) if self.has_guard: - self.guard_surface.apply(assets, selection='guard', metal_color=self.metal_color) + self.guard_surface.apply( + assets, selection="guard", metal_color=self.metal_color + ) if self.scratch: self.scratch.apply(assets) if self.edge_wear: @@ -100,17 +103,22 @@ def finalize_assets(self, assets): def solidify_with_inside(self, obj, thickness): max_z = np.max(read_co(obj)[:, -1]) - obj.vertex_groups.new(name='inside_') - butil.modify_mesh(obj, 'SOLIDIFY', thickness=thickness, offset=1, shell_vertex_group='inside_') - write_attribute(obj, 'inside_', 'inside', 'FACE') + obj.vertex_groups.new(name="inside_") + butil.modify_mesh( + obj, "SOLIDIFY", thickness=thickness, offset=1, shell_vertex_group="inside_" + ) + write_attribute(obj, "inside_", "inside", "FACE") def inside(nw: NodeWrangler): lower = nw.compare( - 'LESS_THAN', nw.separate(nw.new_node(Nodes.InputPosition))[-1], - max_z * self.lower_thresh + "LESS_THAN", + nw.separate(nw.new_node(Nodes.InputPosition))[-1], + max_z * self.lower_thresh, + ) + inside = nw.compare( + "GREATER_THAN", surface.eval_argument(nw, "inside"), 0.8 ) - inside = nw.compare('GREATER_THAN', surface.eval_argument(nw, 'inside'), .8) - return nw.boolean_math('AND', inside, lower) + return nw.boolean_math("AND", inside, lower) - write_attribute(obj, inside, 'lower_inside', 'FACE') - obj.vertex_groups.remove(obj.vertex_groups['inside_']) + write_attribute(obj, inside, "lower_inside", "FACE") + obj.vertex_groups.remove(obj.vertex_groups["inside_"]) diff --git a/infinigen/assets/objects/tableware/bottle.py b/infinigen/assets/objects/tableware/bottle.py new file mode 100644 index 000000000..3012e535b --- /dev/null +++ b/infinigen/assets/objects/tableware/bottle.py @@ -0,0 +1,240 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +import bmesh + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials import text +from infinigen.assets.utils.decorate import read_co, subdivide_edge_ring, subsurf +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.object import join_objects, new_cylinder +from infinigen.assets.utils.uv import wrap_front_back +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed + + +class BottleFactory(AssetFactory): + z_neck_offset = 0.05 + z_waist_offset = 0.15 + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(self.factory_seed): + self.z_length = uniform(0.15, 0.25) + self.x_length = self.z_length * uniform(0.15, 0.25) + self.x_cap = uniform(0.3, 0.35) + self.bottle_type = np.random.choice( + ["beer", "bordeaux", "champagne", "coke", "vintage"] + ) + self.bottle_width = uniform(0.002, 0.005) + self.z_waist = 0 + match self.bottle_type: + case "beer": + self.z_neck = uniform(0.5, 0.6) + self.z_cap = uniform(0.05, 0.08) + neck_size = uniform(0.06, 0.1) + neck_ratio = uniform(0.4, 0.5) + self.x_anchors = [ + 0, + 1, + 1, + (neck_ratio + 1) / 2 + (1 - neck_ratio) / 2 * self.x_cap, + neck_ratio + (1 - neck_ratio) * self.x_cap, + self.x_cap, + self.x_cap, + 0, + ] + self.z_anchors = [ + 0, + 0, + self.z_neck, + self.z_neck + uniform(0.6, 0.7) * neck_size, + self.z_neck + neck_size, + 1 - self.z_cap, + 1, + 1, + ] + self.is_vector = [0, 1, 1, 0, 1, 1, 1, 0] + case "bordeaux": + self.z_neck = uniform(0.6, 0.7) + self.z_cap = uniform(0.1, 0.15) + neck_size = uniform(0.1, 0.15) + self.x_anchors = ( + 0, + 1, + 1, + (1 + self.x_cap) / 2, + self.x_cap, + self.x_cap, + 0, + ) + self.z_anchors = [ + 0, + 0, + self.z_neck, + self.z_neck + uniform(0.6, 0.7) * neck_size, + self.z_neck + neck_size, + 1, + 1, + ] + self.is_vector = [0, 1, 1, 0, 1, 1, 0] + case "champagne": + self.z_neck = uniform(0.4, 0.5) + self.z_cap = uniform(0.05, 0.08) + self.x_anchors = [ + 0, + 1, + 1, + 1, + (1 + self.x_cap) / 2, + self.x_cap, + self.x_cap, + 0, + ] + self.z_anchors = [ + 0, + 0, + self.z_neck, + self.z_neck + uniform(0.08, 0.1), + self.z_neck + uniform(0.15, 0.18), + 1 - self.z_cap, + 1, + 1, + ] + self.is_vector = [0, 1, 1, 0, 0, 1, 1, 0] + case "coke": + self.z_waist = uniform(0.4, 0.5) + self.z_neck = self.z_waist + uniform(0.2, 0.25) + self.z_cap = uniform(0.05, 0.08) + self.x_anchors = [ + 0, + uniform(0.85, 0.95), + 1, + uniform(0.85, 0.95), + 1, + 1, + self.x_cap, + self.x_cap, + 0, + ] + self.z_anchors = [ + 0, + 0, + uniform(0.08, 0.12), + uniform(0.18, 0.25), + self.z_waist, + self.z_neck, + 1 - self.z_cap, + 1, + 1, + ] + self.is_vector = [0, 1, 0, 0, 1, 1, 1, 1, 0] + case "vintage": + self.z_waist = uniform(0.1, 0.15) + self.z_neck = uniform(0.7, 0.75) + self.z_cap = uniform(0.0, 0.08) + x_lower = uniform(0.85, 0.95) + self.x_anchors = [ + 0, + x_lower, + (x_lower + 1) / 2, + 1, + 1, + (self.x_cap + 1) / 2, + self.x_cap, + self.x_cap, + 0, + ] + self.z_anchors = [ + 0, + 0, + self.z_waist - uniform(0.1, 0.15), + self.z_waist, + self.z_neck, + self.z_neck + uniform(0.1, 0.2), + 1 - self.z_cap, + 1, + 1, + ] + self.is_vector = [0, 1, 0, 1, 1, 0, 1, 1, 0] + + material_assignments = AssetList["BottleFactory"]() + self.surface = material_assignments["surface"].assign_material() + self.wrap_surface = material_assignments["wrap_surface"].assign_material() + if self.wrap_surface == text.Text: + self.wrap_surface = text.Text(self.factory_seed, False) + + self.cap_surface = material_assignments["cap_surface"].assign_material() + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + self.scratch, self.edge_wear = material_assignments["wear_tear"] + self.scratch = None if uniform() > scratch_prob else self.scratch + self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear + + self.texture_shared = uniform() < 0.2 + self.cap_subsurf = uniform() < 0.5 + + def create_asset(self, **params) -> bpy.types.Object: + bottle = self.make_bottle() + wrap = self.make_wrap(bottle) + cap = self.make_cap() + obj = join_objects([bottle, wrap, cap]) + + return obj + + def finalize_assets(self, assets): + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + def make_bottle(self): + x_anchors = np.array(self.x_anchors) * self.x_length + z_anchors = np.array(self.z_anchors) * self.z_length + anchors = x_anchors, 0, z_anchors + obj = spin(anchors, np.nonzero(self.is_vector)[0]) + subsurf(obj, 1, True) + subsurf(obj, 1) + if self.bottle_width > 0: + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.bottle_width) + self.surface.apply(obj, translucent=True) + return obj + + def make_wrap(self, bottle): + obj = new_cylinder(vertices=128) + with butil.ViewportMode(obj, "EDIT"): + bm = bmesh.from_edit_mesh(obj.data) + geom = [f for f in bm.faces if len(f.verts) > 4] + bmesh.ops.delete(bm, geom=geom, context="FACES_ONLY") + bmesh.update_edit_mesh(obj.data) + subdivide_edge_ring(obj, 16) + z_max = self.z_neck - uniform(0.02, self.z_neck_offset) * ( + self.z_neck - self.z_waist + ) + z_min = self.z_waist + uniform(0.02, self.z_waist_offset) * ( + self.z_neck - self.z_waist + ) + radius = np.max(read_co(bottle)[:, 0]) + 2e-3 + obj.scale = radius, radius, (z_max - z_min) * self.z_length + obj.location[-1] = z_min * self.z_length + butil.apply_transform(obj, True) + wrap_front_back(obj, self.wrap_surface, self.texture_shared) + return obj + + def make_cap(self): + obj = new_cylinder(vertices=128) + obj.scale = [ + (self.x_cap + 0.1) * self.x_length, + (self.x_cap + 0.1) * self.x_length, + (self.z_cap + 0.01) * self.z_length, + ] + obj.location[-1] = (1 - self.z_cap) * self.z_length + butil.apply_transform(obj, loc=True) + subsurf(obj, 1, self.cap_subsurf) + self.cap_surface.apply(obj) + return obj diff --git a/infinigen/assets/tableware/bowl.py b/infinigen/assets/objects/tableware/bowl.py similarity index 63% rename from infinigen/assets/tableware/bowl.py rename to infinigen/assets/objects/tableware/bowl.py index f0e3e1f94..2ff8655f2 100644 --- a/infinigen/assets/tableware/bowl.py +++ b/infinigen/assets/objects/tableware/bowl.py @@ -6,13 +6,13 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.tableware.base import TablewareFactory -from infinigen.assets.utils.decorate import subsurf, set_shade_smooth +from infinigen.assets.objects.tableware.base import TablewareFactory +from infinigen.assets.utils.decorate import set_shade_smooth, subsurf from infinigen.assets.utils.draw import spin from infinigen.assets.utils.object import new_bbox -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class BowlFactory(TablewareFactory): @@ -21,15 +21,15 @@ class BowlFactory(TablewareFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.x_end = .5 - self.z_length = log_uniform(.4, .8) - self.z_bottom = log_uniform(.02, .05) - self.x_bottom = uniform(.2, .3) * self.x_end - self.x_mid = uniform(.8, .95) * self.x_end + self.x_end = 0.5 + self.z_length = log_uniform(0.4, 0.8) + self.z_bottom = log_uniform(0.02, 0.05) + self.x_bottom = uniform(0.2, 0.3) * self.x_end + self.x_mid = uniform(0.8, 0.95) * self.x_end self.has_guard = False - self.thickness = uniform(.01, .03) - self.has_inside = uniform(0, 1) < .5 - self.scale = log_uniform(.15, .4) + self.thickness = uniform(0.01, 0.03) + self.has_inside = uniform(0, 1) < 0.5 + self.scale = log_uniform(0.15, 0.4) self.edge_wear = None def create_placeholder(self, **kwargs) -> bpy.types.Object: @@ -37,13 +37,22 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox(-radius, radius, -radius, radius, 0, self.z_length * self.scale) def create_asset(self, **params) -> bpy.types.Object: - x_anchors = 0, self.x_bottom, self.x_bottom + 1e-3, self.x_bottom, self.x_mid, self.x_end + x_anchors = ( + 0, + self.x_bottom, + self.x_bottom + 1e-3, + self.x_bottom, + self.x_mid, + self.x_end, + ) z_anchors = 0, 0, 0, self.z_bottom, self.z_length / 2, self.z_length anchors = x_anchors, np.zeros_like(x_anchors), z_anchors obj = spin(anchors, [2, 3], 16, 64) subsurf(obj, 1) self.solidify_with_inside(obj, self.thickness) - butil.modify_mesh(obj, 'BEVEL', width=self.thickness / 2, segments=np.random.randint(2, 5)) + butil.modify_mesh( + obj, "BEVEL", width=self.thickness / 2, segments=np.random.randint(2, 5) + ) obj.scale = [self.scale] * 3 butil.apply_transform(obj) subsurf(obj, 1) diff --git a/infinigen/assets/tableware/can.py b/infinigen/assets/objects/tableware/can.py similarity index 59% rename from infinigen/assets/tableware/can.py rename to infinigen/assets/objects/tableware/can.py index c41d60528..5b53ba126 100644 --- a/infinigen/assets/tableware/can.py +++ b/infinigen/assets/objects/tableware/can.py @@ -1,58 +1,59 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np import shapely from numpy.random import uniform from shapely import Point, affinity +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials import text from infinigen.assets.utils.decorate import write_co from infinigen.assets.utils.object import join_objects, new_circle, new_cylinder from infinigen.assets.utils.uv import wrap_four_sides +from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.assets.materials import text +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList class CanFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.x_length = log_uniform(.05, .1) - self.z_length = self.x_length * log_uniform(.5, 2.5) - self.shape = np.random.choice(['circle', 'rectangle']) - self.skewness = uniform(1, 2.5) if uniform() < .5 else 1 - + self.x_length = log_uniform(0.05, 0.1) + self.z_length = self.x_length * log_uniform(0.5, 2.5) + self.shape = np.random.choice(["circle", "rectangle"]) + self.skewness = uniform(1, 2.5) if uniform() < 0.5 else 1 + material_assignments = AssetList["CanFactory"]() self.surface = material_assignments["surface"].assign_material() self.wrap_surface = material_assignments["wrap_surface"].assign_material() if self.wrap_surface == text.Text: self.wrap_surface = text.Text(self.factory_seed, False) - + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] self.scratch, self.edge_wear = material_assignments["wear_tear"] self.scratch = None if uniform() > scratch_prob else self.scratch self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear - - self.texture_shared = uniform() < .2 + + self.texture_shared = uniform() < 0.2 def create_asset(self, **params) -> bpy.types.Object: coords = self.make_coords() obj = new_circle(vertices=len(coords)) write_co(obj, np.array([[x, y, 0] for x, y in coords])) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.edge_face_add() - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.z_length) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.z_length) surface.add_geomod(obj, self.geo_cap, apply=True) self.surface.apply(obj) wrap = self.make_wrap(coords) @@ -61,36 +62,60 @@ def create_asset(self, **params) -> bpy.types.Object: @staticmethod def geo_cap(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - selection = nw.compare('GREATER_THAN', - nw.math('ABSOLUTE', nw.separate(nw.new_node(Nodes.InputNormal))[-1]), 1 - 1e-3) - geometry, top = nw.new_node(Nodes.ExtrudeMesh, [geometry, selection, None, 0]).outputs[:2] - geometry = nw.new_node(Nodes.ScaleElements, - input_kwargs={'Geometry': geometry, 'Selection': top, 'Scale': uniform(.96, .98) - }) - geometry = nw.new_node(Nodes.ExtrudeMesh, [geometry, top, None, -uniform(.005, .01)]).outputs[0] - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + selection = nw.compare( + "GREATER_THAN", + nw.math("ABSOLUTE", nw.separate(nw.new_node(Nodes.InputNormal))[-1]), + 1 - 1e-3, + ) + geometry, top = nw.new_node( + Nodes.ExtrudeMesh, [geometry, selection, None, 0] + ).outputs[:2] + geometry = nw.new_node( + Nodes.ScaleElements, + input_kwargs={ + "Geometry": geometry, + "Selection": top, + "Scale": uniform(0.96, 0.98), + }, + ) + geometry = nw.new_node( + Nodes.ExtrudeMesh, [geometry, top, None, -uniform(0.005, 0.01)] + ).outputs[0] + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def make_coords(self): match self.shape: - case 'circle': + case "circle": p = Point(0, 0).buffer(self.x_length, quad_segs=64) case _: - side = self.x_length * uniform(.2, .8) - p = shapely.box(-side, -side, side, side).buffer(self.x_length - side, quad_segs=16) + side = self.x_length * uniform(0.2, 0.8) + p = shapely.box(-side, -side, side, side).buffer( + self.x_length - side, quad_segs=16 + ) p = affinity.scale(p, yfact=1 / self.skewness) - coords = p.boundary.segmentize(.01).coords[:][:-1] + coords = p.boundary.segmentize(0.01).coords[:][:-1] return coords def make_wrap(self, coords): obj = new_cylinder(vertices=len(coords)) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) geom = [f for f in bm.faces if len(f.verts) > 4] - bmesh.ops.delete(bm, geom=geom, context='FACES_ONLY') + bmesh.ops.delete(bm, geom=geom, context="FACES_ONLY") bmesh.update_edit_mesh(obj.data) - lowest, highest = self.z_length * uniform(0, .1), self.z_length * uniform(.9, 1.) - write_co(obj, np.concatenate([np.array([[x, y, lowest], [x, y, highest]]) for x, y in coords])) + lowest, highest = ( + self.z_length * uniform(0, 0.1), + self.z_length * uniform(0.9, 1.0), + ) + write_co( + obj, + np.concatenate( + [np.array([[x, y, lowest], [x, y, highest]]) for x, y in coords] + ), + ) obj.scale = 1 + 1e-3, 1 + 1e-3, 1 butil.apply_transform(obj) wrap_four_sides(obj, self.wrap_surface, self.texture_shared) diff --git a/infinigen/assets/tableware/chopsticks.py b/infinigen/assets/objects/tableware/chopsticks.py similarity index 73% rename from infinigen/assets/tableware/chopsticks.py rename to infinigen/assets/objects/tableware/chopsticks.py index 0eef1c331..6c3600607 100644 --- a/infinigen/assets/tableware/chopsticks.py +++ b/infinigen/assets/objects/tableware/chopsticks.py @@ -6,43 +6,39 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.tableware.base import TablewareFactory +from infinigen.assets.objects.tableware.base import TablewareFactory from infinigen.assets.utils.decorate import subsurf, write_co -from infinigen.core.util.random import log_uniform from infinigen.assets.utils.object import join_objects, new_grid -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed - -from infinigen.core.util import blender as butil +from infinigen.core.util.random import log_uniform class ChopsticksFactory(TablewareFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.y_length = uniform(.01, .02) - self.y_shrink = log_uniform(.2, .8) - self.is_square = uniform(0, 1) < .5 - self.has_guard = uniform(0, 1) < .4 - self.x_guard = uniform(.4, .9) - self.guard_depth = 0. + self.y_length = uniform(0.01, 0.02) + self.y_shrink = log_uniform(0.2, 0.8) + self.is_square = uniform(0, 1) < 0.5 + self.has_guard = uniform(0, 1) < 0.4 + self.x_guard = uniform(0.4, 0.9) + self.guard_depth = 0.0 self.pre_level = 2 - self.scale = log_uniform(.2, .4) + self.scale = log_uniform(0.2, 0.4) def create_asset(self, **params) -> bpy.types.Object: obj = self.make_single() - if uniform(0, 1) < .6: + if uniform(0, 1) < 0.6: obj = self.make_parallel(obj) else: obj = self.make_crossed(obj) return obj def make_parallel(self, obj): - distance = log_uniform(self.y_length, .04) - if uniform(0, 1) < .5: + distance = log_uniform(self.y_length, 0.04) + if uniform(0, 1) < 0.5: other = deep_clone_obj(obj) obj.location[1] = distance obj.rotation_euler[-1] = uniform(0, np.pi / 8) @@ -60,7 +56,7 @@ def make_parallel(self, obj): def make_crossed(self, obj): other = deep_clone_obj(obj) - other.location = uniform(-.1, .2), uniform(-.2, .2), self.y_length + other.location = uniform(-0.1, 0.2), uniform(-0.2, 0.2), self.y_length sign = np.sign(other.location[1]) other.rotation_euler[-1] = -sign * log_uniform(np.pi / 8, np.pi / 4) return join_objects([obj, other]) @@ -68,14 +64,14 @@ def make_crossed(self, obj): def make_single(self): n = int(1 / self.y_length) obj = new_grid(x_subdivisions=n - 1, y_subdivisions=1) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.y_length * 2) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.y_length * 2) l = np.linspace(self.y_shrink, 1, n) * self.y_length x = np.concatenate([np.linspace(0, 1, n)] * 4) y = np.concatenate([-l, l, -l, l]) z = np.concatenate([l, l, -l, -l]) write_co(obj, np.stack([x, y, z], -1)) subsurf(obj, 2, self.is_square) - self.add_guard(obj, lambda nw, x: nw.compare('GREATER_THAN', x, self.x_guard)) + self.add_guard(obj, lambda nw, x: nw.compare("GREATER_THAN", x, self.x_guard)) obj.scale = [self.scale] * 3 butil.apply_transform(obj) return obj diff --git a/infinigen/assets/objects/tableware/cup.py b/infinigen/assets/objects/tableware/cup.py new file mode 100644 index 000000000..7cb3cef12 --- /dev/null +++ b/infinigen/assets/objects/tableware/cup.py @@ -0,0 +1,176 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.materials import text +from infinigen.assets.objects.tableware.base import TablewareFactory +from infinigen.assets.utils.decorate import ( + read_co, + remove_vertices, + subsurf, + write_attribute, +) +from infinigen.assets.utils.draw import spin +from infinigen.assets.utils.object import join_objects +from infinigen.assets.utils.uv import wrap_sides +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class CupFactory(TablewareFactory): + allow_transparent = True + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.x_end = 0.25 + self.is_short = uniform(0, 1) < 0.5 + if self.is_short: + self.is_profile_straight = uniform(0, 1) < 0.2 + self.x_lowest = log_uniform(0.6, 0.9) + self.depth = log_uniform(0.25, 0.5) + self.has_guard = uniform(0, 1) < 0.8 + else: + self.is_profile_straight = True + self.x_lowest = log_uniform(0.9, 1.0) + self.depth = log_uniform(0.5, 1.0) + self.has_guard = False + if self.is_profile_straight: + self.handle_location = uniform(0.45, 0.65) + else: + self.handle_location = uniform(-0.1, 0.3) + self.handle_type = "shear" if uniform(0, 1) < 0.5 else "round" + self.handle_radius = self.depth * uniform(0.2, 0.4) + self.handle_inner_radius = self.handle_radius * log_uniform(0.2, 0.3) + self.handle_taper_x = uniform(0, 2) + self.handle_taper_y = uniform(0, 2) + self.x_lower_ratio = log_uniform(0.8, 1.0) + self.thickness = log_uniform(0.01, 0.04) + self.has_wrap = uniform() < 0.3 + self.has_wrap = True + self.wrap_margin = uniform(0.1, 0.2) + + material_assignments = AssetList["CupFactory"]() + self.surface = material_assignments["surface"].assign_material() + self.wrap_surface = material_assignments["wrap_surface"].assign_material() + if self.wrap_surface == text.Text: + self.wrap_surface = text.Text(self.factory_seed, False) + self.scratch = self.edge_wear = None + + self.has_inside = uniform(0, 1) < 0.5 + self.scale = log_uniform(0.15, 0.3) + + def create_asset(self, **params) -> bpy.types.Object: + if self.is_profile_straight: + x_anchors = 0, self.x_lowest * self.x_end, self.x_end + z_anchors = 0, 0, self.depth + else: + x_anchors = ( + 0, + self.x_lowest * self.x_end, + (self.x_lowest + self.x_lower_ratio * (1 - self.x_lowest)) * self.x_end, + self.x_end, + ) + z_anchors = 0, 0, self.depth * 0.5, self.depth + anchors = x_anchors, np.zeros_like(x_anchors), z_anchors + obj = spin(anchors, [1], 16) + subsurf(obj, 1) + butil.modify_mesh( + obj, + "BEVEL", + True, + offset_type="PERCENT", + width_pct=uniform(10, 50), + segments=8, + ) + if self.has_wrap: + wrap = self.make_wrap(obj) + else: + wrap = None + self.solidify_with_inside(obj, self.thickness) + handle_location = ( + x_anchors[-2] * (1 - self.handle_location) + + x_anchors[-1] * self.handle_location, + 0, + z_anchors[-2] * (1 - self.handle_location) + + z_anchors[-1] * self.handle_location, + ) + angle_low = np.arctan( + (x_anchors[-1] - x_anchors[-2]) / (z_anchors[-1] - z_anchors[-2]) + ) + angle_height = np.arctan( + (x_anchors[2] - x_anchors[1]) / (z_anchors[2] - z_anchors[1]) + ) + handle_angle = uniform(angle_low, angle_height + 1e-3) + if self.has_guard: + obj = self.add_handle(obj, handle_location, handle_angle) + if self.has_wrap: + butil.select_none() + obj = join_objects([obj, wrap]) + obj.scale = [self.scale] * 3 + butil.apply_transform(obj) + return obj + + def add_handle(self, obj, handle_location, handle_angle): + bpy.ops.mesh.primitive_torus_add( + location=handle_location, + major_radius=self.handle_radius, + minor_radius=self.handle_inner_radius, + ) + handle = bpy.context.active_object + handle.rotation_euler = np.pi / 2, handle_angle, 0 + butil.modify_mesh( + handle, + "SIMPLE_DEFORM", + deform_method="TAPER", + angle=self.handle_taper_x, + deform_axis="X", + ) + butil.modify_mesh( + handle, + "SIMPLE_DEFORM", + deform_method="TAPER", + angle=self.handle_taper_y, + deform_axis="Y", + ) + butil.modify_mesh(handle, "BOOLEAN", object=obj, operation="DIFFERENCE") + butil.select_none() + objs = butil.split_object(handle) + i = np.argmax([np.max(read_co(o)[:, 0]) for o in objs]) + handle = objs[i] + objs.remove(handle) + butil.delete(objs) + subsurf(handle, 1) + write_attribute(handle, lambda nw: 1, "guard", "FACE") + return join_objects([obj, handle]) + + def make_wrap(self, obj): + butil.select_none() + obj = deep_clone_obj(obj) + remove_vertices( + obj, + lambda x, y, z: (z / self.depth < self.wrap_margin) + | (z / self.depth > 1 - self.wrap_margin + uniform(0.0, 0.1)) + | (np.abs(np.arctan2(y, x)) < np.pi * self.wrap_margin), + ) + obj.scale = 1 + 1e-2, 1 + 1e-2, 1 + butil.apply_transform(obj) + write_attribute(obj, lambda nw: 1, "text", "FACE") + return obj + + def finalize_assets(self, assets): + super().finalize_assets(assets) + if self.has_wrap: + for obj in assets if isinstance(assets, list) else [assets]: + wrap_sides(obj, self.wrap_surface, "u", "v", "z", selection="text") + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) diff --git a/infinigen/assets/tableware/food_bag.py b/infinigen/assets/objects/tableware/food_bag.py similarity index 70% rename from infinigen/assets/tableware/food_bag.py rename to infinigen/assets/objects/tableware/food_bag.py index 68e986d63..17f095fa7 100644 --- a/infinigen/assets/tableware/food_bag.py +++ b/infinigen/assets/objects/tableware/food_bag.py @@ -1,51 +1,60 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.materials import text -from infinigen.assets.utils.decorate import geo_extension, read_co, subdivide_edge_ring, subsurf, write_co +from infinigen.assets.utils.decorate import ( + geo_extension, + read_co, + subdivide_edge_ring, + subsurf, + write_co, +) from infinigen.assets.utils.object import new_base_cylinder from infinigen.assets.utils.uv import wrap_front_back from infinigen.core import surface from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList class FoodBagFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.length = uniform(.1, .3) - self.is_packet = uniform() < .6 + self.length = uniform(0.1, 0.3) + self.is_packet = uniform() < 0.6 if self.is_packet: - self.width = self.length * log_uniform(.6, 1.) - self.depth = self.width * uniform(.5, .8) + self.width = self.length * log_uniform(0.6, 1.0) + self.depth = self.width * uniform(0.5, 0.8) self.curve_profile = uniform(2, 4) else: - self.width = self.length * log_uniform(.2, .4) - self.depth = self.width * uniform(.6, 1.) + self.width = self.length * log_uniform(0.2, 0.4) + self.depth = self.width * uniform(0.6, 1.0) self.curve_profile = uniform(4, 8) - self.extrude_length = uniform(.05, .1) + self.extrude_length = uniform(0.05, 0.1) material_assignments = AssetList["FoodBagFactory"]() self.surface = material_assignments["surface"].assign_material() if self.surface == text.Text: self.surface = self.surface(self.factory_seed) - self.texture_shared = uniform() < .2 + self.texture_shared = uniform() < 0.2 def create_asset(self, **params) -> bpy.types.Object: obj = self.make_base() self.add_seal(obj) self.build_uv(obj) subsurf(obj, 2) - surface.add_geomod(obj, geo_extension, input_kwargs={'musgrave_dimensions': '2D'}, apply=True) + surface.add_geomod( + obj, geo_extension, input_kwargs={"musgrave_dimensions": "2D"}, apply=True + ) return obj def make_base(self): @@ -56,22 +65,27 @@ def make_base(self): x, y, z = read_co(obj).T ratio = 1 - (2 * np.abs(z) / self.length) ** self.curve_profile write_co(obj, np.stack([x, ratio * y, z], -1)) - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-3) + butil.modify_mesh(obj, "WELD", merge_threshold=1e-3) return obj def add_seal(self, obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) for i in [-1, 1]: - bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.mesh.select_all(action="DESELECT") bm.verts.ensure_lookup_table() - indices = np.nonzero(read_co(obj)[:, -1] * i >= self.length / 2 - 1e-3)[0] + indices = np.nonzero(read_co(obj)[:, -1] * i >= self.length / 2 - 1e-3)[ + 0 + ] for idx in indices: bm.verts[idx].select_set(True) bm.select_flush(False) bmesh.update_edit_mesh(obj.data) bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, self.extrude_length * self.length * i)}) + TRANSFORM_OT_translate={ + "value": (0, 0, self.extrude_length * self.length * i) + } + ) def build_uv(self, obj): if not self.is_packet: diff --git a/infinigen/assets/tableware/food_box.py b/infinigen/assets/objects/tableware/food_box.py similarity index 87% rename from infinigen/assets/tableware/food_box.py rename to infinigen/assets/objects/tableware/food_box.py index e0b645d7a..d0a282d0b 100644 --- a/infinigen/assets/tableware/food_box.py +++ b/infinigen/assets/objects/tableware/food_box.py @@ -10,20 +10,20 @@ from infinigen.assets.utils.object import new_cube from infinigen.assets.utils.uv import wrap_six_sides from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil class FoodBoxFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - dimensions = np.sort(log_uniform(.05, .3, 3)).tolist() + dimensions = np.sort(log_uniform(0.05, 0.3, 3)).tolist() self.dimensions = np.array([dimensions[1], dimensions[0], dimensions[2]]) self.surface = text.Text(self.factory_seed) - self.texture_shared = uniform() < .4 - + self.texture_shared = uniform() < 0.4 + def create_placeholder(self, **params): obj = new_cube() obj.scale = self.dimensions / 2 @@ -33,5 +33,5 @@ def create_placeholder(self, **params): def create_asset(self, placeholder, **params) -> bpy.types.Object: obj = butil.copy(placeholder) wrap_six_sides(obj, self.surface, self.texture_shared) - butil.modify_mesh(obj, 'BEVEL', width=.001) + butil.modify_mesh(obj, "BEVEL", width=0.001) return obj diff --git a/infinigen/assets/tableware/fork.py b/infinigen/assets/objects/tableware/fork.py similarity index 57% rename from infinigen/assets/tableware/fork.py rename to infinigen/assets/objects/tableware/fork.py index 86c3e949b..43f8ea862 100644 --- a/infinigen/assets/tableware/fork.py +++ b/infinigen/assets/objects/tableware/fork.py @@ -1,49 +1,76 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform from infinigen.assets.utils.decorate import subsurf, write_co +from infinigen.assets.utils.object import new_grid +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform + from .base import TablewareFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.assets.utils.object import new_grid class ForkFactory(TablewareFactory): - x_end = .15 + x_end = 0.15 is_fragile = True def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.x_length = log_uniform(.4, .8) - self.x_tip = uniform(.15, .2) - self.y_length = log_uniform(.05, .08) - self.z_depth = log_uniform(.02, .04) - self.z_offset = uniform(.0, .05) - self.thickness = log_uniform(.008, .015) - self.has_guard = uniform(0, 1) < .4 - self.guard_type = 'round' if uniform(0, 1) < .6 else 'double' - self.n_cuts = np.random.randint(1, 3) if uniform(0, 1) < .3 else 3 - self.guard_depth = log_uniform(.2, 1.) * self.thickness - self.scale = log_uniform(.15, .25) + self.x_length = log_uniform(0.4, 0.8) + self.x_tip = uniform(0.15, 0.2) + self.y_length = log_uniform(0.05, 0.08) + self.z_depth = log_uniform(0.02, 0.04) + self.z_offset = uniform(0.0, 0.05) + self.thickness = log_uniform(0.008, 0.015) + self.has_guard = uniform(0, 1) < 0.4 + self.guard_type = "round" if uniform(0, 1) < 0.6 else "double" + self.n_cuts = np.random.randint(1, 3) if uniform(0, 1) < 0.3 else 3 + self.guard_depth = log_uniform(0.2, 1.0) * self.thickness + self.scale = log_uniform(0.15, 0.25) self.has_cut = True def create_asset(self, **params) -> bpy.types.Object: x_anchors = np.array( - [self.x_tip, uniform(-.04, -.02), -.08, -.12, -self.x_end, -self.x_end - self.x_length, - -self.x_end - self.x_length * log_uniform(1.2, 1.4)]) - y_anchors = np.array([self.y_length * log_uniform(.8, 1.), self.y_length * log_uniform(1., 1.2), - self.y_length * log_uniform(.6, 1.), self.y_length * log_uniform(.2, .4), - log_uniform(.01, .02), log_uniform(.02, .05), log_uniform(.01, .02)]) - z_anchors = np.array([0, -self.z_depth, -self.z_depth, 0, self.z_offset, self.z_offset + uniform(-.02, .04), - self.z_offset + uniform(-.02, 0)]) + [ + self.x_tip, + uniform(-0.04, -0.02), + -0.08, + -0.12, + -self.x_end, + -self.x_end - self.x_length, + -self.x_end - self.x_length * log_uniform(1.2, 1.4), + ] + ) + y_anchors = np.array( + [ + self.y_length * log_uniform(0.8, 1.0), + self.y_length * log_uniform(1.0, 1.2), + self.y_length * log_uniform(0.6, 1.0), + self.y_length * log_uniform(0.2, 0.4), + log_uniform(0.01, 0.02), + log_uniform(0.02, 0.05), + log_uniform(0.01, 0.02), + ] + ) + z_anchors = np.array( + [ + 0, + -self.z_depth, + -self.z_depth, + 0, + self.z_offset, + self.z_offset + uniform(-0.02, 0.04), + self.z_offset + uniform(-0.02, 0), + ] + ) n = 2 * (self.n_cuts + 1) obj = new_grid(x_subdivisions=len(x_anchors) - 1, y_subdivisions=n - 1) x = np.concatenate([x_anchors] * n) @@ -52,10 +79,13 @@ def create_asset(self, **params) -> bpy.types.Object: write_co(obj, np.stack([x, y, z], -1)) if self.has_cut: self.make_cuts(obj) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) subsurf(obj, 1) - selection = lambda nw, x: nw.compare('LESS_THAN', x, -self.x_end) - if self.guard_type == 'double': + + def selection(nw, x): + return nw.compare("LESS_THAN", x, -self.x_end) + + if self.guard_type == "double": selection = self.make_double_sided(selection) self.add_guard(obj, selection) subsurf(obj, 1) @@ -64,7 +94,7 @@ def create_asset(self, **params) -> bpy.types.Object: return obj def make_cuts(self, obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) front_verts = [] for v in bm.verts: @@ -85,5 +115,5 @@ class SpatulaFactory(ForkFactory): def __init__(self, factory_seed, coarse=False): super(SpatulaFactory, self).__init__(factory_seed, coarse) self.has_cut = False - self.z_depth = uniform(0, .05) - self.y_length = log_uniform(.08, .12) + self.z_depth = uniform(0, 0.05) + self.y_length = log_uniform(0.08, 0.12) diff --git a/infinigen/assets/tableware/fruit_container.py b/infinigen/assets/objects/tableware/fruit_container.py similarity index 60% rename from infinigen/assets/tableware/fruit_container.py rename to infinigen/assets/objects/tableware/fruit_container.py index 29b5638e1..0461976e8 100644 --- a/infinigen/assets/tableware/fruit_container.py +++ b/infinigen/assets/objects/tableware/fruit_container.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from collections.abc import Iterable, Sequence +from collections.abc import Iterable from functools import cached_property from statistics import mean @@ -10,34 +10,49 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.fruits.general_fruit import FruitFactoryGeneralFruit -from infinigen.assets.tableware import BowlFactory, PotFactory +from infinigen.assets.objects.fruits.general_fruit import FruitFactoryGeneralFruit +from infinigen.assets.objects.tableware.bowl import BowlFactory +from infinigen.assets.objects.tableware.pot import PotFactory from infinigen.assets.utils.decorate import read_co, write_co from infinigen.assets.utils.misc import make_normalized_factory, subclasses from infinigen.core.placement.factory import AssetFactory, make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil class FruitCover: def __init__(self, factory_seed=0): with FixedSeed(factory_seed): - fruit_factory_fns = list(subclasses(FruitFactoryGeneralFruit).difference([FruitFactoryGeneralFruit])) - fruit_factory_fn = make_normalized_factory(np.random.choice(fruit_factory_fns)) - self.col = make_asset_collection(fruit_factory_fn(np.random.randint(1e5)), name='fruit', n=5) + fruit_factory_fns = list( + subclasses(FruitFactoryGeneralFruit).difference( + [FruitFactoryGeneralFruit] + ) + ) + fruit_factory_fn = make_normalized_factory( + np.random.choice(fruit_factory_fns) + ) + self.col = make_asset_collection( + fruit_factory_fn(np.random.randint(1e5)), name="fruit", n=5 + ) self.dimension = mean(mean(o.dimensions) for o in self.col.objects) - self.shrink_rate = max(self.dimension, 2.) + self.shrink_rate = max(self.dimension, 2.0) def apply(self, obj, selection=None): for obj in obj if isinstance(obj, Iterable) else [obj]: - scale = uniform(.06, .08) / self.shrink_rate + scale = uniform(0.06, 0.08) / self.shrink_rate scatter_instances( - base_obj=obj, collection=self.col, density=1e3, - min_spacing=scale * self.dimension * uniform(.5, .7), scale=scale, - scale_rand=uniform(0.1, 0.3), selection=selection, - ground_offset=self.dimension * .2 * scale, apply_geo=True, realize=True - ) + base_obj=obj, + collection=self.col, + density=1e3, + min_spacing=scale * self.dimension * uniform(0.5, 0.7), + scale=scale, + scale_rand=uniform(0.1, 0.3), + selection=selection, + ground_offset=self.dimension * 0.2 * scale, + apply_geo=True, + realize=True, + ) class FruitContainerFactory(AssetFactory): @@ -57,8 +72,8 @@ def cover(self): def create_placeholder(self, **params): box = self.base_factory.create_placeholder(**params) co = read_co(box) - co[co[:, -1] > .02, -1] += .05 - co[co[:, -1] < .02, -1] -= .01 + co[co[:, -1] > 0.02, -1] += 0.05 + co[co[:, -1] < 0.02, -1] -= 0.01 write_co(box, co) butil.apply_transform(box) return box @@ -68,4 +83,4 @@ def create_asset(self, **params) -> bpy.types.Object: def finalize_assets(self, assets): self.base_factory.finalize_assets(assets) - self.cover.apply(assets, selection='lower_inside') + self.cover.apply(assets, selection="lower_inside") diff --git a/infinigen/assets/tableware/jar.py b/infinigen/assets/objects/tableware/jar.py similarity index 61% rename from infinigen/assets/tableware/jar.py rename to infinigen/assets/objects/tableware/jar.py index 79a1e9598..38fadfaf7 100644 --- a/infinigen/assets/tableware/jar.py +++ b/infinigen/assets/objects/tableware/jar.py @@ -1,30 +1,32 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import bmesh + # Authors: Lingjie Mei import bpy -import bmesh import numpy as np from numpy.random import uniform -from infinigen.assets.utils.decorate import read_co, subsurf, write_attribute +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.utils.decorate import subsurf, write_attribute from infinigen.assets.utils.object import join_objects, new_circle, new_cylinder from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList +from infinigen.core.util.math import FixedSeed + class JarFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.z_length = uniform(.15, .2) - self.x_length = uniform(.03, .06) - self.thickness = uniform(.002, .004) + self.z_length = uniform(0.15, 0.2) + self.x_length = uniform(0.03, 0.06) + self.thickness = uniform(0.002, 0.004) self.n_base = np.random.choice([4, 6, 64]) - self.x_cap = uniform(.6, .9) * np.cos(np.pi / self.n_base) - self.z_cap = uniform(.05, .08) - self.z_neck = uniform(.15, .2) + self.x_cap = uniform(0.6, 0.9) * np.cos(np.pi / self.n_base) + self.z_cap = uniform(0.05, 0.08) + self.z_neck = uniform(0.15, 0.2) material_assignments = AssetList["JarFactory"]() self.surface = material_assignments["surface"].assign_material() @@ -34,19 +36,19 @@ def __init__(self, factory_seed, coarse=False): self.scratch = None if uniform() > scratch_prob else self.scratch self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear - self.cap_subsurf = uniform() < .5 + self.cap_subsurf = uniform() < 0.5 def create_asset(self, **params) -> bpy.types.Object: obj = new_cylinder(vertices=self.n_base) obj.scale = self.x_length, self.x_length, self.z_length butil.apply_transform(obj, True) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) - geom = [f for f in bm.faces if f.normal[-1] > .5] - bmesh.ops.delete(bm, geom=geom, context='FACES_KEEP_BOUNDARY') + geom = [f for f in bm.faces if f.normal[-1] > 0.5] + bmesh.ops.delete(bm, geom=geom, context="FACES_KEEP_BOUNDARY") bmesh.update_edit_mesh(obj.data) - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() subsurf(obj, 2, True) top = new_circle(location=(0, 0, 0)) @@ -55,28 +57,36 @@ def create_asset(self, **params) -> bpy.types.Object: butil.apply_transform(top) butil.select_none() obj = join_objects([obj, top]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.bridge_edge_loops(number_cuts=5, profile_shape_factor=uniform(0, .1)) - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.bridge_edge_loops( + number_cuts=5, profile_shape_factor=uniform(0, 0.1) + ) + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, self.z_cap * self.z_length)}) + TRANSFORM_OT_translate={"value": (0, 0, self.z_cap * self.z_length)} + ) subsurf(obj, 1) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) cap = new_cylinder(vertices=64) - cap.scale = *([self.x_cap * self.x_length + 1e-3] * 2), self.z_cap * self.z_length - cap.location[-1] = (1 + self.z_neck + self.z_cap * uniform(.5, .8)) * self.z_length + cap.scale = ( + *([self.x_cap * self.x_length + 1e-3] * 2), + self.z_cap * self.z_length, + ) + cap.location[-1] = ( + 1 + self.z_neck + self.z_cap * uniform(0.5, 0.8) + ) * self.z_length butil.apply_transform(cap, True) subsurf(obj, 1, self.cap_subsurf) - write_attribute(cap, 1, 'cap', 'FACE') + write_attribute(cap, 1, "cap", "FACE") obj = join_objects([obj, cap]) return obj def finalize_assets(self, assets): - self.surface.apply(assets, clear=uniform() < .5) - self.cap_surface.apply(assets, selection='cap') + self.surface.apply(assets, clear=uniform() < 0.5) + self.cap_surface.apply(assets, selection="cap") if self.scratch: self.scratch.apply(assets) if self.edge_wear: diff --git a/infinigen/assets/objects/tableware/knife.py b/infinigen/assets/objects/tableware/knife.py new file mode 100644 index 000000000..61cf0d04d --- /dev/null +++ b/infinigen/assets/objects/tableware/knife.py @@ -0,0 +1,133 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +import bmesh + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.utils.decorate import subsurf, write_co +from infinigen.assets.utils.object import new_grid +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from .base import TablewareFactory + + +class KnifeFactory(TablewareFactory): + x_end = 0.5 + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.x_length = log_uniform(0.4, 0.7) + self.has_guard = uniform(0, 1) < 0.7 + if self.has_guard: + self.y_length = log_uniform(0.1, 0.5) + self.y_guard = self.y_length * log_uniform(0.2, 0.4) + else: + self.y_length = log_uniform(0.1, 0.2) + self.y_guard = self.y_length * log_uniform(0.3, 0.5) + self.x_guard = uniform(0, 0.2) + self.has_tip = uniform(0, 1) < 0.7 + self.thickness = log_uniform(0.02, 0.03) + y_off_rand = uniform(0, 1) + self.y_offset = ( + 0.2 + if y_off_rand < 1 / 8 + else 0.5 + if y_off_rand < 1 / 4 + else uniform(0.2, 0.6) + ) + self.guard_type = "round" if uniform(0, 1) < 0.6 else "double" + self.guard_depth = log_uniform(0.2, 1.0) * self.thickness + self.scale = log_uniform(0.2, 0.3) + + def create_asset(self, **params) -> bpy.types.Object: + x_anchors = np.array( + [ + self.x_end, + uniform(0.5, 0.8) * self.x_end, + uniform(0.3, 0.4) * self.x_end, + 1e-3, + 0, + -1e-3, + -2e-3, + -self.x_end * self.x_length + 1e-3, + -self.x_end * self.x_length, + ] + ) + y_anchors = np.array( + [ + 1e-3, + self.y_length * log_uniform(0.75, 0.95), + self.y_length, + self.y_length, + self.y_length, + self.y_guard, + self.y_guard, + self.y_guard, + self.y_guard, + ] + ) + if not self.has_guard: + indices = [0, 1, 2, 4, 5, 7, 8] + x_anchors = x_anchors[indices] + y_anchors = y_anchors[indices] + if self.has_tip: + indices = [0] + list(range(len(x_anchors))) + x_anchors = x_anchors[indices] + x_anchors[0] += 1e-3 + y_anchors = y_anchors[indices] + y_anchors[1] += 3e-3 + + obj = new_grid(x_subdivisions=len(x_anchors) - 1, y_subdivisions=1) + x = np.concatenate([x_anchors] * 2) + y = np.concatenate([y_anchors, np.zeros_like(y_anchors)]) + y[0 :: len(y_anchors)] += self.y_offset * self.y_length + if self.has_tip: + y[1 :: len(y_anchors)] += self.y_offset * self.y_length + y[2 :: len(y_anchors)] += self.y_offset * (self.y_length - y_anchors[2]) + else: + y[1 :: len(y_anchors)] += self.y_offset * (self.y_length - y_anchors[1]) + z = np.concatenate([np.zeros_like(x_anchors)] * 2) + write_co(obj, np.stack([x, y, z], -1)) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) + self.make_knife_tip(obj) + subsurf(obj, 1) + + def selection(nw, x): + return nw.compare( + "LESS_THAN", x, -self.x_guard * self.x_length * self.x_end + ) + + if self.guard_type == "double": + selection = self.make_double_sided(selection) + self.add_guard(obj, selection) + subsurf(obj, 1) + obj.scale = [self.scale] * 3 + butil.apply_transform(obj) + return obj + + def make_knife_tip(self, obj): + with butil.ViewportMode(obj, "EDIT"): + bm = bmesh.from_edit_mesh(obj.data) + for e in bm.edges: + u, v = e.verts + x0, y0, z0 = u.co + x1, y1, z1 = v.co + if x0 >= 0 and x1 >= 0 and abs(x0 - x1) < 2e-4: + if ( + y0 > self.y_offset * self.y_length + and y1 > self.y_offset * self.y_length + ): + bmesh.ops.pointmerge( + bm, verts=[u, v], merge_co=(u.co + v.co) / 2 + ) + bmesh.update_edit_mesh(obj.data) + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_loose(extend=False) + bpy.ops.mesh.delete(type="EDGE") diff --git a/infinigen/assets/tableware/lid.py b/infinigen/assets/objects/tableware/lid.py similarity index 59% rename from infinigen/assets/tableware/lid.py rename to infinigen/assets/objects/tableware/lid.py index 083c2249c..43687a6b6 100644 --- a/infinigen/assets/tableware/lid.py +++ b/infinigen/assets/objects/tableware/lid.py @@ -6,41 +6,43 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.utils.decorate import read_center, subsurf, write_co from infinigen.assets.utils.draw import spin from infinigen.assets.utils.object import join_objects, new_cylinder, new_line from infinigen.core.placement.factory import AssetFactory from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed -from infinigen.assets.material_assignments import AssetList class LidFactory(AssetFactory): def __init__(self, factory_seed, coarse=False): super(LidFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.x_length = uniform(.08, .15) - self.z_height = self.x_length * uniform(0, .5) - self.thickness = uniform(.003, .005) - self.is_glass = uniform() < .5 + self.x_length = uniform(0.08, 0.15) + self.z_height = self.x_length * uniform(0, 0.5) + self.thickness = uniform(0.003, 0.005) + self.is_glass = uniform() < 0.5 self.hardware_type = None self.rim_height = uniform(1, 2) * self.thickness - self.handle_type = np.random.choice(['handle', 'knob']) - if self.handle_type == 'knob': - self.handle_height = self.x_length * uniform(.1, .15) + self.handle_type = np.random.choice(["handle", "knob"]) + if self.handle_type == "knob": + self.handle_height = self.x_length * uniform(0.1, 0.15) else: - self.handle_height = self.x_length * uniform(.2, .25) - self.handle_radius = self.x_length * uniform(.15, .25) - self.handle_width = self.x_length * uniform(.25, .3) + self.handle_height = self.x_length * uniform(0.2, 0.25) + self.handle_radius = self.x_length * uniform(0.15, 0.25) + self.handle_width = self.x_length * uniform(0.25, 0.3) self.handle_subsurf_level = np.random.randint(0, 3) if self.is_glass: - material_assignments = AssetList['GlassLidFactory']() + material_assignments = AssetList["GlassLidFactory"]() else: material_assignments = AssetList["LidFactory"]() self.surface = material_assignments["surface"].assign_material() self.rim_surface = material_assignments["rim_surface"].assign_material() - self.handle_surface = material_assignments["handle_surface"].assign_material() + self.handle_surface = material_assignments[ + "handle_surface" + ].assign_material() scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] self.scratch, self.edge_wear = material_assignments["wear_tear"] @@ -48,17 +50,19 @@ def __init__(self, factory_seed, coarse=False): self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear def create_asset(self, **params) -> bpy.types.Object: - x_anchors = 0, .01, self.x_length / 2, self.x_length - z_anchors = self.z_height, self.z_height, self.z_height * uniform(.7, .8), 0 + x_anchors = 0, 0.01, self.x_length / 2, self.x_length + z_anchors = self.z_height, self.z_height, self.z_height * uniform(0.7, 0.8), 0 obj = spin((x_anchors, 0, z_anchors)) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=0) - butil.modify_mesh(obj, 'BEVEL', width=self.thickness / 2, segments=4) - self.surface.apply(obj, clear=True if self.is_glass else None, metal_color='bw+natural') + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0) + butil.modify_mesh(obj, "BEVEL", width=self.thickness / 2, segments=4) + self.surface.apply( + obj, clear=True if self.is_glass else None, metal_color="bw+natural" + ) parts = [obj] if self.is_glass: parts.append(self.add_rim()) match self.handle_type: - case 'handle': + case "handle": parts.append(self.add_handle(obj)) case _: parts.append(self.add_knob()) @@ -68,8 +72,9 @@ def create_asset(self, **params) -> bpy.types.Object: def add_rim(self): butil.select_none() bpy.ops.mesh.primitive_torus_add( - major_radius=self.x_length, minor_radius=self.thickness / 2, - major_segments=128 + major_radius=self.x_length, + minor_radius=self.thickness / 2, + major_segments=128, ) obj = bpy.context.active_object obj.scale[-1] = self.rim_height / self.thickness @@ -79,23 +84,34 @@ def add_rim(self): def add_handle(self, obj): center = read_center(obj) - i = np.argmin(np.abs(center[:, :2] - np.array([self.handle_width, 0])[np.newaxis, :]).sum(-1)) + i = np.argmin( + np.abs(center[:, :2] - np.array([self.handle_width, 0])[np.newaxis, :]).sum( + -1 + ) + ) z_offset = center[i, -1] obj = new_line(3) write_co( - obj, np.array( - [[-self.handle_width, 0, 0], [-self.handle_width, 0, self.handle_height], - [self.handle_width, 0, self.handle_height], [self.handle_width, 0, 0]] - ) + obj, + np.array( + [ + [-self.handle_width, 0, 0], + [-self.handle_width, 0, self.handle_height], + [self.handle_width, 0, self.handle_height], + [self.handle_width, 0, 0], + ] + ), ) subsurf(obj, self.handle_subsurf_level) butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, self.thickness * 2, 0)}) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=0) - butil.modify_mesh(obj, 'BEVEL', width=self.thickness / 2, segments=4) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (0, self.thickness * 2, 0)} + ) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0) + butil.modify_mesh(obj, "BEVEL", width=self.thickness / 2, segments=4) obj.location = 0, -self.thickness, z_offset butil.apply_transform(obj, True) self.handle_surface.apply(obj) @@ -106,12 +122,16 @@ def add_knob(self): obj.scale = *([self.thickness * uniform(1, 2)] * 2), self.handle_height obj.location[-1] = self.z_height butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'BEVEL', width=self.thickness / 2, segments=4) + butil.modify_mesh(obj, "BEVEL", width=self.thickness / 2, segments=4) top = new_cylinder() - top.scale = self.handle_radius, self.handle_radius, self.thickness * uniform(1, 2) + top.scale = ( + self.handle_radius, + self.handle_radius, + self.thickness * uniform(1, 2), + ) top.location[-1] = self.z_height + self.handle_height butil.apply_transform(top, True) - butil.modify_mesh(top, 'BEVEL', width=self.thickness / 2, segments=4) + butil.modify_mesh(top, "BEVEL", width=self.thickness / 2, segments=4) obj = join_objects([obj, top]) self.handle_surface.apply(obj) return obj diff --git a/infinigen/assets/tableware/pan.py b/infinigen/assets/objects/tableware/pan.py similarity index 56% rename from infinigen/assets/tableware/pan.py rename to infinigen/assets/objects/tableware/pan.py index 5e9c53b1a..545687562 100644 --- a/infinigen/assets/tableware/pan.py +++ b/infinigen/assets/objects/tableware/pan.py @@ -2,58 +2,63 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei # - Karhan Kayan: fix cutter bug -import bpy import bmesh +import bpy import numpy as np from numpy.random import uniform -from infinigen.core.util.random import log_uniform -from .base import TablewareFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil +from infinigen.assets.material_assignments import AssetList from infinigen.assets.utils.decorate import subsurf from infinigen.assets.utils.object import ( - join_objects, new_base_circle, new_base_cylinder, origin2lowest, + join_objects, + new_base_circle, + new_base_cylinder, + origin2lowest, ) -from infinigen.assets.material_assignments import AssetList -from ..utils.misc import assign_material +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from .base import TablewareFactory class PanFactory(TablewareFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.r_expand = 1 if uniform(0, 1) < .2 else log_uniform(1., 1.2) - self.depth = log_uniform(.3, .8) + self.r_expand = 1 if uniform(0, 1) < 0.2 else log_uniform(1.0, 1.2) + self.depth = log_uniform(0.3, 0.8) if self.r_expand == 1: - self.r_mid = log_uniform(1., 1.3) + self.r_mid = log_uniform(1.0, 1.3) else: - self.r_mid = 1 + (self.r_expand - 1) * (uniform(.5, .85) if uniform(0, 1) < .5 else .5) + self.r_mid = 1 + (self.r_expand - 1) * ( + uniform(0.5, 0.85) if uniform(0, 1) < 0.5 else 0.5 + ) self.has_handle = True - self.has_handle_hole = uniform() < .6 + self.has_handle_hole = uniform() < 0.6 self.pre_level = 2 - self.x_handle = log_uniform(1.2, 2.) - self.z_handle = self.x_handle * uniform(0, .2) - self.z_handle_mid = uniform(.6, .8) * self.z_handle - self.s_handle = log_uniform(.8, 1.2) - self.thickness = log_uniform(.04, .06) - self.has_guard = uniform(0, 1) < .8 - self.x_guard = self.r_expand + uniform(0, .2) * self.x_handle - self.guard_type = 'round' - self.guard_depth = log_uniform(1., 2.) * self.thickness - material_assignments = AssetList['PanFactory']() - self.surface = material_assignments['surface'].assign_material() - self.inside_surface = material_assignments['inside'].assign_material() + self.x_handle = log_uniform(1.2, 2.0) + self.z_handle = self.x_handle * uniform(0, 0.2) + self.z_handle_mid = uniform(0.6, 0.8) * self.z_handle + self.s_handle = log_uniform(0.8, 1.2) + self.thickness = log_uniform(0.04, 0.06) + self.has_guard = uniform(0, 1) < 0.8 + self.x_guard = self.r_expand + uniform(0, 0.2) * self.x_handle + self.guard_type = "round" + self.guard_depth = log_uniform(1.0, 2.0) * self.thickness + material_assignments = AssetList["PanFactory"]() + self.surface = material_assignments["surface"].assign_material() + self.inside_surface = material_assignments["inside"].assign_material() if self.surface == self.inside_surface: - self.has_inside = uniform(0, 1) < .5 + self.has_inside = uniform(0, 1) < 0.5 else: self.has_inside = True self.metal_color = None - self.scale = log_uniform(.1, .15) + self.scale = log_uniform(0.1, 0.15) self.scratch = self.edge_wear = None def create_asset(self, **params) -> bpy.types.Object: @@ -66,7 +71,9 @@ def create_asset(self, **params) -> bpy.types.Object: def make_base(self): n = 4 * int(log_uniform(4, 8)) base = new_base_circle(vertices=n) - middle = new_base_circle(vertices=n, ) + middle = new_base_circle( + vertices=n, + ) middle.location[-1] = self.depth / 2 middle.scale = [self.r_mid] * 3 upper = new_base_circle(vertices=n) @@ -74,22 +81,29 @@ def make_base(self): upper.scale = [self.r_expand] * 3 butil.apply_transform(upper, loc=True) obj = join_objects([base, middle, upper]) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.bridge_edge_loops() bm = bmesh.from_edit_mesh(obj.data) for v in bm.verts: v.select_set(np.abs(v.co[-1]) < 1e-3) bm.select_flush(False) bmesh.update_edit_mesh(obj.data) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.fill_grid(use_interp_simple=True, offset=np.random.randint(n // 4)) - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.fill_grid( + use_interp_simple=True, offset=np.random.randint(n // 4) + ) + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) obj.rotation_euler[-1] = np.pi / n butil.apply_transform(obj) if self.has_handle: self.add_handle(obj) self.solidify_with_inside(obj, self.thickness) - selection = lambda nw, x: nw.compare('GREATER_THAN', x, self.x_guard) + + def selection(nw, x): + return nw.compare("GREATER_THAN", x, self.x_guard) + self.add_guard(obj, selection) subsurf(obj, 1, True) subsurf(obj, 3) @@ -98,7 +112,7 @@ def make_base(self): return obj def add_handle(self, obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.select_mode(type="EDGE") bm = bmesh.from_edit_mesh(obj.data) bm.edges.ensure_lookup_table() @@ -113,17 +127,27 @@ def add_handle(self, obj): bmesh.update_edit_mesh(obj.data) bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (self.x_handle * .5, 0, self.z_handle_mid)} + TRANSFORM_OT_translate={ + "value": (self.x_handle * 0.5, 0, self.z_handle_mid) + } ) bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (self.x_handle * .5, 0, (self.z_handle - self.z_handle_mid))} + TRANSFORM_OT_translate={ + "value": ( + self.x_handle * 0.5, + 0, + (self.z_handle - self.z_handle_mid), + ) + } ) bpy.ops.transform.resize(value=[self.s_handle] * 3) - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (1e-3, 0, 0)}) + bpy.ops.mesh.extrude_edges_move( + TRANSFORM_OT_translate={"value": (1e-3, 0, 0)} + ) def add_handle_hole(self, obj): cutter = new_base_cylinder() - cutter.scale = *([uniform(.06, .1)] * 2), 1 - cutter.location[0] = self.r_expand + uniform(.8, .9) * self.x_handle - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + cutter.scale = *([uniform(0.06, 0.1)] * 2), 1 + cutter.location[0] = self.r_expand + uniform(0.8, 0.9) * self.x_handle + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) diff --git a/infinigen/assets/tableware/plant_container.py b/infinigen/assets/objects/tableware/plant_container.py similarity index 70% rename from infinigen/assets/tableware/plant_container.py rename to infinigen/assets/objects/tableware/plant_container.py index f2eb58358..eec7662b3 100644 --- a/infinigen/assets/tableware/plant_container.py +++ b/infinigen/assets/objects/tableware/plant_container.py @@ -6,22 +6,29 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.cactus import CactusFactory -from infinigen.assets.monocot import MonocotFactory -from infinigen.assets.mushroom import MushroomFactory -from infinigen.assets.small_plants import FernFactory, SnakePlantFactory, SpiderPlantFactory, SucculentFactory -from infinigen.assets.tableware import PotFactory +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.cactus import CactusFactory +from infinigen.assets.objects.monocot import MonocotFactory +from infinigen.assets.objects.mushroom import MushroomFactory +from infinigen.assets.objects.small_plants import ( + FernFactory, + SnakePlantFactory, + SpiderPlantFactory, + SucculentFactory, +) +from infinigen.assets.objects.tableware.pot import PotFactory from infinigen.assets.utils.decorate import ( - read_edge_center, read_edge_direction, remove_vertices, - select_edges, subsurf, + read_edge_center, + read_edge_direction, + remove_vertices, + select_edges, + subsurf, ) -from infinigen.assets.utils.object import center, join_objects, new_bbox, origin2lowest +from infinigen.assets.utils.object import join_objects, new_bbox, origin2lowest from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList -from infinigen.core.constraints.example_solver.room.constants import WALL_HEIGHT, WALL_THICKNESS class PlantPotFactory(PotFactory): @@ -29,30 +36,36 @@ def __init__(self, factory_seed, coarse=False): super(PlantPotFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): self.has_handle = self.has_bar = self.has_guard = False - self.depth = log_uniform(.5, 1.) + self.depth = log_uniform(0.5, 1.0) self.r_expand = uniform(1.1, 1.3) - alpha = uniform(.5, .8) + alpha = uniform(0.5, 0.8) self.r_mid = (self.r_expand - 1) * alpha + 1 material_assignments = AssetList["PlantContainerFactory"]() self.surface = material_assignments["surface"].assign_material() - self.scale = log_uniform(.08, .12) + self.scale = log_uniform(0.08, 0.12) class PlantContainerFactory(AssetFactory): - plant_factories = [CactusFactory, MushroomFactory, FernFactory, SucculentFactory, SpiderPlantFactory, - SnakePlantFactory] + plant_factories = [ + CactusFactory, + MushroomFactory, + FernFactory, + SucculentFactory, + SpiderPlantFactory, + SnakePlantFactory, + ] def __init__(self, factory_seed, coarse=False): super(PlantContainerFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): self.base_factory = PlantPotFactory(self.factory_seed, coarse) - self.dirt_ratio = uniform(.7, .8) + self.dirt_ratio = uniform(0.7, 0.8) material_assignments = AssetList["PlantContainerFactory"]() self.dirt_surface = material_assignments["dirt_surface"].assign_material() fn = np.random.choice(self.plant_factories) self.plant_factory = fn(self.factory_seed) self.side_size = self.base_factory.scale * self.base_factory.r_expand - self.top_size = uniform(.4, .6) + self.top_size = uniform(0.4, 0.6) def create_placeholder(self, **kwargs) -> bpy.types.Object: return new_bbox( @@ -60,13 +73,13 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: self.side_size, -self.side_size, self.side_size, - -.02, - self.base_factory.depth * self.base_factory.scale + self.top_size + -0.02, + self.base_factory.depth * self.base_factory.scale + self.top_size, ) def create_asset(self, i, **params) -> bpy.types.Object: obj = self.base_factory.create_asset(i=i, **params) - horizontal = np.abs(read_edge_direction(obj)[:, -1]) < .1 + horizontal = np.abs(read_edge_direction(obj)[:, -1]) < 0.1 edge_center = read_edge_center(obj) z = edge_center[:, -1] @@ -76,34 +89,33 @@ def create_asset(self, i, **params) -> bpy.types.Object: selection = np.zeros_like(z).astype(bool) selection[idx] = True - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.select_mode(type="EDGE") select_edges(obj, selection) bpy.ops.mesh.loop_multi_select(ring=False) bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') + bpy.ops.mesh.separate(type="SELECTED") dirt_ = bpy.context.selected_objects[-1] butil.select_none() self.base_factory.finalize_assets(obj) - with butil.ViewportMode(dirt_, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(dirt_, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.fill_grid() subsurf(dirt_, 3) self.dirt_surface.apply(dirt_) butil.apply_modifiers(dirt_) - remove_vertices(dirt_, lambda x, y, z: np.sqrt(x ** 2 + y ** 2) > radius * 0.92) - dirt_.location[-1] -= .02 + remove_vertices(dirt_, lambda x, y, z: np.sqrt(x**2 + y**2) > radius * 0.92) + dirt_.location[-1] -= 0.02 plant = self.plant_factory.spawn_asset(i=i, loc=(0, 0, 0), rot=(0, 0, 0)) origin2lowest(plant, approximate=True) self.plant_factory.finalize_assets(plant) scale = np.min( - np.array([self.side_size, self.side_size, self.top_size]) / np.max( - np.abs(np.array(plant.bound_box)), 0 - ) + np.array([self.side_size, self.side_size, self.top_size]) + / np.max(np.abs(np.array(plant.bound_box)), 0) ) plant.scale = [scale] * 3 plant.location[-1] = dirt_z @@ -118,9 +130,11 @@ class LargePlantContainerFactory(PlantContainerFactory): def __init__(self, factory_seed, coarse=False): super(LargePlantContainerFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.base_factory.depth = log_uniform(1., 1.5) - self.base_factory.scale = log_uniform(.15, .25) - self.side_size = self.base_factory.scale * uniform(1.5, 2.) * self.base_factory.r_expand + self.base_factory.depth = log_uniform(1.0, 1.5) + self.base_factory.scale = log_uniform(0.15, 0.25) + self.side_size = ( + self.base_factory.scale * uniform(1.5, 2.0) * self.base_factory.r_expand + ) self.top_size = uniform(1, 1.5) # if WALL_HEIGHT - 2*WALL_THICKNESS < 3: # self.top_size = uniform(1.5, WALL_HEIGHT - 2*WALL_THICKNESS) diff --git a/infinigen/assets/tableware/plate.py b/infinigen/assets/objects/tableware/plate.py similarity index 70% rename from infinigen/assets/tableware/plate.py rename to infinigen/assets/objects/tableware/plate.py index 3bcc8756c..294613c66 100644 --- a/infinigen/assets/tableware/plate.py +++ b/infinigen/assets/objects/tableware/plate.py @@ -6,12 +6,12 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.tableware.base import TablewareFactory +from infinigen.assets.objects.tableware.base import TablewareFactory from infinigen.assets.utils.decorate import subsurf from infinigen.assets.utils.draw import spin -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class PlateFactory(TablewareFactory): @@ -20,15 +20,15 @@ class PlateFactory(TablewareFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.x_end = .5 - self.z_length = log_uniform(.05, .2) - self.x_mid = uniform(.3, 1.) * self.x_end - self.z_mid = uniform(.3, .8) * self.z_length + self.x_end = 0.5 + self.z_length = log_uniform(0.05, 0.2) + self.x_mid = uniform(0.3, 1.0) * self.x_end + self.z_mid = uniform(0.3, 0.8) * self.z_length self.has_guard = False self.pre_level = 1 - self.thickness = uniform(.01, .03) - self.has_inside = uniform(0, 1) < .2 - self.scale = log_uniform(.2, .4) + self.thickness = uniform(0.01, 0.03) + self.has_inside = uniform(0, 1) < 0.2 + self.scale = log_uniform(0.2, 0.4) self.scratch = self.edge_wear = None def create_asset(self, **params) -> bpy.types.Object: @@ -36,7 +36,9 @@ def create_asset(self, **params) -> bpy.types.Object: z_anchors = 0, 0, self.z_mid, self.z_length anchors = x_anchors, np.zeros_like(x_anchors), z_anchors obj = spin(anchors, [1, 2], 4, 16) - butil.modify_mesh(obj, 'SUBSURF', render_levels=self.pre_level, levels=self.pre_level) + butil.modify_mesh( + obj, "SUBSURF", render_levels=self.pre_level, levels=self.pre_level + ) self.solidify_with_inside(obj, self.thickness) subsurf(obj, 2) obj.scale = [self.scale] * 3 diff --git a/infinigen/assets/tableware/pot.py b/infinigen/assets/objects/tableware/pot.py similarity index 53% rename from infinigen/assets/tableware/pot.py rename to infinigen/assets/objects/tableware/pot.py index be7e89a3c..626513c62 100644 --- a/infinigen/assets/tableware/pot.py +++ b/infinigen/assets/objects/tableware/pot.py @@ -6,45 +6,50 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.utils.decorate import read_co, write_attribute, subsurf +from infinigen.assets.utils.decorate import read_co, subsurf, write_attribute from infinigen.assets.utils.object import join_objects, new_bbox -from infinigen.core.util.random import log_uniform -from . import PanFactory -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from .pan import PanFactory class PotFactory(PanFactory): def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.depth = log_uniform(.6, 2.) + self.depth = log_uniform(0.6, 2.0) self.r_expand = 1 self.r_mid = 1 - self.has_bar = uniform(0, 1) < .5 + self.has_bar = uniform(0, 1) < 0.5 self.has_handle = not self.has_handle self.has_guard = not self.has_bar - self.bar_height = self.depth * uniform(.75, .85) - self.bar_radius = log_uniform(.2, .3) - self.bar_x = 1 + uniform(-self.bar_radius, self.bar_radius) * .05 - self.bar_inner_radius = log_uniform(.2, .4) * self.bar_radius - scale = log_uniform(.6, 1.5) - self.bar_scale = log_uniform(.6, 1.) * scale, 1 * scale, log_uniform(.6, 1.2) * scale - self.bar_taper = log_uniform(.3, .8) + self.bar_height = self.depth * uniform(0.75, 0.85) + self.bar_radius = log_uniform(0.2, 0.3) + self.bar_x = 1 + uniform(-self.bar_radius, self.bar_radius) * 0.05 + self.bar_inner_radius = log_uniform(0.2, 0.4) * self.bar_radius + scale = log_uniform(0.6, 1.5) + self.bar_scale = ( + log_uniform(0.6, 1.0) * scale, + 1 * scale, + log_uniform(0.6, 1.2) * scale, + ) + self.bar_taper = log_uniform(0.3, 0.8) self.bar_y_rotation = uniform(-np.pi / 6, 0) - self.bar_x_offset = self.bar_radius * uniform(-.1, .1) + self.bar_x_offset = self.bar_radius * uniform(-0.1, 0.1) - self.guard_type = 'round' - self.guard_depth = log_uniform(.5, 1.) * self.thickness - self.scale = log_uniform(.1, .15) + self.guard_type = "round" + self.guard_depth = log_uniform(0.5, 1.0) * self.thickness + self.scale = log_uniform(0.1, 0.15) def post_init(self): self.has_handle = not self.has_bar self.has_guard = not self.has_bar - self.bar_x = 1 + uniform(-self.bar_radius, self.bar_radius) * .05 - self.bar_inner_radius = log_uniform(.2, .4) * self.bar_radius - self.bar_x_offset = self.bar_radius * uniform(-.1, .1) + self.bar_x = 1 + uniform(-self.bar_radius, self.bar_radius) * 0.05 + self.bar_inner_radius = log_uniform(0.2, 0.4) * self.bar_radius + self.bar_x_offset = self.bar_radius * uniform(-0.1, 0.1) def create_asset(self, **params) -> bpy.types.Object: obj = self.make_base() @@ -56,16 +61,38 @@ def create_asset(self, **params) -> bpy.types.Object: def create_placeholder(self, **kwargs) -> bpy.types.Object: if self.has_bar: - radius_ = 1 + self.bar_x_offset + self.bar_radius + self.bar_inner_radius + self.thickness - obj = new_bbox(-radius_, radius_, -1 - self.thickness, 1 + self.thickness, 0, self.depth) + radius_ = ( + 1 + + self.bar_x_offset + + self.bar_radius + + self.bar_inner_radius + + self.thickness + ) + obj = new_bbox( + -radius_, + radius_, + -1 - self.thickness, + 1 + self.thickness, + 0, + self.depth, + ) elif self.has_handle: obj = new_bbox( - -1 - self.thickness, 1 + self.thickness + self.x_handle, -1 - self.thickness, 1 + self.thickness, 0, - self.depth + -1 - self.thickness, + 1 + self.thickness + self.x_handle, + -1 - self.thickness, + 1 + self.thickness, + 0, + self.depth, ) else: obj = new_bbox( - -1 - self.thickness, 1 + self.thickness, -1 - self.thickness, 1 + self.thickness, 0, self.depth + -1 - self.thickness, + 1 + self.thickness, + -1 - self.thickness, + 1 + self.thickness, + 0, + self.depth, ) obj.scale = (self.scale,) * 3 butil.apply_transform(obj) @@ -76,18 +103,22 @@ def add_bar(self, obj): for side in [-1, 1]: bpy.ops.mesh.primitive_torus_add( location=(side * (1 + self.bar_x_offset), 0, self.bar_height), - major_radius=self.bar_radius, minor_radius=self.bar_inner_radius + major_radius=self.bar_radius, + minor_radius=self.bar_inner_radius, ) bar = bpy.context.active_object bar.scale = self.bar_scale butil.modify_mesh( - bar, 'SIMPLE_DEFORM', deform_method='TAPER', angle=self.bar_taper, - deform_axis='X' + bar, + "SIMPLE_DEFORM", + deform_method="TAPER", + angle=self.bar_taper, + deform_axis="X", ) bar.rotation_euler = 0, self.bar_y_rotation, 0 if side == 1 else np.pi butil.apply_transform(bar) - butil.modify_mesh(bar, 'BOOLEAN', object=obj, operation='DIFFERENCE') + butil.modify_mesh(bar, "BOOLEAN", object=obj, operation="DIFFERENCE") butil.select_none() objs = butil.split_object(bar) i = np.argmax([np.max(read_co(o)[:, 0] * side) for o in objs]) diff --git a/infinigen/assets/objects/tableware/spoon.py b/infinigen/assets/objects/tableware/spoon.py new file mode 100644 index 000000000..fba137164 --- /dev/null +++ b/infinigen/assets/objects/tableware/spoon.py @@ -0,0 +1,88 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +from numpy.random import uniform + +from infinigen.assets.utils.decorate import subsurf, write_co +from infinigen.assets.utils.object import new_grid +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + +from .base import TablewareFactory + + +class SpoonFactory(TablewareFactory): + x_end = 0.15 + is_fragile = True + + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.x_length = log_uniform(0.2, 0.8) + self.y_length = log_uniform(0.06, 0.12) + self.z_depth = log_uniform(0.08, 0.25) + self.z_offset = uniform(0.0, 0.05) + self.thickness = log_uniform(0.008, 0.015) + self.has_guard = uniform(0, 1) < 0.4 + self.guard_type = "round" if uniform(0, 1) < 0.6 else "double" + self.guard_depth = log_uniform(0.2, 1.0) * self.thickness + self.scale = log_uniform(0.15, 0.25) + + def create_asset(self, **params) -> bpy.types.Object: + x_anchors = np.array( + [ + log_uniform(0.07, 0.25), + 0, + -0.08, + -0.12, + -self.x_end, + -self.x_end - self.x_length, + -self.x_end - self.x_length * log_uniform(1.2, 1.4), + ] + ) + y_anchors = np.array( + [ + self.y_length * log_uniform(0.1, 0.8), + self.y_length * log_uniform(1.0, 1.2), + self.y_length * log_uniform(0.6, 1.0), + self.y_length * log_uniform(0.2, 0.4), + log_uniform(0.01, 0.02), + log_uniform(0.02, 0.05), + log_uniform(0.01, 0.02), + ] + ) + z_anchors = np.array( + [ + 0, + 0, + 0, + 0, + self.z_offset, + self.z_offset + uniform(-0.02, 0.04), + self.z_offset + uniform(-0.02, 0), + ] + ) + obj = new_grid(x_subdivisions=len(x_anchors) - 1, y_subdivisions=2) + x = np.concatenate([x_anchors] * 3) + y = np.concatenate([y_anchors, np.zeros_like(y_anchors), -y_anchors]) + z = np.concatenate([z_anchors] * 3) + x[len(x_anchors)] += 0.02 + z[len(x_anchors) + 1] = -self.z_depth + write_co(obj, np.stack([x, y, z], -1)) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) + subsurf(obj, 1) + + def selection(nw, x): + return nw.compare("LESS_THAN", x, -self.x_end) + + if self.guard_type == "double": + selection = self.make_double_sided(selection) + self.add_guard(obj, selection) + subsurf(obj, 2) + obj.scale = [self.scale] * 3 + butil.apply_transform(obj) + return obj diff --git a/infinigen/assets/tableware/wineglass.py b/infinigen/assets/objects/tableware/wineglass.py similarity index 62% rename from infinigen/assets/tableware/wineglass.py rename to infinigen/assets/objects/tableware/wineglass.py index 463efae45..09967240f 100644 --- a/infinigen/assets/tableware/wineglass.py +++ b/infinigen/assets/objects/tableware/wineglass.py @@ -6,40 +6,46 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.tableware.base import TablewareFactory from infinigen.assets.materials import glass +from infinigen.assets.objects.tableware.base import TablewareFactory from infinigen.assets.utils.decorate import subsurf from infinigen.assets.utils.draw import spin -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class WineglassFactory(TablewareFactory): - def __init__(self, factory_seed, coarse=False): super().__init__(factory_seed, coarse) with FixedSeed(factory_seed): - self.x_end = .25 - self.z_length = log_uniform(.6, 2.) - self.z_cup = uniform(.3, .6) * self.z_length - self.z_mid = self.z_cup + uniform(.3, .5) * (self.z_length - self.z_cup) - self.x_neck = log_uniform(.01, .02) + self.x_end = 0.25 + self.z_length = log_uniform(0.6, 2.0) + self.z_cup = uniform(0.3, 0.6) * self.z_length + self.z_mid = self.z_cup + uniform(0.3, 0.5) * (self.z_length - self.z_cup) + self.x_neck = log_uniform(0.01, 0.02) self.x_top = self.x_end * log_uniform(1, 1.4) - self.x_mid = self.x_top * log_uniform(.9, 1.2) + self.x_mid = self.x_top * log_uniform(0.9, 1.2) self.has_guard = False - self.thickness = uniform(.01, .03) + self.thickness = uniform(0.01, 0.03) self.surface = glass - self.scale = log_uniform(.1, .3) + self.scale = log_uniform(0.1, 0.3) def create_asset(self, **params) -> bpy.types.Object: - z_bottom = self.z_length * log_uniform(.01, .05) - x_anchors = self.x_end, self.x_end / 2, self.x_neck, self.x_neck, self.x_mid, self.x_top + z_bottom = self.z_length * log_uniform(0.01, 0.05) + x_anchors = ( + self.x_end, + self.x_end / 2, + self.x_neck, + self.x_neck, + self.x_mid, + self.x_top, + ) z_anchors = 0, z_bottom / 2, z_bottom, self.z_cup, self.z_mid, self.z_length anchors = x_anchors, np.zeros_like(x_anchors), z_anchors obj = spin(anchors, [0, 1, 2, 3], 4, 16) subsurf(obj, 2) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness) subsurf(obj, 1) obj.scale = [self.scale] * 3 butil.apply_transform(obj) diff --git a/infinigen/assets/trees/.gitignore b/infinigen/assets/objects/trees/.gitignore similarity index 100% rename from infinigen/assets/trees/.gitignore rename to infinigen/assets/objects/trees/.gitignore diff --git a/infinigen/assets/trees/README.md b/infinigen/assets/objects/trees/README.md similarity index 100% rename from infinigen/assets/trees/README.md rename to infinigen/assets/objects/trees/README.md diff --git a/infinigen/assets/objects/trees/__init__.py b/infinigen/assets/objects/trees/__init__.py new file mode 100644 index 000000000..6d5dd0246 --- /dev/null +++ b/infinigen/assets/objects/trees/__init__.py @@ -0,0 +1,2 @@ +from .generate import BushFactory, TreeFactory, random_leaf_collection, random_season +from .tree_flower import TreeFlowerFactory diff --git a/infinigen/assets/objects/trees/branch.py b/infinigen/assets/objects/trees/branch.py new file mode 100644 index 000000000..f16c4d512 --- /dev/null +++ b/infinigen/assets/objects/trees/branch.py @@ -0,0 +1,756 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + +import bpy +import numpy as np +from numpy.random import randint, uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed + + +@node_utils.to_nodegroup( + "nodegroup_surface_bump", singleton=False, type="GeometryNodeTree" +) +def nodegroup_surface_bump(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Displacement", 0.0200), + ("NodeSocketFloat", "Scale", 50.0000), + ("NodeSocketFloat", "Seed", 0.0000), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "W": group_input.outputs["Seed"], + "Scale": group_input.outputs["Scale"], + }, + attrs={"noise_dimensions": "4D"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: noise_texture.outputs["Fac"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: multiply}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": multiply_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_generate_anchor", singleton=False, type="GeometryNodeTree" +) +def nodegroup_generate_anchor(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "curve parameter", 0.0000), + ("NodeSocketFloat", "trim_bottom", 0.2000), + ("NodeSocketFloat", "trim_top", 0.0000), + ("NodeSocketInt", "seed", 0), + ("NodeSocketFloat", "density", 0.5000), + ("NodeSocketFloat", "keep probablity", 0.0000), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["density"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: group_input.outputs["keep probablity"]}, + attrs={"operation": "MULTIPLY"}, + ) + + minimum = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "MINIMUM"} + ) + + curve_to_points_1 = nw.new_node( + Nodes.CurveToPoints, + input_kwargs={"Curve": group_input.outputs["Curve"], "Length": minimum}, + attrs={"mode": "LENGTH"}, + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + "Probability": group_input.outputs["keep probablity"], + "Seed": group_input.outputs["seed"], + }, + attrs={"data_type": "BOOLEAN"}, + ) + + greater_than = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: group_input.outputs["curve parameter"], + 1: group_input.outputs["trim_bottom"], + }, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={ + 0: group_input.outputs["curve parameter"], + 1: group_input.outputs["trim_top"], + }, + attrs={"operation": "LESS_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: less_than} + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: random_value_3.outputs[3], 1: op_and} + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_1}, attrs={"operation": "NOT"} + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": curve_to_points_1.outputs["Points"], + "Selection": op_not, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Points": delete_geometry}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_create_instance", singleton=False, type="GeometryNodeTree" +) +def nodegroup_create_instance(nw: NodeWrangler): + # Code generated using version 2.6.4 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketBool", "Selection", True), + ("NodeSocketBool", "Pick Instance", False), + ("NodeSocketVector", "Tangent", (0.0000, 0.0000, 1.0000)), + ("NodeSocketFloat", "Rot x deg", 0.0000), + ("NodeSocketFloat", "Rot x range", 0.2000), + ("NodeSocketFloat", "Scale", 1.0000), + ("NodeSocketInt", "Seed", 0), + ], + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={3: 6.2832, "Seed": group_input.outputs["Seed"]} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": random_value_1.outputs[1]} + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={ + "Rotation": combine_xyz_1, + "Vector": group_input.outputs["Tangent"], + }, + attrs={"axis": "Y"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": group_input.outputs["Selection"], + "Instance": group_input.outputs["Instance"], + "Pick Instance": group_input.outputs["Pick Instance"], + "Rotation": align_euler_to_vector, + "Scale": group_input.outputs["Scale"], + }, + ) + + radians = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Rot x deg"]}, + attrs={"operation": "RADIANS"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: group_input.outputs["Rot x range"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: radians, 1: subtract}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Rot x range"]} + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: radians, 1: add}, attrs={"operation": "MULTIPLY"} + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: multiply, 3: multiply_1, "Seed": group_input.outputs["Seed"]}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": random_value_2.outputs[1]} + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": instance_on_points, "Rotation": combine_xyz_2}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Instances": rotate_instances}, + attrs={"is_active_output": True}, + ) + + +def generate_branch(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine) + + # group_input = nw.new_node(Nodes.GroupInput, + # expose_input=[('NodeSocketGeometry', 'Geometry', None), + # ('NodeSocketCollection', 'leaf collection', None), + # ('NodeSocketCollection', 'fruit collection', None), + # ('NodeSocketInt', 'resolution', 256), + # ('NodeSocketInt', 'seed', 0), + # ('NodeSocketFloat', 'main branch noise amount', 0.3000), + # ('NodeSocketFloat', 'main branch noise scale', 1.1000), + # ('NodeSocketFloatDistance', 'overall radius', 0.0200), + # ('NodeSocketFloat', 'twig density', 10.0000), + # ('NodeSocketFloat', 'twig rotation', 45.0000), + # ('NodeSocketFloat', 'twig scale', 5.0000), + # ('NodeSocketFloat', 'twig noise amount', 0.3000), + # ('NodeSocketFloat', 'leaf density', 15.0000), + # ('NodeSocketFloat', 'leaf scale', 0.3000), + # ('NodeSocketFloat', 'leaf rot', 45.0000), + # ('NodeSocketFloat', 'fruit density', 10.0000), + # ('NodeSocketFloat', 'fruit scale', 0.0500), + # ('NodeSocketFloat', 'fruit rot', 0.0000)]) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": curve_line, "Count": kwargs["resolution"]}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": spline_parameter.outputs["Factor"], "Y": kwargs["seed"]}, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": combine_xyz, + "Scale": kwargs["main branch noise scale"], + }, + attrs={"noise_dimensions": "2D"}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, + attrs={"operation": "SUBTRACT"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 2: 0.2000}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": map_range.outputs["Result"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + "Scale": kwargs["main branch noise amount"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Offset": scale_1.outputs["Vector"]}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 2: spline_parameter.outputs["Factor"]}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 13.0000}) + + generateanchor = nw.new_node( + nodegroup_generate_anchor().name, + input_kwargs={ + "Curve": capture_attribute, + "curve parameter": capture_attribute.outputs[2], + "trim_top": 0.9000, + "seed": add, + "density": kwargs["fruit density"], + "keep probablity": 0.3000, + }, + ) + + collection_info_1 = nw.new_node( + Nodes.CollectionInfo, + input_kwargs={ + "Collection": kwargs["fruit collection"], + "Separate Children": True, + "Reset Children": True, + }, + ) + + createinstance = nw.new_node( + nodegroup_create_instance().name, + input_kwargs={ + "Points": generateanchor, + "Instance": collection_info_1, + "Pick Instance": True, + "Rot x deg": kwargs["fruit rot"], + "Scale": kwargs["fruit scale"], + "Seed": kwargs["seed"], + }, + ) + + keep_probablity = nw.new_node(Nodes.Value, label="keep probablity") + keep_probablity.outputs[0].default_value = 0.3000 + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["twig density"], 1: keep_probablity}, + attrs={"operation": "DIVIDE"}, + ) + + curve_to_points = nw.new_node( + Nodes.CurveToPoints, input_kwargs={"Curve": capture_attribute, "Count": divide} + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, input_kwargs={"End": (0.0000, 0.0000, 0.1000)} + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["resolution"], 1: 2.0000}, + attrs={"operation": "DIVIDE"}, + ) + + resample_curve_2 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_1, "Count": divide_1} + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": resample_curve_2, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 37.0000}) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={"Probability": keep_probablity, "Seed": add_1}, + attrs={"data_type": "BOOLEAN"}, + ) + + index = nw.new_node(Nodes.Index) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 0.0500}, attrs={"operation": "MULTIPLY"} + ) + + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: multiply}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 0.9000}, attrs={"operation": "MULTIPLY"} + ) + + less_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index, 3: multiply_1}, + attrs={"data_type": "INT", "operation": "LESS_EQUAL"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal} + ) + + op_and_1 = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: random_value.outputs[3], 1: op_and} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["twig rotation"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": capture_attribute.outputs[2], 3: 1.0000, 4: 0.1000}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_2.outputs["Result"], 1: kwargs["twig scale"]}, + attrs={"operation": "MULTIPLY"}, + ) + + createinstance_1 = nw.new_node( + nodegroup_create_instance().name, + input_kwargs={ + "Points": curve_to_points.outputs["Points"], + "Instance": capture_attribute_1.outputs["Geometry"], + "Selection": op_and_1, + "Tangent": curve_to_points.outputs["Tangent"], + "Rot x deg": multiply_2, + "Scale": multiply_3, + "Seed": kwargs["seed"], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": createinstance_1} + ) + + position = nw.new_node(Nodes.InputPosition) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": position, "W": kwargs["seed"], "Scale": 1.5000}, + attrs={"noise_dimensions": "4D"}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_1.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, + attrs={"operation": "SUBTRACT"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": capture_attribute_1.outputs[2], 2: 0.2000}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract_1.outputs["Vector"], + "Scale": map_range_3.outputs["Result"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale_2.outputs["Vector"], + "Scale": kwargs["twig noise amount"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances, + "Offset": scale_3.outputs["Vector"], + }, + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position_1, 1: curve_tangent}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 17.0000}) + + generateanchor_1 = nw.new_node( + nodegroup_generate_anchor().name, + input_kwargs={ + "Curve": capture_attribute_2.outputs["Geometry"], + "curve parameter": capture_attribute_1.outputs[2], + "trim_top": 1.0000, + "seed": add_2, + "density": kwargs["leaf density"], + "keep probablity": 0.3000, + }, + ) + + collection_info = nw.new_node( + Nodes.CollectionInfo, + input_kwargs={ + "Collection": kwargs["leaf collection"], + "Separate Children": True, + "Reset Children": True, + }, + ) + + createinstance_2 = nw.new_node( + nodegroup_create_instance().name, + input_kwargs={ + "Points": generateanchor_1, + "Instance": collection_info, + "Pick Instance": True, + "Tangent": capture_attribute_2.outputs["Attribute"], + "Rot x deg": kwargs["leaf rot"], + "Scale": kwargs["leaf scale"], + "Seed": kwargs["seed"], + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": capture_attribute.outputs[2], 3: 1.0000, 4: 0.4000}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: kwargs["overall radius"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": capture_attribute, "Radius": multiply_4}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: kwargs["resolution"], 1: kwargs["overall radius"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_5, 1: 6.2832}, + attrs={"operation": "MULTIPLY"}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": multiply_6} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": capture_attribute_1.outputs[2], 3: 0.8000, 4: 0.1000}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_4.outputs["Result"], + 1: map_range_1.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_7, 1: kwargs["overall radius"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": capture_attribute_2.outputs["Geometry"], + "Radius": multiply_8, + }, + ) + + divide_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_6, 1: 2.0000}, + attrs={"operation": "DIVIDE"}, + ) + + curve_circle_1 = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": divide_2} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": curve_circle_1.outputs["Curve"], + "Fill Caps": True, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh, curve_to_mesh_1]} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": join_geometry, "Material": kwargs["material"]}, + ) + + surfacebump = nw.new_node( + nodegroup_surface_bump().name, + input_kwargs={"Geometry": set_material, "Displacement": 0.0050}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [createinstance, createinstance_2, surfacebump]}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_1, + "Rotation": (-1.5708, 0.0000, 0.0000), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform}, + attrs={"is_active_output": True}, + ) + + +class BranchFactory(AssetFactory): + def __init__(self, factory_seed, twig_col, fruit_col, coarse=False): + super().__init__(factory_seed, coarse=coarse) + + self.avg_fruit_dim = np.cbrt( + np.mean([np.prod(list(o.dimensions)) for o in fruit_col.objects]) + ) + + with FixedSeed(factory_seed): + self.branch_params = self.sample_branch_params() + + self.branch_params["leaf collection"] = twig_col + self.branch_params["fruit collection"] = fruit_col + self.branch_params["material"] = twig_col.objects[0].active_material + + def sample_branch_params(self): + return { + "resolution": 256, + "main branch noise amount": uniform(0.2, 0.4), + "main branch noise scale": uniform(0.9, 1.3), + "overall radius": uniform(0.015, 0.025), + "twig density": uniform(5, 15), + "twig rotation": uniform(30, 60), + "twig scale": uniform(3, 7), + "twig noise amount": uniform(0.2, 0.4), + "leaf density": uniform(5, 25), + "leaf scale": uniform(0.25, 0.35), + "leaf rot": uniform(30, 60), + "fruit scale": uniform(0.15, 0.25), + "fruit rot": 0.0, + "fruit density": np.clip(uniform(1, 5) / self.avg_fruit_dim, 0.01, 50), + } + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + phenome = self.branch_params.copy() + phenome["seed"] = randint(10000000) + + surface.add_geomod(obj, generate_branch, input_kwargs=phenome) + + return obj diff --git a/infinigen/assets/objects/trees/generate.py b/infinigen/assets/objects/trees/generate.py new file mode 100644 index 000000000..f0a0f95f6 --- /dev/null +++ b/infinigen/assets/objects/trees/generate.py @@ -0,0 +1,556 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick, Yiming Zuo, Alejandro Newell, Lingjie Mei + + +import logging + +import bpy +import gin +import numpy as np +from numpy.random import uniform + +from infinigen.assets.objects.cloud import CloudFactory +from infinigen.assets.objects.fruits import ( + apple, + blackberry, + coconutgreen, + compositional_fruit, + durian, + starfruit, + strawberry, +) +from infinigen.assets.objects.leaves import ( + leaf, + leaf_broadleaf, + leaf_ginko, + leaf_maple, + leaf_pine, + leaf_v2, +) +from infinigen.assets.objects.trees import branch, tree, treeconfigs +from infinigen.assets.utils.misc import toggle_hide, toggle_show +from infinigen.core import surface +from infinigen.core.placement import detail +from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.placement.split_in_view import split_inview +from infinigen.core.tagging import tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.math import FixedSeed + +from . import tree_flower + +logger = logging.getLogger(__name__) + + +@gin.configurable +class GenericTreeFactory(AssetFactory): + scale = ( + 0.35 # trees are defined in weird units currently, need converting to meters + ) + + def __init__( + self, + factory_seed, + genome: tree.TreeParams, + child_col, + trunk_surface, + realize=False, + meshing_camera=None, + cam_meshing_max_dist=1e7, + coarse_mesh_placeholder=False, + adapt_mesh_method="remesh", + decimate_placeholder_levels=0, + min_dist=None, + coarse=False, + ): + super(GenericTreeFactory, self).__init__(factory_seed, coarse=coarse) + + self.genome = genome + self.child_col = child_col + self.trunk_surface = trunk_surface + self.realize = realize + + self.camera = meshing_camera + self.cam_meshing_max_dist = cam_meshing_max_dist + self.adapt_mesh_method = adapt_mesh_method + self.decimate_placeholder_levels = decimate_placeholder_levels + self.coarse_mesh_placeholder = coarse_mesh_placeholder + + self.min_dist = min_dist + + def create_placeholder(self, i, loc, rot): + logger.debug("generating tree skeleton") + skeleton_obj = tree.tree_skeleton( + self.genome.skeleton, + self.genome.trunk_spacecol, + self.genome.roots_spacecol, + init_pos=(0, 0, 0), + scale=self.scale, + ) + + if self.coarse_mesh_placeholder: + pholder = self._create_coarse_mesh(skeleton_obj) + else: + pholder = butil.spawn_cube(size=4) + + butil.parent_to(skeleton_obj, pholder, no_inverse=True) + return pholder + + def _create_coarse_mesh(self, skeleton_obj): + logger.debug("generating skinned mesh") + coarse_mesh = deep_clone_obj(skeleton_obj) + surface.add_geomod( + coarse_mesh, + tree.skin_tree, + input_kwargs={"params": self.genome.skinning}, + apply=True, + ) + + if self.decimate_placeholder_levels > 0: + butil.modify_mesh( + coarse_mesh, + "DECIMATE", + decimate_type="UNSUBDIV", + iterations=self.decimate_placeholder_levels, + ) + + return coarse_mesh + + def finalize_placeholders(self, placeholders): + if not self.coarse_mesh_placeholder: + return + with FixedSeed(self.factory_seed): + logger.debug(f"adding {self.trunk_surface} to {len(placeholders)=}") + self.trunk_surface.apply(placeholders) + + def asset_parameters(self, distance: float, vis_distance: float) -> dict: + if self.min_dist is not None and distance < self.min_dist: + logger.warn( + f"{self} recieved {distance=} which violates {self.min_dist=}. Ignoring" + ) + distance = self.min_dist + return dict(face_size=detail.target_face_size(distance), distance=distance) + + def create_asset( + self, placeholder, face_size, distance, **kwargs + ) -> bpy.types.Object: + skeleton_obj = placeholder.children[0] + + if not self.coarse_mesh_placeholder: + skin_obj = self._create_coarse_mesh(skeleton_obj) + self.trunk_surface.apply(skin_obj) + butil.parent_to(skeleton_obj, skin_obj, no_inverse=True) + else: + skin_obj = butil.deep_clone_obj(placeholder) + + if self.child_col is not None: + assert self.genome.child_placement is not None + + max_needed_child_fs = ( + detail.target_face_size(self.min_dist, global_multiplier=1) + if self.min_dist is not None + else None + ) + + logger.debug(f"adding tree children using {self.child_col=}") + butil.select_none() + surface.add_geomod( + skeleton_obj, + tree.add_tree_children, + input_kwargs=dict( + child_col=self.child_col, + params=self.genome.child_placement, + realize=self.realize, + merge_dist=max_needed_child_fs, + ), + ) + + if self.camera is not None and distance < self.cam_meshing_max_dist: + assert self.adapt_mesh_method != "remesh" + + skin_obj_cleanup = skin_obj + skin_obj, outofview, vert_dists, _ = split_inview( + skin_obj, cam=self.camera, vis_margin=0.15 + ) + butil.parent_to(outofview, skin_obj, no_inverse=True, no_transform=True) + + butil.delete(skin_obj_cleanup) + face_size = detail.target_face_size(vert_dists.min()) + + skin_obj.hide_render = False + + if self.adapt_mesh_method == "remesh": + butil.modify_mesh( + skin_obj, "SUBSURF", levels=self.decimate_placeholder_levels + 1 + ) # one extra level to smooth things out or remesh is jaggedy + + with butil.DisableModifiers(skin_obj): + detail.adapt_mesh_resolution( + skin_obj, face_size, method=self.adapt_mesh_method, apply=True + ) + + butil.parent_to(skin_obj, placeholder, no_inverse=True, no_transform=True) + + if self.realize: + logger.debug("realizing tree children") + butil.apply_modifiers(skin_obj) + butil.apply_modifiers(skeleton_obj) + + butil.join_objects([skin_obj, skeleton_obj]) + assert len(skin_obj.children) == 0 + else: + butil.parent_to(skeleton_obj, skin_obj, no_inverse=True) + + tag_object(skin_obj, "tree") + butil.apply_modifiers(skin_obj) + + return skin_obj + + +@gin.configurable +def random_season(weights=None): + options = ["autumn", "summer", "spring", "winter"] + + if weights is not None: + weights = np.array([weights[k] for k in options]) + else: + weights = np.array([0.25, 0.3, 0.4, 0.1]) + return np.random.choice(options, p=weights / weights.sum()) + + +@gin.configurable +def random_species(season="summer", pine_chance=0.0): + tree_species_code = np.random.rand(32) + + if season is None: + season = random_season() + + if tree_species_code[-1] < pine_chance: + return treeconfigs.pine_tree(), "leaf_pine" + # elif tree_species_code < 0.2: + # tree_args = treeconfigs.palm_tree() + # elif tree_species_code < 0.3: + # tree_args = treeconfigs.baobab_tree() + else: + return treeconfigs.random_tree(tree_species_code, season), None + + +def random_tree_child_factory(seed, leaf_params, leaf_type, season, **kwargs): + if season is None: + season = random_season() + + fruit_scale = 0.2 + + if leaf_type is None: + return None, None + elif leaf_type == "leaf": + return leaf.LeafFactory(seed, leaf_params, **kwargs), surface.registry( + "greenery" + ) + elif leaf_type == "leaf_pine": + return leaf_pine.LeafFactoryPine(seed, season, **kwargs), None + elif leaf_type == "leaf_ginko": + return leaf_ginko.LeafFactoryGinko(seed, season, **kwargs), None + elif leaf_type == "leaf_maple": + return leaf_maple.LeafFactoryMaple(seed, season, **kwargs), None + elif leaf_type == "leaf_broadleaf": + return leaf_broadleaf.LeafFactoryBroadleaf(seed, season, **kwargs), None + elif leaf_type == "leaf_v2": + return leaf_v2.LeafFactoryV2(seed, **kwargs), None + elif leaf_type == "berry": + return leaf.BerryFactory(seed, leaf_params, **kwargs), None + elif leaf_type == "apple": + return apple.FruitFactoryApple(seed, scale=fruit_scale, **kwargs), None + elif leaf_type == "blackberry": + return blackberry.FruitFactoryBlackberry( + seed, scale=fruit_scale, **kwargs + ), None + elif leaf_type == "coconutgreen": + return coconutgreen.FruitFactoryCoconutgreen( + seed, scale=fruit_scale, **kwargs + ), None + elif leaf_type == "durian": + return durian.FruitFactoryDurian(seed, scale=fruit_scale, **kwargs), None + elif leaf_type == "starfruit": + return starfruit.FruitFactoryStarfruit(seed, scale=fruit_scale, **kwargs), None + elif leaf_type == "strawberry": + return strawberry.FruitFactoryStrawberry( + seed, scale=fruit_scale, **kwargs + ), None + elif leaf_type == "compositional_fruit": + return compositional_fruit.FruitFactoryCompositional( + seed, scale=fruit_scale, **kwargs + ), None + elif leaf_type == "flower": + return tree_flower.TreeFlowerFactory( + seed, rad=uniform(0.15, 0.25), **kwargs + ), None + elif leaf_type == "cloud": + return CloudFactory(seed), None + else: + raise ValueError(f"Unrecognized {leaf_type=}") + + +def make_leaf_collection( + seed, leaf_params, n_leaf, leaf_types, decimate_rate=0.0, season=None +): + logger.debug(f"Starting make_leaf_collection({seed=}, {n_leaf=} ...)") + + if season is None: + season = random_season() + + weights = [] + + if not isinstance(leaf_types, list): + leaf_types = [leaf_types] + + child_factories = [] + for leaf_type in leaf_types: + if leaf_type is not None: + leaf_factory, _ = random_tree_child_factory( + seed, leaf_params, leaf_type=leaf_type, season=season + ) + child_factories.append(leaf_factory) + weights.append(1.0) + + weights = np.array(weights) + weights /= np.sum(weights) # normalize to 1 + + col = make_asset_collection(child_factories, n_leaf, verbose=True, weights=weights) + # if leaf_surface is not None: + # leaf_surface.apply(list(col.objects)) + toggle_show(col) + for obj in col.objects: + if decimate_rate > 0: + butil.modify_mesh(obj, "DECIMATE", ratio=1.0 - decimate_rate, apply=True) + butil.apply_transform(obj, rot=True, scale=True) + butil.apply_modifiers(obj) + toggle_hide(col) + return col + + +def random_leaf_collection(season, n=5): + (_, _, leaf_params), leaf_type = random_species(season=season) + return make_leaf_collection( + np.random.randint(1e5), + leaf_params, + n_leaf=n, + leaf_types=leaf_type or "leaf_v2", + decimate_rate=0.97, + ) + + +def make_twig_collection( + seed, + twig_params, + leaf_params, + trunk_surface, + n_leaf, + n_twig, + leaf_types, + season=None, + twig_valid_dist=6, +): + logger.debug(f"Starting make_twig_collection({seed=}, {n_leaf=}, {n_twig=}...)") + + if season is None: + season = random_season() + + if leaf_types is not None: + child_col = make_leaf_collection( + seed, leaf_params, n_leaf, leaf_types, season=season, decimate_rate=0.97 + ) + else: + child_col = None + + twig_factory = GenericTreeFactory( + seed, twig_params, child_col, trunk_surface=trunk_surface, realize=True + ) + col = make_asset_collection( + twig_factory, n_twig, verbose=False, distance=twig_valid_dist + ) + + if child_col is not None: + child_col.hide_viewport = False + butil.delete(list(child_col.objects)) + return col + + +def make_branch_collection(seed, twig_col, fruit_col, n_branch, coarse=False): + logger.debug(f"Starting make_branch_collection({seed=}, ...)") + + branch_factory = branch.BranchFactory( + seed, twig_col=twig_col, fruit_col=fruit_col, coarse=coarse + ) + col = make_asset_collection(branch_factory, n_branch, verbose=False) + + return col + + +@gin.configurable +class TreeFactory(GenericTreeFactory): + n_leaf = 5 + n_twig = 2 + + @staticmethod + def get_leaf_type(season): + # return np.random.choice(['leaf', 'leaf_v2', 'flower', 'berry', 'leaf_ginko'], p=[0, 0.70, 0.15, 0, 0.15]) + # return + # return 'leaf_maple' + leaf_type = np.random.choice( + ["leaf", "leaf_v2", "leaf_broadleaf", "leaf_ginko", "leaf_maple"], + p=[0, 0.0, 0.70, 0.15, 0.15], + ) + flower_type = np.random.choice(["flower", "berry", None], p=[1.0, 0.0, 0.0]) + if season == "spring": + return [flower_type] + else: + return [leaf_type] + # return [leaf_type, flower_type] + # return ['leaf_broadleaf', 'leaf_maple', 'leaf_ginko', 'flower'] + + @staticmethod + def get_fruit_type(): + # return np.random.choice(['leaf', 'leaf_v2', 'flower', 'berry', 'leaf_ginko'], p=[0, 0.70, 0.15, 0, 0.15]) + # return + # return 'leaf_maple' + fruit_type = np.random.choice( + [ + "apple", + "blackberry", + "coconutgreen", + "durian", + "starfruit", + "strawberry", + "compositional_fruit", + ], + p=[0.2, 0.0, 0.2, 0.2, 0.2, 0.0, 0.2], + ) + + return fruit_type + + def __init__(self, seed, season=None, coarse=False, fruit_chance=1.0, **kwargs): + with FixedSeed(seed): + if season is None: + season = np.random.choice(["summer", "winter", "autumn", "spring"]) + + with FixedSeed(seed): + (tree_params, twig_params, leaf_params), leaf_type = random_species(season) + + leaf_type = leaf_type or self.get_leaf_type(season) + if not isinstance(leaf_type, list): + leaf_type = [leaf_type] + + trunk_surface = surface.registry("bark") + + if uniform() < fruit_chance: + fruit_type = self.get_fruit_type() + else: + fruit_type = None + + super(TreeFactory, self).__init__( + seed, + tree_params, + child_col=None, + trunk_surface=trunk_surface, + coarse=coarse, + **kwargs, + ) + + with FixedSeed(seed): + colname = f"assets:{self}.twigs" + use_cached = colname in bpy.data.collections + if use_cached == coarse: + logger.warning( + f"In {self}, encountered {use_cached=} yet {coarse=}, unexpected since twigs are typically generated only in coarse" + ) + + if colname not in bpy.data.collections: + twig_col = make_twig_collection( + seed, + twig_params, + leaf_params, + trunk_surface, + self.n_leaf, + self.n_twig, + leaf_type, + season=season, + ) + if fruit_type is not None: + fruit_col = make_leaf_collection( + seed, + leaf_params, + self.n_leaf, + fruit_type, + season=season, + decimate_rate=0.0, + ) + else: + fruit_col = butil.get_collection("Empty", reuse=True) + + self.child_col = make_branch_collection( + seed, twig_col, fruit_col, n_branch=self.n_twig + ) + self.child_col.name = colname + + assert ( + self.child_col.name == colname + ), f"Blender truncated {colname} to {self.child_col.name}" + else: + self.child_col = bpy.data.collections[colname] + + +@gin.configurable +class BushFactory(GenericTreeFactory): + n_leaf = 3 + n_twig = 3 + max_distance = 50 + + def __init__(self, seed, coarse=False, **kwargs): + with FixedSeed(seed): + shrub_shape = np.random.randint(2) + trunk_surface = surface.registry("bark") + tree_params, twig_params, leaf_params = treeconfigs.shrub( + shrub_shape=shrub_shape + ) + + super(BushFactory, self).__init__( + seed, + tree_params, + child_col=None, + trunk_surface=trunk_surface, + coarse=coarse, + **kwargs, + ) + + with FixedSeed(seed): + leaf_type = np.random.choice( + ["leaf", "leaf_v2", "flower", "berry"], p=[0.1, 0.4, 0.5, 0] + ) + + colname = f"assets:{self}.twigs" + use_cached = colname in bpy.data.collections + if use_cached == coarse: + logger.warning( + f"In {self}, encountered {use_cached=} yet {coarse=}, unexpected since twigs are typically generated only in coarse" + ) + + if colname not in bpy.data.collections: + self.child_col = make_twig_collection( + seed, + twig_params, + leaf_params, + trunk_surface, + self.n_leaf, + self.n_twig, + leaf_type, + ) + self.child_col.name = colname + assert ( + self.child_col.name == colname + ), f"Blender truncated {colname} to {self.child_col.name}" + else: + self.child_col = bpy.data.collections[colname] diff --git a/infinigen/assets/trees/tree.py b/infinigen/assets/objects/trees/tree.py similarity index 73% rename from infinigen/assets/trees/tree.py rename to infinigen/assets/objects/trees/tree.py index 13f033528..3ec735a29 100644 --- a/infinigen/assets/trees/tree.py +++ b/infinigen/assets/objects/trees/tree.py @@ -4,27 +4,25 @@ # Authors: Alejandro Newell -import pdb -from dataclasses import dataclass import warnings +from dataclasses import dataclass import bpy import numpy as np from scipy.interpolate import interp1d -from .utils import helper, mesh -from .utils import geometrynodes as gn -from infinigen.assets.leaves import leaf - +from infinigen.assets.utils.object import data2mesh, mesh2obj from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util import blender as butil -from infinigen.assets.utils.object import data2mesh, mesh2obj + +from .utils import geometrynodes as gn +from .utils import helper, mesh C = bpy.context D = bpy.data -class TreeVertices(): +class TreeVertices: def __init__(self, vtxs=None, parent=None, level=None): """Define vertices and edges to outline tree geometry.""" if vtxs is None: @@ -106,16 +104,20 @@ def parse_tree_attributes(vtx): # if there is already a longer path connected to this parent, we create a dummy # copy of the parent node, and connect the current child to the dummy parent. - # This makes sure each point will have no more than one child. + # This makes sure each point will have no more than one child. new_p_id = n # p for parent. start from the last of the array for idx in range(n): children = np.array([v for v in edge_ref[idx] if v != parents[idx]]) if len(children) >= 2: child_depths = rev_depth[children] - deepest_child_idx = children[child_depths.argmax()] # we keep this untouched + deepest_child_idx = children[ + child_depths.argmax() + ] # we keep this untouched - children_idxs_to_deal = np.setdiff1d(children, np.array([deepest_child_idx])) + children_idxs_to_deal = np.setdiff1d( + children, np.array([deepest_child_idx]) + ) for child_idx_to_deal in children_idxs_to_deal: new_p_pos = vtx_pos[idx] # len-3 new_p_parent = parents[idx] @@ -135,7 +137,9 @@ def parse_tree_attributes(vtx): # new connection # note we don't connect the new node with its parent - edge_ref[new_p_id] = [child_idx_to_deal, ] + edge_ref[new_p_id] = [ + child_idx_to_deal, + ] # remove old connections edge_ref[child_idx_to_deal].remove(idx) @@ -174,21 +178,34 @@ def parse_tree_attributes(vtx): parent_loc[vertex_idx] = vtx_pos[parent_idx] self_loc[vertex_idx] = vtx_pos[vertex_idx] - parent_loc[0] = np.array([0, 0, -1], - dtype=float) # create a fake parent location for the root, to avoid zero-length + parent_loc[0] = np.array( + [0, 0, -1], dtype=float + ) # create a fake parent location for the root, to avoid zero-length # vector return { - 'parent_idx': parents, - 'depth': depth, - 'rev_depth': rev_depth, - 'stem_id': stem_id, - 'parent_skeleton_loc': parent_loc, - 'skeleton_loc': self_loc} - - -def rand_path(n_pts, sz=1, std=.3, momentum=.5, init_vec=[0, 0, 1], init_pt=[0, 0, 0], pull_dir=None, - pull_init=1, pull_factor=0, sz_decay=1, decay_mom=True): + "parent_idx": parents, + "depth": depth, + "rev_depth": rev_depth, + "stem_id": stem_id, + "parent_skeleton_loc": parent_loc, + "skeleton_loc": self_loc, + } + + +def rand_path( + n_pts, + sz=1, + std=0.3, + momentum=0.5, + init_vec=[0, 0, 1], + init_pt=[0, 0, 0], + pull_dir=None, + pull_init=1, + pull_factor=0, + sz_decay=1, + decay_mom=True, +): init_vec = np.array(init_vec, dtype=float) if pull_dir is not None: pull_dir = np.array(pull_dir, dtype=float) @@ -214,14 +231,23 @@ def rand_path(n_pts, sz=1, std=.3, momentum=.5, init_vec=[0, 0, 1], init_pt=[0, else: tmp_momentum = momentum delta = prev_delta * tmp_momentum + new_delta * (1 - tmp_momentum) - delta = (delta / np.linalg.norm(delta)) * sz * (sz_decay ** i) + delta = (delta / np.linalg.norm(delta)) * sz * (sz_decay**i) path[i] = path[i - 1] + delta return path -def get_spawn_pt(path, rng=[.5, 1], ang_min=np.pi / 6, ang_max=.9 * np.pi / 2, rnd_idx=None, ang_sign=None, - axis2=None, init_vec=None, z_bias=0): +def get_spawn_pt( + path, + rng=[0.5, 1], + ang_min=np.pi / 6, + ang_max=0.9 * np.pi / 2, + rnd_idx=None, + ang_sign=None, + axis2=None, + init_vec=None, + z_bias=0, +): n = len(path) if n == 1: return 0, path[0], init_vec @@ -245,8 +271,16 @@ def get_spawn_pt(path, rng=[.5, 1], ang_min=np.pi / 6, ang_max=.9 * np.pi / 2, r return rnd_idx, path[rnd_idx], init_vec -def recursive_path(tree, parent_idxs, level, path_kargs=None, spawn_kargs=None, n=1, symmetry=False, - children=None): +def recursive_path( + tree, + parent_idxs, + level, + path_kargs=None, + spawn_kargs=None, + n=1, + symmetry=False, + children=None, +): if path_kargs is None: return @@ -258,9 +292,11 @@ def recursive_path(tree, parent_idxs, level, path_kargs=None, spawn_kargs=None, curr_path = path_kargs(curr_idx) curr_spawn = spawn_kargs(curr_idx) if symmetry: - curr_spawn['ang_sign'] = 2 * (branch_idx % 2) - 1 + curr_spawn["ang_sign"] = 2 * (branch_idx % 2) - 1 - parent_idx, init_pt, init_vec = get_spawn_pt(tree.vtxs[parent_idxs], **curr_spawn) + parent_idx, init_pt, init_vec = get_spawn_pt( + tree.vtxs[parent_idxs], **curr_spawn + ) parent_idx = parent_idxs[parent_idx] path = rand_path(**curr_path, init_pt=init_pt, init_vec=init_vec) @@ -274,7 +310,9 @@ def recursive_path(tree, parent_idxs, level, path_kargs=None, spawn_kargs=None, recursive_path(tree, node_idxs, level + 1, **c) -def remove_matched_atts(atts, vtxs, dist_thr, curr_min, curr_match, idx_offset=0, prev_deltas=None): +def remove_matched_atts( + atts, vtxs, dist_thr, curr_min, curr_match, idx_offset=0, prev_deltas=None +): dists, deltas = helper.compute_dists(atts, vtxs) if prev_deltas is not None: deltas = np.append(prev_deltas, deltas, axis=1) @@ -297,8 +335,18 @@ def remove_matched_atts(atts, vtxs, dist_thr, curr_min, curr_match, idx_offset=0 return atts, deltas, curr_min, curr_match -def space_colonization(tree, atts, D=.1, d=10.0, s=.1, pull_dir=None, dir_rand=.1, mag_rand=.15, n_steps=200, - level=0): +def space_colonization( + tree, + atts, + D=0.1, + d=10.0, + s=0.1, + pull_dir=None, + dir_rand=0.1, + mag_rand=0.15, + n_steps=200, + level=0, +): # D: length of each growing step # d: init value for distance between attractors and points. safe to set to a very large value (e.g., 10) # s: if distance between an attractor and any point is less than s, we remove the attractor. should be @@ -312,10 +360,14 @@ def space_colonization(tree, atts, D=.1, d=10.0, s=.1, pull_dir=None, dir_rand=. curr_min = np.zeros(len(atts)) + d curr_match = -np.ones(len(atts)).astype(int) - atts, deltas, curr_min, curr_match = remove_matched_atts(atts, tree.vtxs, s, curr_min, curr_match) + atts, deltas, curr_min, curr_match = remove_matched_atts( + atts, tree.vtxs, s, curr_min, curr_match + ) if np.all(curr_match == -1): - warnings.warn('Space colonization attractor matching failed, all curr_match == -1') + warnings.warn( + "Space colonization attractor matching failed, all curr_match == -1" + ) return for i in range(n_steps): @@ -342,8 +394,9 @@ def space_colonization(tree, atts, D=.1, d=10.0, s=.1, pull_dir=None, dir_rand=. new_vtxs = np.stack(new_vtxs, 0) tree.append(new_vtxs, new_parents, level) - atts, deltas, curr_min, curr_match = remove_matched_atts(atts, new_vtxs, s, curr_min, curr_match, - idx_offset, deltas) + atts, deltas, curr_min, curr_match = remove_matched_atts( + atts, new_vtxs, s, curr_min, curr_match, idx_offset, deltas + ) if atts.shape[0] == 0: break @@ -358,7 +411,9 @@ class TreeParams: skinning: dict -def tree_skeleton(skeleton_params: dict, trunk_spacecol: dict, roots_spacecol: dict, init_pos, scale): +def tree_skeleton( + skeleton_params: dict, trunk_spacecol: dict, roots_spacecol: dict, init_pos, scale +): vtx = TreeVertices(np.array(init_pos).reshape(-1, 3)) recursive_path(vtx, vtx.get_idxs(), level=0, **skeleton_params) @@ -369,17 +424,18 @@ def tree_skeleton(skeleton_params: dict, trunk_spacecol: dict, roots_spacecol: d space_colonization(vtx, **roots_spacecol, level=-1) attributes = parse_tree_attributes(vtx) - obj = mesh.init_mesh('Tree', vtx.vtxs, vtx.get_edges()) - attributes['level'] = np.array(vtx.level) + obj = mesh.init_mesh("Tree", vtx.vtxs, vtx.get_edges()) + attributes["level"] = np.array(vtx.level) for att_name, att_val in attributes.items(): if att_val.ndim == 2: - obj.data.attributes.new(name=att_name, type='FLOAT_VECTOR', domain='POINT') - obj.data.attributes[att_name].data.foreach_set('vector', att_val.reshape( - -1) * scale) # vector value should be scaled together with the obj + obj.data.attributes.new(name=att_name, type="FLOAT_VECTOR", domain="POINT") + obj.data.attributes[att_name].data.foreach_set( + "vector", att_val.reshape(-1) * scale + ) # vector value should be scaled together with the obj else: - obj.data.attributes.new(name=att_name, type='INT', domain='POINT') - obj.data.attributes[att_name].data.foreach_set('value', att_val) + obj.data.attributes.new(name=att_name, type="INT", domain="POINT") + obj.data.attributes[att_name].data.foreach_set("value", att_val) obj.scale *= scale with butil.SelectObjects(obj): @@ -389,45 +445,64 @@ def tree_skeleton(skeleton_params: dict, trunk_spacecol: dict, roots_spacecol: d def skin_tree(nw, params, source_obj=None): - base_geo = nw.new_node(Nodes.GroupInput).outputs['Geometry'] - skin = nw.new_node(gn.set_tree_radius().name, input_kwargs={ - 'Geometry': base_geo, - 'Reverse depth': nw.expose_input('Reverse depth', attribute='rev_depth'), **params}) + base_geo = nw.new_node(Nodes.GroupInput).outputs["Geometry"] + skin = nw.new_node( + gn.set_tree_radius().name, + input_kwargs={ + "Geometry": base_geo, + "Reverse depth": nw.expose_input("Reverse depth", attribute="rev_depth"), + **params, + }, + ) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': skin}) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": skin}) def add_tree_children(nw, child_col, params, merge_dist=None, realize=False): - base_geo = nw.new_node(Nodes.GroupInput).outputs['Geometry'] + base_geo = nw.new_node(Nodes.GroupInput).outputs["Geometry"] - rev_depth = nw.expose_input('Reverse Depth', attribute='rev_depth') + rev_depth = nw.expose_input("Reverse Depth", attribute="rev_depth") - depth_range = params.pop('depth_range', None) + depth_range = params.pop("depth_range", None) if depth_range is not None: min, max = depth_range - lt = nw.new_node(Nodes.Math, [rev_depth, max + 0.01], attrs={'operation': 'LESS_THAN'}) - gt = nw.new_node(Nodes.Math, [rev_depth, min - 0.01], attrs={'operation': 'GREATER_THAN'}) - selection = nw.new_node(Nodes.BooleanMath, [lt, gt], attrs={'operation': 'AND'}) + lt = nw.new_node( + Nodes.Math, [rev_depth, max + 0.01], attrs={"operation": "LESS_THAN"} + ) + gt = nw.new_node( + Nodes.Math, [rev_depth, min - 0.01], attrs={"operation": "GREATER_THAN"} + ) + selection = nw.new_node(Nodes.BooleanMath, [lt, gt], attrs={"operation": "AND"}) else: selection = None - children = nw.new_node(gn.coll_distribute(merge_dist=merge_dist).name, input_kwargs={ - 'Geometry': base_geo, - 'Collection': child_col, - 'Selection': selection, **params}) + children = nw.new_node( + gn.coll_distribute(merge_dist=merge_dist).name, + input_kwargs={ + "Geometry": base_geo, + "Collection": child_col, + "Selection": selection, + **params, + }, + ) if realize: children = nw.new_node(Nodes.RealizeInstances, [children]) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': children}) + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": children}) class FineTreeVertices(TreeVertices): - def __init__(self, vtxs=None, parent=None, level=None, radius_fn=None, resolution=1): + def __init__( + self, vtxs=None, parent=None, level=None, radius_fn=None, resolution=1 + ): super(FineTreeVertices, self).__init__(vtxs, parent, level) self.resolution = resolution if radius_fn is None: - radius_fn = (lambda base_radius, size, resolution: [1] * size) + + def radius_fn(base_radius, size, resolution): + return [1] * size + self.radius_fn = radius_fn self.detailed_locations = [[0, 0, 0]] self.radius = [1] @@ -435,31 +510,43 @@ def __init__(self, vtxs=None, parent=None, level=None, radius_fn=None, resolutio def append(self, v, p, l=None): super(FineTreeVertices, self).append(v, p, l) - f = interp1d(np.arange(len(v) + 1), np.concatenate([self.vtxs[p[0]:p[0] + 1], v]), axis=0, - kind='quadratic') - self.detailed_locations.extend(f(np.linspace(0, len(v), len(v) * self.resolution + 1))[1:]) + f = interp1d( + np.arange(len(v) + 1), + np.concatenate([self.vtxs[p[0] : p[0] + 1], v]), + axis=0, + kind="quadratic", + ) + self.detailed_locations.extend( + f(np.linspace(0, len(v), len(v) * self.resolution + 1))[1:] + ) base_radius = self.radius[p[0] * self.resolution] self.radius.extend(self.radius_fn(base_radius, len(v), self.resolution)) self.detailed_parents.append(p[0] * self.resolution) self.detailed_parents.extend( - np.arange(0, len(v) * self.resolution - 1) + len(self.detailed_parents) - 1) + np.arange(0, len(v) * self.resolution - 1) + len(self.detailed_parents) - 1 + ) @property def edges(self): - edges = np.stack([np.arange(len(self.detailed_locations)), np.array(self.detailed_parents)], 1) + edges = np.stack( + [np.arange(len(self.detailed_locations)), np.array(self.detailed_parents)], + 1, + ) return edges[edges[:, 1] != -1] def fix_first(self): self.radius[0] = self.radius[1] -def build_radius_tree(radius_fn, branch_config, base_radius=.002, resolution=1, fix_first=False): +def build_radius_tree( + radius_fn, branch_config, base_radius=0.002, resolution=1, fix_first=False +): vtx = FineTreeVertices(np.zeros((1, 3)), radius_fn=radius_fn, resolution=resolution) recursive_path(vtx, vtx.get_idxs(), level=0, **branch_config) if fix_first: vtx.radius[0] = vtx.radius[1] - obj = mesh2obj(data2mesh(vtx.detailed_locations, vtx.edges, [], 'tree')) - vg_a = obj.vertex_groups.new(name='radius') + obj = mesh2obj(data2mesh(vtx.detailed_locations, vtx.edges, [], "tree")) + vg_a = obj.vertex_groups.new(name="radius") for i, r in enumerate(vtx.radius): - vg_a.add([i], base_radius * r, 'REPLACE') + vg_a.add([i], base_radius * r, "REPLACE") return obj diff --git a/infinigen/assets/objects/trees/tree_flower.py b/infinigen/assets/objects/trees/tree_flower.py new file mode 100644 index 000000000..504a18fee --- /dev/null +++ b/infinigen/assets/objects/trees/tree_flower.py @@ -0,0 +1,964 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Yiming Zuo - modifications +# - Alexander Raistrick - authored original flower.py + + +# Code generated using version v2.0.1 of the node_transpiler +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_nodegroup, tag_object +from infinigen.core.util import blender as butil +from infinigen.core.util import color +from infinigen.core.util.math import FixedSeed, dict_lerp + + +@node_utils.to_nodegroup("nodegroup_polar_to_cart_old", singleton=True) +def nodegroup_polar_to_cart_old(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Addend", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketVector", "Vector", (0.0, 0.0, 0.0)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"]}, + attrs={"operation": "SINE"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": cosine, "Z": sine}) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Vector"], + 1: combine_xyz_4, + 2: group_input.outputs["Addend"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": multiply_add.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_follow_curve", singleton=True) +def nodegroup_follow_curve(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Curve Min", 0.5), + ("NodeSocketFloat", "Curve Max", 1.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 2: separate_xyz.outputs["Z"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Z"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + 3: group_input.outputs["Curve Min"], + 4: group_input.outputs["Curve Max"], + }, + ) + + curve_length = nw.new_node( + Nodes.CurveLength, input_kwargs={"Curve": group_input.outputs["Curve"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: curve_length}, + attrs={"operation": "MULTIPLY"}, + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={"Curve": group_input.outputs["Curve"], "Length": multiply}, + attrs={"mode": "LENGTH"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Tangent"], + 1: sample_curve.outputs["Normal"], + }, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: cross_product.outputs["Vector"], + "Scale": separate_xyz.outputs["X"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Normal"], + "Scale": separate_xyz.outputs["Y"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": sample_curve.outputs["Position"], + "Offset": add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("nodegroup_norm_index", singleton=True) +def nodegroup_norm_index(nw): + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Count", 0)] + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["Count"]}, + attrs={"operation": "DIVIDE"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"T": divide}) + + +@node_utils.to_nodegroup("nodegroup_flower_petal", singleton=True) +def nodegroup_flower_petal(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Length", 0.2), + ("NodeSocketFloat", "Point", 1.0), + ("NodeSocketFloat", "Point height", 0.5), + ("NodeSocketFloat", "Bevel", 6.8), + ("NodeSocketFloat", "Base width", 0.2), + ("NodeSocketFloat", "Upper width", 0.3), + ("NodeSocketInt", "Resolution H", 8), + ("NodeSocketInt", "Resolution V", 4), + ("NodeSocketFloat", "Wrinkle", 0.1), + ("NodeSocketFloat", "Curl", 0.0), + ], + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution H"], 1: 2.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + grid = nw.new_node( + Nodes.MeshGrid, + input_kwargs={ + "Vertices X": group_input.outputs["Resolution V"], + "Vertices Y": multiply_add, + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": grid, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: 0.05}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": separate_xyz.outputs["Y"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": combine_xyz, + "Scale": 7.9, + "Detail": 0.0, + "Distortion": 0.2, + }, + attrs={"noise_dimensions": "2D"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: group_input.outputs["Wrinkle"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_1.outputs["X"]}) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: absolute, 1: 2.0}, attrs={"operation": "MULTIPLY"} + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Bevel"]}, + attrs={"operation": "POWER"}, + ) + + multiply_add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: multiply_3, + 1: group_input.outputs["Upper width"], + 2: group_input.outputs["Base width"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_add_2}, + attrs={"operation": "MULTIPLY"}, + ) + + power_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Point"]}, + attrs={"operation": "POWER"}, + ) + + multiply_add_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: power_1, 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_add_3, 1: group_input.outputs["Point height"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Point height"], 1: -1.0, 2: 1.0}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: multiply_add_4}) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_2, 1: multiply_add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: add_1, 1: multiply_6}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_1, "Y": multiply_4, "Z": multiply_7}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + multiply_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Length"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_8}) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Curl"]} + ) + + group_1 = nw.new_node( + nodegroup_polar_to_cart_old().name, + input_kwargs={"Addend": combine_xyz_3, "Value": reroute, "Vector": multiply_8}, + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 8, + "Start": (0.0, 0.0, 0.0), + "Middle": combine_xyz_3, + "End": group_1, + }, + ) + + group = nw.new_node( + nodegroup_follow_curve().name, + input_kwargs={ + "Geometry": set_position, + "Curve": quadratic_bezier, + "Curve Min": 0.0, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": tag_nodegroup(nw, group, "petal")} + ) + + +@node_utils.to_nodegroup("nodegroup_phyllo_points", singleton=True) +def nodegroup_phyllo_points(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Count", 50), + ("NodeSocketFloat", "Min Radius", 0.0), + ("NodeSocketFloat", "Max Radius", 2.0), + ("NodeSocketFloat", "Radius exp", 0.5), + ("NodeSocketFloat", "Min angle", -0.5236), + ("NodeSocketFloat", "Max angle", 0.7854), + ("NodeSocketFloat", "Min z", 0.0), + ("NodeSocketFloat", "Max z", 1.0), + ("NodeSocketFloat", "Clamp z", 1.0), + ("NodeSocketFloat", "Yaw offset", -1.5708), + ], + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, input_kwargs={"Count": group_input.outputs["Count"]} + ) + + mesh_to_points = nw.new_node(Nodes.MeshToPoints, input_kwargs={"Mesh": mesh_line}) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": mesh_to_points, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + index = nw.new_node(Nodes.Index) + + cosine = nw.new_node( + Nodes.Math, input_kwargs={0: index}, attrs={"operation": "COSINE"} + ) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: index}, attrs={"operation": "SINE"}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": cosine, "Y": sine}) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: group_input.outputs["Count"]}, + attrs={"operation": "DIVIDE"}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: group_input.outputs["Radius exp"]}, + attrs={"operation": "POWER"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": power, + 3: group_input.outputs["Min Radius"], + 4: group_input.outputs["Max Radius"], + }, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": divide, + 2: group_input.outputs["Clamp z"], + 3: group_input.outputs["Min z"], + 4: group_input.outputs["Max z"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": map_range_2.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": divide, + 3: group_input.outputs["Min angle"], + 4: group_input.outputs["Max angle"], + }, + ) + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.1, 3: 0.1}) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: group_input.outputs["Yaw offset"]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": map_range_3.outputs["Result"], + "Y": random_value.outputs[1], + "Z": add, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Points": set_position, "Rotation": combine_xyz_2}, + ) + + +@node_utils.to_nodegroup("nodegroup_plant_seed", singleton=True) +def nodegroup_plant_seed(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Dimensions", (0.0, 0.0, 0.0)), + ("NodeSocketIntUnsigned", "U", 4), + ("NodeSocketInt", "V", 8), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Dimensions"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": separate_xyz.outputs["X"]} + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + quadratic_bezier_1 = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": group_input.outputs["U"], + "Start": (0.0, 0.0, 0.0), + "Middle": multiply_add.outputs["Vector"], + "End": combine_xyz, + }, + ) + + group = nw.new_node( + nodegroup_norm_index().name, input_kwargs={"Count": group_input.outputs["U"]} + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": group}) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.3159, 0.4469), (1.0, 0.0156)] + ) + + map_range = nw.new_node(Nodes.MapRange, input_kwargs={"Value": float_curve, 4: 3.0}) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": quadratic_bezier_1, + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={ + "Resolution": group_input.outputs["V"], + "Radius": separate_xyz.outputs["Y"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": tag_nodegroup(nw, curve_to_mesh, "seed")}, + ) + + +def shader_flower_center(nw): + ambient_occlusion = nw.new_node(Nodes.AmbientOcclusion) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": ambient_occlusion.outputs["Color"]} + ) + colorramp.color_ramp.elements.new(1) + colorramp.color_ramp.elements[0].position = 0.4841 + colorramp.color_ramp.elements[0].color = (0.0127, 0.0075, 0.0026, 1.0) + colorramp.color_ramp.elements[1].position = 0.8591 + colorramp.color_ramp.elements[1].color = (0.0848, 0.0066, 0.0007, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = (1.0, 0.6228, 0.1069, 1.0) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": colorramp.outputs["Color"]} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_petal(nw, petal_color_name): + translucent_color_change = uniform(0.1, 0.6) + specular = normal(0.6, 0.1) + roughness = normal(0.4, 0.05) + translucent_amt = normal(0.3, 0.05) + + petal_color = nw.new_node(Nodes.RGB) + petal_color.outputs[0].default_value = color.color_category(petal_color_name) + + translucent_color = nw.new_node( + Nodes.MixRGB, + [translucent_color_change, petal_color, color.color_category(petal_color_name)], + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": translucent_color} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": petal_color, + "Specular": specular, + "Roughness": roughness, + }, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": translucent_amt, 1: principled_bsdf, 2: translucent_bsdf}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader} + ) + + +def geo_flower(nw, petal_material, center_material): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Center Rad", 0.0), + ("NodeSocketVector", "Petal Dims", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Seed Size", 0.0), + ("NodeSocketFloat", "Min Petal Angle", 0.1), + ("NodeSocketFloat", "Max Petal Angle", 1.36), + ("NodeSocketFloat", "Wrinkle", 0.01), + ("NodeSocketFloat", "Curl", 13.89), + ], + ) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": 8, + "Rings": 8, + "Radius": group_input.outputs["Center Rad"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 1.0, 0.05)} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Seed Size"], 1: 1.5}, + attrs={"operation": "MULTIPLY"}, + ) + + distribute_points_on_faces = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": transform, + "Distance Min": multiply, + "Density Max": 50000.0, + }, + attrs={"distribute_method": "POISSON"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Seed Size"], 1: 10.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_1, "Y": group_input.outputs["Seed Size"]}, + ) + + group_3 = nw.new_node( + nodegroup_plant_seed().name, + input_kwargs={"Dimensions": combine_xyz, "U": 6, "V": 6}, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={"W": 13.8, "Scale": 2.41}, + attrs={"musgrave_dimensions": "4D"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": musgrave_texture, 3: 0.34, 4: 1.21} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": map_range.outputs["Result"], "Y": 1.0, "Z": 1.0}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": distribute_points_on_faces.outputs["Points"], + "Instance": group_3, + "Rotation": (0.0, -1.5708, 0.0541), + "Scale": combine_xyz_1, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [realize_instances, transform]} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": join_geometry_1, "Material": center_material}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Center Rad"], 1: 6.2832}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Petal Dims"]} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 1.2}, attrs={"operation": "MULTIPLY"} + ) + + reroute_3 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Center Rad"]} + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Min Petal Angle"]} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Max Petal Angle"]} + ) + + group_1 = nw.new_node( + nodegroup_phyllo_points().name, + input_kwargs={ + "Count": multiply_3, + "Min Radius": reroute_3, + "Max Radius": reroute_3, + "Radius exp": 0.0, + "Min angle": reroute_1, + "Max angle": reroute, + "Max z": 0.0, + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz.outputs["Y"]}, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + reroute_2 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Wrinkle"]} + ) + + reroute_4 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Curl"]} + ) + + group = nw.new_node( + nodegroup_flower_petal().name, + input_kwargs={ + "Length": separate_xyz.outputs["X"], + "Point": 0.56, + "Point height": -0.1, + "Bevel": 1.83, + "Base width": separate_xyz.outputs["Y"], + "Upper width": subtract, + "Resolution H": 8, + "Resolution V": 16, + "Wrinkle": reroute_2, + "Curl": reroute_4, + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_1.outputs["Points"], + "Instance": group, + "Rotation": group_1.outputs["Rotation"], + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 3.73, "Detail": 5.41, "Distortion": -1.0}, + ) + + subtract_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.025 + + multiply_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract_1.outputs["Vector"], 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": realize_instances_1, + "Offset": multiply_4.outputs["Vector"], + }, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": set_position, "Material": petal_material}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_1, set_material]} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": join_geometry, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +class TreeFlowerFactory(AssetFactory): + def __init__(self, factory_seed, rad=uniform(0.15, 0.25), diversity_fac=0.25): + super(TreeFlowerFactory, self).__init__(factory_seed=factory_seed) + + self.rad = rad + self.diversity_fac = diversity_fac + + self.petal_color = np.random.choice( + ["pink", "white", "red", "yellowish"], p=[0.4, 0.2, 0.2, 0.2] + ) + + with FixedSeed(factory_seed): + self.petal_material = surface.shaderfunc_to_material( + shader_petal, self.petal_color + ) + self.center_material = surface.shaderfunc_to_material(shader_flower_center) + self.species_params = self.get_flower_params(self.rad) + + @staticmethod + def get_flower_params(overall_rad=0.05): + pct_inner = uniform(0.05, 0.4) + base_width = 2 * np.pi * overall_rad * pct_inner / normal(20, 5) + top_width = overall_rad * np.clip(normal(0.7, 0.3), base_width * 1.2, 100) + + min_angle, max_angle = np.deg2rad(np.sort(uniform(-20, 100, 2))) + + return { + "Center Rad": overall_rad * pct_inner, + "Petal Dims": np.array( + [overall_rad * (1 - pct_inner), base_width, top_width], dtype=np.float32 + ), + "Seed Size": uniform(0.005, 0.01), + "Min Petal Angle": min_angle, + "Max Petal Angle": max_angle, + "Wrinkle": uniform(0.003, 0.02), + "Curl": np.deg2rad(normal(30, 50)), + } + + def create_asset(self, **kwargs) -> bpy.types.Object: + vert = butil.spawn_vert("flower") + mod = surface.add_geomod( + vert, + geo_flower, + input_kwargs={ + "petal_material": self.petal_material, + "center_material": self.center_material, + }, + ) + + inst_params = self.get_flower_params(self.rad * normal(1, 0.05)) + params = dict_lerp(self.species_params, inst_params, 0.25) + butil.set_geomod_inputs(mod, params) + + butil.apply_modifiers(vert, mod) + + vert.rotation_euler.z = uniform(0, 360) + tag_object(vert, "flower") + return vert diff --git a/infinigen/assets/objects/trees/treeconfigs.py b/infinigen/assets/objects/trees/treeconfigs.py new file mode 100644 index 000000000..00129a860 --- /dev/null +++ b/infinigen/assets/objects/trees/treeconfigs.py @@ -0,0 +1,1328 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alejandro Newell + + +import bpy +import numpy as np + +from .tree import TreeParams +from .utils import helper, mesh + +subsubtwig_config = { + "n": 2, + "symmetry": True, + "path_kargs": lambda idx: {"n_pts": 3, "std": 1, "momentum": 1, "sz": 0.4}, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.2, + "rnd_idx": 2 * idx + 2, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, +} +subtwig_config = { + "n": 3, + "symmetry": True, + "path_kargs": lambda idx: { + "n_pts": 6, + "std": 1, + "momentum": 1, + "sz": 0.6 - 0.1 * idx, + }, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.1, + "rnd_idx": 2 * idx + 1, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + "children": [subsubtwig_config], +} +twig_config = { + "n": 1, + "decay": 0.8, + "valid_leaves": [-2, -1], + "path_kargs": lambda idx: {"n_pts": 7, "sz": 0.5, "std": 0.5, "momentum": 0.7}, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, 0]}, + "children": [subtwig_config], +} + + +def random_pine_rot(): + theta = np.random.uniform(2 * np.pi) + return [np.sin(theta), 0.0, np.cos(theta)] + + +subsubtwig_config = { + "n": 20, + "symmetry": False, + "path_kargs": lambda idx: {"n_pts": 2, "std": 1, "momentum": 1, "sz": 0.2}, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.2, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": random_pine_rot, + }, +} +subtwig_config = { + "n": 7, + "symmetry": False, + "path_kargs": lambda idx: { + "n_pts": 10, + "std": 0.3, + "momentum": 1, + "sz": 0.2 - 0.01 * idx, + }, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.1, + "ang_min": np.pi / 8, + "ang_max": np.pi / 8 + np.pi / 16, + "axis2": random_pine_rot, + }, + "children": [subsubtwig_config], +} +pinetwig_config = { + "n": 1, + "path_kargs": lambda idx: {"n_pts": 7, "sz": 0.5, "std": 0.2, "momentum": 0.7}, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, 0]}, + "children": [subtwig_config], +} + + +subsubsubtwig_config = { + "n": 1, + "symmetry": True, + "path_kargs": lambda idx: {"n_pts": 2, "std": 1, "momentum": 1, "sz": 0.4}, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.2, + "rnd_idx": idx + 1, + "ang_min": np.pi / 8, + "ang_max": np.pi / 8 + np.pi / 32, + "axis2": [0, 0, 1], + }, +} +subsubtwig_config = { + "n": 3, + "symmetry": False, + "path_kargs": lambda idx: { + "n_pts": 3, + "std": 1, + "momentum": 1, + "sz": 0.6 - 0.1 * idx, + }, + "spawn_kargs": lambda idx: { + "rng": [0.1, 1.0], + "z_bias": 0.1, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + "children": [subsubsubtwig_config], +} +subtwig_config = { + "n": 8, + "symmetry": False, + "path_kargs": lambda idx: { + "n_pts": 7, + "std": 1, + "momentum": 1, + "sz": 0.6 - 0.1 * idx, + }, + "spawn_kargs": lambda idx: { + "rng": [0.2, 1.0], + "z_bias": 0.1, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + "children": [subsubtwig_config], +} +bambootwig_config = { + "n": 1, + "decay": 0.8, + "valid_leaves": [-2, -1], + "path_kargs": lambda idx: { + "n_pts": 15, + "sz": 1.0, + "std": 0.05, + "momentum": 0.7, + "pull_dir": [0, 0, -0.3], + "pull_factor": 0.5, + "pull_init": 0.0, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, 0]}, + "children": [subtwig_config], +} + + +subtwig_config = { + "n": 37, + "symmetry": True, + "path_kargs": lambda idx: {"n_pts": 2, "std": 1, "momentum": 1, "sz": 0.4}, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.2, + "rnd_idx": idx + 2, + "ang_min": 0.3 * np.pi, + "ang_max": 0.3 * np.pi + np.pi / 16, + "axis2": [0, 0, 1], + }, +} +palmtwig_config = { + "n": 1, + "decay": 0.8, + "valid_leaves": [-2, -1], + "path_kargs": lambda idx: { + "n_pts": 40, + "sz": 0.5, + "std": 0.05, + "momentum": 0.7, + "pull_dir": [0, 0, -0.3], + "pull_factor": 0.5, + "pull_init": 0.0, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, 0]}, + "children": [subtwig_config], +} + + +subtwig_config = { + "n": 3, + "symmetry": True, + "path_kargs": lambda idx: { + "n_pts": 3, + "std": 1, + "momentum": 1, + "sz": 0.6 - 0.1 * idx, + }, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "z_bias": 0.1, + "rnd_idx": 2 * idx + 1, + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + "children": [], +} +shrubtwig_config = { + "n": 1, + "path_kargs": lambda idx: {"n_pts": 6, "sz": 0.5, "std": 0.5, "momentum": 0.7}, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, 0]}, + "children": [subtwig_config], +} + + +def generate_twig_config(): + n_twig_pts = np.random.randint(10) + 5 + twig_len = np.random.uniform(3, 4) + twig_sz = twig_len / n_twig_pts + avail_idxs = np.arange(n_twig_pts) + start_idx = 1 + int(n_twig_pts * np.random.uniform(0, 0.3)) + sample_density = np.random.choice( + np.arange(np.ceil(np.sqrt(n_twig_pts)), dtype=int) + 1 + ) + avail_sub_idxs = avail_idxs[start_idx::sample_density] + + init_z = np.random.uniform(0, 0.3) + z_rnd_factor = np.random.uniform(0.01, 0.05) + + skip_subtwig = np.random.rand() < 0.3 + subsub_sz = np.random.uniform(0.02, 0.1) + subtwig_momentum = np.random.uniform(0, 1) + subtwig_std = np.random.rand() ** 2 + sz_decay = np.random.uniform(0.9, 1) + pull_factor = np.random.uniform(0, 0.3) + + if not skip_subtwig: + n_sub_pts = np.random.randint(10) + 5 + sub_sz = np.random.uniform(1, twig_len - 0.5) / n_sub_pts + idx_decay = (sub_sz * (np.random.rand() * 0.8 + 0.1)) / n_sub_pts + avail_idxs = np.arange(n_sub_pts) + start_idx = int(n_sub_pts * np.random.rand() * 0.5) + 1 + sample_density = np.random.choice([1, 2, 3]) + avail_idxs = avail_idxs[start_idx::sample_density] + + ang_offset = np.random.rand() * np.pi / 3 + ang_range = np.random.rand() * ang_offset + + subsubtwig_config = { + "n": len(avail_idxs), + "symmetry": True, + "path_kargs": lambda idx: { + "n_pts": 3, + "std": 1, + "momentum": 1, + "sz": subsub_sz, + "pull_dir": [0, 0, init_z + np.random.randn() * z_rnd_factor], + "pull_factor": pull_factor, + }, + "spawn_kargs": lambda idx: { + "rnd_idx": avail_idxs[idx], + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + } + subtwig_config = { + "n": len(avail_sub_idxs), + "symmetry": True, + "path_kargs": lambda idx: { + "n_pts": n_sub_pts, + "std": subtwig_std, + "momentum": subtwig_momentum, + "sz": sub_sz - idx_decay * idx, + "sz_decay": sz_decay, + "pull_dir": [0, 0, init_z + np.random.randn() * z_rnd_factor], + "pull_factor": pull_factor, + }, + "spawn_kargs": lambda idx: { + "rng": [0.2, 0.9], + "rnd_idx": avail_sub_idxs[idx], + "ang_min": ang_offset, + "ang_max": ang_offset + ang_range, + "axis2": [0, 0, 1], + }, + "children": [subsubtwig_config], + } + + else: + subtwig_config = { + "n": len(avail_sub_idxs), + "symmetry": True, + "path_kargs": lambda idx: { + "n_pts": 3, + "std": 1, + "momentum": 1, + "sz": subsub_sz, + "pull_dir": [0, 0, init_z + np.random.randn() * z_rnd_factor], + "pull_factor": pull_factor, + }, + "spawn_kargs": lambda idx: { + "rnd_idx": avail_sub_idxs[idx], + "ang_min": np.pi / 4, + "ang_max": np.pi / 4 + np.pi / 16, + "axis2": [0, 0, 1], + }, + } + + twig_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": n_twig_pts, + "sz": twig_sz, + "std": 0.5, + "momentum": 0.5, + "pull_dir": [0, 0, init_z + np.random.randn() * z_rnd_factor], + "pull_factor": pull_factor, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 1, -init_z]}, + "children": [subtwig_config], + } + + return twig_config + + +def basic_tree(init_pos=np.array([[0, 0, 0]])): + def init_att_fn(nodes): + pt_offset = init_pos[0] + np.array([0, 0, 11]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[7, 7, 7], + pt_offset=pt_offset, + ) + return branch_pts + + def root_att_fn(nodes): + # Pass this into root_kargs to initialize a root system + pt_offset = init_pos[0] + np.array([0, 0, -3.5]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[5, 5, 4], + pt_offset=pt_offset, + ) + return branch_pts + + branch_config = { + "n": 5, + "spawn_kargs": lambda idx: {"rng": [0.5, 0.8]}, + "path_kargs": lambda idx: {"n_pts": 5, "sz": 0.4, "std": 1.4, "momentum": 0.4}, + "children": [], + } + tree_config = { + "n": 4, + "path_kargs": lambda idx: ( + {"n_pts": 15, "sz": 0.8, "std": 1, "momentum": 0.7} + if idx > 0 + else {"n_pts": 15, "sz": 1, "std": 0.1, "momentum": 0.7} + ), + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + twig_kargs = { + "config": shrubtwig_config, + "radii_kargs": {"Max radius": 0.1}, + "leaf_kargs": { + "Density": 1, + "Min scale": 0.4, + "Max scale": 0.6, + "Multi inst": 2, + }, + } + tree_kargs = { + "config": tree_config, + "init_pos": init_pos, + "radii_kargs": {"Min radius": 0.04, "Exponent": 2}, + "leaf_kargs": {"Density": 1, "Min scale": 0.35, "Max scale": 0.45}, + "space_kargs": { + "atts": init_att_fn, + "D": 0.3, + "s": 0.4, + "d": 10, + "pull_dir": [0, 0, 0.5], + "n_steps": 20, + }, + "root_kargs": None, # {'atts': None, 'D': .2, 's': .3, 'd': 2, + #'dir_rand': .3, 'mag_rand': .2, + #'pull_dir': None, 'n_steps': 30}, + } + + return tree_kargs, twig_kargs + + +def palm_tree(init_pos=np.array([[0, 0, 0]])): + def tmp_att_fn(nodes): + # pt_offset = init_pos[0] + np.array([0,0,20]) + pt_offset = nodes[-1] + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[1, 1, 1], + pt_offset=pt_offset, + ) + return branch_pts + + # select a random horizontal angle + pull_angle = np.random.uniform(0.0, 2 * np.pi) + + tree_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": 20, + "sz": 0.8, + "std": 0.1, + "momentum": 0.95, + "pull_dir": [np.cos(pull_angle), np.sin(pull_angle), 0.0], + "pull_factor": np.random.uniform(0.0, 1.5), + "pull_init": 0.0, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [], + } + + leaf_kargs = {"leaf_width": 0.1, "alpha": 0.3, "use_wave": False} + twig_kargs = { + "config": palmtwig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": { + "max_density": 20, + "scale": 2.0, + "rot_x": (-0.5, -0.4), + "rot_z": (-0.1, 0.1), + }, + } + tree_kargs = { + "config": tree_config, + "D_": 0.3, + "s": 0.4, + "d": 10, + "init_pos": init_pos, + "pull_dir": [0, 0, 0.5], + "n_updates": 20, + "init_att_fn": tmp_att_fn, + "radii_kargs": { + "max_radius": 0.7, + "merge_size": 0.3, + "min_radius": 0.1, + "growth_amt": 1.01, + }, + "leaf_kargs": { + "max_density": 20, + "scale": 0.3, + "rot_x": (-1.0, 1.0), + "rot_z": (-0.1, 0.1), + }, + } + + return tree_kargs, twig_kargs, leaf_kargs + + +def baobab_tree(init_pos=np.array([[0, 0, 0]])): + def tmp_att_fn(nodes): + # pt_offset = init_pos[0] + np.array([0,0,20]) + pt_offset = nodes[-1] + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=50, + scaling=[7, 7, 1], + pt_offset=pt_offset, + ) + return branch_pts + + # select a random horizontal angle + pull_angle = np.random.uniform(0.0, 2 * np.pi) + + tree_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": 20, + "sz": 0.8, + "std": 0.1, + "momentum": 0.95, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [], + } + + leaf_kargs = {"leaf_width": 0.5, "alpha": 0.3, "use_wave": False} + twig_kargs = { + "config": shrubtwig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": { + "max_density": 20, + "scale": 0.5, + "rot_x": (-0.5, -0.4), + "rot_z": (-0.1, 0.1), + }, + } + tree_kargs = { + "config": tree_config, + "D_": 0.5, + "s": 0.6, + "d": 10, + "init_pos": init_pos, + "pull_dir": [0, 0, 0.5], + "n_updates": 20, + "init_att_fn": tmp_att_fn, + "radii_kargs": { + "max_radius": 2.0, + "merge_size": 0.3, + "min_radius": 0.1, + "growth_amt": 1.10, + }, + "leaf_kargs": { + "max_density": 30, + "scale": 0.7, + "rot_x": (0, 1.0), + "rot_z": (-1.0, 1.0), + }, + } + + return tree_kargs, twig_kargs, leaf_kargs + + +def bamboo_tree(init_pos=np.array([[0, 0, 0]])): + height = np.random.randint(25, 35) + + def tmp_att_fn(nodes): + # pt_offset = init_pos[0] + np.array([0,0,20]) + pt_offset = nodes[-1] + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, n=50, scaling=[0.5, 0.5, 4] + ) + # rotate the points + rot_axis = (nodes[-1] - nodes[-2]) / np.linalg.norm((nodes[-1] - nodes[-2])) + rot_axis = (rot_axis + np.array([0, 0, 1])) / 2.0 + + branch_pts = np.array( + [helper.rodrigues_rot(pts, rot_axis, np.pi) for pts in branch_pts] + ) + + branch_pts += pt_offset + + return branch_pts + + # select a random horizontal angle + pull_angle = np.random.uniform(0.0, 2 * np.pi) + + tree_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": height, + "sz": 0.8, + "std": 0.1, + "momentum": 0.95, + "pull_dir": [np.cos(pull_angle), np.sin(pull_angle), 0.0], + "pull_factor": np.random.uniform(0.1, 0.6), + "pull_init": 0.0, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [], + } + + leaf_kargs = {"leaf_width": 0.1, "alpha": 0.3, "use_wave": False} + twig_kargs = { + "config": bambootwig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": { + "max_density": 20, + "scale": 1.5, + "rot_x": (-0.5, -0.4), + "rot_z": (-0.1, 0.1), + }, + } + tree_kargs = { + "config": tree_config, + "D_": 0.3, + "s": 0.4, + "d": 10, + "init_pos": init_pos, + "pull_dir": [0, 0, 0.5], + "n_updates": 20, + "init_att_fn": tmp_att_fn, + "radii_kargs": { + "max_radius": 0.3, + "merge_size": 0.1, + "min_radius": 0.2, + "growth_amt": 1.01, + }, + "leaf_kargs": { + "max_density": 20, + "scale": 0.3, + "rot_x": (-1.0, 1.0), + "rot_z": (-0.1, 0.1), + }, + } + + return tree_kargs, twig_kargs, leaf_kargs + + +def shrub(init_pos=np.array([[0, 0, 0]]), shrub_shape=0): + scale = 0.2 + + def att_fn_ball(nodes): + pt_offset = init_pos[0] + np.array([0, 0, 7 * scale]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_uv_sphere_add, + n=2000, + scaling=[7 * scale, 7 * scale, 7 * scale], + pt_offset=pt_offset, + ) + return branch_pts + + def att_fn_cone(nodes): + pt_offset = init_pos[0] + np.array([0, 0, 9 * scale]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cone_add, + n=2000, + scaling=[5 * scale, 5 * scale, 10 * scale], + pt_offset=pt_offset, + ) + return branch_pts + + def att_fn_cube(nodes): + pt_offset = init_pos[0] + np.array([0, 0, 9 * scale]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=2000, + scaling=[4 * scale, 4 * scale, 7 * scale], + pt_offset=pt_offset, + ) + return branch_pts + + if shrub_shape == 0: + tmp_att_fn = att_fn_ball + elif shrub_shape == 1: + tmp_att_fn = att_fn_cone + elif shrub_shape == 2: + tmp_att_fn = att_fn_cube + else: + raise NotImplementedError + + leaf_kargs = { + "leaf_width": np.random.rand() * 0.5 + 0.1, + "alpha": np.random.rand() * 0.3, + } + branch_config = { + "n": 5, + "spawn_kargs": lambda idx: {"rng": [0.5, 0.8]}, + "path_kargs": lambda idx: {"n_pts": 5, "sz": 0.4, "std": 1.4, "momentum": 0.4}, + "children": [], + } + tree_config = { + "n": 1, + "path_kargs": lambda idx: ( + {"n_pts": 3, "sz": 0.8, "std": 1, "momentum": 0.7} + if idx > 0 + else {"n_pts": 3, "sz": 1, "std": 0.1, "momentum": 0.7} + ), + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + twig_kargs = TreeParams( + skeleton=shrubtwig_config, + trunk_spacecol=None, + roots_spacecol=None, + child_placement={ + "Density": 1, + "Min scale": 0.4, + "Max scale": 0.6, + "Multi inst": 2, + }, + skinning={"Max radius": 0.1}, + ) + + tree_kargs = TreeParams( + skeleton=tree_config, + trunk_spacecol={"atts": tmp_att_fn, "D": 0.3, "s": 0.4, "d": 10}, + roots_spacecol=None, + child_placement={ + "depth_range": (0, 2.7), + "Density": 0.7, + "Min scale": 1.2 * scale, + "Max scale": 1.4 * scale, + "Multi inst": 3, + "Pitch offset": 1.0, + "Pitch variance": 2.0, + "Yaw variance": 2.0, + }, + skinning={"Min radius": 0.005, "Max radius": 0.025, "Exponent": 2}, + ) + + return tree_kargs, twig_kargs, leaf_kargs + + # branch_config = {'n': 5, 'spawn_kargs': lambda idx: {'rng': [.5,.8]}, + # 'path_kargs': lambda idx: {'n_pts': 5, 'sz': .4, 'std': 1.4, 'momentum': .4}, + # 'children': []} + # twig_config = {'n': 4, + # 'path_kargs': lambda idx: ({'n_pts': 15, 'sz': .8, 'std': 1, 'momentum': .7} + # if idx > 0 else + # {'n_pts': 15, 'sz': 1, 'std': .1, 'momentum': .7}), + # 'spawn_kargs': lambda idx: {'init_vec': [0,0,1]}, + # 'children': [branch_config]} + + # twig_kargs = {'config': shrubtwig_config, + # 'radii_kargs': {'Max radius': .1}, + # 'leaf_kargs': {'Density': 1, 'Min scale': .4, 'Max scale': .6, 'Multi inst': 2} + # } + # tree_kargs = {'config': twig_config, 'init_pos': init_pos, + # 'radii_kargs': {'Min radius': .04, 'Exponent': 2}, + # 'leaf_kargs': {'Density': 1, 'Min scale': .35, 'Max scale': .45}, + # 'space_kargs': {'atts': init_att_fn, 'D': .3, 's': .4, 'd': 10, + # 'pull_dir': [0,0,.5], 'n_steps': 20}, + # 'root_kargs': {'atts': None, 'D': .2, 's': .3, 'd': 2, + # 'dir_rand': .3, 'mag_rand': .2, + # 'pull_dir': None, 'n_steps': 30}, + # } + + +def basic_stem(init_pos=np.array([[0, 0, 0]])): + branch_config = { + "n": 3, + "spawn_kargs": lambda idx: {"rng": [0.1 * (idx + 1), 0.1 * (idx + 2)]}, + "path_kargs": lambda idx: { + "n_pts": 20 - 2 * idx, + "sz": 0.5, + "std": 1.5, + "momentum": 0.7, + "decay_mom": False, + "pull_dir": [0, 0, 1], + "pull_factor": 1.5 + idx * 0.2, + }, + "children": [], + } + tree_config = { + "n": 1, + "path_kargs": lambda idx: ( + { + "n_pts": 30, + "sz": 0.5, + "std": 2, + "momentum": 0.8, + "decay_mom": False, + "pull_dir": [0, 0, 1], + "pull_factor": 2 + idx * 0.5, + } + ), + "spawn_kargs": lambda idx: { + "init_vec": [np.random.randn(), np.random.randn(), 1] + }, + "children": [branch_config], + } + + tree_kargs = { + "config": tree_config, + "init_pos": init_pos, + "radii_kargs": {"Min radius": 0.02, "Max radius": 0.1, "Exponent": 2}, + "leaf_kargs": {"Density": 0, "Min scale": 0.35, "Max scale": 0.45}, + "space_kargs": {}, + "root_kargs": {}, + } + + return tree_kargs, None, {} + + +def space_tree_wrap(cds, n_init=5): + def tmp_att_fn(nodes): + return cds + + tree_config = { + "n": 1, + "path_kargs": lambda idx: {"n_pts": 1, "sz": 0.8, "std": 1, "momentum": 0.7}, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + } + + twig_kargs = { + "config": twig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": {"max_density": 5, "scale": 0.5}, + } + tree_kargs = { + "config": tree_config, + "D_": 0.15, + "s": 0.2, + "d": 0.5, + "dir_rand": 0.3, + "mag_rand": 0.2, + "pull_dir": [0, 0, 0], + "n_updates": 40, + "init_att_fn": tmp_att_fn, + "radii_kargs": { + "max_radius": 0.04, + "merge_size": 0.1, + "min_radius": 0.01, + "growth_amt": 1.02, + }, + "leaf_kargs": {}, + } + + rand_pts = np.random.choice(np.arange(len(cds)), n_init, replace=False) + tree_kargs["init_pos"] = cds[rand_pts] + + return tree_kargs, twig_kargs + + +def space_tree(obj, init_pos=np.array([[0, 0, 0]])): + def init_att_fn(nodes): + return mesh.sample_vtxs( + obj, n=1000, emit_from="VOLUME", seed=np.random.randint(100) + ) + + twig_kargs = { + "config": shrubtwig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": {"Density": 1, "Min scale": 0.4, "Max scale": 0.6}, + } + tree_kargs = { + "config": {"n": 0}, + "init_pos": init_pos, + "leaf_kargs": {"Density": 0}, + "radii_kargs": {"Min radius": 0.01, "Scaling": 0.05, "Exponent": 2}, + "space_kargs": { + "atts": init_att_fn, + "D": 0.1, + "s": 0.2, + "d": 10, + "dir_rand": 0.2, + "mag_rand": 0.2, + "pull_dir": [0, 0.5, 0], + "n_steps": 100, + }, + } + + return tree_kargs, twig_kargs + + +def pine_tree(init_pos=np.array([[0, 0, 0]])): + def tmp_att_fn(nodes): + tmp_v = nodes[nodes[:, 2] > 3] + atts = [tmp_v.copy() + np.random.randn(*tmp_v.shape) * 0.5 for _ in range(5)] + return np.concatenate(atts, 0)[::5] + + def root_att_fn(nodes): + # Pass this into root_kargs to initialize a root system + pt_offset = init_pos[0] + np.array([0, 0, -3.5]) + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[5, 5, 4], + pt_offset=pt_offset, + ) + return branch_pts + + per_layer = 4 + tree_ht = np.random.randint(20, 30) + max_sz = 0.8 + start_ht = int(tree_ht * np.random.uniform(0.1, 0.3)) + n = tree_ht - start_ht + + branch_config = { + "n": n * per_layer, + "path_kargs": lambda idx: { + "n_pts": np.random.randint( + np.floor(((n - idx // per_layer) / n) * 6), + np.ceil(((n - idx // per_layer) / n) * 8), + ) + + 3, + "std": 0.3, + "momentum": 0.9, + "sz": max_sz - (max_sz / tree_ht) * (idx // per_layer), + }, + "spawn_kargs": lambda idx: { + "rng": [0.5, 1], + "z_bias": 0.2, + "rnd_idx": (idx // per_layer) + start_ht, + "ang_min": np.pi / 2, + "ang_max": np.pi / 2 + np.pi / 16, + "axis2": [np.random.randn(), np.random.randn(), 0.5], + }, + "children": [], + } + pinetree_config = { + "n": 1, + "path_kargs": lambda idx: { + "n_pts": tree_ht + 1, + "sz": 0.8, + "std": 0.1, + "momentum": 0.7, + }, + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + leaf_kargs = {"leaf_width": 0.05, "alpha": 0, "use_wave": False} + twig_kargs = TreeParams( + skeleton=pinetwig_config, + trunk_spacecol=None, + roots_spacecol=None, + skinning={ + "Min radius": 0.005, + "Max radius": 0.03, + "Exponent": 1.3, + "Scaling": 0.1, + "Profile res": 3, + }, + child_placement={ + "depth_range": (0, 5.0), + "Density": 1.0, + "Min scale": 0.7, + "Max scale": 0.9, + }, + ) + + tree_kargs = TreeParams( + skeleton=pinetree_config, + skinning={"Min radius": 0.02, "Exponent": 1.5, "Max radius": 0.2}, + trunk_spacecol={ + "atts": tmp_att_fn, + "D": 0.3, + "s": 0.4, + "d": 10, + "pull_dir": [0, 0, 0.5], + "n_steps": 20, + }, + roots_spacecol=None, # {'atts': None, 'D': .2, 's': .3, 'd': 2, + # 'dir_rand': .3, 'mag_rand': .2, + # 'pull_dir': None, 'n_steps': 30}, + child_placement={ + "depth_range": (0, 2.7), + "Density": 1.0, + "Min scale": 0.7, + "Max scale": 0.9, + }, + ) + + return tree_kargs, twig_kargs, leaf_kargs + + +def coral(): + def tmp_att_fn(nodes): + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[7, 7, 7], + pt_offset=[0, 0, 11], + ) + return branch_pts + + branch_config = { + "n": 5, + "spawn_kargs": lambda idx: {"rng": [0.5, 0.8]}, + "path_kargs": lambda idx: {"n_pts": 5, "sz": 0.4, "std": 1.4, "momentum": 0.4}, + "children": [], + } + tree_config = { + "n": 4, + "path_kargs": lambda idx: ( + {"n_pts": 15, "sz": 0.8, "std": 1, "momentum": 0.7} + if idx > 0 + else {"n_pts": 15, "sz": 1, "std": 0.1, "momentum": 0.7} + ), + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + twig_kargs = { + "config": twig_config, + "radii_kargs": {"max_radius": 0.1, "merge_size": 0.2}, + "leaf_kargs": {"max_density": 20, "scale": 0.4}, + } + tree_kargs = { + "config": tree_config, + "D_": 0.3, + "s": 0.4, + "d": 10, + "pull_dir": [0, 0, 0.5], + "n_updates": 20, + "init_att_fn": tmp_att_fn, + "radii_kargs": { + "max_radius": 0.7, + "merge_size": 0.3, + "min_radius": 0.03, + "growth_amt": 1.01, + }, + "leaf_kargs": {"max_density": 5, "scale": 0.3}, + } + + return tree_kargs, twig_kargs + + +def parse_genome(tree_genome): + genome_keys = [ + "size", + "trunk_warp", + "n_trunks", + "branch_start", + "branch_angle", + "multi_branch", + "branch density", + "branch_len", + "branch_warp", + "pull_dir_vt", + "pull_dir_hz", + "outgrowth", + "branch_thickness", + "twig_density", + "twig_scale", + ] + return {k: tree_genome[k_idx] for k_idx, k in enumerate(genome_keys)} + + +def calc_height(x, min_ht=5, max_ht=30, bias=-0.05, uniform=0.5): + def map_fn(val): + return np.tan((val - 0.5 + bias) * np.pi * (1.1 - uniform)) + + rng = map_fn(0), map_fn(1) + y = map_fn(x) + y = (y - rng[0]) / (rng[1] - rng[0]) + y = y * (max_ht - min_ht) + min_ht + return y + + +def generate_tree_config(tree_genome=None, season="autumn"): + """ + Main latent params that we might want to control: + - overall size/"age" + - trunk straightness + - additional "trunks" + - starting height of branches + - outgoing branch angle (parallel to ground vs angled up vs angled proporitionally to height) + - branch density + - branch length (fn of height) + - branch straightness + - pull direction (up/down/to the side) + - outgrowth (space filling) / "density" + - branch thickness (ideally this behaves reasonably based on everything else) + """ + if tree_genome is None: + tree_genome = np.random.rand(32) + + cfg = parse_genome(tree_genome) + sz = calc_height(cfg["size"], min_ht=12) + n_tree_pts = int(sz) + n_trunks = int(10 ** (cfg["n_trunks"] ** 1.6)) + ex = np.exp((6 - (5 if n_trunks > 1 else 0)) * (cfg["trunk_warp"] - 0.1)) + trunk_std = ((1 - (ex / (1 + ex))) * 4) ** 2 + trunk_mtm = max(0.2, min(0.95, (1 / (trunk_std + 1)) + np.random.randn() * 0.2)) + radial_out = False # False # np.random.rand() < .3 + avail_idxs = np.arange(n_tree_pts) + start_idx = 1 + int(n_tree_pts * np.random.uniform(0.1, 0.7)) + sample_density = np.random.choice( + np.arange(np.ceil(np.sqrt(n_tree_pts)), dtype=int) + 1 + ) + avail_idxs = avail_idxs[start_idx::sample_density] + multi_branch = int(5 ** (cfg["multi_branch"] ** 1.6)) + avail_idxs = np.repeat(avail_idxs, multi_branch).flatten() + + n = len(avail_idxs) + + start_ht = sz * (start_idx / sz) + box_ht = (sz - start_ht) * 0.6 + + def tmp_att_fn(nodes): + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[sz / 2, sz / 2, box_ht], + pt_offset=[0, 0, start_ht + sz * 0.4], + ) + return branch_pts + + max_sz = 1 + + if radial_out: + start_ht = int(sz * 0.1) + per_layer = np.random.randint(3, 6) + branch_config = { + "n": n * per_layer, + "path_kargs": lambda idx: { + "n_pts": np.random.randint( + np.floor(((n - idx // per_layer) / n) * 6), + np.ceil(((n - idx // per_layer) / n) * 8), + ) + + 3, + "std": 0.3, + "momentum": 0.9, + "sz": max_sz - (max_sz / sz) * (idx // per_layer), + "pull_dir": [0, 0, np.random.rand()], + "pull_factor": np.random.rand(), + }, + "spawn_kargs": lambda idx: { + "rnd_idx": avail_idxs[idx // per_layer], + "ang_min": np.pi / 2, + "ang_max": np.pi / 2 + np.pi / 16, + "axis2": [np.random.randn(), np.random.randn(), 0.5], + }, + } + + else: + branch_config = { + "n": n, + "path_kargs": lambda idx: { + "n_pts": int(n_tree_pts * np.random.uniform(0.4, 0.6)), + "sz": 1, + "std": 1.4, + "momentum": 0.4, + "pull_dir": [0, 0, np.random.rand()], + "pull_factor": np.random.rand(), + }, + "spawn_kargs": lambda idx: {"rnd_idx": avail_idxs[idx]}, + } + + tree_config = { + "n": n_trunks, + "path_kargs": lambda idx: ( + { + "n_pts": n_tree_pts, + "sz": 1, + "std": trunk_std, + "momentum": trunk_mtm, + "pull_dir": [0, 0, 0], + } + ), + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + tmp_D = 0.3 + 0.2 * (sz / 30) # .3 * sz / 8 + tmp_s = tmp_D * 1.3 + if n < 5: + n_updates = np.random.choice([2, 3, int(1 + sz // 2)]) + else: + n_updates = np.random.choice([2, 2, 2, 3, 4, 5]) + + max_radius = 0.2 + merge_size = 2.5 - cfg["branch_thickness"] + + if season == "winter": + twig_density = 0.0 if cfg["twig_density"] < 0.5 else 0.5 * cfg["twig_density"] + twig_inst = 1 + 0 * np.random.randint(3, 5) + else: + twig_density = 0.5 + 0.5 * cfg["twig_density"] + twig_inst = np.random.randint(1, 3) + + return TreeParams( + skeleton=tree_config, + skinning={"Max radius": max_radius, "Min radius": 0.02, "Exponent": merge_size}, + trunk_spacecol={ + "atts": tmp_att_fn, + "D": tmp_D, + "s": tmp_s, + "d": 10, + "pull_dir": [0, 0, np.random.randn() * 0.3], + "n_steps": n_updates, + }, + roots_spacecol=None, # {'atts': None, 'D': .05, 's': .1, 'd': 2, 'dir_rand': .05, 'mag_rand': .05, 'pull_dir': None, 'n_steps': 30}, + child_placement={ + "depth_range": (0, 5.0), + "Density": twig_density, + "Multi inst": twig_inst, + "Pitch variance": 1.0, + "Yaw variance": 10.0, + "Min scale": 1.1, + "Max scale": 1.3, + }, + ) + + +def random_tree(tree_genome=None, season="autumn"): + leaf_kargs = { + "leaf_width": np.random.rand() * 0.5 + 0.1, + "alpha": np.random.rand() * 0.3, + } + + if season == "winter": + leaf_density = np.random.uniform(0.0, 0.1) + leaf_inst = 1 + elif season == "spring": # flowers should be less dense + leaf_density = np.random.uniform(0.3, 0.7) + leaf_inst = 2 + else: + leaf_density = np.random.uniform(0.4, 1.0) + leaf_inst = 3 + + twig_kargs = TreeParams( + skeleton=generate_twig_config(), + skinning={"Max radius": 0.01, "Min radius": 0.005}, + trunk_spacecol=None, + roots_spacecol=None, + child_placement={ + "Density": leaf_density, + "Multi inst": leaf_inst, + "Min scale": 0.3, + "Max scale": 0.4, + }, + ) + tree_kargs = generate_tree_config(tree_genome, season=season) + return tree_kargs, twig_kargs, leaf_kargs + + +def generate_coral_config(tree_genome=None): + """ + Main latent params that we might want to control: + - overall size/"age" + - trunk straightness + - additional "trunks" + - starting height of branches + - outgoing branch angle (parallel to ground vs angled up vs angled proporitionally to height) + - branch density + - branch length (fn of height) + - branch straightness + - pull direction (up/down/to the side) + - outgrowth (space filling) / "density" + - branch thickness (ideally this behaves reasonably based on everything else) + """ + if tree_genome is None: + tree_genome = np.random.rand(32) + + cfg = parse_genome(tree_genome) + sz = calc_height(cfg["size"]) + n_tree_pts = int(sz) + n_trunks = np.random.randint(5, 20) # int(10 ** (cfg['n_trunks']**1.6)) + ex = np.exp((6 - (5 if n_trunks > 1 else 0)) * (cfg["trunk_warp"] - 0.1)) + trunk_std = ((1 - (ex / (1 + ex))) * 4) ** 2 + trunk_mtm = max(0.2, min(0.95, (1 / (trunk_std + 1)) + np.random.randn() * 0.2)) + radial_out = False # np.random.rand() < .3 + avail_idxs = np.arange(n_tree_pts) + start_idx = 1 + int(n_tree_pts * np.random.uniform(0, 0.7)) + sample_density = np.random.choice( + np.arange(np.ceil(np.sqrt(n_tree_pts)), dtype=int) + 1 + ) + avail_idxs = avail_idxs[start_idx::sample_density] + multi_branch = int(5 ** (cfg["multi_branch"] ** 1.6)) + avail_idxs = np.repeat(avail_idxs, multi_branch).flatten() + + n = 0 # len(avail_idxs) + + start_ht = sz * (start_idx / sz) + 1 + box_ht = (sz - start_ht) * 0.6 + + def tmp_att_fn(nodes): + branch_pts = mesh.get_pts_from_shape( + bpy.ops.mesh.primitive_cube_add, + n=500, + scaling=[sz / 2, sz / 2, box_ht], + pt_offset=[0, 0, start_ht + sz * 0.4], + ) + return branch_pts + + max_sz = 1 + + branch_config = { + "n": n, + "path_kargs": lambda idx: { + "n_pts": int(n_tree_pts * np.random.uniform(0.4, 0.6)), + "sz": 1, + "std": 0.4, + "momentum": 0.8, + "pull_dir": [0, 0, np.random.rand()], + "pull_factor": np.random.rand(), + }, + "spawn_kargs": lambda idx: {"rnd_idx": avail_idxs[idx]}, + } + + tree_config = { + "n": n_trunks, + "path_kargs": lambda idx: ( + { + "n_pts": n_tree_pts, + "sz": 1, + "std": trunk_std, + "momentum": trunk_mtm, + "pull_dir": [0, 0, 1], + } + ), + "spawn_kargs": lambda idx: {"init_vec": [0, 0, 1]}, + "children": [branch_config], + } + + tmp_D = 0.3 + 0.2 * (sz / 30) # .3 * sz / 8 + tmp_s = tmp_D * 1.3 + if n < 5: + n_updates = np.random.choice([2, 3, int(1 + sz // 2)]) + else: + n_updates = np.random.choice([2, 2, 2, 3, 4, 5]) + # print(sz, n_updates) + n_updates = 3 + max_radius = 0.3 # 00 + merge_size = np.random.uniform(0.2, 0.7) + growth_amt = 1.01 + + return { + "config": tree_config, + "D_": tmp_D, + "s": tmp_s, + "d": 10, + "pull_dir": [0, 0, np.random.randn() * 0.3], + # np.random.randint(15) + 3, + "init_att_fn": tmp_att_fn, + "n_updates": n_updates, + "radii_kargs": { + "max_radius": max_radius, + "merge_size": merge_size, + "min_radius": 0.2, + "growth_amt": growth_amt, + }, + "leaf_kargs": { + "max_density": 0 if np.random.rand() < 0.1 else np.random.uniform(5, 20), + "scale": np.random.uniform(0.5, 1), + }, + } + + +def random_coral(genome=None): + leaf_kargs = {} + twig_kargs = {} + tree_kargs = generate_coral_config(genome) + return tree_kargs, twig_kargs, leaf_kargs diff --git a/infinigen/assets/objects/trees/utils/geometrynodes.py b/infinigen/assets/objects/trees/utils/geometrynodes.py new file mode 100644 index 000000000..a3e8f714f --- /dev/null +++ b/infinigen/assets/objects/trees/utils/geometrynodes.py @@ -0,0 +1,1047 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alejandro Newell + +import bpy +import numpy as np + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes + +from . import helper, mesh +from .materials import new_link + +C = bpy.context +D = bpy.data + + +def add_node_modifier(obj): + # Add geometry node modifier + helper.set_active_obj(obj) + # bpy.ops.node.new_geometry_nodes_modifier() # Blender 3.2 + bpy.ops.object.modifier_add(type="NODES") # Blender 3.1 + return obj.modifiers[-1] + + +def setup_inps(ng, inp, nodes): + for k_idx, (k, node, attr) in enumerate(nodes): + new_link(ng, inp, k_idx, node, attr) + ng.inputs[k_idx].name = k + + +@node_utils.to_nodegroup("CollectionDistribute", singleton=False) +def coll_distribute(nw, merge_dist=None): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketBool", "Selection", True), + ("NodeSocketCollection", "Collection", None), + ("NodeSocketInt", "Multi inst", 1), + ("NodeSocketFloat", "Density", 0.5), + ("NodeSocketFloat", "Min scale", 0.0), + ("NodeSocketFloat", "Max scale", 1.0), + ("NodeSocketFloat", "Pitch scaling", 0.2), + ("NodeSocketFloat", "Pitch offset", 0.0), + ("NodeSocketFloat", "Pitch variance", 0.4), + ("NodeSocketFloat", "Yaw variance", 0.4), + ("NodeSocketBool", "Realize Instance", False), + ], + ) + + mesh_to_curve = nw.new_node( + "GeometryNodeMeshToCurve", + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Selection": group_input.outputs["Selection"], + }, + ) + + curve_to_points = nw.new_node( + "GeometryNodeCurveToPoints", + input_kwargs={ + "Curve": mesh_to_curve, + "Count": group_input.outputs["Multi inst"], + }, + ) + + mesh_to_points = nw.new_node( + "GeometryNodeMeshToPoints", + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Selection": group_input.outputs["Selection"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + transfer_attribute_index = nw.new_node( + Nodes.SampleNearest, input_kwargs={"Geometry": mesh_to_points} + ) + + transfer_attribute = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": mesh_to_points, + "Value": position, + "Index": transfer_attribute_index, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_to_points.outputs["Points"], + "Position": (transfer_attribute, "Value"), + }, + ) + + random_value = nw.new_node(Nodes.RandomValue) + + math = nw.new_node( + Nodes.Math, + input_kwargs={0: random_value.outputs[1], 1: group_input.outputs["Density"]}, + attrs={"operation": "LESS_THAN"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": curve_to_points.outputs["Rotation"]} + ) + + math_1 = nw.new_node( + Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: 1.5708} + ) + + math_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: math_1, 1: group_input.outputs["Pitch scaling"]}, + attrs={"operation": "MULTIPLY"}, + ) + + math_3 = nw.new_node( + Nodes.Math, input_kwargs={0: math_2, 1: group_input.outputs["Pitch offset"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": math_3, "Z": separate_xyz.outputs["Z"]} + ) + + math_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Pitch variance"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: math_4, 3: group_input.outputs["Pitch variance"]}, + ) + + math_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Yaw variance"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + random_value_2 = nw.new_node( + Nodes.RandomValue, + input_kwargs={2: math_5, 3: group_input.outputs["Yaw variance"]}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": random_value_1.outputs[1], "Z": random_value_2.outputs[1]}, + ) + + vector_math = nw.new_node( + Nodes.VectorMath, input_kwargs={0: combine_xyz, 1: combine_xyz_1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 2: group_input.outputs["Min scale"], + 3: group_input.outputs["Max scale"], + }, + ) + + geo = nw.new_node( + Nodes.CollectionInfo, + input_kwargs={ + "Collection": group_input.outputs["Collection"], + "Separate Children": True, + "Reset Children": True, + }, + ) + + if merge_dist is not None: + geo = nw.new_node(Nodes.MergeByDistance, [geo, None, merge_dist]) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Selection": math, + "Instance": geo, + "Pick Instance": True, + "Rotation": vector_math.outputs["Vector"], + "Scale": random_value_3.outputs[1], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Realize Instance"], + 14: instance_on_points, + 15: realize_instances, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": switch.outputs[6]} + ) + + +@node_utils.to_nodegroup("PhylloDist", singleton=False) +def phyllotaxis_distribute(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketInt", "Count", 50), + ("NodeSocketFloat", "Max radius", 2.0), + ("NodeSocketFloat", "Radius exp", 0.5), + ("NodeSocketFloat", "Inner pct", 0.0), + ("NodeSocketFloat", "Min angle", -0.5236), + ("NodeSocketFloat", "Max angle", 0.7854), + ("NodeSocketFloat", "Min scale", 0.3), + ("NodeSocketFloat", "Max scale", 0.3), + ("NodeSocketFloat", "Min z", 0.0), + ("NodeSocketFloat", "Max z", 1.0), + ("NodeSocketFloat", "Clamp z", 1.0), + ("NodeSocketFloat", "Yaw offset", -np.pi / 2), + ], + ) + + mesh_line = nw.new_node( + "GeometryNodeMeshLine", input_kwargs={"Count": group_input.outputs["Count"]} + ) + + mesh_to_points = nw.new_node( + "GeometryNodeMeshToPoints", input_kwargs={"Mesh": mesh_line} + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": mesh_to_points, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + index = nw.new_node("GeometryNodeInputIndex") + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 1.0 + + math = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: value}, attrs={"operation": "DIVIDE"} + ) + + math_1 = nw.new_node( + Nodes.Math, input_kwargs={0: math}, attrs={"operation": "FLOOR"} + ) + + math_6 = nw.new_node( + Nodes.Math, input_kwargs={0: math_1, 1: 2.3998}, attrs={"operation": "MULTIPLY"} + ) + + math_2 = nw.new_node( + Nodes.Math, input_kwargs={0: math}, attrs={"operation": "FRACT"} + ) + + math_5 = nw.new_node( + Nodes.Math, input_kwargs={0: math_2, 1: 6.2832}, attrs={"operation": "MULTIPLY"} + ) + + math_7 = nw.new_node(Nodes.Math, input_kwargs={0: math_6, 1: math_5}) + + math_8 = nw.new_node( + Nodes.Math, input_kwargs={0: math_7}, attrs={"operation": "COSINE"} + ) + + math_9 = nw.new_node( + Nodes.Math, input_kwargs={0: math_7}, attrs={"operation": "SINE"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": math_8, "Y": math_9}) + + math_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Count"], 1: value}, + attrs={"operation": "DIVIDE"}, + ) + + math_4 = nw.new_node( + Nodes.Math, input_kwargs={0: math_1, 1: math_3}, attrs={"operation": "DIVIDE"} + ) + + math_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: math_4, 1: group_input.outputs["Radius exp"]}, + attrs={"operation": "POWER"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": math_10, 3: group_input.outputs["Inner pct"]}, + ) + + math_11 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range.outputs["Result"], + 1: group_input.outputs["Max radius"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": math_4, 3: 1.5708, 4: 1.5708} + ) + + math_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"]}, + attrs={"operation": "SINE"}, + ) + + math_13 = nw.new_node( + Nodes.Math, + input_kwargs={0: math_11, 1: math_12}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_math = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: math_13}, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": vector_math.outputs["Vector"]} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": math_4, + 2: group_input.outputs["Clamp z"], + 3: group_input.outputs["Min z"], + 4: group_input.outputs["Max z"], + }, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": map_range_2.outputs["Result"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": combine_xyz_1, + }, + ) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + 2: map_range.outputs["Result"], + }, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": map_range.outputs["Result"], + 1: attribute_statistic.outputs["Max"], + 2: attribute_statistic.outputs["Min"], + 3: group_input.outputs["Min angle"], + 4: group_input.outputs["Max angle"], + }, + ) + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.1, 3: 0.1}) + + math_14 = nw.new_node( + Nodes.Math, input_kwargs={0: math_7, 1: group_input.outputs["Yaw offset"]} + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": map_range_3.outputs["Result"], + "Y": random_value_1.outputs[1], + "Z": math_14, + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={ + 2: group_input.outputs["Min scale"], + 3: group_input.outputs["Max scale"], + }, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Instance": group_input.outputs["Geometry"], + "Rotation": combine_xyz_2, + "Scale": random_value.outputs[1], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": instance_on_points} + ) + + +@node_utils.to_nodegroup("FollowCurve", singleton=False) +def follow_curve(nw): + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Offset", 0.5), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": group_input.outputs["Geometry"], 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": capture_attribute.outputs["Attribute"]}, + ) + + math = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["Offset"]}, + ) + + sample_curve = nw.new_node( + "GeometryNodeSampleCurve", + input_kwargs={"Curve": group_input.outputs["Curve"], "Length": math}, + ) + + vector_math = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Tangent"], + 1: sample_curve.outputs["Normal"], + }, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + vector_math_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: vector_math.outputs["Vector"], + "Scale": separate_xyz.outputs["X"], + }, + attrs={"operation": "SCALE"}, + ) + + vector_math_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: sample_curve.outputs["Normal"], + "Scale": separate_xyz.outputs["Y"], + }, + attrs={"operation": "SCALE"}, + ) + + vector_math_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: vector_math_1.outputs["Vector"], + 1: vector_math_2.outputs["Vector"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": sample_curve.outputs["Position"], + "Offset": vector_math_3.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup("SetTreeRadius", singleton=False, type="GeometryNodeTree") +def set_tree_radius(nw): + # Code generated using version 2.3.1 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketBool", "Selection", True), + ("NodeSocketFloat", "Reverse depth", 0.5), + ("NodeSocketFloat", "Scaling", 0.2), + ("NodeSocketFloat", "Exponent", 1.5), + ("NodeSocketFloat", "Min radius", 0.02), + ("NodeSocketFloat", "Max radius", 5.0), + ("NodeSocketInt", "Profile res", 20), + ("NodeSocketFloatDistance", "Merge dist", 0.001), + ], + ) + + mesh_to_curve = nw.new_node( + Nodes.MeshToCurve, + input_kwargs={ + "Mesh": group_input.outputs["Geometry"], + "Selection": group_input.outputs["Selection"], + }, + ) + + set_spline_type = nw.new_node( + Nodes.CurveSplineType, + input_kwargs={"Curve": mesh_to_curve}, + attrs={"spline_type": "BEZIER"}, + ) + + set_handle_type = nw.new_node( + Nodes.SetHandleType, input_kwargs={"Curve": set_spline_type} + ) + + position = nw.new_node(Nodes.InputPosition) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": position, "Scale": 1.0} + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], "Scale": 0.02}, + attrs={"operation": "SCALE"}, + ) + + set_handle_positions = nw.new_node( + Nodes.SetHandlePositions, + input_kwargs={"Curve": set_handle_type, "Offset": scale.outputs["Vector"]}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={1: True, 14: mesh_to_curve, 15: set_handle_positions}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Reverse depth"], + 1: group_input.outputs["Scaling"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: 0.1}, attrs={"operation": "MULTIPLY"} + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: group_input.outputs["Exponent"]}, + attrs={"operation": "POWER"}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: group_input.outputs["Min radius"]}, + attrs={"operation": "MAXIMUM"}, + ) + + minimum = nw.new_node( + Nodes.Math, + input_kwargs={0: maximum, 1: group_input.outputs["Max radius"]}, + attrs={"operation": "MINIMUM"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": switch.outputs[6], "Radius": minimum}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Profile res"]}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": curve_to_mesh, "Shade Smooth": False}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={ + "Geometry": set_shade_smooth, + "Distance": group_input.outputs["Merge dist"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": merge_by_distance} + ) + + +@node_utils.to_material("BarkMat2", singleton=False) +def bark_shader_2(nw): + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "offset_barkgeo2"} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": attribute.outputs["Color"]} + ) + + math = nw.new_node(Nodes.Math, input_kwargs={0: reroute, 1: 0.0}) + + colorramp_1 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": math}) + for i in range(2): + colorramp_1.color_ramp.elements.new(0) + colorramp_1.color_ramp.elements[0].position = 0.0 + # colorramp_1.color_ramp.elements[0].color = (0.0025, 0.0019, 0.0017, 1.0) + colorramp_1.color_ramp.elements[0].color = (0.1004, 0.049, 0.0344, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.163 + colorramp_1.color_ramp.elements[1].color = (0.1004, 0.049, 0.0344, 1.0) + colorramp_1.color_ramp.elements[2].position = 0.4529 + colorramp_1.color_ramp.elements[2].color = (0.1094, 0.0656, 0.054, 1.0) + colorramp_1.color_ramp.elements[3].position = 0.6268 + colorramp_1.color_ramp.elements[3].color = (0.0712, 0.0477, 0.0477, 1.0) + + math_1 = nw.new_node( + Nodes.Math, input_kwargs={0: 1.0, 1: reroute}, attrs={"operation": "SUBTRACT"} + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": colorramp_1.outputs["Color"], "Roughness": math_1}, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_material("BarkMat1", singleton=False) +def bark_shader_1(nw): + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Vector": mapping, "Detail": 16.0, "Roughness": 0.62}, + ) + + attribute = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "offset_barkgeo1"} + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": noise_texture.outputs["Fac"], + "Color2": attribute.outputs["Color"], + }, + attrs={"blend_type": "MULTIPLY"}, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix}) + colorramp.color_ramp.elements.new(1) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (0.0171, 0.005, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.4636 + colorramp.color_ramp.elements[1].color = (0.1132, 0.0653, 0.0471, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = (0.2243, 0.1341, 0.1001, 1.0) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": noise_texture.outputs["Fac"]} + ) + colorramp_2.color_ramp.elements[0].position = 0.0 + colorramp_2.color_ramp.elements[0].color = (0.5173, 0.5173, 0.5173, 1.0) + colorramp_2.color_ramp.elements[1].position = 1.0 + colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": colorramp_2.outputs["Color"], + }, + attrs={"subsurface_method": "BURLEY"}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup("BarkGeo2", singleton=False) +def bark_geo_2(nw): + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + vector = nw.new_node(Nodes.Vector) + vector.vector = (0.1, 0.1, 0.1) + + vector_math_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: vector}, + attrs={"operation": "MULTIPLY"}, + ) + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 0.38 + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 5.0 + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 2.0 + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": vector_math_1.outputs["Vector"], + "Scale": value, + "Detail": value_1, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": value_2, + "Color1": noise_texture.outputs["Color"], + "Color2": (0.0, 0.0, 0.0, 1.0), + }, + ) + + vector_math_2 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_math_1.outputs["Vector"], 1: mix} + ) + + value_4 = nw.new_node(Nodes.Value) + value_4.outputs[0].default_value = 0.0 + + value_3 = nw.new_node(Nodes.Value) + value_3.outputs[0].default_value = 20.0 + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": vector_math_2.outputs["Vector"], + "W": value_4, + "Scale": value_3, + }, + attrs={"voronoi_dimensions": "4D", "feature": "F2"}, + ) + + math_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture.outputs["Distance"], + 1: voronoi_texture.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "Vector": vector_math_2.outputs["Vector"], + "W": value_4, + "Scale": value_3, + }, + attrs={"voronoi_dimensions": "4D"}, + ) + + math_4 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: voronoi_texture_1.outputs["Distance"], + 1: voronoi_texture_1.outputs["Distance"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + math_5 = nw.new_node( + Nodes.Math, input_kwargs={0: math_3, 1: math_4}, attrs={"operation": "SUBTRACT"} + ) + + value_5 = nw.new_node(Nodes.Value) + value_5.outputs[0].default_value = 0.6 + + math_7 = nw.new_node( + Nodes.Math, input_kwargs={0: math_5, 1: value_5}, attrs={"operation": "MINIMUM"} + ) + + math_6 = nw.new_node( + Nodes.Math, input_kwargs={0: math_5, 1: value_5}, attrs={"operation": "MAXIMUM"} + ) + + value_6 = nw.new_node(Nodes.Value) + value_6.outputs[0].default_value = 0.1 + + math_8 = nw.new_node( + Nodes.Math, + input_kwargs={0: math_6, 1: value_6}, + attrs={"operation": "MULTIPLY"}, + ) + + math_9 = nw.new_node( + Nodes.Math, input_kwargs={0: math_7, 1: math_8}, attrs={"operation": "SUBTRACT"} + ) + + normal = nw.new_node(Nodes.InputNormal) + + vector_math_3 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: math_9, 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + face_area = nw.new_node("GeometryNodeInputMeshFaceArea") + + math_1 = nw.new_node( + Nodes.Math, input_kwargs={0: face_area}, attrs={"operation": "SQRT"} + ) + + value_7 = nw.new_node(Nodes.Value) + value_7.outputs[0].default_value = 2.0 + + math = nw.new_node( + Nodes.Math, + input_kwargs={0: math_1, 1: value_7}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_math_4 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_math_3.outputs["Vector"], 1: math}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": vector_math_4.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: math_7}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "offset_barkgeo2": capture_attribute.outputs["Attribute"], + }, + ) + + +@node_utils.to_nodegroup("BarkGeo1", singleton=False) +def bark_geo_1(nw): + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.2 + + vector_math = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: value}, + attrs={"operation": "MULTIPLY"}, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 10.0 + + value_2 = nw.new_node(Nodes.Value) + value_2.outputs[0].default_value = 15.0 + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": vector_math.outputs["Vector"], + "Scale": value_1, + "Distortion": value_2, + }, + ) + + normal = nw.new_node(Nodes.InputNormal) + + vector_math_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: wave_texture.outputs["Color"], 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + face_area = nw.new_node("GeometryNodeInputMeshFaceArea") + + math_1 = nw.new_node( + Nodes.Math, input_kwargs={0: face_area}, attrs={"operation": "SQRT"} + ) + + value_3 = nw.new_node(Nodes.Value) + value_3.outputs[0].default_value = 1.0 + + math = nw.new_node( + Nodes.Math, + input_kwargs={0: math_1, 1: value_3}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_math_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vector_math_1.outputs["Vector"], 1: math}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": vector_math_2.outputs["Vector"], + }, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": set_position, 1: wave_texture.outputs["Color"]}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "offset_barkgeo1": capture_attribute.outputs["Attribute"], + }, + ) + + +""" +def create_berry(sphere): + # Create a sphere + phyllotaxis_distribute('berry', sphere, + min_radius_pct=0, max_radius=1, + sin_max=2.5, sin_clamp_max=.8, + z_max=.8, z_clamp=.7) +""" + + +def sample_points_and_normals(obj, max_density=3, surface_dist=1, max_points=10000): + # Need to instantiate point distribute + m = add_node_modifier(obj) + ng = m.node_group + inp = ng.nodes.get("Group Input") + out = ng.nodes.get("Group Output") + dist = ng.nodes.new(type="GeometryNodeDistributePointsOnFaces") + pos = ng.nodes.new("GeometryNodeInputPosition") + scale_factor = ng.nodes.new("ShaderNodeValue") + mult_normal = ng.nodes.new("ShaderNodeVectorMath") + add_pos = ng.nodes.new("ShaderNodeVectorMath") + set_pos = ng.nodes.new("GeometryNodeSetPosition") + to_vtx = ng.nodes.new("GeometryNodePointsToVertices") + + new_link(ng, inp, "Geometry", dist, "Mesh") + new_link(ng, dist, "Normal", mult_normal, 0) + new_link(ng, scale_factor, 0, mult_normal, 1) + new_link(ng, pos, 0, add_pos, 0) + new_link(ng, mult_normal, 0, add_pos, 1) + new_link(ng, dist, "Points", set_pos, "Geometry") + new_link(ng, add_pos, 0, set_pos, "Position") + new_link(ng, set_pos, "Geometry", to_vtx, "Points") + new_link(ng, to_vtx, "Mesh", out, "Geometry") + + mult_normal.operation = "MULTIPLY" + scale_factor.outputs[0].default_value = surface_dist + dist.distribute_method = "POISSON" + dist.inputs.get("Density Max").default_value = max_density + + # Get point coordinates + dgraph = C.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(dgraph) + vtx = mesh.vtx2cds(obj_eval.data.vertices, obj_eval.matrix_world) + + # Get normals + scale_factor.outputs[0].default_value = 1 + for l in ng.links: + if l.from_node == pos: + ng.links.remove(l) + + dgraph = C.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(dgraph) + normals = mesh.vtx2cds(obj_eval.data.vertices, np.eye(4)) + + obj.modifiers.remove(obj.modifiers[-1]) + D.node_groups.remove(ng) + + idxs = mesh.subsample_vertices(vtx, max_num=max_points) + return vtx[idxs], normals[idxs] diff --git a/infinigen/assets/objects/trees/utils/helper.py b/infinigen/assets/objects/trees/utils/helper.py new file mode 100644 index 000000000..af4dd5915 --- /dev/null +++ b/infinigen/assets/objects/trees/utils/helper.py @@ -0,0 +1,251 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alejandro Newell + + +import bpy +import numpy as np + +from infinigen.core.util.logging import Suppress + +C = bpy.context +D = bpy.data + + +def set_active_obj(obj): + if not C.active_object == obj: + try: + bpy.ops.object.mode_set(mode="OBJECT") + except Exception: # TODO narrow + pass + bpy.ops.object.select_all(action="DESELECT") + obj.select_set(True) + C.view_layer.objects.active = obj + bpy.ops.object.mode_set(mode="OBJECT") + + +def config_rendering( + resolution=(480, 640), + renderer="cycles", + render_samples=64, + render_exr=False, + thread_limit=8, +): + """Adjust rendering settings. + + Args: + resolution: Integer tuple for image resolution + renderer: Either 'cycles' or 'eevee' + render_samples: Integer that determines sample quality, rendering time + use_gpu: Whether to use the GPU for rendering + render_exr: Set true to output segmentation and depth ground truth + """ + + if renderer == "eevee": + C.scene.render.engine = "BLENDER_EEVEE" + C.scene.eevee.taa_render_samples = render_samples + + elif renderer == "cycles": + C.scene.render.engine = "CYCLES" + # C.scene.cycles.device = 'GPU' + C.scene.cycles.samples = render_samples + C.scene.cycles.use_denoising = True + # C.scene.cycles.denoiser = 'OPTIX' + + C.scene.render.resolution_x = resolution[1] + C.scene.render.resolution_y = resolution[0] + # C.scene.render.threads_mode = 'FIXED' + # C.scene.render.threads = thread_limit + + if render_exr: + C.scene.render.image_settings.file_format = "OPEN_EXR_MULTILAYER" + C.scene.render.image_settings.color_mode = "RGBA" + C.scene.render.image_settings.color_depth = "32" + C.window.view_layer.use_pass_object_index = True + C.window.view_layer.use_pass_material_index = True + C.window.view_layer.use_pass_z = True + + else: + C.scene.render.image_settings.color_mode = "RGB" + + +def create_collection(name, objs): + c_names = [] + for c_idx, c in enumerate(D.collections): + if c_idx > 0: + c_names += [c.name] + + name_ = name + count = 1 + while name_ in c_names: + name_ = f"{name}_{count}" + count += 1 + + bpy.ops.object.select_all(action="DESELECT") + for o in objs: + o.select_set(True) + + with Suppress(): + bpy.ops.object.move_to_collection( + collection_index=0, is_new=True, new_collection_name=name_ + ) + + return name_ + + +def traverse_tree(t): + # https://blender.stackexchange.com/questions/172559/python-how-to-move-collection-into-another-collection + yield t + for child in t.children: + yield from traverse_tree(child) + + +def parent_lookup(coll): + parent_lookup = {} + for coll in traverse_tree(coll): + for c in coll.children.keys(): + parent_lookup.setdefault(c, coll) + return parent_lookup + + +def collect_collections(name, colls): + # Get all collections of the scene and their parents in a dict + coll_scene = C.scene.collection + coll_parents = parent_lookup(coll_scene) + + # Create target collection + D.collections.new(name) + coll_target = D.collections[name] + coll_scene.children.link(coll_target) + + for coll in colls: + coll_parent = coll_parents.get(coll.name) + coll_parent.children.unlink(coll) + coll_target.children.link(coll) + + +def remove_collection(name): + collection = D.collections.get(name) + for obj in collection.objects: + D.objects.remove(obj, do_unlink=True) + D.collections.remove(collection) + + +def hide_collection(collection): + if isinstance(collection, str): + name = collection + collection = D.collections[name] + else: + name = collection.name + + vlayer = C.scene.view_layers[0] + vlayer.layer_collection.children[name].hide_viewport = True + collection.hide_render = True + + +def clear_collections(): + c_names = [] + for c_idx, c in enumerate(D.collections): + if c_idx > 0: + c_names += [c.name] + + for c_name in c_names: + remove_collection(c_name) + + +def run_cleanup(): + for d in [D.meshes, D.materials, D.images, D.particles]: + for d_ in d: + if d_.users == 0: + d.remove(d_) + for d in [D.textures, D.node_groups]: + for d_ in d: + d.remove(d_) + + +def reset_scene(add_camera=False, clear_materials=False, obj_to_keep_list=[]): + """Clear and reset scene.""" + set_active_obj(D.objects[0]) + + for obj in D.objects: + obj.hide_viewport = False + + # Delete everything + clear_collections() + # bpy.ops.object.select_all(action='SELECT') + for obj in bpy.context.scene.objects: + if obj.name not in obj_to_keep_list: + obj.select_set(True) + bpy.ops.object.delete(confirm=False) + run_cleanup() + + if add_camera: + # Initialize camera + v = min(1, max(0, (np.random.randn() * 0.3 + 0.5))) + v = 0 + camera_height = 0.5 + 3 * v # np.random.uniform(1,5) # + np.random.randn() * .2 + camera_pitch = np.pi * 0.45 # + np.random.randn() * np.pi * .1 + camera_pitch = min(max(camera_pitch, np.pi * 0.4), np.pi * 0.5) + camera_pitch = np.pi * 0.65 # (1-v) * np.pi * .6 + np.pi * .2 + + camera_pitch = np.pi * 0.5 + camera_height = 3 + + bpy.ops.object.camera_add( + location=(0, -6, camera_height), rotation=(camera_pitch, 0, 0) + ) + cam = D.objects[0] + C.scene.camera = cam + cam.data.lens = 20 + + if clear_materials: # Regardless of number of users + for m_idx in range(len(D.materials)): + D.materials.remove(D.materials[-1]) + + +# ============================================================================== +# Transformation utils +# ============================================================================== + + +def compute_dists(a, b): + deltas = a[:, None] - b[None] + d = np.linalg.norm(deltas, axis=-1) + return d, deltas + + +def get_cos_sin(angle, convert_to_rad=False): + if convert_to_rad: + angle = angle * np.pi / 180 + return np.cos(angle), np.sin(angle) + + +def rodrigues_rot(vec, axis, angle, convert_to_rad=False): + axis = axis / np.linalg.norm(axis) + cs, sn = get_cos_sin(angle, convert_to_rad) + return vec * cs + sn * np.cross(axis, vec) + axis * np.dot(axis, vec) * (1 - cs) + + +def get_T_mat(distance, angle, convert_to_rad=True): + T = np.identity(3) + T[0, 2] = distance + rot = np.identity(3) + cs, sn = get_cos_sin(angle, convert_to_rad) + rot[0, :2] = cs, -sn + rot[1, :2] = sn, cs + + return np.matmul(rot, T) + + +def valid_pos(d0=2, d1=10): + camera_pos = C.scene.camera.location + view_angle = C.scene.camera.rotation_euler[2] + tmp_ang = (C.scene.camera.data.angle / 2) * 0.9 + tmp_ang = np.random.rand() * 2 * tmp_ang - tmp_ang + tmp_ang += view_angle + tmp_dist = np.random.rand() * (d1 - d0) + d0 + root_pos = np.array([camera_pos[0], camera_pos[1]]) + v_dir = np.array([-np.sin(tmp_ang), np.cos(tmp_ang)]) + + return root_pos + tmp_dist * v_dir diff --git a/infinigen/assets/objects/trees/utils/materials.py b/infinigen/assets/objects/trees/utils/materials.py new file mode 100644 index 000000000..682a52e35 --- /dev/null +++ b/infinigen/assets/objects/trees/utils/materials.py @@ -0,0 +1,275 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alejandro Newell, Lingjie Mei + + +import colorsys + +import bpy +import numpy as np + +from infinigen.core.util.color import hsv2rgba + +from . import helper + +C = bpy.context +D = bpy.data + + +def get_materials(prefix=""): + return [m for m in D.materials if f"{prefix}Material" in m.name] + + +def new_material(prefix=""): + n_idx = len(get_materials(prefix)) + m = D.materials.new(f"{prefix}Material{n_idx:04d}") + m.use_nodes = True + + return m + + +def init_color_material( + color, + prefix="", + hsv_variance=[0, 0, 0], + roughness=0.8, + specular=0.05, + is_hsv=True, + is_emission=False, + emit_strength=1, +): + m = new_material(prefix) + nt = m.node_tree + color = np.array(color) + np.random.randn(3) * np.array(hsv_variance) + color = list(color.clip(0, 1)) + color = hsv2rgba(*color) + + if is_emission: + out_node = nt.nodes.get("Material Output") + nt.nodes.new("ShaderNodeEmission") + em = nt.nodes.get("Emission") + em.inputs.get("Strength").default_value = emit_strength + em.inputs.get("Color").default_value = color + new_link(nt, em, "Emission", out_node, "Surface") + + else: + bsdf_node = nt.nodes.get("Principled BSDF") + bsdf_node.inputs.get("Base Color").default_value = color + bsdf_node.inputs.get("Roughness").default_value = roughness + bsdf_node.inputs.get("Specular").default_value = specular + + return m + + +def assign_material(obj, m=None, prefix="", m_idx=0, slot_idx=0): + helper.set_active_obj(obj) + while len(obj.material_slots) < (slot_idx + 1): + bpy.ops.object.material_slot_add() + obj.active_material_index = slot_idx + + if m is not None: + obj.active_material = m + else: + obj.active_material = get_materials(prefix)[m_idx] + + +def uv_smart_project(obj): + helper.set_active_obj(obj) + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.uv.smart_project() + bpy.ops.object.mode_set(mode="OBJECT") + + +def new_link(nt, node1, field1, node2, field2): + node_out = ( + node1.outputs[field1] if isinstance(field1, int) else node1.outputs.get(field1) + ) + node_inp = ( + node2.inputs[field2] if isinstance(field2, int) else node2.inputs.get(field2) + ) + nt.links.new(node_out, node_inp) + + +def create_leaf_material(src_hue, glow=False): + m = new_material("Leaf") + nt = m.node_tree + + if glow: + out_node = nt.nodes.get("Material Output") + nt.nodes.new("ShaderNodeEmission") + em = nt.nodes.get("Emission") + em.inputs.get("Strength").default_value = 1 + em.inputs.get("Color").default_value = ( + *colorsys.hsv_to_rgb(src_hue + np.random.randn() * 0.1, 1, 1), + 1, + ) + new_link(nt, em, "Emission", out_node, "Surface") + + else: + info_node = nt.nodes.new("ShaderNodeObjectInfo") + add_node = nt.nodes.new("ShaderNodeVectorMath") + mult_node = nt.nodes.new("ShaderNodeVectorMath") + add2_node = nt.nodes.new("ShaderNodeVectorMath") + noise_node = nt.nodes.new("ShaderNodeTexWhiteNoise") + sep_node = nt.nodes.new("ShaderNodeSeparateXYZ") + hsv_node = nt.nodes.new("ShaderNodeCombineHSV") + + sep_loc_node = nt.nodes.new("ShaderNodeSeparateXYZ") + loc_mult_node = nt.nodes.new("ShaderNodeMath") + loc_add_node = nt.nodes.new("ShaderNodeMath") + + bsdf_node = nt.nodes.get("Principled BSDF") + mult_node.operation = "MULTIPLY" + loc_mult_node.operation = "MULTIPLY" + + add_node.inputs[1].default_value += np.random.randn(3) + # mult_node.inputs[1].default_value = [.07,.2,.2] + # add2_node.inputs[1].default_value = [.22,.9,.1] + # loc_mult_node.inputs[1].default_value = 0 + mult_node.inputs[1].default_value = [0.05, 0.4, 0.4] + add2_node.inputs[1].default_value = [ + src_hue + np.random.randn() * 0.05, + 0.6, + 0.1, + ] + loc_mult_node.inputs[1].default_value = 0 # -.01 + # add2_node.inputs[1].default_value += np.random.randn(3) * .1 + + # Get HSV color (output of sep_node) + new_link(nt, info_node, "Random", add_node, 0) + new_link(nt, add_node, 0, noise_node, "Vector") + new_link(nt, noise_node, "Color", mult_node, 0) + new_link(nt, mult_node, 0, add2_node, 0) + new_link(nt, add2_node, 0, sep_node, 0) + + # Modify H based on Z + nt.links.new(info_node.outputs.get("Location"), sep_loc_node.inputs[0]) + nt.links.new(sep_loc_node.outputs.get("Z"), loc_mult_node.inputs[0]) + nt.links.new(loc_mult_node.outputs[0], loc_add_node.inputs[0]) + nt.links.new(sep_node.outputs[0], loc_add_node.inputs[1]) + + # Combine and assign color + nt.links.new(loc_add_node.outputs[0], hsv_node.inputs.get("H")) + nt.links.new(sep_node.outputs[1], hsv_node.inputs.get("S")) + nt.links.new(sep_node.outputs[2], hsv_node.inputs.get("V")) + nt.links.new(hsv_node.outputs[0], bsdf_node.inputs.get("Base Color")) + + +def get_tex_nodes(m): + """Returns Image Texture node, creates one if it doesn't exist.""" + nt = m.node_tree + m.cycles.displacement_method = "DISPLACEMENT" + + # Check whether the Image Texture node has been added + diff_img_node = nt.nodes.get("Image Texture") + rough_img_node = nt.nodes.get("Image Texture.001") + disp_img_node = nt.nodes.get("Image Texture.002") + + if diff_img_node is None: + # Create new node for linking images + nt.nodes.new("ShaderNodeTexImage") + nt.nodes.new("ShaderNodeTexImage") + nt.nodes.new("ShaderNodeTexImage") + nt.nodes.new("ShaderNodeMapRange") + diff_img_node = nt.nodes.get("Image Texture") + rough_img_node = nt.nodes.get("Image Texture.001") + rough_scaling_node = nt.nodes.get("Map Range") + disp_img_node = nt.nodes.get("Image Texture.002") + + # Link to main node + bsdf_node = nt.nodes.get("Principled BSDF") + nt.links.new( + diff_img_node.outputs.get("Color"), bsdf_node.inputs.get("Base Color") + ) + nt.links.new( + rough_img_node.outputs.get("Color"), rough_scaling_node.inputs.get("Value") + ) + nt.links.new( + rough_scaling_node.outputs.get("Result"), bsdf_node.inputs.get("Roughness") + ) + + # Set up nodes for mixing in color + disp_node = nt.nodes.new("ShaderNodeDisplacement") + disp_node.space = "WORLD" + disp_node.inputs.get("Scale").default_value = 0.05 + out_node = nt.nodes.get("Material Output") + nt.links.new(disp_img_node.outputs.get("Color"), disp_node.inputs.get("Height")) + nt.links.new( + disp_node.outputs.get("Displacement"), out_node.inputs.get("Displacement") + ) + + return diff_img_node, rough_img_node, disp_img_node + + +def setup_material(m, txt_paths, metal_prob=0.2, transm_prob=0.2, emit_prob=0): + """Initialize material given list of paths to diff, rough, disp images.""" + + # Load any images that haven't been loaded already + img_ref = [tpath.split("/")[-1] for tpath in txt_paths] + for img_idx, img in enumerate(img_ref): + if img not in D.images: + try: + D.images.load(txt_paths[img_idx]) + except FileNotFoundError: + pass + + # Initialize and update diff, rough, and disp shader nodes + txt_nodes = get_tex_nodes(m) + for n_idx, n in enumerate(txt_nodes): + try: + im = D.images.get(img_ref[n_idx]) + if n_idx > 0: + im.colorspace_settings.name = "Non-Color" + n.image = im + except KeyError: + pass + + nt = m.node_tree + bsdf = nt.nodes.get("Principled BSDF") + rough_scale = nt.nodes.get("Map Range") + + bsdf.inputs.get("Metallic").default_value = 0 + bsdf.inputs.get("Transmission").default_value = 0 + bsdf.inputs.get("IOR").default_value = 1.45 + rough_scale.inputs.get("To Max").default_value = 1 + + if np.random.rand() < metal_prob: + bsdf.inputs.get("Metallic").default_value = 1 + rough_scale.inputs.get("To Max").default_value = 0.5 + + elif np.random.rand() < transm_prob: + bsdf.inputs.get("Transmission").default_value = 1 + bsdf.inputs.get("IOR").default_value = 1.05 + np.random.rand() * 0.3 + rough_scale.inputs.get("To Max").default_value = 0.2 + + if np.random.rand() < emit_prob: + out_node = nt.nodes.get("Material Output") + + nt.nodes.new("ShaderNodeEmission") + nt.nodes.new("ShaderNodeTexNoise") + nt.nodes.new("ShaderNodeValToRGB") # ColorRamp + nt.nodes.new("ShaderNodeMixShader") + + em = nt.nodes.get("Emission") + em.inputs.get("Strength").default_value = 5 + em.inputs.get("Color").default_value = ( + *colorsys.hsv_to_rgb(np.random.rand(), 1, 1), + 1, + ) + + noise = nt.nodes.get("Noise Texture") + noise.inputs.get("Scale").default_value = np.random.uniform(1, 10) + noise.inputs.get("Distortion").default_value = np.random.uniform(3, 10) + + ramp = nt.nodes.get("ColorRamp") + ramp.color_ramp.elements[0].position = 0.4 + ramp.color_ramp.elements[1].position = 0.45 + new_link(nt, noise, "Color", ramp, "Fac") + + mix = nt.nodes.get("Mix Shader") + new_link(nt, ramp, "Color", mix, "Fac") + new_link(nt, bsdf, "BSDF", mix, "Shader") + new_link(nt, em, "Emission", mix, "Shader") + new_link(nt, mix, "Shader", out_node, "Surface") diff --git a/infinigen/assets/objects/trees/utils/mesh.py b/infinigen/assets/objects/trees/utils/mesh.py new file mode 100644 index 000000000..d4115961f --- /dev/null +++ b/infinigen/assets/objects/trees/utils/mesh.py @@ -0,0 +1,296 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alejandro Newell + + +import bpy +import numpy as np +from mathutils import Vector + +from . import helper + +C = bpy.context +D = bpy.data + + +def init_mesh(name, verts=[], edges=[], faces=[], coll=None): + mesh = D.meshes.new(name) + obj = D.objects.new(mesh.name, mesh) + + if coll is None: + coll = bpy.context.scene.collection + else: + coll = D.collections[coll] + + coll.objects.link(obj) + helper.set_active_obj(obj) + + mesh.from_pydata(verts, edges, faces) + + return obj + + +def duplicate_obj(obj, name): + new_obj = obj.copy() + new_obj.name = name + new_obj.data = new_obj.data.copy() + + col = obj.users_collection[0] + col.objects.link(new_obj) + + helper.set_active_obj(new_obj) + return new_obj + + +def finalize_obj(obj): + helper.set_active_obj(obj) + bpy.ops.object.convert(target="MESH") + + +def init_vertex(pos): + bpy.ops.mesh.primitive_cube_add( + size=1, enter_editmode=True, align="WORLD", location=pos, scale=(1, 1, 1) + ) + bpy.ops.mesh.merge(type="COLLAPSE") + bpy.ops.object.editmode_toggle() + + return C.active_object + + +def get_all_vtx_pos(obj): + n_cds = len(obj.data.vertices) + all_cds = np.zeros(n_cds * 3) + obj.data.vertices.foreach_get("co", all_cds) + return all_cds.reshape(-1, 3) + + +def vtx2cds(vtxs, world_mat): + n_cds = len(vtxs) + all_cds = np.zeros(n_cds * 3) + vtxs.foreach_get("co", all_cds) + all_cds = all_cds.reshape(-1, 3) + all_cds = add_ones(all_cds.reshape(-1, 3)) + m_world = np.array(world_mat) + all_cds = np.matmul(m_world, all_cds.T).T[:, :3] + + return all_cds + + +def sample_vtxs(obj, emit_from="VOLUME", n=1000, seed=1): + # Make object current active object + bpy.ops.object.mode_set(mode="OBJECT") + C.view_layer.objects.active = obj + + # Add particle system modifier + bpy.ops.object.modifier_add(type="PARTICLE_SYSTEM") + p = D.particles[-1] + + # Adjust modifier settings + p.count = n + p.frame_end = 1 + p.emit_from = emit_from + p.distribution = "RAND" + p.use_modifier_stack = True + p.physics_type = "NO" + obj.particle_systems[-1].seed = seed + + # Get particle locations (relative to object) + obj_eval = obj.evaluated_get(C.evaluated_depsgraph_get()) + all_cds = np.zeros(n * 3) + obj_eval.particle_systems[-1].particles.foreach_get("location", all_cds) + + obj.modifiers.remove(obj.modifiers[-1]) + D.particles.remove(D.particles[-1]) + + return all_cds.reshape(-1, 3) + + +def get_pts_from_shape( + shape_fn, n=10, emit_from="VOLUME", loc=(0, 0, 0), scaling=1, pt_offset=0 +): + if isinstance(pt_offset, list): + pt_offset = np.array([pt_offset]) + if isinstance(scaling, list): + scaling = Vector(scaling) + shape_fn(location=loc) + obj = C.active_object + obj.scale *= scaling + pts = sample_vtxs(obj, n=n, emit_from=emit_from, seed=np.random.randint(100)) + pts += pt_offset + D.objects.remove(obj) + return pts + + +def select_vtx_by_pos(obj, pos): + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_mode(type="VERT") + bpy.ops.mesh.select_all(action="DESELECT") + bpy.ops.object.mode_set(mode="OBJECT") + n_cds = len(obj.data.vertices) + all_cds = np.zeros(n_cds * 3) + obj.data.vertices.foreach_get("co", all_cds) + idx = np.abs(all_cds.reshape(n_cds, 3) - pos).sum(1).argmin() + obj.data.vertices[idx].select = True + bpy.ops.object.mode_set(mode="EDIT") + + return idx + + +def select_vtx_by_idx(obj, idx, deselect=False): + if not isinstance(idx, list): + idx = [idx] + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_mode(type="VERT") + if deselect: + bpy.ops.mesh.select_all(action="DESELECT") + bpy.ops.object.mode_set(mode="OBJECT") + for i in idx: + obj.data.vertices[i].select = True + bpy.ops.object.mode_set(mode="EDIT") + + return idx + + +def extrude_path(obj, path): + helper.set_active_obj(obj) + bpy.ops.object.mode_set(mode="EDIT") + src_idx = select_vtx_by_pos(obj, path[0]) + deltas = path[1:] - path[:-1] + start_idx = len(obj.data.vertices) + for i in range(len(deltas)): + bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value": deltas[i]}) + + return src_idx, start_idx + + +def get_vtx_obj(): + if "vtx" not in D.objects: + bpy.ops.object.mode_set(mode="OBJECT") + bpy.ops.mesh.primitive_cube_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + bpy.ops.object.editmode_toggle() + bpy.ops.mesh.merge(type="COLLAPSE") + bpy.ops.object.editmode_toggle() + obj = C.active_object + obj.name = "vtx" + + return D.objects["vtx"] + + +def subsample_vertices(v, max_num=500): + if len(v) > max_num: + rand_order = np.random.permutation(len(v)) + return np.sort(rand_order[:max_num]) + else: + return np.arange(len(v)) + + +def add_ones(x): + return np.concatenate([x, np.ones_like(x[:, :1])], 1) + + +def get_world_coords(obj, subset=None): + dgraph = C.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(dgraph) + + vts = obj_eval.data.vertices + all_cds = np.zeros(len(vts) * 3) + vts.foreach_get("co", all_cds) + all_cds = add_ones(all_cds.reshape(-1, 3)) + if subset is not None: + all_cds = all_cds[subset] + + m_world = np.array(obj_eval.matrix_world) + all_cds = np.matmul(m_world, all_cds.T).T[:, :3] + + return all_cds + + +def arr_world_to_camera_view(scene, obj, coord): + # Modified to support array operations from bpy_extras.object_utils.world_to_camera_view + cam_matrix = np.array(obj.matrix_world.normalized().inverted()) + co_local = np.matmul(cam_matrix, add_ones(coord).T).T[:, :3] + z = -co_local[:, 2] + + camera = obj.data + frame = [np.array(v) for v in camera.view_frame(scene=scene)[:3]] + if camera.type != "ORTHO": + frame = [(-v / v[2])[None, :] * z[:, None] for v in frame] + for i in range(len(frame)): + frame[i][z == 0][:, :2] = 0.5 + + min_x, max_x = frame[2][:, 0], frame[1][:, 0] + min_y, max_y = frame[1][:, 1], frame[0][:, 1] + + x = (co_local[:, 0] - min_x) / (max_x - min_x) + y = (co_local[:, 1] - min_y) / (max_y - min_y) + + return np.stack([x, y, z], 1) + + +def get_coords_clip(obj, f0, f1, subset=None): + all_cds = [] + for i in range(f0, f1): + C.scene.frame_set(i) + cds = get_world_coords(obj, subset) + all_cds += [cds] + + return np.stack(all_cds, 0) + + +def get_visible_vertices(cam, vertices, co2D=None, limit=0.02): + if co2D is None: + co2D = arr_world_to_camera_view(C.scene, cam, vertices) + + bpy.ops.mesh.primitive_cube_add() + bpy.ops.transform.resize(value=(0.01, 0.01, 0.01)) + cube = C.active_object + + in_frame = (co2D[:, 0] >= 0) & (co2D[:, 0] <= 1) + in_frame &= (co2D[:, 1] >= 0) & (co2D[:, 1] <= 1) + in_frame &= co2D[:, 2] > 0 + + is_visible = in_frame.copy() + + valid_idxs = np.arange(len(in_frame))[in_frame] + + for i in valid_idxs: + v = Vector(vertices[i]) + cube.location = v + depsgraph = C.evaluated_depsgraph_get() + + # Try a ray cast, in order to test the vertex visibility from the camera + location = C.scene.ray_cast( + depsgraph, cam.location, (v - cam.location).normalized() + ) + # If the ray hits something and if this hit is close to the vertex, we assume this is the vertex + if not (location[0] and (v - location[1]).length < limit): + is_visible[i] = False + + bpy.ops.object.select_all(action="DESELECT") + cube.select_set(True) + bpy.ops.object.delete(confirm=False) + + return co2D, is_visible, in_frame + + +def sanity_check_viz(all_pts, is_visible, in_frame, frame_idx=0): + C.scene.frame_set(frame_idx) + for i in range(all_pts.shape[1]): + pt = all_pts[frame_idx, i] + vis = is_visible[frame_idx, i] + + bpy.ops.mesh.primitive_cube_add() + bpy.ops.transform.resize(value=(0.02, 0.02, 0.02)) + cube = C.active_object + cube.location = pt + bpy.ops.object.material_slot_add() + cube.material_slots[0].material = D.materials[2] if vis else D.materials[1] + if not in_frame[frame_idx, i]: + cube.material_slots[0].material = D.materials[0] diff --git a/infinigen/assets/tropic_plants/__init__.py b/infinigen/assets/objects/tropic_plants/__init__.py similarity index 100% rename from infinigen/assets/tropic_plants/__init__.py rename to infinigen/assets/objects/tropic_plants/__init__.py index 08d3ad05a..7d6fb2939 100644 --- a/infinigen/assets/tropic_plants/__init__.py +++ b/infinigen/assets/objects/tropic_plants/__init__.py @@ -1,5 +1,5 @@ +from .coconut_tree import CoconutTreeFactory from .leaf_banana_tree import LeafBananaTreeFactory, PlantBananaTreeFactory from .leaf_palm_plant import LeafPalmPlantFactory from .leaf_palm_tree import LeafPalmTreeFactory -from .coconut_tree import CoconutTreeFactory from .palm_tree import PalmTreeFactory diff --git a/infinigen/assets/objects/tropic_plants/coconut_tree.py b/infinigen/assets/objects/tropic_plants/coconut_tree.py new file mode 100644 index 000000000..22dfa36bd --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/coconut_tree.py @@ -0,0 +1,1893 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.fruits.coconutgreen import FruitFactoryCoconutgreen +from infinigen.assets.objects.tropic_plants.leaf_palm_tree import LeafPalmTreeFactory +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal_2 = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal_2, 1: combine_xyz_3}, + attrs={"operation": "MULTIPLY"}, + ) + + index_1 = nw.new_node(Nodes.Index) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: 63.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": greater_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_bottom", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 64.0}, attrs={"operation": "LESS_THAN"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": less_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_trunk_radius_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_trunk_radius_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.01, 3: 0.05}) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}, + attrs={"clamp": False}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "FLOOR"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: floor}, + attrs={"operation": "SUBTRACT"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": subtract}) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0156), (0.2545, 0.2), (0.5182, 0.0344), (0.7682, 0.2375), (1.0, 0.0)], + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_2} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: random_value.outputs[1], 1: add}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_coutour_cross_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coutour_cross_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 128, "Radius": 0.05} + ) + + pedal_cross_coutour_x = nw.new_node(Nodes.Value, label="pedal_cross_coutour_x") + pedal_cross_coutour_x.outputs[0].default_value = 0.3 + + pedal_cross_contour_bottom = nw.new_node( + nodegroup_pedal_cross_contour_bottom().name, + input_kwargs={"X": pedal_cross_coutour_x}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": pedal_cross_contour_bottom.outputs["Value"], + "Offset": pedal_cross_contour_bottom.outputs["Vector"], + }, + ) + + pedal_cross_coutour_y = nw.new_node(Nodes.Value, label="pedal_cross_coutour_y") + pedal_cross_coutour_y.outputs[0].default_value = 0.3 + + pedal_cross_contour_top = nw.new_node( + nodegroup_pedal_cross_contour_top().name, + input_kwargs={"Y": pedal_cross_coutour_y, "X": pedal_cross_coutour_x}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": pedal_cross_contour_top.outputs["Value"], + "Offset": pedal_cross_contour_top.outputs["Vector"], + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 7.0, "Detail": 15.0}, + attrs={"noise_dimensions": "4D"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Fac"], "Scale": 0.0}, + attrs={"operation": "SCALE"}, + ) + + set_position_5 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_2, "Offset": scale.outputs["Vector"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_5} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_z_contour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_z_contour(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.4094), + (0.1773, 0.475), + (0.3795, 0.5062), + (0.5864, 0.5187), + (0.7202, 0.5084), + (0.8636, 0.4781), + (1.0, 0.375), + ], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_3 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 0.0688), (0.2545, 0.2281), (0.5023, 0.2563), (0.9773, 0.2656)], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.2)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_3, + "Center": (0.0, 0.0, 0.2), + "Angle": multiply, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate} + ) + + +@node_utils.to_nodegroup( + "nodegroup_node_group_002", singleton=False, type="ShaderNodeTree" +) +def nodegroup_node_group_002(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0)), + ("NodeSocketFloat", "attribute", 0.0), + ("NodeSocketFloat", "voronoi scale", 50.0), + ("NodeSocketFloatFactor", "voronoi randomness", 1.0), + ("NodeSocketFloat", "seed", 0.0), + ("NodeSocketFloat", "noise scale", 10.0), + ("NodeSocketFloat", "noise amount", 1.4), + ("NodeSocketFloat", "hue min", 0.6), + ("NodeSocketFloat", "hue max", 1.085), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: texture_coordinate.outputs["Object"], + 1: group_input.outputs["seed"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + "Detail": 1.0, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["noise amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["attribute"], + "Scale": group_input.outputs["voronoi scale"], + "Randomness": group_input.outputs["voronoi randomness"], + }, + attrs={"voronoi_dimensions": "1D"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": add_1, + 3: group_input.outputs["hue min"], + 4: group_input.outputs["hue max"], + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Value": map_range.outputs["Result"], + "Color": group_input.outputs["Color"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": hue_saturation_value} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconutvein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconutvein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_2 = nw.new_node(Nodes.Index) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": index_2, 1: 400.0, 2: 0.0}, + attrs={"clamp": False}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Factor": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0), (0.2455, 0.0), (0.5091, 0.0), (0.7636, 0.1625), (1.0, 0.4688)], + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"Scale": 1.0}, + attrs={"noise_dimensions": "4D"}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: float_curve, 1: noise_texture.outputs["Color"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_tree_trunk_geometry_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_trunk_geometry_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + trunkradius_001 = nw.new_node(nodegroup_trunk_radius_001().name) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": trunkradius_001}, + ) + + trunk_resolution = nw.new_node( + Nodes.Integer, label="TrunkResolution", attrs={"integer": 32} + ) + trunk_resolution.integer = 32 + + trunk_radius = nw.new_node(Nodes.Value, label="TrunkRadius") + trunk_radius.outputs[0].default_value = 0.02 + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": trunk_resolution, "Radius": trunk_radius}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh, "Integer": trunk_resolution}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_leaf_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_leaf_selection(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_3 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: 1600.0, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(0.92, 0.98)}, + attrs={"operation": "MULTIPLY"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_3, 1: multiply_1}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: np.clip(normal(0.8, 0.1), 0.7, 0.9)}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_3, 1: multiply_2}, + attrs={"operation": "LESS_THAN"}, + ) + + op_or = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: greater_than, 1: less_than}, + attrs={"operation": "OR"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_or}) + + +@node_utils.to_nodegroup( + "nodegroup_random_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_random_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.5, 3: 0.5, "Seed": 1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2, "Seed": 3} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_2.outputs[1], + "Z": random_value_3.outputs[1], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_truncated_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_truncated_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0} + ) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index_1, 1: add}, attrs={"operation": "MODULO"} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: modulo, 1: add}, attrs={"operation": "DIVIDE"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 6.28}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_leaf_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_leaf_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": (0.0, 0.0, 0.15)}) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 64}) + integer.integer = 64 + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": integer} + ) + + pedal_stem_curvature_scale = nw.new_node( + Nodes.Value, label="pedal_stem_curvature_scale" + ) + pedal_stem_curvature_scale.outputs[0].default_value = 0.2 + + pedal_stem_curvature = nw.new_node( + nodegroup_pedal_stem_curvature().name, + input_kwargs={"Value": pedal_stem_curvature_scale}, + ) + + set_position_4 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve_1, "Offset": pedal_stem_curvature}, + ) + + pedal_z_coutour_scale = nw.new_node(Nodes.Value, label="pedal_z_coutour_scale") + pedal_z_coutour_scale.outputs[0].default_value = uniform(0.2, 0.4) + + pedal_z_contour = nw.new_node( + nodegroup_pedal_z_contour().name, input_kwargs={"Value": pedal_z_coutour_scale} + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": set_position_4, "Radius": pedal_z_contour}, + ) + + coutour_cross_geometry = nw.new_node(nodegroup_coutour_cross_geometry().name) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": coutour_cross_geometry, + "Fill Caps": True, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Material": surface.shaderfunc_to_material(shader_top_core), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_trunk_radius", singleton=False, type="GeometryNodeTree" +) +def nodegroup_trunk_radius(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.01, 3: 0.05}) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.2}, + attrs={"clamp": False}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "FLOOR"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: floor}, + attrs={"operation": "SUBTRACT"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": subtract}) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0969), (0.5864, 0.1406), (1.0, 0.2906)] + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: uniform(0.1, 0.25)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_2} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: random_value.outputs[1], 1: add}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_tree_cracks", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_cracks(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: spline_parameter.outputs["Length"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: capture_attribute.outputs[2], 1: uniform(0.1, 0.25)}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 400.0, "Randomness": 10.0}, + attrs={"voronoi_dimensions": "4D", "distance": "CHEBYCHEV"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) + colorramp.color_ramp.elements[0].position = 0.6091 + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.6818 + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + normal = nw.new_node(Nodes.InputNormal) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: colorramp.outputs["Color"], 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_1.outputs["Vector"], 1: (-0.01, -0.01, -0.01)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Vector": multiply_2.outputs["Vector"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_instance_selection_bottom_remove", + singleton=False, + type="GeometryNodeTree", +) +def nodegroup_leaf_instance_selection_bottom_remove(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Ring", 10.0), + ("NodeSocketFloat", "Segment", 0.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: group_input.outputs["Ring"]}, + attrs={"operation": "DIVIDE"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Segment"], 1: 4.0}, + attrs={"operation": "SUBTRACT"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: subtract}, + attrs={"operation": "GREATER_THAN"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": greater_than}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_random_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_random_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_3.outputs[1], + "Z": random_value_2.outputs[1], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_rotate_downward", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_rotate_downward(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0} + ) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: add}, attrs={"operation": "MODULO"} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: modulo, 1: add}, attrs={"operation": "DIVIDE"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 6.28}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +def shader_coconut_green_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) + + noise_texture_1 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate_1.outputs["Object"], + "Scale": 1.0, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateColor, input_kwargs={"Color": noise_texture_1.outputs["Color"]} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["Green"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "spline parameter"} + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": attribute_1.outputs["Fac"]} + ) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.0 + colorramp.color_ramp.elements[0].color = (0.0908, 0.2664, 0.013, 1.0) + colorramp.color_ramp.elements[1].position = 0.01 + colorramp.color_ramp.elements[1].color = (0.0908, 0.2664, 0.013, 1.0) + colorramp.color_ramp.elements[2].position = 1.0 + colorramp.color_ramp.elements[2].color = (0.2462, 0.4125, 0.0044, 1.0) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range_1.outputs["Result"], + "Value": map_range_2.outputs["Result"], + "Color": colorramp.outputs["Color"], + }, + ) + + attribute_2 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "cross section parameter"} + ) + + group = nw.new_node( + nodegroup_node_group_002().name, + input_kwargs={ + "Color": hue_saturation_value_1, + "attribute": attribute_2.outputs["Fac"], + "seed": 10.0, + }, + ) + + group_1 = nw.new_node( + nodegroup_node_group_002().name, + input_kwargs={ + "Color": group, + "attribute": attribute_1.outputs["Fac"], + "voronoi scale": 10.0, + "voronoi randomness": 0.6446, + "seed": -10.0, + "noise amount": 0.48, + "hue min": 1.32, + "hue max": 0.9, + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": group_1, "Specular": 0.4773, "Roughness": 0.4455}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_vein_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_vein_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": 400, + "Start": (0.0, 0.0, 0.0), + "Middle": (0.0, 0.2, 0.5), + "End": (0.0, 0.0, 1.0), + }, + ) + + coconutvein = nw.new_node(nodegroup_coconutvein().name) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": quadratic_bezier, "Offset": coconutvein}, + ) + + treetrunkgeometry_001 = nw.new_node( + nodegroup_tree_trunk_geometry_001().name, input_kwargs={"Curve": set_position} + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": treetrunkgeometry_001.outputs["Mesh"], + "Translation": (0.0, 0.0, -0.1), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_random_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_random_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_4 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_2.outputs[1], + "Y": random_value_3.outputs[1], + "Z": random_value_4.outputs[1], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + + truncated_leaf_stem = nw.new_node(nodegroup_truncated_leaf_stem().name) + + normal_1 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal_1}, attrs={"axis": "Z"} + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": truncated_leaf_stem, + "Rotation": align_euler_to_vector_1, + }, + ) + + leaf_truncated_rotate = nw.new_node( + nodegroup_leaf_truncated_rotate().name, + input_kwargs={"Value": group_input.outputs[2]}, + ) + + rotate_instances_2 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_2, + "Rotation": leaf_truncated_rotate, + }, + ) + + rotate_instances_3 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": rotate_instances_2, + "Rotation": (-0.9599, 0.0, 1.5708), + }, + ) + + random_rotate = nw.new_node(nodegroup_random_rotate().name) + + rotate_instances_4 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances_3, "Rotation": random_rotate}, + ) + + random_value_5 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.6}) + + scale_instances_4 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_4, + "Scale": random_value_5.outputs[1], + }, + ) + + index_2 = nw.new_node(Nodes.Index) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: index_2, 1: randint(8, 12)}, + attrs={"operation": "MODULO"}, + ) + + scale_instances_3 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_4, + "Selection": modulo, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + truncated_leaf_selection = nw.new_node( + nodegroup_truncated_leaf_selection().name, + input_kwargs={"Value": group_input.outputs["Value1"]}, + ) + + scale_instances_5 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_3, + "Selection": truncated_leaf_selection, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": scale_instances_5} + ) + + +@node_utils.to_nodegroup( + "nodegroup_tree_trunk_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_trunk_geometry(nw: NodeWrangler, radius): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + trunkradius = nw.new_node(nodegroup_trunk_radius().name) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": trunkradius}, + ) + + treecracks = nw.new_node( + nodegroup_tree_cracks().name, input_kwargs={"Geometry": set_curve_radius} + ) + + trunk_resolution = nw.new_node( + Nodes.Integer, label="TrunkResolution", attrs={"integer": 32} + ) + trunk_resolution.integer = 32 + + trunk_radius = nw.new_node(Nodes.Value, label="TrunkRadius") + trunk_radius.outputs[0].default_value = radius + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": trunk_resolution, "Radius": trunk_radius}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": treecracks.outputs["Geometry"], + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": curve_to_mesh, "Level": 5} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": subdivide_mesh, + "Offset": treecracks.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Integer": trunk_resolution, + "Mesh": curve_to_mesh, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_top(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketFloat", "Ring", 10.0), + ("NodeSocketFloat", "Segment", 0.5), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal}, attrs={"axis": "Z"} + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + }, + ) + + leafrotatedownward = nw.new_node( + nodegroup_leaf_rotate_downward().name, + input_kwargs={"Value": group_input.outputs["Value"]}, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": leafrotatedownward, + }, + ) + + leafrandomrotate = nw.new_node(nodegroup_leaf_random_rotate().name) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances, "Rotation": leafrandomrotate}, + ) + + random_value_4 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.9, 3: 1.2}) + + scale_instances_2 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_1, + "Scale": random_value_4.outputs[1], + }, + ) + + leafinstanceselectionbottomremove = nw.new_node( + nodegroup_leaf_instance_selection_bottom_remove().name, + input_kwargs={ + "Ring": group_input.outputs["Ring"], + "Segment": group_input.outputs["Segment"], + }, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_2, + "Selection": leafinstanceselectionbottomremove, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={5: 1}, attrs={"data_type": "INT"} + ) + + scale_instances_1 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances, + "Selection": random_value.outputs[2], + "Scale": (0.0, 0.0, 0.0), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": scale_instances_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_instance_on_points", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_instance_on_points(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Ring", 0.5), + ("NodeSocketFloat", "Segment", 0.5), + ], + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Segment"], 1: 0.0} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: index_1, 1: add}, attrs={"operation": "DIVIDE"} + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Ring"], 1: 0.0} + ) + + subtract = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 4.0}, attrs={"operation": "SUBTRACT"} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: subtract}, + attrs={"operation": "GREATER_THAN"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 2.0}, attrs={"operation": "SUBTRACT"} + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: subtract_1}, + attrs={"operation": "LESS_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: less_than} + ) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_not}) + + +@node_utils.to_nodegroup( + "nodegroup_coconut_group", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coconut_group(nw: NodeWrangler, coconut): + # Code generated using version 2.4.3 of the node_transpiler + + uv_sphere_1 = nw.new_node( + Nodes.MeshUVSphere, input_kwargs={"Segments": 8, "Rings": 6, "Radius": 0.15} + ) + + object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": coconut}) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": object_info_2.outputs["Geometry"], + "Translation": (0.0, 0.0, -1.2), + }, + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal_1}, attrs={"axis": "Z"} + ) + + instance_on_points_3 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": uv_sphere_1, + "Instance": transform_2, + "Rotation": align_euler_to_vector_1, + "Scale": (-1.0, -1.0, -1.0), + }, + ) + + coconut_random_rotate = nw.new_node(nodegroup_coconut_random_rotate().name) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_3, + "Rotation": coconut_random_rotate, + }, + ) + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.15, 3: 0.4}) + + scale_instances_6 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances, + "Scale": random_value_2.outputs[1], + }, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 20.0}, attrs={"operation": "LESS_THAN"} + ) + + scale_instances_2 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_6, + "Selection": less_than, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={5: 2, "Seed": 2}, attrs={"data_type": "INT"} + ) + + scale_instances_4 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_2, + "Selection": random_value_1.outputs[2], + "Scale": (0.0, 0.0, 0.0), + }, + ) + + coconut_vein_geometry = nw.new_node(nodegroup_coconut_vein_geometry().name) + + normal_2 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_2 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal_2}, attrs={"axis": "Z"} + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": uv_sphere_1, + "Instance": coconut_vein_geometry, + "Rotation": align_euler_to_vector_2, + }, + ) + + index_2 = nw.new_node(Nodes.Index) + + less_than_1 = nw.new_node( + Nodes.Math, input_kwargs={0: index_2, 1: 30.0}, attrs={"operation": "LESS_THAN"} + ) + + scale_instances_3 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": instance_on_points_2, + "Selection": less_than_1, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + random_value_5 = nw.new_node( + Nodes.RandomValue, input_kwargs={5: 1, "Seed": 4}, attrs={"data_type": "INT"} + ) + + scale_instances_5 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_3, + "Selection": random_value_5.outputs[2], + "Scale": (0.0, 0.0, 0.0), + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": scale_instances_5, + "Material": surface.shaderfunc_to_material(shader_coconut_green_shader), + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [scale_instances_4, set_material_2]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry_2} + ) + + +def shader_top_core(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0, 1.0, 0.1), + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": uniform(100, 400)}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 2.0, + "Distortion": 5.0, + "Detail": 10.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.4, + "Color1": voronoi_texture.outputs["Distance"], + "Color2": wave_texture.outputs["Color"], + }, + ) + + d_hsv = (uniform(0.02, 0.05), uniform(0.3, 0.6), uniform(0.01, 0.05)) + b_hsv = d_hsv[:1] + (uniform(0.6, 0.9), uniform(0.3, 0.6)) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix}) + colorramp.color_ramp.elements[0].position = 0.2409 + colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) + colorramp.color_ramp.elements[1].position = 0.6045 + colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": colorramp.outputs["Alpha"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_trunk(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": 20.0}, + attrs={"voronoi_dimensions": "4D"}, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": uniform(1.0, 3.0), + "Distortion": 5.0, + "Detail Scale": 3.0, + }, + attrs={"bands_direction": "Z"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": voronoi_texture_1.outputs["Distance"], + "Color2": wave_texture.outputs["Color"], + }, + ) + + d_hsv = ( + uniform(0.02, 0.05), + uniform(0.01, 0.05) if randint(0, 2) == 1 else uniform(0.5, 0.8), + uniform(0.03, 0.09), + ) + b_hsv = d_hsv[:-1] + (uniform(0.1, 0.3),) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) + colorramp.color_ramp.elements[0].position = 0.4682 + colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) + colorramp.color_ramp.elements[1].position = 0.5591 + colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (10.0, 10.0, 0.2), + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_1, "Scale": 100.0, "Randomness": 10.0}, + attrs={"voronoi_dimensions": "4D", "distance": "CHEBYCHEV"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) + colorramp_1.color_ramp.elements[0].position = 0.2818 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.3045 + colorramp_1.color_ramp.elements[1].color = (0.5284, 0.5034, 0.4327, 1.0) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": uniform(0.1, 0.3), + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Roughness": voronoi_texture.outputs["Distance"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def geometry_coconut_tree_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + leaf = kwargs["leaf"][0] + coconut = kwargs["coconut"][0] + radius = kwargs["trunk_radius"] + + trunk_height = nw.new_node(Nodes.Value, label="trunk_height") + trunk_height.outputs[0].default_value = 5.0 + + top_x, top_y = np.random.normal(0.0, 1.0), np.random.normal(0.0, 1.0) + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": top_x, "Y": top_y, "Z": trunk_height} + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.0, 0.0, 0.0), + "Middle": ( + top_x / uniform(1.0, 2.0), + top_y / uniform(1.0, 2.0), + uniform(1.5, 3.0), + ), + "End": combine_xyz_2, + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": quadratic_bezier, "Length": 0.02}, #'Count': 20000 + attrs={"mode": "LENGTH"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": resample_curve} + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + top_segment = nw.new_node(Nodes.Integer, label="TopSegment", attrs={"integer": 12}) + top_segment.integer = randint(8, 14) + + top_ring = nw.new_node(Nodes.Integer, label="TopRing", attrs={"integer": 8}) + top_ring.integer = randint(8, 11) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": top_segment, + "Rings": top_ring, + "Radius": uniform(0.15, 0.2), + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 1.0, uniform(0.8, 2.0))}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform, + "Material": surface.shaderfunc_to_material(shader_top_core), + }, + ) + + coconut_group = nw.new_node(nodegroup_coconut_group(coconut=coconut).name) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": coconut_group, "Scale": (-1.0, -1.0, -1.0)}, + ) + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal}, attrs={"axis": "Z"} + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.2 + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": transform, + "Instance": transform_1, + "Rotation": align_euler_to_vector, + "Scale": value, + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={5: randint(1, 3)}, attrs={"data_type": "INT"} + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Selection": random_value.outputs[2], + "Scale": (0.0, 0.0, 0.0), + }, + ) + + coconut_instance_on_points = nw.new_node( + nodegroup_coconut_instance_on_points().name, + input_kwargs={"Ring": top_ring, "Segment": top_segment}, + ) + + scale_instances_1 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances, + "Selection": coconut_instance_on_points, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + + leafontop = nw.new_node( + nodegroup_leaf_on_top().name, + input_kwargs={ + "Points": transform, + "Value": top_segment, + "Ring": top_segment, + "Segment": top_ring, + "Instance": object_info.outputs["Geometry"], + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material_1, scale_instances_1, leafontop]}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Selection": endpoint_selection, + "Instance": join_geometry_1, + }, + ) + + treetrunkgeometry = nw.new_node( + nodegroup_tree_trunk_geometry(radius=radius).name, + input_kwargs={"Curve": set_position}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": treetrunkgeometry.outputs["Geometry"], + "Material": surface.shaderfunc_to_material(shader_trunk), + }, + ) + + truncatedstemgeometry = nw.new_node( + nodegroup_truncated_stem_geometry().name, + input_kwargs={ + "Points": treetrunkgeometry.outputs["Mesh"], + 1: trunk_height, + 2: treetrunkgeometry.outputs["Integer"], + }, + ) + + geos = [instance_on_points, set_material] + if uniform(0.0, 1.0) < 0.3: + geos.append(truncatedstemgeometry) + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": geos}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry} + ) + + +class CoconutTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(CoconutTreeFactory, self).__init__(factory_seed, coarse=coarse) + + def create_asset(self, params={}, **kwargs): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # Make the Leaf and Delete It Later + lf_seed = randint(0, 1000, size=(1,))[0] + leaf_model = LeafPalmTreeFactory(factory_seed=lf_seed) + p = {"leaf_x_curvature": uniform(0.3, 0.8)} + leaf = leaf_model.create_asset(p) + params["leaf"] = [leaf] + + co_seed = randint(0, 1000, size=(1,))[0] + coconut_model = FruitFactoryCoconutgreen(factory_seed=co_seed) + coconut = coconut_model.create_asset() + params["coconut"] = [coconut] + params["trunk_radius"] = uniform(0.2, 0.3) + + surface.add_geomod( + obj, + geometry_coconut_tree_nodes, + selection=None, + attributes=[], + input_kwargs=params, + ) + butil.delete([leaf, coconut]) + with butil.SelectObjects(obj): + bpy.ops.object.material_slot_remove() + bpy.ops.object.shade_flat() + + return obj + + +if __name__ == "__main__": + model = CoconutTreeFactory(0) + model.create_asset() diff --git a/infinigen/assets/objects/tropic_plants/leaf_banana_tree.py b/infinigen/assets/objects/tropic_plants/leaf_banana_tree.py new file mode 100644 index 000000000..30f0f397c --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/leaf_banana_tree.py @@ -0,0 +1,1014 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, uniform + +from infinigen.assets.objects.tropic_plants.tropic_plant_utils import ( + nodegroup_nodegroup_leaf_gen, + nodegroup_nodegroup_leaf_rotate_x, + nodegroup_nodegroup_leaf_shader, + nodegroup_nodegroup_move_to_origin, + nodegroup_nodegroup_sub_vein, + shader_stem_material, +) +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_apply_wave", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_apply_wave( + nw: NodeWrangler, + leaf_h_wave_control_points, + leaf_w_wave_control_points, + leaf_edge_wave_control_points, +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Wave Scale Y", 1.0), + ("NodeSocketFloat", "Wave Scale X", 1.0), + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Width Scale", 0.0), + ("NodeSocketFloat", "Wave Scale E", 1.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + map_range_6 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + float_curve_3 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_6.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_3.mapping.curves[0], + [ + (0.0, 0.5), + (0.1, leaf_edge_wave_control_points[0] + 0.5), + (0.2, leaf_edge_wave_control_points[1] + 0.5), + (0.3, leaf_edge_wave_control_points[2] + 0.5), + (0.4, leaf_edge_wave_control_points[3] + 0.5), + (0.5, leaf_edge_wave_control_points[4] + 0.5), + (0.6, leaf_edge_wave_control_points[5] + 0.5), + (0.7, leaf_edge_wave_control_points[6] + 0.5), + (0.8, leaf_edge_wave_control_points[7] + 0.5), + (0.9, leaf_edge_wave_control_points[8] + 0.5), + (1.0, 0.5), + ], + ) + + map_range_7 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_3, 3: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": absolute, 2: group_input.outputs["Width Scale"]}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": map_range_4.outputs["Result"]} + ) + colorramp.color_ramp.elements[0].position = 0.015 + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = uniform(0.3, 0.5) + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_7.outputs["Result"], 1: colorramp.outputs["Color"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: group_input.outputs["Wave Scale E"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz_3, + }, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz_1.outputs["Y"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.2, leaf_h_wave_control_points[0] + 0.5), + (0.4, leaf_h_wave_control_points[1] + 0.5), + (0.6, leaf_h_wave_control_points[2] + 0.5), + (0.8, leaf_h_wave_control_points[3] + 0.5), + (1.0, leaf_h_wave_control_points[4] + 0.5), + ], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve, 3: -1.0} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Wave Scale Y"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_2, "Offset": combine_xyz}, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: group_input.outputs["X Modulated"], + }, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["X Modulated"], + 1: attribute_statistic_1.outputs["Min"], + 2: attribute_statistic_1.outputs["Max"], + }, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, leaf_w_wave_control_points[0] + 0.5 + normal(0.0, 0.02)), + (0.1, leaf_w_wave_control_points[1] + 0.5 + normal(0.0, 0.02)), + (0.25, leaf_w_wave_control_points[2] + 0.5 + normal(0.0, 0.02)), + (0.4, leaf_w_wave_control_points[3] + 0.5 + normal(0.0, 0.02)), + (0.5, 0.5), + (0.6, leaf_w_wave_control_points[3] + 0.5 + normal(0.0, 0.02)), + (0.75, leaf_w_wave_control_points[2] + 0.5 + normal(0.0, 0.02)), + (0.9, leaf_w_wave_control_points[1] + 0.5 + normal(0.0, 0.02)), + (1.0, leaf_w_wave_control_points[0] + 0.5 + normal(0.0, 0.02)), + ], + handles=[ + "AUTO", + "AUTO", + "AUTO", + "AUTO", + "VECTOR", + "AUTO", + "AUTO", + "AUTO", + "AUTO", + ], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_1, 3: -1.0} + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: group_input.outputs["Wave Scale X"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_3}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position, "Offset": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +def shader_leaf_material(nw: NodeWrangler, stem_color_hsv): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein"}) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 6.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["B"], + 1: 0.4, + 2: 0.7, + 3: 0.8, + 4: 1.2, + }, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "subvein offset"} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": attribute_1.outputs["Color"], 2: -0.94} + ) + + main_leaf_hsv = (uniform(0.26, 0.37), uniform(0.8, 1.0), uniform(0.15, 0.55)) + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 2.0, "Color": hsv2rgba(main_leaf_hsv)}, + ) + + main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.02),) + main_leaf_hsv[1:] + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": hue_saturation_value, + "Color2": hsv2rgba(main_leaf_hsv_2), + }, + ) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range.outputs["Result"], + "Value": map_range_1.outputs["Result"], + "Color": mix, + }, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": hsv2rgba(stem_color_hsv), + "Color2": hue_saturation_value_1, + }, + ) + + group = nw.new_node( + nodegroup_nodegroup_leaf_shader().name, input_kwargs={"Color": mix_1} + ) + + material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": group}) + + +@node_utils.to_nodegroup( + "nodegroup_round_tropical_leaf", singleton=False, type="GeometryNodeTree" +) +def nodegroup_round_tropical_leaf( + nw: NodeWrangler, + jigsaw_depth, + leaf_h_wave_control_points, + leaf_w_wave_control_points, + leaf_edge_wave_control_points, + leaf_contour_control_points, +): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "To Max", -0.4), + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Wave Scale Y", 0.3), + ("NodeSocketFloat", "Wave Scale X", 0.5), + ("NodeSocketFloat", "Wave Scale E", 0.5), + ("NodeSocketFloat", "Leaf Width Scale", 0.0), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Mesh"], "Level": 10}, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": subdivide_mesh} + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": subdivide_mesh_1, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + nodegroup_leaf_gen = nw.new_node( + nodegroup_nodegroup_leaf_gen(leaf_contour_control_points).name, + input_kwargs={ + "Mesh": capture_attribute.outputs["Geometry"], + "Displancement scale": 0.0, + "Vein Asymmetry": 0.3023, + "Vein Density": 0.0, + "Jigsaw Scale": uniform(5.0, 20.0), + "Jigsaw Depth": jigsaw_depth, + "Vein Angle": 0.3, + "Wave Displacement": 0.0, + "Midrib Length": 0.333, + "Stem Length": 0.6, + "Midrib Width": uniform(0.8, 1.4), + "Leaf Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_sub_vein = nw.new_node( + nodegroup_nodegroup_sub_vein().name, + input_kwargs={"X": 0.0, "Y": nodegroup_leaf_gen.outputs["Vein Coord"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": nodegroup_leaf_gen.outputs["Mesh"], + "Offset": combine_xyz, + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position, + 2: nodegroup_sub_vein.outputs["Color Value"], + }, + ) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: nodegroup_leaf_gen.outputs["Vein Value"], + }, + ) + + nodegroup_apply_wave = nw.new_node( + nodegroup_nodegroup_apply_wave( + leaf_h_wave_control_points, + leaf_w_wave_control_points, + leaf_edge_wave_control_points, + ).name, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Wave Scale Y": group_input.outputs["Wave Scale Y"], + "Wave Scale X": group_input.outputs["Wave Scale X"], + "X Modulated": nodegroup_leaf_gen.outputs["X Modulated"], + "Wave Scale E": group_input.outputs["Wave Scale E"], + }, + ) + + nodegroup_move_to_origin = nw.new_node( + nodegroup_nodegroup_move_to_origin().name, + input_kwargs={"Geometry": nodegroup_apply_wave}, + ) + + nodegroup_leaf_rotate_x = nw.new_node( + nodegroup_nodegroup_leaf_rotate_x().name, + input_kwargs={ + "Geometry": nodegroup_move_to_origin, + "To Max": group_input.outputs["To Max"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Attribute": nodegroup_leaf_gen.outputs["Attribute"], + "Coordinate": capture_attribute.outputs["Attribute"], + "subvein": capture_attribute_1.outputs[2], + "vein": capture_attribute_2.outputs[2], + "Geometry": nodegroup_leaf_rotate_x, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, 1.0, 1.0)), + ], + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"End Size": 0} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": endpoint_selection, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + "Scale": group_input.outputs["Scale"], + }, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": (-1.5708, 0.0, 0.0), + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "To Min1", 0.2), + ("NodeSocketFloat", "To Min2", -0.2), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": group_input.outputs["Curve"], "Count": 100}, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_1.outputs["Factor"], + 3: group_input.outputs["To Min1"], + 4: 0.0, + }, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_2, + "Center": (0.0, 0.0, 2.0), + "Angle": map_range_1.outputs["Result"], + }, + attrs={"rotation_type": "Y_AXIS"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Position": vector_rotate}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_2.outputs["Factor"], + 3: group_input.outputs[2], + 4: 0.0, + }, + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": map_range_2.outputs["Result"]}, + attrs={"rotation_type": "X_AXIS"}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Position": vector_rotate_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 0.4, 4: 0.8}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": 0.02}) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +def geometry_leaf_nodes(nw: NodeWrangler, **kwargs): + leaf_x_curvature = nw.new_node(Nodes.Value, label="leaf_x_curvature") + leaf_x_curvature.outputs[0].default_value = -kwargs["leaf_x_curvature"] + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + wave_x_scale = nw.new_node(Nodes.Value, label="wave_x_scale") + wave_x_scale.outputs[0].default_value = kwargs["leaf_h_wave_scale"] + + wave_y_scale = nw.new_node(Nodes.Value, label="wave_y_scale") + wave_y_scale.outputs[0].default_value = kwargs["leaf_w_wave_scale"] + + wave_e_scale = nw.new_node(Nodes.Value, label="wave_e_scale") + wave_e_scale.outputs[0].default_value = kwargs["leaf_edge_wave_scale"] + + leaf_width_scale = nw.new_node(Nodes.Value, label="leaf_width_scale") + leaf_width_scale.outputs[0].default_value = kwargs["leaf_width"] + + leaf_h_wave_control_points = kwargs["leaf_h_wave_control_points"] + leaf_w_wave_control_points = kwargs["leaf_w_wave_control_points"] + leaf_edge_wave_control_points = kwargs["leaf_edge_wave_control_points"] + leaf_contour_control_points = kwargs["leaf_contour_control_points"] + leaf_jigsaw_depth = kwargs["leaf_jigsaw_depth"] + + round_tropical_leaf = nw.new_node( + nodegroup_round_tropical_leaf( + leaf_jigsaw_depth, + leaf_h_wave_control_points, + leaf_w_wave_control_points, + leaf_edge_wave_control_points, + leaf_contour_control_points, + ).name, + input_kwargs={ + "To Max": leaf_x_curvature, + "Mesh": group_input.outputs["Geometry"], + "Wave Scale Y": wave_x_scale, + "Wave Scale X": wave_y_scale, + "Leaf Width Scale": leaf_width_scale, + "Wave Scale E": wave_e_scale, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": round_tropical_leaf.outputs["Geometry"], + "Attribute": round_tropical_leaf.outputs["Attribute"], + "Coordinate": round_tropical_leaf.outputs["Coordinate"], + "subvein offset": round_tropical_leaf.outputs["subvein"], + "vein": round_tropical_leaf.outputs["vein"], + }, + ) + + +def geometry_plant_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line_1 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": (0.0, 0.0, 2.0), "End": (0.0, 0.0, 0.0)} + ) + + stem_y_curvature = nw.new_node(Nodes.Value, label="stem_y_curvature") + stem_y_curvature.outputs[0].default_value = uniform(-0.5, 0.5) + + stem_x_curvature = nw.new_node(Nodes.Value, label="stem_x_curvature") + stem_x_curvature.outputs[0].default_value = -kwargs["leaf_x_curvature"] + + stem_curvature = nw.new_node( + nodegroup_stem_curvature().name, + input_kwargs={"Curve": curve_line_1, 1: stem_y_curvature, 2: stem_x_curvature}, + ) + + stem_geometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": stem_curvature} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": stem_geometry, + "Material": surface.shaderfunc_to_material( + lambda x: shader_stem_material( + x, stem_color_hsv=kwargs["stem_color_hsv"] + ) + ), + }, + ) + + leaf_x_curvature = nw.new_node(Nodes.Value, label="leaf_x_curvature") + leaf_x_curvature.outputs[0].default_value = -kwargs["leaf_x_curvature"] + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + wave_x_scale = nw.new_node(Nodes.Value, label="wave_x_scale") + wave_x_scale.outputs[0].default_value = kwargs["leaf_h_wave_scale"] + + wave_y_scale = nw.new_node(Nodes.Value, label="wave_y_scale") + wave_y_scale.outputs[0].default_value = kwargs["leaf_w_wave_scale"] + + wave_e_scale = nw.new_node(Nodes.Value, label="wave_edge_scale") + wave_e_scale.outputs[0].default_value = kwargs["leaf_edge_wave_scale"] + + leaf_width_scale = nw.new_node(Nodes.Value, label="leaf_width_scale") + leaf_width_scale.outputs[0].default_value = kwargs["leaf_width"] + + leaf_h_wave_control_points = kwargs["leaf_h_wave_control_points"] + leaf_w_wave_control_points = kwargs["leaf_w_wave_control_points"] + leaf_edge_wave_control_points = kwargs["leaf_edge_wave_control_points"] + leaf_contour_control_points = kwargs["leaf_contour_control_points"] + leaf_jigsaw_depth = kwargs["leaf_jigsaw_depth"] + + round_tropical_leaf = nw.new_node( + nodegroup_round_tropical_leaf( + leaf_jigsaw_depth, + leaf_h_wave_control_points, + leaf_w_wave_control_points, + leaf_edge_wave_control_points, + leaf_contour_control_points, + ).name, + input_kwargs={ + "To Max": leaf_x_curvature, + "Mesh": group_input.outputs["Geometry"], + "Wave Scale Y": wave_x_scale, + "Wave Scale X": wave_y_scale, + "Leaf Width Scale": leaf_width_scale, + "Wave Scale E": wave_e_scale, + }, + ) + + leaf_scale = nw.new_node(Nodes.Value, label="leaf_scale") + leaf_scale.outputs[0].default_value = normal(1.0, 0.3) + + leaf_on_stem = nw.new_node( + nodegroup_leaf_on_stem().name, + input_kwargs={ + "Points": stem_curvature, + "Instance": round_tropical_leaf.outputs["Geometry"], + "Scale": leaf_scale, + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, leaf_on_stem]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry, + "Translation": kwargs["plant_translation"], + "Rotation": (0.0, 0.0, kwargs["plant_z_rotate"]), + "Scale": kwargs["plant_scale"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Attribute": round_tropical_leaf.outputs["Attribute"], + "Coordinate": round_tropical_leaf.outputs["Coordinate"], + "subvein offset": round_tropical_leaf.outputs["subvein"], + "vein": round_tropical_leaf.outputs["vein"], + }, + ) + + +class LeafBananaTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(LeafBananaTreeFactory, self).__init__(factory_seed, coarse=coarse) + + def get_leaf_contour(self, mode): + if mode == "oval": + return [0.13, 0.275, 0.35, 0.365, 0.32, 0.21] + elif mode == "pear": + return [0.30, 0.46, 0.46, 0.43, 0.37, 0.23] + else: + return NotImplementedError + + def get_h_wave_contour(self, mode): + if mode == "flat": + return [normal(0.0, 0.03) for _ in range(6)] + elif mode == "s": + return [ + -0.1 + normal(0.0, 0.02), + 0.0 + normal(0.0, 0.02), + 0.08 + normal(0.0, 0.02), + 0.0 + normal(0.0, 0.02), + -0.05 + normal(0.0, 0.01), + ] + elif mode == "w": + return [ + -0.08 + normal(0.0, 0.02), + 0.07 + normal(0.0, 0.02), + -0.08 + normal(0.0, 0.02), + 0.08 + normal(0.0, 0.02), + -0.05 + normal(0, 0.02), + ] + else: + raise NotImplementedError + + def get_w_wave_contour(self, mode): + if mode == "fold": + return [ + -0.28 + normal(0.0, 0.02), + -0.2 + normal(0.0, 0.02), + -0.13 + normal(0.0, 0.01), + -0.06 + normal(0.0, 0.01), + ], uniform(0.1, 0.3) + elif mode == "wing": + return [ + 0.0 + normal(0.0, 0.02), + 0.06 + normal(0.0, 0.02), + 0.07 + normal(0.0, 0.01), + 0.04 + normal(0.0, 0.01), + ], uniform(0.0, 0.3) + else: + raise NotImplementedError + + def get_e_wave_contour(self, mode): + if mode == "wavy": + return [ + -0.06 + normal(0.0, 0.01), + 0.06 + normal(0.0, 0.01), + -0.06 + normal(0.0, 0.01), + 0.06 + normal(0.0, 0.01), + -0.06 + normal(0.0, 0.01), + 0.06 + normal(0.0, 0.01), + -0.06 + normal(0.0, 0.01), + 0.06 + normal(0.0, 0.01), + -0.06 + normal(0.0, 0.01), + ], 10 + elif mode == "flat": + return [0.0 for _ in range(9)], 0.0 + else: + raise NotImplementedError + + def update_params(self, **params): + if params.get("leaf_h_wave_control_points", None) is None: + mode = np.random.choice(["flat", "w", "s"], p=[0.4, 0.3, 0.3]) + params["leaf_h_wave_control_points"] = self.get_h_wave_contour(mode) + + if params.get("leaf_w_wave_control_points", None) is None: + mode = np.random.choice(["fold", "wing"], p=[0.2, 0.8]) + params["leaf_w_wave_control_points"], params["leaf_w_wave_scale"] = ( + self.get_w_wave_contour(mode) + ) + + if params.get("leaf_edge_wave_control_points", None) is None: + mode = np.random.choice(["wavy", "flat"], p=[1.0, 0.0]) # 0.6, 0.4 + params["leaf_edge_wave_control_points"], params["leaf_edge_wave_scale"] = ( + self.get_e_wave_contour(mode) + ) + + if params.get("leaf_contour_control_points", None) is None: + mode = np.random.choice(["oval", "pear"], p=[0.5, 0.5]) + params["leaf_contour_control_points"] = self.get_leaf_contour(mode) + + if params.get("leaf_jigsaw_depth", None) is None: + mode = np.random.choice([0, 1], p=[0.4, 0.6]) + params["leaf_jigsaw_depth"] = mode * uniform(0.8, 1.7) + + if params.get("leaf_width", None) is None: + params["leaf_width"] = uniform(0.5, 0.85) + + if params.get("leaf_h_wave_scale", None) is None: + params["leaf_h_wave_scale"] = uniform(0.02, 0.2) + + if params.get("leaf_w_wave_scale", None) is None: + params["leaf_w_wave_scale"] = uniform(0.05, 0.25) + + if params.get("leaf_x_curvature", None) is None: + params["leaf_x_curvature"] = uniform(0.0, 0.1) + + if params.get("stem_color_hsv", None) is None: + params["stem_color_hsv"] = ( + uniform(0.25, 0.32), + uniform(0.8, 1.0), + uniform(0.8, 1.0), + ) + + return params + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.update_params(**params) + surface.add_geomod( + obj, + geometry_leaf_nodes, + apply=True, + attributes=["Attribute", "Coordinate", "subvein offset", "vein"], + input_kwargs=params, + ) + surface.add_material( + obj, + lambda x: shader_leaf_material(x, stem_color_hsv=params["stem_color_hsv"]), + selection=None, + ) + + tag_object(obj, "leaf_banana_tree") + return obj + + +class PlantBananaTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(PlantBananaTreeFactory, self).__init__(factory_seed, coarse=coarse) + self.leaf_tropical_factory = LeafBananaTreeFactory(factory_seed) + + def update_params(self, **params): + params = self.leaf_tropical_factory.update_params(**params) + # Add new params update + if params.get("plant_translation", None) is None: + params["plant_translation"] = (0.0, 0.0, 0.0) + if params.get("plant_z_rotate", None) is None: + params["plant_z_rotate"] = uniform(-0.4, 0.4) + if params.get("plant_scale", None) is None: + s = uniform(0.8, 1.5) + params["plant_scale"] = (s, s, s) + return params + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.update_params(**params) + surface.add_geomod( + obj, + geometry_plant_nodes, + apply=True, + attributes=["Attribute", "Coordinate", "subvein offset", "vein"], + input_kwargs=params, + ) + surface.add_material( + obj, + lambda x: shader_leaf_material(x, stem_color_hsv=params["stem_color_hsv"]), + selection=None, + ) + + tag_object(obj, "leaf_banana_tree") + return obj + + +if __name__ == "__main__": + fac = LeafBananaTreeFactory(0) + fac.create_asset() diff --git a/infinigen/assets/objects/tropic_plants/leaf_palm_plant.py b/infinigen/assets/objects/tropic_plants/leaf_palm_plant.py new file mode 100644 index 000000000..78cb49f17 --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/leaf_palm_plant.py @@ -0,0 +1,860 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.tropic_plants.tropic_plant_utils import ( + hsv2rgba, + nodegroup_nodegroup_leaf_gen, + nodegroup_nodegroup_leaf_rotate_x, + nodegroup_nodegroup_leaf_shader, + nodegroup_nodegroup_move_to_origin, + nodegroup_nodegroup_sub_vein, + shader_stem_material, +) +from infinigen.core import surface +from infinigen.core.nodes import Nodes, NodeWrangler, node_utils +from infinigen.core.placement.factory import AssetFactory + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_apply_wave", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_apply_wave(nw: NodeWrangler, leaf_h_wave_control_points): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Wave Scale Y", 1.0), + ("NodeSocketFloat", "Wave Scale X", 1.0), + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Width Scale", 0.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz_1.outputs["Y"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.5), + (0.2, leaf_h_wave_control_points[0] + 0.5), + (0.4, leaf_h_wave_control_points[1] + 0.5), + (0.6, leaf_h_wave_control_points[2] + 0.5), + (0.8, leaf_h_wave_control_points[3] + 0.5), + (1.0, leaf_h_wave_control_points[4] + 0.5), + ], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve, 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Wave Scale Y"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: group_input.outputs["X Modulated"], + }, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["X Modulated"], + 1: attribute_statistic_1.outputs["Min"], + 2: attribute_statistic_1.outputs["Max"], + }, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, 0.1625), + (0.0955, 0.2844), + (0.2318, 0.3594), + (0.3727, 0.451), + (0.5045, 0.5094), + (0.6045, 0.4447), + (0.7886, 0.325), + (1.0, 0.1594), + ], + handles=["AUTO", "AUTO", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO", "AUTO"], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_1, 3: -1.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: group_input.outputs["Wave Scale X"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position, "Offset": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_palm_leaf_assemble", singleton=False, type="GeometryNodeTree" +) +def nodegroup_palm_leaf_assemble(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketFloat", "Resolution", 0.0), + ], + ) + + index = nw.new_node(Nodes.Index) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: 2.0}, + attrs={"operation": "DIVIDE"}, + ) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: divide}, attrs={"operation": "LESS_THAN"} + ) + + greater_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 0.0}, attrs={"operation": "GREATER_THAN"} + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": curve_tangent} + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={2: 0.9, 3: 1.1, "Seed": 2} + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": op_and, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + "Scale": random_value.outputs[1], + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": instance_on_points_1} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_round_tropical_leaf", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_palm_instance(nw: NodeWrangler, leaf_h_wave_control_points): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "To Max", -0.4), + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Wave Scale Y", 0.3), + ("NodeSocketFloat", "Wave Scale X", 0.5), + ("NodeSocketFloat", "Leaf Width Scale", 0.0), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Mesh"], "Level": 10}, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": subdivide_mesh} + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": subdivide_mesh_1, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + nodegroup_leaf_gen = nw.new_node( + nodegroup_nodegroup_leaf_gen().name, + input_kwargs={ + "Mesh": capture_attribute.outputs["Geometry"], + "Displancement scale": 0.0, + "Vein Asymmetry": 0.3023, + "Vein Density": 0.0, + "Jigsaw Scale": 10.0, + "Jigsaw Depth": 0.0, + "Vein Angle": 0.3, + "Wave Displacement": 0.0, + "Midrib Length": 0.3336, + "Midrib Width": 1.3, + "Stem Length": 0.6, + "Leaf Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_sub_vein = nw.new_node( + nodegroup_nodegroup_sub_vein().name, + input_kwargs={"X": nodegroup_leaf_gen.outputs["X Modulated"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": nodegroup_leaf_gen.outputs["Mesh"], + "Offset": combine_xyz, + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position, + 2: nodegroup_sub_vein.outputs["Color Value"], + }, + ) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: nodegroup_leaf_gen.outputs["Vein Value"], + }, + ) + + nodegroup_apply_wave = nw.new_node( + nodegroup_nodegroup_apply_wave(leaf_h_wave_control_points).name, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Wave Scale Y": group_input.outputs["Wave Scale Y"], + "Wave Scale X": group_input.outputs["Wave Scale X"], + "X Modulated": nodegroup_leaf_gen.outputs["X Modulated"], + "Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_move_to_origin = nw.new_node( + nodegroup_nodegroup_move_to_origin().name, + input_kwargs={"Geometry": nodegroup_apply_wave}, + ) + + nodegroup_leaf_rotate_x = nw.new_node( + nodegroup_nodegroup_leaf_rotate_x().name, + input_kwargs={ + "Geometry": nodegroup_move_to_origin, + "To Max": group_input.outputs["To Max"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Attribute": nodegroup_leaf_gen.outputs["Attribute"], + "Coordinate": capture_attribute.outputs["Attribute"], + "subvein": capture_attribute_1.outputs[2], + "vein": capture_attribute_2.outputs[2], + "Geometry": nodegroup_leaf_rotate_x, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_palmleafsector", singleton=False, type="GeometryNodeTree" +) +def nodegroup_palmleafsector(nw: NodeWrangler, leaf_h_wave_control_points): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "To Max", -0.4), + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Wave Scale Y", 0.3), + ("NodeSocketFloat", "Wave Scale X", 0.5), + ("NodeSocketFloat", "Leaf Width Scale", 0.0), + ("NodeSocketInt", "Resolution1", 26), + ("NodeSocketFloat", "Resolution2", 0.0), + ], + ) + + round_tropical_leaf = nw.new_node( + nodegroup_leaf_palm_instance(leaf_h_wave_control_points).name, + input_kwargs={ + "To Max": group_input.outputs["To Max"], + "Mesh": group_input.outputs["Mesh"], + "Wave Scale Y": group_input.outputs["Wave Scale Y"], + "Wave Scale X": group_input.outputs["Wave Scale X"], + "Leaf Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution1"], "Radius": 0.01}, + ) + + palm_leaf_assemble = nw.new_node( + nodegroup_palm_leaf_assemble().name, + input_kwargs={ + "Points": curve_circle.outputs["Curve"], + "Instance": round_tropical_leaf.outputs["Geometry"], + "Resolution": group_input.outputs[6], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": palm_leaf_assemble} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Attribute": round_tropical_leaf.outputs["Attribute"], + "Coordinate": round_tropical_leaf.outputs["Coordinate"], + "subvein": round_tropical_leaf.outputs["subvein"], + "vein": round_tropical_leaf.outputs["vein"], + "Geometry": join_geometry, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, 1.0, 1.0)), + ], + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"End Size": 0} + ) + + curve_tangent = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent}, + attrs={"axis": "Z"}, + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Selection": endpoint_selection, + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + "Scale": group_input.outputs["Scale"], + }, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": (1.5708, 0.0, 3.1416), + }, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Y Stem Rotate", 0.2), + ("NodeSocketFloat", "X Stem Rotate", -0.2), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": group_input.outputs["Curve"], "Count": 100}, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_1.outputs["Factor"], + 3: group_input.outputs["Y Stem Rotate"], + 4: 0.0, + }, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_2, + "Center": (0.0, 0.0, 2.0), + "Angle": map_range_1.outputs["Result"], + }, + attrs={"rotation_type": "Y_AXIS"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Position": vector_rotate}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_2.outputs["Factor"], + 3: group_input.outputs["X Stem Rotate"], + 4: 0.0, + }, + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": map_range_2.outputs["Result"]}, + attrs={"rotation_type": "X_AXIS"}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Position": vector_rotate_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 0.4, 4: 0.8}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": uniform(0.03, 0.06)} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +def shader_leaf_material(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein"}) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 6.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["B"], + 1: 0.4, + 2: 0.7, + 3: 0.8, + 4: 1.2, + }, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "subvein offset"} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": attribute_1.outputs["Color"], 2: -0.94} + ) + + main_leaf_hsv = (uniform(0.3, 0.36), uniform(0.6, 0.7), uniform(0.2, 0.3)) + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 2.0, "Color": hsv2rgba(main_leaf_hsv)}, + ) + + main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.005),) + main_leaf_hsv[1:] + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": hue_saturation_value, + "Color2": hsv2rgba(main_leaf_hsv_2), + }, + ) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range.outputs["Result"], + "Value": map_range_1.outputs["Result"], + "Color": mix, + }, + ) + + stem_color_hsv = main_leaf_hsv[:-1] + (main_leaf_hsv[-1] - uniform(0.05, 0.15),) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": hsv2rgba(stem_color_hsv), + "Color2": hue_saturation_value_1, + }, + ) + + group = nw.new_node( + nodegroup_nodegroup_leaf_shader().name, input_kwargs={"Color": mix_1} + ) + + material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": group}) + + +def geometry_plant_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line_1 = nw.new_node( + Nodes.CurveLine, + input_kwargs={ + "Start": (0.0, 0.0, kwargs["plant_stem_length"]), + "End": (0.0, 0.0, 0.0), + }, + ) + + stem_y_curvature = nw.new_node(Nodes.Value, label="stem_y_curvature") + stem_y_curvature.outputs[0].default_value = kwargs["stem_y_curvature"] + + stem_x_curvature = nw.new_node(Nodes.Value, label="stem_x_curvature") + stem_x_curvature.outputs[0].default_value = kwargs["stem_x_curvature"] + + stem_curvature = nw.new_node( + nodegroup_stem_curvature().name, + input_kwargs={ + "Curve": curve_line_1, + "Y Stem Rotate": stem_y_curvature, + "X Stem Rotate": stem_x_curvature, + }, + ) + + stem_geometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": stem_curvature} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": stem_geometry, + "Material": surface.shaderfunc_to_material(shader_stem_material), + }, + ) + + leaf_x_curvature = nw.new_node(Nodes.Value, label="leaf_x_curvature") + leaf_x_curvature.outputs[0].default_value = kwargs["leaf_x_curvature"] + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + wave_x_scale = nw.new_node(Nodes.Value, label="wave_x_scale") + wave_x_scale.outputs[0].default_value = kwargs["leaf_h_wave_scale"] + + wave_y_scale = nw.new_node(Nodes.Value, label="wave_y_scale") + wave_y_scale.outputs[0].default_value = 0.0 + + leaf_width_scale = nw.new_node(Nodes.Value, label="leaf_width_scale") + leaf_width_scale.outputs[0].default_value = uniform(0.15, 0.2) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 24}) + integer.integer = randint(20, 30) + + palmleafsector = nw.new_node( + nodegroup_palmleafsector( + leaf_h_wave_control_points=kwargs["leaf_h_wave_control_points"] + ).name, + input_kwargs={ + "To Max": leaf_x_curvature, + "Mesh": group_input.outputs["Geometry"], + "Wave Scale Y": wave_x_scale, + "Wave Scale X": wave_y_scale, + "Leaf Width Scale": leaf_width_scale, + 5: integer, + 6: integer, + }, + ) + + leaf_scale = nw.new_node(Nodes.Value, label="leaf_scale") + leaf_scale.outputs[0].default_value = uniform(0.85, 1.25) + + leaf_on_stem = nw.new_node( + nodegroup_leaf_on_stem().name, + input_kwargs={ + "Points": stem_curvature, + "Instance": palmleafsector.outputs["Geometry"], + "Scale": leaf_scale, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, leaf_on_stem]} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": leaf_x_curvature}) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": join_geometry_1, "Rotation": combine_xyz}, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_1, + "Translation": kwargs["plant_translation"], + "Rotation": (0.0, 0.0, kwargs["plant_z_rotate"]), + "Scale": kwargs["plant_scale"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Attribute": palmleafsector.outputs["Attribute"], + "Coordinate": palmleafsector.outputs["Coordinate"], + "subvein offset": palmleafsector.outputs["subvein"], + "vein": palmleafsector.outputs["vein"], + }, + ) + + +class LeafPalmPlantFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(LeafPalmPlantFactory, self).__init__(factory_seed, coarse=coarse) + + def get_h_wave_contour(self, mode): + if mode == "flat": + return [normal(0.0, 0.03) for _ in range(6)] + elif mode == "s": + return [ + -0.5 + normal(0.0, 0.01), + 0.0 + normal(0.0, 0.01), + 0.05 + normal(0.0, 0.01), + 0.0 + normal(0.0, 0.01), + -0.05 + normal(0.0, 0.01), + ] + else: + raise NotImplementedError + + def update_params(self, params): + if params.get("leaf_h_wave_control_points", None) is None: + mode = np.random.choice(["flat", "s"], p=[0.7, 0.3]) + params["leaf_h_wave_control_points"] = self.get_h_wave_contour(mode) + if params.get("leaf_h_wave_scale", None) is None: + params["leaf_h_wave_scale"] = uniform(0.01, 0.15) + if params.get("leaf_x_curvature", None) is None: + params["leaf_x_curvature"] = uniform(0.0, 0.5) + if params.get("stem_x_curvature", None) is None: + params["stem_x_curvature"] = uniform(-0.1, 0.4) + if params.get("stem_y_curvature", None) is None: + params["stem_y_curvature"] = uniform(-0.15, 0.15) + if params.get("plant_translation", None) is None: + params["plant_translation"] = (0.0, 0.0, 0.0) + if params.get("plant_z_rotate", None) is None: + params["plant_z_rotate"] = uniform(-0.4, 0.4) + if params.get("plant_stem_length", None) is None: + params["plant_stem_length"] = uniform(1.5, 2.2) + if params.get("plant_scale", None) is None: + s = uniform(0.8, 1.3) + params["plant_scale"] = (s, s, s) + return params + + def create_asset(self, params={}, **kwargs): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.update_params(params) + surface.add_geomod( + obj, + geometry_plant_nodes, + apply=False, + attributes=["Attribute", "Coordinate", "subvein offset", "vein"], + input_kwargs=params, + ) + surface.add_material(obj, shader_leaf_material, selection=None) + + return obj diff --git a/infinigen/assets/objects/tropic_plants/leaf_palm_tree.py b/infinigen/assets/objects/tropic_plants/leaf_palm_tree.py new file mode 100644 index 000000000..294323d81 --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/leaf_palm_tree.py @@ -0,0 +1,987 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + +import bpy +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.tropic_plants.tropic_plant_utils import ( + nodegroup_nodegroup_leaf_gen, + nodegroup_nodegroup_leaf_rotate_x, + nodegroup_nodegroup_leaf_shader, + nodegroup_nodegroup_move_to_origin, + nodegroup_nodegroup_sub_vein, + shader_stem_material, +) +from infinigen.core import surface +from infinigen.core.nodes import Nodes, NodeWrangler, node_utils +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_apply_wave", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_apply_wave(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Wave Scale Y", 1.0), + ("NodeSocketFloat", "Wave Scale X", 1.0), + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Width Scale", 0.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz_1.outputs["Y"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 1: attribute_statistic.outputs["Min"], + 2: attribute_statistic.outputs["Max"], + }, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.4875), + (0.1091, 0.5), + (0.3275, 0.4921), + (0.7409, 0.5031), + (1.0, 0.5063), + ], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve, 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Wave Scale Y"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + attribute_statistic_1 = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: group_input.outputs["X Modulated"], + }, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["X Modulated"], + 1: attribute_statistic_1.outputs["Min"], + 2: attribute_statistic_1.outputs["Max"], + }, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_2.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [ + (0.0, 0.1625), + (0.0955, 0.2844), + (0.2318, 0.3594), + (0.3727, 0.451), + (0.5045, 0.5094), + (0.6045, 0.4447), + (0.7886, 0.325), + (1.0, 0.1594), + ], + handles=["AUTO", "AUTO", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO", "AUTO"], + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve_1, 3: -1.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: group_input.outputs["Wave Scale X"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_1}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position, "Offset": combine_xyz_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem_selection(nw: NodeWrangler, gt, lt, th): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Samples", 0.0), + ("NodeSocketFloat", "Random Value", 0.0), + ], + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Random Value"], 1: gt}, + attrs={"operation": "GREATER_THAN"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Random Value"], 1: lt}, + attrs={"operation": "LESS_THAN"}, + ) + + op_and = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: greater_than, 1: less_than} + ) + + index = nw.new_node(Nodes.Index) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Samples"], 1: th * uniform(0.95, 1.05)}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: multiply}, + attrs={"operation": "LESS_THAN"}, + ) + + op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: less_than_1}) + + op_not = nw.new_node( + Nodes.BooleanMath, input_kwargs={0: op_and_1}, attrs={"operation": "NOT"} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_not}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem_scale_up_down", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem_scale_up_down(nw: NodeWrangler, gap): + # Code generated using version 2.4.3 of the node_transpiler + + index_2 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Samples", 0.0)] + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": index_2, 2: group_input.outputs["Samples"]}, + attrs={"clamp": False}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 1.0 - gap), (0.3, 1.0 - gap / 2.0), (0.6, 1.0 - gap / 5.0), (1.0, 1.0)], + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: float_curve_1}, attrs={"operation": "MULTIPLY"} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem_rotation_up_down", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem_rotation_up_down(nw: NodeWrangler, scale, gap): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Samples", 0)] + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": index_1, + 2: group_input.outputs["Samples"], + 3: 1.0, + 4: 0.0, + }, + attrs={"clamp": False}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 1.0 - gap), (0.7, 1.0 - gap / 2.0), (1.0, 1.0)], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: scale}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem_rotation_in_out", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem_rotation_in_out(nw: NodeWrangler, in_out_scale=1.0): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketInt", "Samples", 0)] + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": index_1, + 2: group_input.outputs["Samples"], + 3: 1.0, + 4: 0.0, + }, + attrs={"clamp": False}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0), (0.5136, 0.2188), (1.0, 0.8813)] + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: add, 1: in_out_scale}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_round_tropical_leaf", singleton=False, type="GeometryNodeTree" +) +def nodegroup_palm_leaf_instance(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "To Max", -0.4), + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Wave Scale Y", 0.3), + ("NodeSocketFloat", "Wave Scale X", 0.5), + ("NodeSocketFloat", "Leaf Width Scale", 0.0), + ], + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, + input_kwargs={"Mesh": group_input.outputs["Mesh"], "Level": 8}, + ) + + subdivide_mesh_1 = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": subdivide_mesh} + ) + + position = nw.new_node(Nodes.InputPosition) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": subdivide_mesh_1, 1: position}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + nodegroup_leaf_gen = nw.new_node( + nodegroup_nodegroup_leaf_gen().name, + input_kwargs={ + "Mesh": capture_attribute.outputs["Geometry"], + "Displancement scale": 0.0, + "Vein Asymmetry": uniform(0.2, 0.4), + "Vein Density": 0.0, + "Jigsaw Scale": 10.0, + "Jigsaw Depth": 0.0, + "Vein Angle": 0.3, + "Wave Displacement": 0.0, + "Midrib Length": 0.3336, + "Midrib Width": uniform(0.9, 1.5), + "Stem Length": uniform(0.55, 0.65), + "Leaf Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_sub_vein = nw.new_node( + nodegroup_nodegroup_sub_vein().name, + input_kwargs={"X": nodegroup_leaf_gen.outputs["X Modulated"]}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": nodegroup_leaf_gen.outputs["Mesh"], + "Offset": combine_xyz, + }, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": set_position, + 2: nodegroup_sub_vein.outputs["Color Value"], + }, + ) + + capture_attribute_2 = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": capture_attribute_1.outputs["Geometry"], + 2: nodegroup_leaf_gen.outputs["Vein Value"], + }, + ) + + nodegroup_apply_wave = nw.new_node( + nodegroup_nodegroup_apply_wave().name, + input_kwargs={ + "Geometry": capture_attribute_2.outputs["Geometry"], + "Wave Scale Y": group_input.outputs["Wave Scale Y"], + "Wave Scale X": group_input.outputs["Wave Scale X"], + "X Modulated": nodegroup_leaf_gen.outputs["X Modulated"], + "Width Scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_move_to_origin = nw.new_node( + nodegroup_nodegroup_move_to_origin().name, + input_kwargs={"Geometry": nodegroup_apply_wave}, + ) + + nodegroup_leaf_rotate_x = nw.new_node( + nodegroup_nodegroup_leaf_rotate_x().name, + input_kwargs={ + "Geometry": nodegroup_move_to_origin, + "To Max": group_input.outputs["To Max"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Attribute": nodegroup_leaf_gen.outputs["Attribute"], + "Coordinate": capture_attribute.outputs["Attribute"], + "subvein": capture_attribute_1.outputs[2], + "vein": capture_attribute_2.outputs[2], + "Geometry": nodegroup_leaf_rotate_x, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_stem(nw: NodeWrangler, versions): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketGeometry", "Instance", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, 1.0, 1.0)), + ("NodeSocketInt", "Samples", 0), + ], + ) + + rotation_scale, rotation_gap = uniform(0.6, 1.2), uniform(0.2, 0.6) + scale_gap = uniform(0.2, 0.5) + in_out_scale = normal(0.0, 0.7) + leaves = [] + for L in [-1, 1]: + curve_tangent_1 = nw.new_node(Nodes.CurveTangent) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": curve_tangent_1}, + attrs={"pivot_axis": "Y"}, + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector_1, + }, + ) + + scale_instances_4 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": instance_on_points_2, "Scale": (1.0, L, 1.0)}, + ) + + index_1 = nw.new_node(Nodes.Index) + + random_value_4 = nw.new_node( + Nodes.RandomValue, input_kwargs={"ID": index_1, "Seed": L + 1} + ) + + leaf_on_stem_selection_1 = nw.new_node( + nodegroup_leaf_on_stem_selection(0, 0, 0).name, + input_kwargs={ + "Samples": group_input.outputs["Samples"], + "Random Value": random_value_4.outputs[1], + }, + ) + + value_1 = nw.new_node(Nodes.Value) + value_1.outputs[0].default_value = 1.0 + + scale_instances_3 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_4, + "Selection": leaf_on_stem_selection_1, + "Scale": value_1, + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": scale_instances_3} + ) + + leaf_on_stem_rotation_up_down = nw.new_node( + nodegroup_leaf_on_stem_rotation_up_down( + rotation_scale * L, rotation_gap + ).name, + input_kwargs={"Samples": group_input.outputs["Samples"]}, + ) + + rotate_instances_6 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": join_geometry_2, + "Rotation": leaf_on_stem_rotation_up_down, + }, + ) + + leaf_on_stem_rotation_in_out_001 = nw.new_node( + nodegroup_leaf_on_stem_rotation_in_out(in_out_scale=in_out_scale).name, + input_kwargs={"Samples": group_input.outputs["Samples"]}, + ) + + rotate_instances_7 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": rotate_instances_6, + "Rotation": leaf_on_stem_rotation_in_out_001, + }, + ) + + leaf_on_stem_scale_up_down_1 = nw.new_node( + nodegroup_leaf_on_stem_scale_up_down(scale_gap).name, + input_kwargs={"Samples": group_input.outputs["Samples"]}, + ) + + scale_instances_9 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_7, + "Scale": leaf_on_stem_scale_up_down_1, + }, + ) + leaves.append(scale_instances_9) + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": leaves}) + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.3, 3: 0.3}) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.3, 3: 0.3}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": random_value_1.outputs[1], "Y": random_value_3.outputs[1]}, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": join_geometry, "Rotation": combine_xyz}, + ) + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.7}) + + scale_instances_6 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances, + "Scale": random_value_2.outputs[1], + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": scale_instances_6} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": realize_instances} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Y Stem Rotate", 0.2), + ("NodeSocketFloat", "Stem Count", 0.0), + ("NodeSocketFloat", "X Stem Rotate", -0.2), + ], + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Count": group_input.outputs["Stem Count"], + }, + ) + + position_2 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_1.outputs["Factor"], + 3: group_input.outputs["Y Stem Rotate"], + 4: 0.0, + }, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_2, + "Center": (0.0, 0.0, 2.0), + "Angle": map_range_1.outputs["Result"], + }, + attrs={"rotation_type": "Y_AXIS"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve, "Position": vector_rotate}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + spline_parameter_2 = nw.new_node(Nodes.SplineParameter) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter_2.outputs["Factor"], + 3: group_input.outputs["X Stem Rotate"], + 4: 0.0, + }, + ) + + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": map_range_2.outputs["Result"]}, + attrs={"rotation_type": "X_AXIS"}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_1, "Position": vector_rotate_1}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: uniform(0.1, 0.3), + 4: 0.8, + }, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Curve"], + "Radius": map_range.outputs["Result"], + }, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": uniform(0.03, 0.06)} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Mesh": curve_to_mesh}) + + +def shader_leaf_material(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "vein"}) + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": 6.8, + "Detail": 10.0, + "Roughness": 0.7, + }, + ) + + separate_rgb = nw.new_node( + Nodes.SeparateRGB, input_kwargs={"Image": noise_texture.outputs["Color"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["G"], + 1: 0.4, + 2: 0.7, + 3: 0.48, + 4: 0.52, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_rgb.outputs["B"], + 1: 0.4, + 2: 0.7, + 3: 0.8, + 4: 1.2, + }, + ) + + attribute_1 = nw.new_node( + Nodes.Attribute, attrs={"attribute_name": "subvein offset"} + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": attribute_1.outputs["Color"], 2: -0.94} + ) + + main_leaf_hsv = (uniform(0.3, 0.36), uniform(0.8, 1.0), uniform(0.25, 0.45)) + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={"Value": 2.0, "Color": hsv2rgba(main_leaf_hsv)}, + ) + + main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.005),) + main_leaf_hsv[1:] + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": map_range_2.outputs["Result"], + "Color1": hue_saturation_value, + "Color2": hsv2rgba(main_leaf_hsv_2), + }, + ) + + hue_saturation_value_1 = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Hue": map_range.outputs["Result"], + "Value": map_range_1.outputs["Result"], + "Color": mix, + }, + ) + + stem_color_hsv = main_leaf_hsv[:-1] + (main_leaf_hsv[-1] - uniform(0.05, 0.15),) + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute.outputs["Color"], + "Color1": hsv2rgba(stem_color_hsv), + "Color2": hue_saturation_value_1, + }, + ) + + group = nw.new_node( + nodegroup_nodegroup_leaf_shader().name, input_kwargs={"Color": mix_1} + ) + + material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": group}) + + +def geometry_palm_tree_leaf_nodes(nw: NodeWrangler, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line_1 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": (0.0, 0.0, 2.0), "End": (0.0, 0.0, 0.0)} + ) + + leaf_x_curvature = nw.new_node(Nodes.Value, label="leaf_x_curvature") + leaf_x_curvature.outputs[0].default_value = kwargs["leaf_x_curvature"] + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: leaf_x_curvature, 1: kwargs["leaf_instance_curvature_ratio"]}, + attrs={"operation": "MULTIPLY"}, + ) + + integer_1 = nw.new_node(Nodes.Integer, attrs={"integer": 50}) + integer_1.integer = kwargs["num_leaf_samples"] + + stem_x_curvature = nw.new_node(Nodes.Value, label="stem_x_curvature") + stem_x_curvature.outputs[0].default_value = normal(0.0, 0.15) + + stem_curvature = nw.new_node( + nodegroup_stem_curvature().name, + input_kwargs={ + "Curve": curve_line_1, + "Y Stem Rotate": leaf_x_curvature, + "Stem Count": integer_1, + "X Stem Rotate": stem_x_curvature, + }, + ) + + stem_geometry = nw.new_node( + nodegroup_stem_geometry().name, input_kwargs={"Curve": stem_curvature} + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": stem_geometry, + "Material": surface.shaderfunc_to_material(shader_stem_material), + }, + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + wave_x_scale = nw.new_node(Nodes.Value, label="wave_x_scale") + wave_x_scale.outputs[0].default_value = 0.0 + + wave_y_scale = nw.new_node(Nodes.Value, label="wave_y_scale") + wave_y_scale.outputs[0].default_value = 0.0 + + leaf_width_scale = nw.new_node(Nodes.Value, label="leaf_width_scale") + leaf_width_scale.outputs[0].default_value = kwargs["leaf_instance_width"] + + palm_leaf_instance = nw.new_node( + nodegroup_palm_leaf_instance().name, + input_kwargs={ + "To Max": multiply, + "Mesh": group_input.outputs["Geometry"], + "Wave Scale Y": wave_x_scale, + "Wave Scale X": wave_y_scale, + "Leaf Width Scale": leaf_width_scale, + }, + ) + + leaf_scale = nw.new_node(Nodes.Value, label="leaf_scale") + leaf_scale.outputs[0].default_value = uniform(0.5, 0.7) + + leaf_on_stem = nw.new_node( + nodegroup_leaf_on_stem(kwargs["versions"]).name, + input_kwargs={ + "Points": stem_curvature, + "Instance": palm_leaf_instance.outputs["Geometry"], + "Scale": leaf_scale, + "Samples": integer_1, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, leaf_on_stem]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_1, + "Translation": kwargs["plant_translation"], + "Rotation": (0.0, 0.0, kwargs["plant_z_rotate"]), + "Scale": kwargs["plant_scale"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Attribute": palm_leaf_instance.outputs["Attribute"], + "Coordinate": palm_leaf_instance.outputs["Coordinate"], + "subvein offset": palm_leaf_instance.outputs["subvein"], + "vein": palm_leaf_instance.outputs["vein"], + }, + ) + + +class LeafPalmTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(LeafPalmTreeFactory, self).__init__(factory_seed, coarse=coarse) + + def update_params(self, params): + if params.get("leaf_x_curvature", None) is None: + params["leaf_x_curvature"] = uniform(0.0, 0.8) + if params.get("leaf_instance_curvature_ratio", None) is None: + params["leaf_instance_curvature_ratio"] = uniform(0.3, 0.6) + if params.get("leaf_instance_width", None) is None: + params["leaf_instance_width"] = uniform(0.07, 0.15) + if params.get("num_leaf_samples", None) is None: + params["num_leaf_samples"] = int( + randint(6, 10) / params["leaf_instance_width"] + ) + if params.get("plant_translation", None) is None: + params["plant_translation"] = (0.0, 0.0, 0.0) + if params.get("plant_z_rotate", None) is None: + params["plant_z_rotate"] = uniform(-0.4, 0.4) + if params.get("versions", None) is None: + params["versions"] = 3 + if params.get("plant_scale", None) is None: + s = uniform(0.8, 1.5) + params["plant_scale"] = (s, s, s) + return params + + def create_asset(self, params={}, **kwargs): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + params = self.update_params(params) + surface.add_geomod( + obj, + geometry_palm_tree_leaf_nodes, + apply=True, + attributes=["Attribute", "Coordinate", "subvein offset", "vein"], + input_kwargs=params, + ) + surface.add_material(obj, shader_leaf_material, selection=None) + + tag_object(obj, "leaf_palm_tree") + return obj + + +if __name__ == "__main__": + fac = LeafPalmTreeFactory(0) + fac.create_asset() diff --git a/infinigen/assets/objects/tropic_plants/palm_tree.py b/infinigen/assets/objects/tropic_plants/palm_tree.py new file mode 100644 index 000000000..a2d717426 --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/palm_tree.py @@ -0,0 +1,1436 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +import bpy +import gin +import numpy as np +from numpy.random import normal, randint, uniform + +from infinigen.assets.objects.tropic_plants.leaf_palm_plant import LeafPalmPlantFactory +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.color import hsv2rgba + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal_2 = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz_3 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal_2, 1: combine_xyz_3}, + attrs={"operation": "MULTIPLY"}, + ) + + index_1 = nw.new_node(Nodes.Index) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: 63.0}, + attrs={"operation": "GREATER_THAN"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": greater_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_cross_contour_bottom", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + normal = nw.new_node(Nodes.InputNormal) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "Y", 0.0), ("NodeSocketFloat", "X", 0.0)], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["X"], "Y": group_input.outputs["Y"]}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + index = nw.new_node(Nodes.Index) + + less_than = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: 64.0}, attrs={"operation": "LESS_THAN"} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": multiply.outputs["Vector"], "Value": less_than}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_trunk_radius_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_trunk_radius_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.01, 3: 0.05}) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}, + attrs={"clamp": False}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "FLOOR"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: floor}, + attrs={"operation": "SUBTRACT"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": subtract}) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 0.0156), (0.2545, 0.2), (0.5182, 0.0344), (0.7682, 0.2375), (1.0, 0.0)], + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: 1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_2} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: random_value.outputs[1], 1: add}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_coutour_cross_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_coutour_cross_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Resolution": 128, "Radius": 0.05} + ) + + pedal_cross_coutour_x = nw.new_node(Nodes.Value, label="pedal_cross_coutour_x") + pedal_cross_coutour_x.outputs[0].default_value = 0.3 + + pedal_cross_contour_bottom = nw.new_node( + nodegroup_pedal_cross_contour_bottom().name, + input_kwargs={"X": pedal_cross_coutour_x}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": curve_circle.outputs["Curve"], + "Selection": pedal_cross_contour_bottom.outputs["Value"], + "Offset": pedal_cross_contour_bottom.outputs["Vector"], + }, + ) + + pedal_cross_coutour_y = nw.new_node(Nodes.Value, label="pedal_cross_coutour_y") + pedal_cross_coutour_y.outputs[0].default_value = 0.3 + + pedal_cross_contour_top = nw.new_node( + nodegroup_pedal_cross_contour_top().name, + input_kwargs={"Y": pedal_cross_coutour_y, "X": pedal_cross_coutour_x}, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": set_position_1, + "Selection": pedal_cross_contour_top.outputs["Value"], + "Offset": pedal_cross_contour_top.outputs["Vector"], + }, + ) + + noise_texture_2 = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": 7.0, "Detail": 15.0}, + attrs={"noise_dimensions": "4D"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture_2.outputs["Fac"], "Scale": 0.0}, + attrs={"operation": "SCALE"}, + ) + + set_position_5 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": set_position_2, "Offset": scale.outputs["Vector"]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_5} + ) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_z_contour", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_z_contour(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [ + (0.0, 0.4094), + (0.1773, 0.475), + (0.3795, 0.5062), + (0.5864, 0.5187), + (0.7202, 0.5084), + (0.8636, 0.4781), + (1.0, 0.375), + ], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply}) + + +@node_utils.to_nodegroup( + "nodegroup_pedal_stem_curvature", singleton=False, type="GeometryNodeTree" +) +def nodegroup_pedal_stem_curvature(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + position_3 = nw.new_node(Nodes.InputPosition) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": spline_parameter_1.outputs["Factor"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], + [(0.0, 0.0688), (0.2545, 0.2281), (0.5023, 0.2563), (0.9773, 0.2656)], + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.2)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": position_3, + "Center": (0.0, 0.0, 0.2), + "Angle": multiply, + }, + attrs={"rotation_type": "X_AXIS"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": vector_rotate} + ) + + +@node_utils.to_nodegroup( + "nodegroup_node_group_002", singleton=False, type="ShaderNodeTree" +) +def nodegroup_node_group_002(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0)), + ("NodeSocketFloat", "attribute", 0.0), + ("NodeSocketFloat", "voronoi scale", 50.0), + ("NodeSocketFloatFactor", "voronoi randomness", 1.0), + ("NodeSocketFloat", "seed", 0.0), + ("NodeSocketFloat", "noise scale", 10.0), + ("NodeSocketFloat", "noise amount", 1.4), + ("NodeSocketFloat", "hue min", 0.6), + ("NodeSocketFloat", "hue max", 1.085), + ], + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: texture_coordinate.outputs["Object"], + 1: group_input.outputs["seed"], + }, + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": group_input.outputs["noise scale"], + "Detail": 1.0, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: noise_texture.outputs["Fac"], + 1: group_input.outputs["noise amount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["attribute"], + "Scale": group_input.outputs["voronoi scale"], + "Randomness": group_input.outputs["voronoi randomness"], + }, + attrs={"voronoi_dimensions": "1D"}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": add_1, + 3: group_input.outputs["hue min"], + 4: group_input.outputs["hue max"], + }, + ) + + hue_saturation_value = nw.new_node( + "ShaderNodeHueSaturation", + input_kwargs={ + "Value": map_range.outputs["Result"], + "Color": group_input.outputs["Color"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": hue_saturation_value} + ) + + +@node_utils.to_nodegroup( + "nodegroup_tree_trunk_geometry_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_trunk_geometry_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + trunkradius_001 = nw.new_node(nodegroup_trunk_radius_001().name) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": trunkradius_001}, + ) + + trunk_resolution = nw.new_node( + Nodes.Integer, label="TrunkResolution", attrs={"integer": 32} + ) + trunk_resolution.integer = 32 + + trunk_radius = nw.new_node(Nodes.Value, label="TrunkRadius") + trunk_radius.outputs[0].default_value = 0.02 + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": trunk_resolution, "Radius": trunk_radius}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh, "Integer": trunk_resolution}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_leaf_selection", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_leaf_selection(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_3 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: 1600.0, 1: group_input.outputs["Value"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: uniform(0.98, 0.99)}, + attrs={"operation": "MULTIPLY"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_3, 1: multiply_1}, + attrs={"operation": "GREATER_THAN"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: np.clip(normal(0.65, 0.2), 0.7, 0.5)}, + attrs={"operation": "MULTIPLY"}, + ) + + less_than = nw.new_node( + Nodes.Math, + input_kwargs={0: index_3, 1: multiply_2}, + attrs={"operation": "LESS_THAN"}, + ) + + op_or = nw.new_node( + Nodes.BooleanMath, + input_kwargs={0: greater_than, 1: less_than}, + attrs={"operation": "OR"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Boolean": op_or}) + + +@node_utils.to_nodegroup( + "nodegroup_random_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_random_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2}) + + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.5, 3: 0.5, "Seed": 1} + ) + + random_value_3 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: -0.2, 3: 0.2, "Seed": 3} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_2.outputs[1], + "Z": random_value_3.outputs[1], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_truncated_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_truncated_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0} + ) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index_1, 1: add}, attrs={"operation": "MODULO"} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: modulo, 1: add}, attrs={"operation": "DIVIDE"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 6.28}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_leaf_stem", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_leaf_stem(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": (0.0, 0.0, 0.15)}) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 64}) + integer.integer = 64 + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line, "Count": integer} + ) + + pedal_stem_curvature_scale = nw.new_node( + Nodes.Value, label="pedal_stem_curvature_scale" + ) + pedal_stem_curvature_scale.outputs[0].default_value = 0.2 + + pedal_stem_curvature = nw.new_node( + nodegroup_pedal_stem_curvature().name, + input_kwargs={"Value": pedal_stem_curvature_scale}, + ) + + set_position_4 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": resample_curve_1, "Offset": pedal_stem_curvature}, + ) + + pedal_z_coutour_scale = nw.new_node(Nodes.Value, label="pedal_z_coutour_scale") + pedal_z_coutour_scale.outputs[0].default_value = uniform(0.2, 0.4) + + pedal_z_contour = nw.new_node( + nodegroup_pedal_z_contour().name, input_kwargs={"Value": pedal_z_coutour_scale} + ) + + set_curve_radius_1 = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": set_position_4, "Radius": pedal_z_contour}, + ) + + coutour_cross_geometry = nw.new_node(nodegroup_coutour_cross_geometry().name) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius_1, + "Profile Curve": coutour_cross_geometry, + "Fill Caps": True, + }, + ) + + set_material_2 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": curve_to_mesh_1, + "Material": surface.shaderfunc_to_material(shader_top_core), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_material_2} + ) + + +@node_utils.to_nodegroup( + "nodegroup_trunk_radius", singleton=False, type="GeometryNodeTree" +) +def nodegroup_trunk_radius(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.01, 3: 0.05}) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.2}, + attrs={"clamp": False}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, + attrs={"operation": "MULTIPLY"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "FLOOR"} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply, 1: floor}, + attrs={"operation": "SUBTRACT"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": subtract}) + node_utils.assign_curve( + float_curve.mapping.curves[0], [(0.0, 0.0969), (0.5864, 0.1406), (1.0, 0.2906)] + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: uniform(0.1, 0.25)}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_2} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: random_value.outputs[1], 1: add}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_tree_cracks", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_cracks(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: spline_parameter.outputs["Length"], + }, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: capture_attribute.outputs[2], 1: uniform(0.1, 0.25)}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Z": multiply, + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 400.0, "Randomness": 10.0}, + attrs={"voronoi_dimensions": "4D", "distance": "CHEBYCHEV"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) + colorramp.color_ramp.elements[0].position = 0.6091 + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.6818 + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + normal = nw.new_node(Nodes.InputNormal) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: colorramp.outputs["Color"], 1: normal}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: multiply_1.outputs["Vector"], 1: (-0.01, -0.01, -0.01)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Vector": multiply_2.outputs["Vector"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_instance_selection_bottom_remove", + singleton=False, + type="GeometryNodeTree", +) +def nodegroup_leaf_instance_selection_bottom_remove(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index_1 = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Ring", 10.0), + ("NodeSocketFloat", "Segment", 0.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: index_1, 1: group_input.outputs["Ring"]}, + attrs={"operation": "DIVIDE"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Segment"], 1: 4.0}, + attrs={"operation": "SUBTRACT"}, + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: subtract}, + attrs={"operation": "GREATER_THAN"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": greater_than}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_random_rotate", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_random_rotate(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.4, 3: 0.4}) + + random_value_3 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.4, 3: 0.4}) + + random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: -0.6, 3: 0.6}) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": random_value_1.outputs[1], + "Y": random_value_3.outputs[1], + "Z": random_value_2.outputs[1], + }, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_rotate_downward", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_rotate_downward(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + index = nw.new_node(Nodes.Index) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.5)] + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0} + ) + + modulo = nw.new_node( + Nodes.Math, input_kwargs={0: index, 1: add}, attrs={"operation": "MODULO"} + ) + + divide = nw.new_node( + Nodes.Math, input_kwargs={0: modulo, 1: add}, attrs={"operation": "DIVIDE"} + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: 6.28}, attrs={"operation": "MULTIPLY"} + ) + + add2 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply.outputs["Value"], 1: -1.57} + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add2}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz}) + + +@node_utils.to_nodegroup( + "nodegroup_truncated_stem_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_truncated_stem_geometry(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Value1", 0.5), + ("NodeSocketFloat", "Value2", 0.5), + ], + ) + + truncated_leaf_stem = nw.new_node(nodegroup_truncated_leaf_stem().name) + + normal_1 = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal_1}, attrs={"axis": "Z"} + ) + + instance_on_points_2 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": truncated_leaf_stem, + "Rotation": align_euler_to_vector_1, + }, + ) + + leaf_truncated_rotate = nw.new_node( + nodegroup_leaf_truncated_rotate().name, + input_kwargs={"Value": group_input.outputs[2]}, + ) + + rotate_instances_2 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_2, + "Rotation": leaf_truncated_rotate, + }, + ) + + rotate_instances_3 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": rotate_instances_2, + "Rotation": (-0.9599, 0.0, 1.5708), + }, + ) + + random_rotate = nw.new_node(nodegroup_random_rotate().name) + + rotate_instances_4 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances_3, "Rotation": random_rotate}, + ) + + random_value_5 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.6}) + + scale_instances_4 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_4, + "Scale": random_value_5.outputs[1], + }, + ) + + index_2 = nw.new_node(Nodes.Index) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: index_2, 1: randint(6, 10)}, + attrs={"operation": "MODULO"}, + ) + + scale_instances_3 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_4, + "Selection": modulo, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + truncated_leaf_selection = nw.new_node( + nodegroup_truncated_leaf_selection().name, + input_kwargs={"Value": group_input.outputs["Value1"]}, + ) + + scale_instances_5 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_3, + "Selection": truncated_leaf_selection, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": scale_instances_5} + ) + + +@node_utils.to_nodegroup( + "nodegroup_tree_trunk_geometry", singleton=False, type="GeometryNodeTree" +) +def nodegroup_tree_trunk_geometry(nw: NodeWrangler, radius): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curve", None)] + ) + + trunkradius = nw.new_node(nodegroup_trunk_radius().name) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": trunkradius}, + ) + + treecracks = nw.new_node( + nodegroup_tree_cracks().name, input_kwargs={"Geometry": set_curve_radius} + ) + + trunk_resolution = nw.new_node( + Nodes.Integer, label="TrunkResolution", attrs={"integer": 32} + ) + trunk_resolution.integer = 32 + + trunk_radius = nw.new_node(Nodes.Value, label="TrunkRadius") + trunk_radius.outputs[0].default_value = radius + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": trunk_resolution, "Radius": trunk_radius}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": treecracks.outputs["Geometry"], + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + subdivide_mesh = nw.new_node( + Nodes.SubdivideMesh, input_kwargs={"Mesh": curve_to_mesh, "Level": 2} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": subdivide_mesh, + "Offset": treecracks.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": set_position_1, + "Integer": trunk_resolution, + "Mesh": curve_to_mesh, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_leaf_on_top", singleton=False, type="GeometryNodeTree" +) +def nodegroup_leaf_on_top(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Points", None), + ("NodeSocketFloat", "Value", 0.5), + ("NodeSocketFloat", "Ring", 10.0), + ("NodeSocketFloat", "Segment", 0.5), + ("NodeSocketGeometry", "Instance", None), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, input_kwargs={"Vector": normal}, attrs={"axis": "Z"} + ) + + instance_on_points_1 = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": group_input.outputs["Points"], + "Instance": group_input.outputs["Instance"], + "Rotation": align_euler_to_vector, + }, + ) + + leafrotatedownward = nw.new_node( + nodegroup_leaf_rotate_downward().name, + input_kwargs={"Value": group_input.outputs["Value"]}, + ) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": instance_on_points_1, + "Rotation": leafrotatedownward, + }, + ) + + leafrandomrotate = nw.new_node(nodegroup_leaf_random_rotate().name) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": rotate_instances, "Rotation": leafrandomrotate}, + ) + + random_value_4 = nw.new_node(Nodes.RandomValue, input_kwargs={2: 0.5, 3: 1.0}) + + scale_instances_2 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": rotate_instances_1, + "Scale": random_value_4.outputs[1], + }, + ) + + leafinstanceselectionbottomremove = nw.new_node( + nodegroup_leaf_instance_selection_bottom_remove().name, + input_kwargs={ + "Ring": group_input.outputs["Ring"], + "Segment": group_input.outputs["Segment"], + }, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances_2, + "Selection": leafinstanceselectionbottomremove, + "Scale": (0.0, 0.0, 0.0), + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, input_kwargs={5: 1}, attrs={"data_type": "INT"} + ) + + scale_instances_1 = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={ + "Instances": scale_instances, + "Selection": random_value.outputs[2], + "Scale": (0.0, 0.0, 0.0), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": scale_instances_1} + ) + + +def shader_top_core(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + mapping = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (1.0, 1.0, 0.1), + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": uniform(100, 400)}, + ) + + mapping_1 = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping_1, + "Scale": 2.0, + "Distortion": 5.0, + "Detail": 10.0, + }, + ) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": 0.4, + "Color1": voronoi_texture.outputs["Distance"], + "Color2": wave_texture.outputs["Color"], + }, + ) + + d_hsv = (uniform(0.02, 0.05), uniform(0.3, 0.6), uniform(0.01, 0.05)) + b_hsv = d_hsv[:1] + (uniform(0.6, 0.9), uniform(0.3, 0.6)) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix}) + colorramp.color_ramp.elements[0].position = 0.2409 + colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) + colorramp.color_ramp.elements[1].position = 0.6045 + colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": colorramp.outputs["Color"], + "Roughness": colorramp.outputs["Alpha"], + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +def shader_trunk(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + texture_coordinate = nw.new_node(Nodes.TextureCoord) + + mapping = nw.new_node( + Nodes.Mapping, input_kwargs={"Vector": texture_coordinate.outputs["Object"]} + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping, "Scale": 20.0}, + attrs={"voronoi_dimensions": "4D"}, + ) + + wave_texture = nw.new_node( + Nodes.WaveTexture, + input_kwargs={ + "Vector": mapping, + "Scale": uniform(1.0, 3.0), + "Distortion": 5.0, + "Detail Scale": 3.0, + }, + attrs={"bands_direction": "Z"}, + ) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Color1": voronoi_texture_1.outputs["Distance"], + "Color2": wave_texture.outputs["Color"], + }, + ) + + d_hsv = ( + uniform(0.02, 0.05), + uniform(0.01, 0.05) if randint(0, 2) == 1 else uniform(0.5, 0.8), + uniform(0.03, 0.09), + ) + b_hsv = d_hsv[:-1] + (uniform(0.1, 0.3),) + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) + colorramp.color_ramp.elements[0].position = 0.4682 + colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) + colorramp.color_ramp.elements[1].position = 0.5591 + colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) + + mapping_1 = nw.new_node( + Nodes.Mapping, + input_kwargs={ + "Vector": texture_coordinate.outputs["Object"], + "Scale": (10.0, 10.0, 0.2), + }, + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": mapping_1, "Scale": 100.0, "Randomness": 10.0}, + attrs={"voronoi_dimensions": "4D", "distance": "CHEBYCHEV"}, + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": voronoi_texture.outputs["Distance"]} + ) + colorramp_1.color_ramp.elements[0].position = 0.2818 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.3045 + colorramp_1.color_ramp.elements[1].color = (0.5284, 0.5034, 0.4327, 1.0) + + mix = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": uniform(0.1, 0.3), + "Color1": colorramp.outputs["Color"], + "Color2": colorramp_1.outputs["Color"], + }, + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": mix, + "Roughness": voronoi_texture.outputs["Distance"], + "Specular": 0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) + + +@gin.configurable +def geometry_palm_tree_nodes(nw: NodeWrangler, truncatedstem_chance=0.4, **kwargs): + # Code generated using version 2.4.3 of the node_transpiler + + leaf = kwargs["leaf"][0] + radius = kwargs["trunk_radius"] + + trunk_height = nw.new_node(Nodes.Value, label="trunk_height") + trunk_height.outputs[0].default_value = 5.0 + + top_x, top_y = np.random.normal(0.0, 0.5), np.random.normal(0.0, 0.5) + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": top_x, "Y": top_y, "Z": trunk_height} + ) + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Start": (0.0, 0.0, 0.0), + "Middle": ( + top_x / uniform(1.0, 2.0), + top_y / uniform(1.0, 2.0), + uniform(1.5, 3.0), + ), + "End": combine_xyz_2, + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": quadratic_bezier, "Length": 0.02}, + attrs={"mode": "LENGTH"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": resample_curve} + ) + + endpoint_selection = nw.new_node( + "GeometryNodeCurveEndpointSelection", input_kwargs={"Start Size": 0} + ) + + top_segment = nw.new_node(Nodes.Integer, label="TopSegment", attrs={"integer": 12}) + top_segment.integer = randint(8, 14) + + top_ring = nw.new_node(Nodes.Integer, label="TopRing", attrs={"integer": 8}) + top_ring.integer = randint(10, 15) + + uv_sphere = nw.new_node( + Nodes.MeshUVSphere, + input_kwargs={ + "Segments": top_segment, + "Rings": top_ring, + "Radius": uniform(0.15, 0.2), + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": uv_sphere, "Scale": (1.0, 1.0, uniform(0.8, 2.0))}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": transform, + "Material": surface.shaderfunc_to_material(shader_trunk), + }, + ) + + value = nw.new_node(Nodes.Value) + value.outputs[0].default_value = 0.2 + + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": leaf}) + + leafontop = nw.new_node( + nodegroup_leaf_on_top().name, + input_kwargs={ + "Points": transform, + "Value": top_segment, + "Ring": top_segment, + "Segment": top_ring, + "Instance": object_info.outputs["Geometry"], + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material_1, leafontop]} + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": set_position, + "Selection": endpoint_selection, + "Instance": join_geometry_1, + }, + ) + + treetrunkgeometry = nw.new_node( + nodegroup_tree_trunk_geometry(radius=radius).name, + input_kwargs={"Curve": set_position}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": treetrunkgeometry.outputs["Geometry"], + "Material": surface.shaderfunc_to_material(shader_trunk), + }, + ) + + truncatedstemgeometry = nw.new_node( + nodegroup_truncated_stem_geometry().name, + input_kwargs={ + "Points": treetrunkgeometry.outputs["Mesh"], + 1: trunk_height, + 2: treetrunkgeometry.outputs["Integer"], + }, + ) + + geos = [instance_on_points, set_material] + if uniform(0.0, 1.0) < truncatedstem_chance: + geos.append(truncatedstemgeometry) + join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": geos}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": join_geometry} + ) + + +class PalmTreeFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super(PalmTreeFactory, self).__init__(factory_seed, coarse=coarse) + + def create_asset(self, params={}, **kwargs): + bpy.ops.mesh.primitive_plane_add( + size=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + # Make the Leaf and Delete It Later + lf_seed = randint(0, 1000, size=(1,))[0] + leaf_model = LeafPalmPlantFactory(factory_seed=lf_seed) + p = { + "leaf_x_curvature": uniform(0.1, 0.3), + "plant_z_rotate": uniform(0.0, 0.02), + "stem_x_curvature": 0.0, + "stem_y_curvature": uniform(-0.1, 0.1), + "plant_stem_length": uniform(0.5, 1.2), + } + leaf = leaf_model.create_asset(p) + params["leaf"] = [leaf] + params["trunk_radius"] = uniform(0.2, 0.3) + + surface.add_geomod( + obj, + geometry_palm_tree_nodes, + selection=None, + attributes=[], + input_kwargs=params, + ) + butil.delete([leaf]) + with butil.SelectObjects(obj): + bpy.ops.object.material_slot_remove() + bpy.ops.object.shade_flat() + obj.scale = (2, 2, 2) + return obj diff --git a/infinigen/assets/objects/tropic_plants/tropic_plant_utils.py b/infinigen/assets/objects/tropic_plants/tropic_plant_utils.py new file mode 100644 index 000000000..50713010e --- /dev/null +++ b/infinigen/assets/objects/tropic_plants/tropic_plant_utils.py @@ -0,0 +1,1246 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Beining Han + + +from numpy.random import uniform + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import hsv2rgba + + +@node_utils.to_nodegroup( + "nodegroup_node_group", singleton=False, type="GeometryNodeTree" +) +def nodegroup_node_group(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Coord", 0.0), + ("NodeSocketFloat", "Shape", 0.5), + ("NodeSocketFloat", "Density", 0.5), + ("NodeSocketFloat", "Random Scale Seed", 0.5), + ], + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Coord"], + "Scale": group_input.outputs["Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Density"], + 1: group_input.outputs["Random Scale Seed"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + vein_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"W": group_input.outputs["Coord"], "Scale": multiply}, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: vein_1.outputs["Distance"], 1: 0.35}) + + round = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "ROUND"}) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: vein.outputs["Distance"], 1: round} + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add_1, 2: 0.02, 3: 0.95, 4: 0.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Shape"], + 1: map_range_1.outputs["Result"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_1, 1: 0.001, 2: 0.005, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Result": map_range_2.outputs["Result"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord_001(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": group_input.outputs["X Modulated"]}, + label="Vein Shape", + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.7227, 0.75), (1.0, 1.0)], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_shape_with_jigsaw", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_shape_with_jigsaw(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Midrib Value", 1.0), + ("NodeSocketFloat", "Vein Coord", 0.0), + ("NodeSocketFloat", "Leaf Shape", 0.5), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.5), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}, + ) + + jigsaw = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord"], + "Scale": group_input.outputs["Jigsaw Scale"], + }, + label="Jigsaw", + attrs={"voronoi_dimensions": "1D"}, + ) + + colorramp = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": jigsaw.outputs["Distance"]} + ) + colorramp.color_ramp.elements[0].position = 0.4795 + colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.5545 + colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.0}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: colorramp.outputs["Color"], + 1: multiply, + 2: group_input.outputs["Leaf Shape"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}, + ) + + maximum = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + attrs={"operation": "MAXIMUM"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": maximum}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord_003", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord_003(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": divide}, label="Vein Shape" + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + [ + (0.0, 0.0), + (0.0182, 0.05), + (0.2909, 0.2199), + (0.4182, 0.3063), + (0.7045, 0.3), + (1.0, 0.8562), + ], + handles=["AUTO", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO"], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": divide}, label="Vein Shape" + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + [ + (0.0, 0.0), + (0.0182, 0.05), + (0.3364, 0.2386), + (0.6045, 0.4812), + (0.7, 0.725), + (0.8273, 0.8437), + (1.0, 1.0), + ], + handles=["AUTO", "AUTO", "AUTO", "VECTOR", "AUTO", "AUTO", "AUTO"], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_vein_coord_002", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_vein_coord_002(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.5), + ("NodeSocketFloat", "Y", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Angle", 2.0), + ("NodeSocketFloat", "Leaf Shape", 0.0), + ], + ) + + sign = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "SIGN"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": group_input.outputs["Y"], 1: -1.0} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X Modulated"]}, + attrs={"operation": "ABSOLUTE", "use_clamp": True}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + vein_shape = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": divide}, label="Vein Shape" + ) + node_utils.assign_curve( + vein_shape.mapping.curves[0], + [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.8091, 0.7312), (1.0, 0.9937)], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": vein_shape, 4: 1.9} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: group_input.outputs["Vein Angle"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, + attrs={"operation": "SUBTRACT"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Vein Coord": add}) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_shape", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_shape(nw: NodeWrangler, leaf_contour_control_points=None): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X Modulated", 0.0), + ("NodeSocketFloat", "Y", 0.0), + ("NodeSocketFloat", "scale", 0.0), + ], + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X Modulated"], + "Y": group_input.outputs["Y"], + }, + ) + + clamp = nw.new_node( + Nodes.Clamp, + input_kwargs={"Value": group_input.outputs["Y"], "Min": -0.6, "Max": 0.6}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": clamp}) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: combine_xyz, 1: combine_xyz_1}, + attrs={"operation": "SUBTRACT"}, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + leaf_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range.outputs["Result"]}, + label="Leaf shape", + ) + + if leaf_contour_control_points is not None: + node_utils.assign_curve( + leaf_shape.mapping.curves[0], + [ + (0.0, 0.0), + (0.1, leaf_contour_control_points[0]), + (0.25, leaf_contour_control_points[1]), + (0.4, leaf_contour_control_points[2]), + (0.55, leaf_contour_control_points[3]), + (0.7, leaf_contour_control_points[4]), + (0.85, leaf_contour_control_points[5]), + (1.0, 0.0), + ], + ) + else: + node_utils.assign_curve( + leaf_shape.mapping.curves[0], + [ + (0.0, 0.0), + (0.15, 0.25), + (0.3818, 0.35), + (0.6273, 0.3625), + (0.7802, 0.2957), + (0.8955, 0.2), + (1.0, 0.0), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: leaf_shape, 1: group_input.outputs["scale"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: length.outputs["Value"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Leaf Shape": subtract_1, "Value": multiply} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_leaf_gen", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_leaf_gen(nw: NodeWrangler, leaf_contour_control_points=None): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloat", "Displancement scale", 0.5), + ("NodeSocketFloat", "Vein Asymmetry", 0.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ("NodeSocketFloat", "Jigsaw Scale", 18.0), + ("NodeSocketFloat", "Jigsaw Depth", 0.07), + ("NodeSocketFloat", "Vein Angle", 1.0), + ("NodeSocketFloat", "Sub-vein Displacement", 0.5), + ("NodeSocketFloat", "Sub-vein Scale", 50.0), + ("NodeSocketFloat", "Wave Displacement", 0.1), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ("NodeSocketFloat", "Leaf Width Scale", 0.0), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + nodegroup_midrib = nw.new_node( + nodegroup_nodegroup_midrib().name, + input_kwargs={ + "X": separate_xyz.outputs["X"], + "Y": separate_xyz.outputs["Y"], + "Midrib Length": group_input.outputs["Midrib Length"], + "Midrib Width": group_input.outputs["Midrib Width"], + "Stem Length": group_input.outputs["Stem Length"], + }, + ) + + nodegroup_shape = nw.new_node( + nodegroup_nodegroup_shape(leaf_contour_control_points).name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "scale": group_input.outputs["Leaf Width Scale"], + }, + ) + + nodegroup_vein_coord_002 = nw.new_node( + nodegroup_nodegroup_vein_coord_002().name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_vein_coord = nw.new_node( + nodegroup_nodegroup_vein_coord().name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_vein_coord_003 = nw.new_node( + nodegroup_nodegroup_vein_coord_003().name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + "Leaf Shape": nodegroup_shape.outputs["Value"], + }, + ) + + nodegroup_apply_vein_midrib = nw.new_node( + nodegroup_nodegroup_apply_vein_midrib().name, + input_kwargs={ + "Midrib Value": nodegroup_midrib.outputs["Midrib Value"], + "Leaf Shape": nodegroup_shape.outputs["Leaf Shape"], + "Vein Density": group_input.outputs["Vein Density"], + "Vein Coord - main": nodegroup_vein_coord_002, + "Vein Coord - 1": nodegroup_vein_coord, + "Vein Coord - 2": nodegroup_vein_coord_003, + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Displancement scale"], + 1: nodegroup_apply_vein_midrib, + }, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": group_input.outputs["Mesh"], "Offset": combine_xyz}, + ) + + nodegroup_shape_with_jigsaw = nw.new_node( + nodegroup_nodegroup_shape_with_jigsaw().name, + input_kwargs={ + "Midrib Value": nodegroup_midrib.outputs["Midrib Value"], + "Vein Coord": nodegroup_vein_coord_002, + "Leaf Shape": nodegroup_shape.outputs["Leaf Shape"], + "Jigsaw Scale": group_input.outputs["Jigsaw Scale"], + "Jigsaw Depth": group_input.outputs["Jigsaw Depth"], + }, + ) + + less_than = nw.new_node( + Nodes.Compare, + input_kwargs={0: nodegroup_shape_with_jigsaw, 1: 0.5}, + attrs={"operation": "LESS_THAN"}, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={"Geometry": set_position, "Selection": less_than}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": delete_geometry, 2: nodegroup_apply_vein_midrib}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz_1.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + float_curve_1 = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range_1.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve_1.mapping.curves[0], [(0.0, 0.0), (0.5182, 1.0), (1.0, 1.0)] + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": nodegroup_shape.outputs["Leaf Shape"], 2: -1.0}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0045, 0.0063), (0.0409, 0.0375), (0.4182, 0.05), (1.0, 0.0)], + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve_1, 1: float_curve}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: 0.7}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_2}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + nodegroup_vein_coord_001 = nw.new_node( + nodegroup_nodegroup_vein_coord_001().name, + input_kwargs={ + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Y": separate_xyz.outputs["Y"], + "Vein Asymmetry": group_input.outputs["Vein Asymmetry"], + "Vein Angle": group_input.outputs["Vein Angle"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Mesh": set_position_1, + "Attribute": capture_attribute.outputs[2], + "X Modulated": nodegroup_midrib.outputs["X Modulated"], + "Vein Coord": nodegroup_vein_coord_001, + "Vein Value": nodegroup_apply_vein_midrib, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_midrib", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_midrib(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "X", 0.5), + ("NodeSocketFloat", "Y", -0.6), + ("NodeSocketFloat", "Midrib Length", 0.4), + ("NodeSocketFloat", "Midrib Width", 1.0), + ("NodeSocketFloat", "Stem Length", 0.8), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": group_input.outputs["Y"], 1: -0.6, 2: 0.6}, + ) + + stem_shape = nw.new_node( + Nodes.FloatCurve, + input_kwargs={"Value": map_range.outputs["Result"]}, + label="Stem shape", + ) + node_utils.assign_curve( + stem_shape.mapping.curves[0], + [ + (0.0, 0.5), + (0.25, 0.4828), + (0.5, 0.4938), + (0.75, 0.503), + (0.8773, 0.5125), + (1.0, 0.5), + ], + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": stem_shape, 3: -1.0} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["X"]}, + attrs={"operation": "SUBTRACT"}, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture) + + map_range_5 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": noise_texture.outputs["Fac"], 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_5.outputs["Result"], 1: 0.01}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Y"], + 1: -70.0, + 2: group_input.outputs["Midrib Length"], + 3: group_input.outputs["Midrib Width"], + 4: 0.0, + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply, 1: map_range_2.outputs["Result"]} + ) + + absolute = nw.new_node( + Nodes.Math, input_kwargs={0: subtract}, attrs={"operation": "ABSOLUTE"} + ) + + subtract_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: absolute}, attrs={"operation": "SUBTRACT"} + ) + + absolute_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": absolute_1, + 2: group_input.outputs["Stem Length"], + 3: 1.0, + 4: 0.0, + }, + ) + + smooth_min = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: map_range_3.outputs["Result"], 2: 0.06}, + attrs={"operation": "SMOOTH_MIN"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: smooth_min}, + attrs={"operation": "DIVIDE", "use_clamp": True}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "X Modulated": subtract, + "Midrib Value": map_range_4.outputs["Result"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_apply_vein_midrib", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_apply_vein_midrib(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Midrib Value", 0.5), + ("NodeSocketFloat", "Leaf Shape", 1.0), + ("NodeSocketFloat", "Vein Density", 6.0), + ("NodeSocketFloat", "Vein Coord - main", 0.0), + ("NodeSocketFloat", "Vein Coord - 1", 0.0), + ("NodeSocketFloat", "Vein Coord - 2", 0.0), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": group_input.outputs["Leaf Shape"], + 1: -0.3, + 2: 0.05, + 3: 0.015, + 4: 0.0, + }, + ) + + nodegroup = nw.new_node( + nodegroup_node_group().name, + input_kwargs={ + "Coord": group_input.outputs["Vein Coord - 2"], + "Shape": map_range.outputs["Result"], + "Density": group_input.outputs["Vein Density"], + "Random Scale Seed": 3.57, + }, + ) + + nodegroup_1 = nw.new_node( + nodegroup_node_group().name, + input_kwargs={ + "Coord": group_input.outputs["Vein Coord - 1"], + "Shape": map_range.outputs["Result"], + "Density": group_input.outputs["Vein Density"], + "Random Scale Seed": 1.08, + }, + ) + + vein = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={ + "W": group_input.outputs["Vein Coord - main"], + "Scale": group_input.outputs["Vein Density"], + "Randomness": 0.2, + }, + label="Vein", + attrs={"voronoi_dimensions": "1D"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": position, "Scale": 20.0} + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": noise_texture.outputs["Fac"], 3: -1.0} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_3.outputs["Result"], 1: 0.02}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: vein.outputs["Distance"], 1: multiply} + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": add, 2: 0.03, 3: 1.0, 4: 0.0} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_4.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": multiply_1, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup_1, 1: map_range_5.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: nodegroup, 1: multiply_2}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Midrib Value"], 1: multiply_3}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vein Value": multiply_4} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_move_to_origin", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_move_to_origin(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["Y"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": subtract}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": combine_xyz, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_leaf_rotate_x", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_leaf_rotate_x(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "To Max", -0.4), + ], + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position_1}) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": separate_xyz.outputs["Y"], + 4: group_input.outputs["To Max"], + }, + attrs={"clamp": False}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": position_1, "Angle": map_range.outputs["Result"]}, + attrs={"rotation_type": "X_AXIS"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": vector_rotate, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_sub_vein", singleton=False, type="GeometryNodeTree" +) +def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketFloat", "X", 0.5), ("NodeSocketFloat", "Y", 0.0)], + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["X"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": absolute, "Y": group_input.outputs["Y"]} + ) + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, input_kwargs={"Vector": combine_xyz, "Scale": 30.0} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, + attrs={"clamp": False}, + ) + + voronoi_texture_1 = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": combine_xyz, "Scale": 150.0}, + attrs={"feature": "DISTANCE_TO_EDGE"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture_1.outputs["Distance"], 2: 0.1}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + ) + + multiply = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: -1.0}, attrs={"operation": "MULTIPLY"} + ) + + map_range_3 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": map_range_1.outputs["Result"], 4: -1.0} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Value": multiply, "Color Value": map_range_3.outputs["Result"]}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_nodegroup_leaf_shader", singleton=False, type="ShaderNodeTree" +) +def nodegroup_nodegroup_leaf_shader(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[("NodeSocketColor", "Color", (0.8, 0.8, 0.8, 1.0))], + ) + + diffuse_bsdf = nw.new_node( + Nodes.DiffuseBSDF, input_kwargs={"Color": group_input.outputs["Color"]} + ) + + glossy_bsdf = nw.new_node( + "ShaderNodeBsdfGlossy", + input_kwargs={"Color": group_input.outputs["Color"], "Roughness": 0.3}, + ) + + mix_shader = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.2, 1: diffuse_bsdf, 2: glossy_bsdf} + ) + + translucent_bsdf = nw.new_node( + Nodes.TranslucentBSDF, input_kwargs={"Color": group_input.outputs["Color"]} + ) + + mix_shader_1 = nw.new_node( + Nodes.MixShader, input_kwargs={"Fac": 0.3, 1: mix_shader, 2: translucent_bsdf} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Shader": mix_shader_1}) + + +def shader_stem_material(nw: NodeWrangler, stem_color_hsv=None): + # Code generated using version 2.4.3 of the node_transpiler + + if stem_color_hsv is None: + stem_color_hsv = (uniform(0.25, 0.32), uniform(0.6, 0.9), uniform(0.2, 0.6)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": hsv2rgba(stem_color_hsv)} + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, input_kwargs={"Surface": principled_bsdf} + ) diff --git a/infinigen/assets/underwater/__init__.py b/infinigen/assets/objects/underwater/__init__.py similarity index 51% rename from infinigen/assets/underwater/__init__.py rename to infinigen/assets/objects/underwater/__init__.py index ba27ddac7..a2208f8a4 100644 --- a/infinigen/assets/underwater/__init__.py +++ b/infinigen/assets/objects/underwater/__init__.py @@ -1,2 +1,2 @@ from .seaweed import SeaweedFactory -from .urchin import UrchinFactory \ No newline at end of file +from .urchin import UrchinFactory diff --git a/infinigen/assets/objects/underwater/seaweed.py b/infinigen/assets/objects/underwater/seaweed.py new file mode 100644 index 000000000..24de8f78f --- /dev/null +++ b/infinigen/assets/objects/underwater/seaweed.py @@ -0,0 +1,181 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this +# source tree. + +# Authors: Lingjie Mei + + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + repeated_driver, +) +from infinigen.assets.utils.decorate import read_co, subsurface2face_size, write_co +from infinigen.assets.utils.draw import make_circular_interp +from infinigen.assets.utils.mesh import polygon_angles +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import data2mesh, mesh2obj +from infinigen.core import surface +from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform +from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth + + +class SeaweedFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.base_hue = ( + uniform(0.0, 0.1) if uniform(0, 1) < 0.5 else uniform(0.3, 0.4) + ) + self.material = surface.shaderfunc_to_material( + self.shader_seaweed, self.base_hue + ) + self.freq = 1 / log_uniform(200, 500) + + def create_asset(self, face_size=0.01, **params): + growth_vec = 0, 0, uniform(3.0, 6.0) + inhibit_shell = uniform(0.6, 0.8) + max_polygons = int(log_uniform(2e3, 1e4)) + fac_noise = uniform(1.5, 2.5) + repulsion_radius = log_uniform(1.0, 1.5) + obj = self.differential_growth_make( + fac_noise=fac_noise, + inhibit_shell=inhibit_shell, + repulsion_radius=repulsion_radius, + growth_vec=growth_vec, + dt=0.25, + max_polygons=max_polygons, + ) + + obj.scale = [2 / max(obj.dimensions)] * 3 + obj.scale[-1] *= uniform(1.5, 2) + obj.location[-1] -= 0.02 + butil.apply_transform(obj, loc=True) + f_scale = make_circular_interp(2, 5, 5, log_uniform) + x, y, z = read_co(obj).T + scale = f_scale(np.arctan2(y, x) + np.pi) + co = np.stack([scale * x, scale * y, z], -1) + write_co(obj, co) + subsurface2face_size(obj, face_size / 2) + butil.modify_mesh(obj, "TRIANGULATE") + butil.modify_mesh(obj, "SMOOTH", factor=uniform(-0.8, 0.8)) + texture_type = np.random.choice(["STUCCI", "MARBLE"]) + texture = bpy.data.textures.new(name="seaweed", type=texture_type) + texture.noise_scale = log_uniform(0.05, 0.2) + butil.modify_mesh( + obj, "DISPLACE", True, strength=uniform(0.0, 0.03), texture=texture + ) + assign_material(obj, self.material) + self.animate_bend(obj) + tag_object(obj, "seaweed") + return obj + + def animate_bend(self, obj): + obj, mod = butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + False, + deform_method="BEND", + deform_axis="Y", + return_mod=True, + ) + driver = mod.driver_add("angle").driver + start_angle = uniform(-np.pi / 4, 0) + driver.expression = repeated_driver( + start_angle, start_angle + uniform(np.pi * 0.2, np.pi * 0.8), self.freq + ) + + @staticmethod + def differential_growth_make(**kwargs): + n_base = np.random.randint(5, 7) + angles = polygon_angles(n_base) + vertices = np.block( + [[np.cos(angles), 0], [np.sin(angles), 0], [np.zeros(n_base + 1)]] + ).T + faces = np.stack( + [np.arange(n_base), np.roll(np.arange(n_base), 1), np.full(n_base, n_base)] + ).T + obj = mesh2obj(data2mesh(vertices, [], faces, "diff_growth")) + + boundary = obj.vertex_groups.new(name="Boundary") + boundary.add(list(range(n_base)), 1.0, "REPLACE") + build_diff_growth(obj, boundary.index, **kwargs) + return obj + + @staticmethod + def geo_seaweed_waves(nw: NodeWrangler): + translation_scale = uniform(0.0, 0.25) + expand_scale = uniform(0.2, 0.3) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) + angle = np.random.uniform(0, 2 * np.pi) + displacement = nw.scale( + nw.add( + nw.scale( + nw.combine(np.cos(angle), np.sin(angle), 0), + nw.scalar_multiply(nw.musgrave(10), translation_scale), + ), + nw.scale(nw.combine(x, y, 0), expand_scale), + ), + z, + ) + geometry = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": geometry, "Offset": displacement}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) + + @staticmethod + def shader_seaweed(nw: NodeWrangler, base_hue=0.3): + h_perturb = uniform(-0.1, 0.1) + s_perturb = uniform(-0.1, -0.0) + v_perturb = log_uniform(1.0, 2) + + def map_perturb(h, s, v): + return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) + + subsurface_ratio = 0.01 + roughness = 0.8 + mix_ratio = uniform(0.2, 0.4) + specular = 0.2 + + color_1 = map_perturb(base_hue, uniform(0.6, 0.8), 0.25) + color_2 = map_perturb(base_hue - uniform(0.05, 0.1), uniform(0.6, 0.8), 0.15) + cr = build_color_ramp( + nw, + nw.musgrave(uniform(5, 10)), + [0, 0.3, 0.7, 1.0], + [color_1, color_1, color_2, color_2], + ) + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": cr, + "Subsurface": subsurface_ratio, + "Subsurface Radius": (0.01, 0.01, 0.01), + "Subsurface Color": map_perturb(base_hue, 0.6, 0.2), + "Roughness": roughness, + "Specular": specular, + }, + ) + + translucent_bsdf = nw.new_node( + Nodes.TransparentBSDF, input_kwargs={"Color": cr} + ) + + mix_shader = nw.new_node( + Nodes.MixShader, [mix_ratio, principled_bsdf, translucent_bsdf] + ) + return mix_shader diff --git a/infinigen/assets/objects/underwater/urchin.py b/infinigen/assets/objects/underwater/urchin.py new file mode 100644 index 000000000..f03691e20 --- /dev/null +++ b/infinigen/assets/objects/underwater/urchin.py @@ -0,0 +1,187 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei + + +import bpy +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.objects.creatures.util.animation.driver_repeated import ( + repeated_driver, +) +from infinigen.assets.utils.decorate import geo_extension +from infinigen.assets.utils.misc import assign_material +from infinigen.assets.utils.object import new_icosphere, separate_loose +from infinigen.core import surface +from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.core.placement.detail import adapt_mesh_resolution +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.tagging import tag_object +from infinigen.core.util.color import hsv2rgba +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform + + +class UrchinFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False): + super().__init__(factory_seed, coarse) + with FixedSeed(factory_seed): + self.base_hue = uniform(-0.25, 0.15) % 1 + self.materials = [ + surface.shaderfunc_to_material(shader, self.base_hue) + for shader in [self.shader_spikes, self.shader_girdle, self.shader_base] + ] + self.freq = 1 / log_uniform(100, 200) + + def create_asset(self, placeholder, face_size=0.01, **params): + obj = new_icosphere(subdivisions=4) + surface.add_geomod(obj, geo_extension, apply=True) + obj.scale[-1] = uniform(0.8, 1.0) + butil.apply_transform(obj) + butil.modify_mesh( + obj, "BEVEL", offset_type="PERCENT", width_pct=25, angle_limit=0 + ) + surface.add_geomod( + obj, + self.geo_extrude, + apply=True, + attributes=["spike", "girdle"], + domains=["FACE"] * 2, + ) + levels = 1 + butil.modify_mesh( + obj, "SUBSURF", apply=True, levels=levels, render_levels=levels + ) + obj.scale = [2 / max(obj.dimensions)] * 3 + obj.scale[-1] *= log_uniform(0.6, 1.2) + butil.apply_transform(obj) + adapt_mesh_resolution(obj, face_size, method="subdiv_by_area") + obj = separate_loose(obj) + butil.modify_mesh( + obj, + "DISPLACE", + texture=bpy.data.textures.new(name="urchin", type="STUCCI"), + strength=0.005, + mid_level=0, + ) + surface.add_geomod( + obj, + self.geo_material_index, + apply=True, + input_attributes=[None, "spike", "girdle"], + ) + assign_material(obj, self.materials) + self.animate_stretch(obj) + tag_object(obj, "urchin") + return obj + + def animate_stretch(self, obj): + obj, mod = butil.modify_mesh( + obj, + "SIMPLE_DEFORM", + False, + return_mod=True, + deform_method="STRETCH", + deform_axis="Z", + ) + driver = mod.driver_add("factor").driver + driver.expression = repeated_driver(-0.1, 0.1, self.freq) + + @staticmethod + def geo_extrude(nw: NodeWrangler): + face_prob = 0.98 + girdle_height = 0.1 + extrude_height = log_uniform(1.0, 5.0) + perturb = 0.1 + girdle_size = uniform(0.6, 1) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + face_vertices = nw.new_node(Nodes.FaceNeighbors) + selection = nw.boolean_math( + "AND", + nw.compare("GREATER_EQUAL", face_vertices, 5), + nw.bernoulli(face_prob), + ) + geometry, top, _ = nw.new_node( + Nodes.ExtrudeMesh, [geometry, selection, None, girdle_height] + ).outputs + geometry, top, girdle = nw.new_node( + Nodes.ExtrudeMesh, [geometry, top, None, 1e-3] + ).outputs + geometry = nw.new_node(Nodes.ScaleElements, [geometry, top, girdle_size]) + geometry, top, _ = nw.new_node( + Nodes.ExtrudeMesh, [geometry, top, None, -girdle_height] + ).outputs + direction = nw.scale( + nw.add( + nw.new_node(Nodes.InputNormal), + nw.uniform([-perturb] * 3, [perturb] * 3), + ), + nw.uniform(0.5 * extrude_height, extrude_height), + ) + geometry, top, side = nw.new_node( + Nodes.ExtrudeMesh, [geometry, top, direction] + ).outputs + geometry = nw.new_node(Nodes.ScaleElements, [geometry, top, 0.2]) + spike = nw.boolean_math("OR", top, side) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": geometry, "Spike": spike, "Girdle": girdle}, + ) + + @staticmethod + def shader_spikes(nw: NodeWrangler, base_hue): + transmission = uniform(0.95, 0.99) + subsurface = uniform(0.1, 0.2) + roughness = uniform(0.5, 0.8) + color = hsv2rgba(base_hue, uniform(0.5, 1.0), log_uniform(0.05, 1.0)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color, + "Roughness": roughness, + "Subsurface": subsurface, + "Subsurface Color": color, + "Transmission": transmission, + }, + ) + return principled_bsdf + + @staticmethod + def shader_girdle(nw: NodeWrangler, base_hue): + roughness = uniform(0.5, 0.8) + color = hsv2rgba(base_hue, uniform(0.4, 0.5), log_uniform(0.02, 0.1)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": color, "Roughness": roughness}, + ) + return principled_bsdf + + @staticmethod + def shader_base(nw: NodeWrangler, base_hue): + roughness = uniform(0.5, 0.8) + color = hsv2rgba(base_hue, uniform(0.8, 1.0), log_uniform(0.01, 0.02)) + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": color, "Roughness": roughness}, + ) + return principled_bsdf + + @staticmethod + def geo_material_index(nw: NodeWrangler): + geometry, spike, girdle = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Spike", None), + ("NodeSocketFloat", "Girdle", None), + ], + ).outputs[:-1] + geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, None, 2]) + geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, spike, 0]) + geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, girdle, 1]) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) diff --git a/infinigen/assets/wall_decorations/__init__.py b/infinigen/assets/objects/wall_decorations/__init__.py similarity index 85% rename from infinigen/assets/wall_decorations/__init__.py rename to infinigen/assets/objects/wall_decorations/__init__.py index e437b4f72..e266a6e3c 100644 --- a/infinigen/assets/wall_decorations/__init__.py +++ b/infinigen/assets/objects/wall_decorations/__init__.py @@ -3,6 +3,6 @@ # Authors: Lingjie Mei from .balloon import BalloonFactory -from .wall_art import WallArtFactory, MirrorFactory -from .wall_shelf import WallShelfFactory from .range_hood import RangeHoodFactory +from .wall_art import MirrorFactory, WallArtFactory +from .wall_shelf import WallShelfFactory diff --git a/infinigen/assets/wall_decorations/balloon.py b/infinigen/assets/objects/wall_decorations/balloon.py similarity index 68% rename from infinigen/assets/wall_decorations/balloon.py rename to infinigen/assets/objects/wall_decorations/balloon.py index bb8cc032f..ce8363e19 100644 --- a/infinigen/assets/wall_decorations/balloon.py +++ b/infinigen/assets/objects/wall_decorations/balloon.py @@ -3,49 +3,51 @@ # Authors: Lingjie Mei import bpy - import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.scatters import clothes from infinigen.assets.utils.decorate import subdivide_edge_ring, subsurf from infinigen.assets.utils.draw import remesh_fill from infinigen.assets.utils.misc import generate_text from infinigen.assets.utils.object import new_bbox from infinigen.core.placement.factory import AssetFactory - from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed -from infinigen.assets.material_assignments import AssetList class BalloonFactory(AssetFactory): - alpha = .8 + alpha = 0.8 def __init__(self, factory_seed, coarse=False): super(BalloonFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.thickness = uniform(.06, .1) - material_assignments = AssetList['BalloonFactory']() - self.surface = material_assignments['surface'].assign_material() - self.rel_scale = uniform(.2, .3) * 4 - self.displace = uniform(.02, .04) + self.thickness = uniform(0.06, 0.1) + material_assignments = AssetList["BalloonFactory"]() + self.surface = material_assignments["surface"].assign_material() + self.rel_scale = uniform(0.2, 0.3) * 4 + self.displace = uniform(0.02, 0.04) def create_placeholder(self, **kwargs) -> bpy.types.Object: bpy.ops.object.text_add() obj = bpy.context.active_object - with butil.ViewportMode(obj, 'EDIT'): - for _ in 'Text': - bpy.ops.font.delete(type='PREVIOUS_OR_SELECTION') + with butil.ViewportMode(obj, "EDIT"): + for _ in "Text": + bpy.ops.font.delete(type="PREVIOUS_OR_SELECTION") text = generate_text().upper() bpy.ops.font.text_insert(text=text) with butil.SelectObjects(obj): - bpy.ops.object.convert(target='MESH') + bpy.ops.object.convert(target="MESH") obj = bpy.context.active_object parent = new_bbox( - -self.thickness / 2, self.thickness / 2, 0, self.rel_scale * len(text) * self.alpha, - 0, self.rel_scale * self.alpha + -self.thickness / 2, + self.thickness / 2, + 0, + self.rel_scale * len(text) * self.alpha, + 0, + self.rel_scale * self.alpha, ) obj.parent = parent return parent @@ -53,8 +55,8 @@ def create_placeholder(self, **kwargs) -> bpy.types.Object: def create_asset(self, i, placeholder, **params) -> bpy.types.Object: obj = placeholder.children[0] obj.parent = None - remesh_fill(obj, .02) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=.5) + remesh_fill(obj, 0.02) + butil.modify_mesh(obj, "SOLIDIFY", thickness=self.thickness, offset=0.5) subdivide_edge_ring(obj, 8, (0, 0, 1)) clothes.cloth_sim( @@ -63,15 +65,15 @@ def create_asset(self, i, placeholder, **params) -> bpy.types.Object: gravity=0, use_pressure=True, uniform_pressure_force=uniform(10, 20), - vertex_group_mass='pin' + vertex_group_mass="pin", ) subsurf(obj, 1) obj.scale = [self.rel_scale] * 3 obj.rotation_euler = np.pi / 2, 0, np.pi / 2 butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'DISPLACE', strength=self.displace) - butil.modify_mesh(obj, 'SMOOTH', iterations=5) + butil.modify_mesh(obj, "DISPLACE", strength=self.displace) + butil.modify_mesh(obj, "SMOOTH", iterations=5) return obj def finalize_assets(self, assets): diff --git a/infinigen/assets/objects/wall_decorations/range_hood.py b/infinigen/assets/objects/wall_decorations/range_hood.py new file mode 100644 index 000000000..6a4111e84 --- /dev/null +++ b/infinigen/assets/objects/wall_decorations/range_hood.py @@ -0,0 +1,303 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo + +import bpy +import numpy as np +from numpy.random import uniform + +import infinigen.core.util.blender as butil +from infinigen.assets.material_assignments import AssetList +from infinigen.assets.objects.table_decorations.utils import nodegroup_lofting_poly +from infinigen.assets.objects.tables.table_utils import nodegroup_n_gon_profile +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util.math import FixedSeed + + +class RangeHoodFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, dimensions=None): + super(RangeHoodFactory, self).__init__(factory_seed, coarse=coarse) + + self.dimensions = dimensions + + with FixedSeed(factory_seed): + self.params = self.sample_parameters(dimensions) + self.surface, self.scratch, self.edge_wear = self.get_material_params() + + def get_material_params(self): + material_assignments = AssetList["RangeHoodFactory"]() + surface = material_assignments["surface"].assign_material() + + scratch_prob, edge_wear_prob = material_assignments["wear_tear_prob"] + scratch, edge_wear = material_assignments["wear_tear"] + + is_scratch = np.random.uniform() < scratch_prob + is_edge_wear = np.random.uniform() < edge_wear_prob + if not is_scratch: + scratch = None + + if not is_edge_wear: + edge_wear = None + + return surface, scratch, edge_wear + + @staticmethod + def sample_parameters(dimensions): + # all in meters + if dimensions is None: + x = 0.55 + y = 0.75 + z = 1.0 + dimensions = (x, y, z) + + x, y, z = dimensions + + height_1 = uniform(0.05, 0.07) + height_2 = uniform(0.1, 0.3) + scale_2 = uniform(0.25, 0.4) + + parameters = { + "Height_total": z, + "Width": y, + "Depth": x, + "Height_1": height_1, + "Scale_2": scale_2, + "Height_2": height_2, + } + + return parameters + + def create_asset(self, **params): + bpy.ops.mesh.primitive_plane_add( + size=2, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) + obj = bpy.context.active_object + + surface.add_geomod( + obj, geometry_generate_hood, apply=True, input_kwargs=self.params + ) + butil.modify_mesh(obj, "SOLIDIFY", apply=True, thickness=0.002) + butil.modify_mesh(obj, "SUBSURF", apply=True, levels=1, render_levels=1) + + return obj + + def finalize_assets(self, assets): + self.surface.apply(assets) + if self.scratch: + self.scratch.apply(assets) + if self.edge_wear: + self.edge_wear.apply(assets) + + +def geometry_generate_hood(nw: NodeWrangler, **kwargs): + # Code generated using version 2.6.4 of the node_transpiler + + generatetabletop = nw.new_node( + geometry_range_hood().name, + input_kwargs={ + "Resolution": 64, + "Height_total": kwargs["Height_total"], + "Width": kwargs["Width"], + "Depth": kwargs["Depth"], + "Height_1": kwargs["Height_1"], + "Scale_2": kwargs["Scale_2"], + "Height_2": kwargs["Height_2"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": generatetabletop}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "geometry_range_hood", singleton=False, type="GeometryNodeTree" +) +def geometry_range_hood(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Resolution", 128), + ("NodeSocketFloat", "Height_total", 0.0000), + ("NodeSocketFloat", "Width", 0.0000), + ("NodeSocketFloat", "Depth", 0.0000), + ("NodeSocketFloat", "Profile Fillet Ratio", 0.0100), + ("NodeSocketFloat", "Height_1", 0.0000), + ("NodeSocketFloat", "Scale_2", 0.0000), + ("NodeSocketFloat", "Height_2", 0.3000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: 1.4140}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["Width"]}, + attrs={"operation": "DIVIDE"}, + ) + + ngonprofile = nw.new_node( + nodegroup_n_gon_profile().name, + input_kwargs={ + "Profile Width": multiply, + "Profile Aspect Ratio": divide, + "Profile Fillet Ratio": group_input.outputs["Profile Fillet Ratio"], + }, + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={"Curve": ngonprofile, "Count": group_input.outputs["Resolution"]}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_1}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": resample_curve, "Translation": combine_xyz}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Height_1"]} + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_geometry, "Translation": combine_xyz_1}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Z": group_input.outputs["Height_2"]} + ) + + transform_geometry_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_geometry, + "Translation": combine_xyz_2, + "Scale": group_input.outputs["Scale_2"], + }, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Height_total"], + 1: group_input.outputs["Height_2"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": subtract}) + + transform_geometry_3 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": transform_geometry_2, "Translation": combine_xyz_3}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={ + "Geometry": [ + transform_geometry_3, + transform_geometry_2, + transform_geometry_1, + transform_geometry, + ] + }, + ) + + lofting_poly = nw.new_node( + nodegroup_lofting_poly().name, + input_kwargs={ + "Profile Curves": join_geometry, + "U Resolution": group_input.outputs["Resolution"], + "V Resolution": group_input.outputs["Resolution"], + }, + ) + + delete_geometry = nw.new_node( + Nodes.DeleteGeometry, + input_kwargs={ + "Geometry": lofting_poly.outputs["Geometry"], + "Selection": lofting_poly.outputs["Top"], + }, + ) + + grid = nw.new_node( + Nodes.MeshGrid, + input_kwargs={ + "Size X": group_input.outputs["Width"], + "Size Y": group_input.outputs["Depth"], + "Vertices X": group_input.outputs["Resolution"], + "Vertices Y": group_input.outputs["Resolution"], + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_2}) + + transform_geometry_4 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": grid.outputs["Mesh"], + "Translation": combine_xyz_4, + "Rotation": (-0.0698, 0.0000, 0.0000), + "Scale": (0.9800, 0.9800, 1.0000), + }, + ) + + transform_geometry_5 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": transform_geometry_4, + "Rotation": (0.1047, 0.0000, 0.0000), + "Scale": (0.9500, 0.9700, 1.0000), + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [delete_geometry, transform_geometry_5]}, + ) + + transform_geometry_6 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": join_geometry_1, + "Rotation": (0.0, 0.0000, -np.pi / 2), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": transform_geometry_6}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/objects/wall_decorations/skirting_board.py b/infinigen/assets/objects/wall_decorations/skirting_board.py new file mode 100644 index 000000000..563197421 --- /dev/null +++ b/infinigen/assets/objects/wall_decorations/skirting_board.py @@ -0,0 +1,349 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Yiming Zuo, Lingjie Mei, Alexander Raistrick + +import logging + +import bpy +import numpy as np +import shapely +from numpy.random import randint, uniform +from shapely.geometry import Polygon +from shapely.ops import unary_union + +import infinigen.core.util.blender as butil +from infinigen.assets.materials.plastics import plastic_rough +from infinigen.assets.utils.decorate import ( + read_co, +) +from infinigen.assets.utils.draw import bezier_curve +from infinigen.assets.utils.object import join_objects, new_plane +from infinigen.assets.utils.shapes import obj2polygon +from infinigen.core import surface, tagging +from infinigen.core import tags as t +from infinigen.core.constraints.example_solver.room.constants import ( + DOOR_WIDTH, + WALL_HEIGHT, + WALL_THICKNESS, +) +from infinigen.core.constraints.example_solver.room.types import get_room_level +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.color import color_category +from infinigen.core.util.math import FixedSeed + +logger = logging.getLogger(__name__) + + +@node_utils.to_nodegroup( + "nodegroup_make_skirting_board_001", singleton=False, type="GeometryNodeTree" +) +def nodegroup_make_skirting_board(nw: NodeWrangler, control_points): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketCollection", "Parent", None), + ("NodeSocketFloat", "Thickness", 0.0300), + ("NodeSocketFloat", "Height", 0.1500), + ("NodeSocketFloatDistance", "Resolution", 0.0050), + ("NodeSocketBool", "Is Ceiling", False), + ], + ) + + collection_info = nw.new_node( + Nodes.CollectionInfo, input_kwargs={"Collection": group_input.outputs["Parent"]} + ) + + mesh = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": collection_info} + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Thickness"], + "Height": group_input.outputs["Height"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": multiply_1} + ) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": quadrilateral, "Translation": combine_xyz}, + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, + input_kwargs={ + "Curve": transform_geometry, + "Count": 220, + "Length": group_input.outputs["Resolution"], + }, + attrs={"mode": "LENGTH"}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: separate_xyz.outputs["X"]} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Y"], 1: multiply_2, 2: 0.0000}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve(float_curve.mapping.curves[0], control_points) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: group_input.outputs["Thickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_3, "Y": separate_xyz.outputs["Y"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": resample_curve_1, + "Selection": greater_than, + "Position": combine_xyz_1, + }, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Is Ceiling"], + 8: (-1.0000, 1.0000, 1.0000), + 9: (-1.0000, -1.0000, -1.0000), + }, + attrs={"input_type": "VECTOR"}, + ) + + transform_geometry_1 = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": set_position, "Scale": switch.outputs[3]}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": mesh, + "Profile Curve": transform_geometry_1, + "Fill Caps": True, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": curve_to_mesh_1, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) + + +def apply_skirtingboard( + nw: NodeWrangler, contour, is_ceiling=False, seed=None, thickness=0.02 +): + # Code generated using version 2.6.5 of the node_transpiler + + # TODO: randomize style / size / materials + if seed is None: + seed = randint(0, 10000) + with FixedSeed(seed): + thickness = uniform(0.02, 0.05) + height = uniform(0.08, 0.15) + color = color_category("white") + roughness = uniform(0.5, 1.0) + n_peaks = randint(1, 4) + start_y = uniform(0.0, 0.5) + mid_x = uniform(0.2, 0.8) + peak_xs = np.sort(uniform(0.0, mid_x, size=n_peaks)) + peak_ys = np.sort(uniform(start_y, 1.0, size=n_peaks)) + control_points = [(0.0000, start_y)] + control_points += [(x, y) for x, y in zip(peak_xs, peak_ys)] + control_points += [(mid_x, 1.0000), (1.0000, 1.0000)] + + makeskirtingboard = nw.new_node( + nodegroup_make_skirting_board(control_points=control_points).name, + input_kwargs={ + "Parent": contour, + "Resolution": 0.0010, + "Thickness": thickness, + "Height": height, + "Is Ceiling": is_ceiling, + }, + ) + + makeskirtingboard = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": makeskirtingboard, + "Material": surface.shaderfunc_to_material( + plastic_rough.shader_rough_plastic, + base_color=color, + roughness=roughness, + ), + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": makeskirtingboard}, + attrs={"is_active_output": True}, + ) + + +def make_skirtingboard_contour(objs: list[bpy.types.Object], tag: t.Subpart): + # make the outline curve + + assert len(objs) > 0 + + objs = [ + tagging.extract_tagged_faces(o, {tag, t.Subpart.Visible}, nonempty=True) + for o in list(objs) + ] + + all_polys = [] + all_zs = [] + for floor_pieces in objs: + all_polys.append(obj2polygon(floor_pieces)) + all_zs.append(read_co(floor_pieces)[:, -1] + floor_pieces.location[-1]) + + floor_z = np.mean(np.concatenate(all_zs)) + + boundary = ( + unary_union(all_polys) + .buffer(0.05, join_style="mitre") + .buffer(-0.05, join_style="mitre") + ) + + if isinstance(boundary, Polygon): + boundaries = [boundary] + else: + boundaries = boundary.geoms + + contours = [] + + for b in boundaries: + lr = b.exterior + o = linear_ring2curve(lr) + contours.append(o) + o.location[-1] += floor_z + butil.apply_transform(o, True) + for lr in b.interiors: + o = linear_ring2curve(lr, True) + contours.append(o) + o.location[-1] += floor_z + butil.apply_transform(o, True) + butil.delete(objs) + return contours + + +def make_skirting_board(objs, tag, joined=True): + if joined: + seqs = list( + [o for o in objs if get_room_level(o.name.split(".")[0]) == i] for i in [0] + ) + else: + seqs = [[o] for o in objs] + + for s in seqs: + logger.debug(f"make_skirting_board for {len(objs)=} {tag=}") + + try: + contours = make_skirtingboard_contour(s, tag) + except shapely.errors.GEOSException as e: + logger.warning( + f"make_skirting_board({objs=}, {tag=}) failed with {e}, skipping" + ) + return + + obj = new_plane() + obj.name = "skirtingboard_" + tag.value + + col = butil.put_in_collection(contours, "contour") + kwargs = { + "contour": col, + "seed": np.random.randint(1e7), + "is_ceiling": tag == t.Subpart.Ceiling, + } + surface.add_geomod(obj, apply_skirtingboard, apply=True, input_kwargs=kwargs) + + portal_cutters = butil.get_collection("placeholders:portal_cutters").objects + for p in portal_cutters: + if ( + p.name.startswith("entrance") + and int(p.location[-1] / WALL_HEIGHT - 1 / 2) == 0 + ): + p.location[-1] -= WALL_HEIGHT / 2 + butil.modify_mesh( + obj, + "BOOLEAN", + object=p, + operation="DIFFERENCE", + use_self=True, + use_hole_tolerant=True, + ) + p.location[-1] += WALL_HEIGHT / 2 + butil.delete_collection(col) + col = butil.get_collection("skirting") + butil.put_in_collection(obj, col) + + +def linear_ring2curve(ring, reversed=False): + coords = ring.coords + if shapely.is_ccw(ring) == reversed: + coords = coords[::-1] + coords = np.array(coords) + lengths = np.linalg.norm(coords[:-1] - coords[1:], axis=-1) + invalid = np.sort( + np.nonzero( + (np.abs(lengths - WALL_THICKNESS) < 0.02) + | (np.abs(lengths - DOOR_WIDTH) < 0.02) + )[0] + ) + ranges = -1, *invalid, len(coords) + curves = [] + for l, r in zip(ranges[:-1], ranges[1:]): + x, y = np.array(coords[l + 1 : r + 1]).T + if len(x) > 1: + curves.append(bezier_curve((x, y, 0), list(np.arange(len(x))), 1, False)) + return join_objects(curves) diff --git a/infinigen/assets/wall_decorations/wall_art.py b/infinigen/assets/objects/wall_decorations/wall_art.py similarity index 58% rename from infinigen/assets/wall_decorations/wall_art.py rename to infinigen/assets/objects/wall_decorations/wall_art.py index 8e2cc7497..c8210955c 100644 --- a/infinigen/assets/wall_decorations/wall_art.py +++ b/infinigen/assets/objects/wall_decorations/wall_art.py @@ -6,52 +6,50 @@ import numpy as np from numpy.random import uniform +from infinigen.assets.material_assignments import AssetList from infinigen.assets.materials.art import Art -from infinigen.assets.utils.object import join_objects, new_plane, new_bbox +from infinigen.assets.utils.object import join_objects, new_bbox, new_plane from infinigen.assets.utils.uv import wrap_sides from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList class WallArtFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): super(WallArtFactory, self).__init__(factory_seed, coarse) with FixedSeed(self.factory_seed): - self.width = log_uniform(.4, 2) - self.height = log_uniform(.4, 2) - self.thickness = uniform(.02, .05) - self.depth = uniform(.01, .02) + self.width = log_uniform(0.4, 2) + self.height = log_uniform(0.4, 2) + self.thickness = uniform(0.02, 0.05) + self.depth = uniform(0.01, 0.02) self.frame_bevel_segments = np.random.choice([0, 1, 4]) self.frame_bevel_width = uniform(self.depth / 4, self.depth / 2) - self.material_assignments = AssetList['WallArtFactory']() + self.material_assignments = AssetList["WallArtFactory"]() self.assign_materials() def assign_materials(self): - # self.surface = Art(self.factory_seed) + # self.surface = Art(self.factory_seed) assignments = self.material_assignments - self.surface = assignments['surface'].assign_material() + self.surface = assignments["surface"].assign_material() if self.surface == Art: self.surface = self.surface(self.factory_seed) - self.frame_surface = assignments['frame'].assign_material() - is_scratch = uniform() < assignments['wear_tear_prob'][0] - is_edge_wear = uniform() < assignments['wear_tear_prob'][1] - self.scratch = assignments['wear_tear'][0] if is_scratch else None - self.edge_wear = assignments['wear_tear'][1] if is_edge_wear else None - + self.frame_surface = assignments["frame"].assign_material() + is_scratch = uniform() < assignments["wear_tear_prob"][0] + is_edge_wear = uniform() < assignments["wear_tear_prob"][1] + self.scratch = assignments["wear_tear"][0] if is_scratch else None + self.edge_wear = assignments["wear_tear"][1] if is_edge_wear else None def create_placeholder(self, **params): return new_bbox( - -0.01, - 0.15, - -self.width / 2 - self.thickness, + -0.01, + 0.15, + -self.width / 2 - self.thickness, self.width / 2 + self.thickness, - -self.height / 2 - self.thickness, - self.height / 2 + self.thickness, + -self.height / 2 - self.thickness, + self.height / 2 + self.thickness, ) def create_asset(self, placeholder, **params) -> bpy.types.Object: @@ -61,30 +59,36 @@ def create_asset(self, placeholder, **params) -> bpy.types.Object: butil.apply_transform(obj, True) frame = deep_clone_obj(obj) - wrap_sides(obj, self.surface, 'x', 'y', 'z') + wrap_sides(obj, self.surface, "x", "y", "z") butil.select_none() - with butil.ViewportMode(frame, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.delete(type='ONLY_FACE') - butil.modify_mesh(frame, 'SOLIDIFY', thickness=self.thickness, offset=1) - with butil.ViewportMode(frame, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(frame, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.delete(type="ONLY_FACE") + butil.modify_mesh(frame, "SOLIDIFY", thickness=self.thickness, offset=1) + with butil.ViewportMode(frame, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.bridge_edge_loops() - butil.modify_mesh(frame, 'SOLIDIFY', thickness=self.depth, offset=1) + butil.modify_mesh(frame, "SOLIDIFY", thickness=self.depth, offset=1) if self.frame_bevel_segments > 0: - butil.modify_mesh(frame, 'BEVEL', width=self.frame_bevel_width, segments=self.frame_bevel_segments) + butil.modify_mesh( + frame, + "BEVEL", + width=self.frame_bevel_width, + segments=self.frame_bevel_segments, + ) self.frame_surface.apply(frame) obj = join_objects([obj, frame]) return obj - + def finalize_assets(self, assets): if self.scratch: self.scratch.apply(assets) if self.edge_wear: self.edge_wear.apply(assets) + class MirrorFactory(WallArtFactory): def __init__(self, factory_seed, coarse=False): super(MirrorFactory, self).__init__(factory_seed, coarse) - self.material_assignments = AssetList['MirrorFactory']() + self.material_assignments = AssetList["MirrorFactory"]() self.assign_materials() diff --git a/infinigen/assets/objects/wall_decorations/wall_shelf.py b/infinigen/assets/objects/wall_decorations/wall_shelf.py new file mode 100644 index 000000000..b5e4d7de6 --- /dev/null +++ b/infinigen/assets/objects/wall_decorations/wall_shelf.py @@ -0,0 +1,180 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Lingjie Mei +import bpy +import numpy as np +import shapely +import shapely.affinity +from numpy.random import uniform + +from infinigen.assets.materials import metal, plastic +from infinigen.assets.materials.woods import wood +from infinigen.assets.utils.decorate import ( + read_edge_center, + read_edge_direction, + select_edges, +) +from infinigen.assets.utils.object import join_objects, new_bbox, new_bbox_2d +from infinigen.assets.utils.shapes import polygon2obj +from infinigen.core import tagging as t +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.surface import write_attr_data +from infinigen.core.tags import Subpart +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg + + +class WallShelfFactory(AssetFactory): + support_sides_ = ( + "weighted_choice", + (0.5, "none"), + (1, "bottom"), + (1, "top"), + (1.5, "both"), + ) + support_margins = "weighted_choice", (2, 0), (1, ("uniform", 0.0, 0.2)) + support_ratios = "weighted_choice", (2, 1), (1, ("uniform", 0.5, 0.9)) + support_alphas = ( + "weighted_choice", + (1, 1), + ( + 1, + ( + "weighted_choice", + (1, ("log_uniform", 0.4, 0.7)), + (2, ("log_uniform", 1.5, 3)), + (1, 10), + ), + ), + ) + support_joins = "mitre", "round", "bevel" + plate_bevels = "weighted_choice", (1, "none"), (1, "front"), (1, "side") + + plate_surfaces = "weighted_choice", (2, wood), (1, metal) + support_surfaces = "weighted_choice", (2, metal), (1, wood), (2, plastic) + + def __init__(self, factory_seed, coarse=False): + super(WallShelfFactory, self).__init__(factory_seed, coarse) + self.support_side = rg(self.support_sides_) + self.support_margin = rg(self.support_margins) + if self.support_margin == 0: + n_support = np.random.choice([2, 3, 4], p=[0.7, 0.2, 0.1]) + else: + n_support = np.random.choice([2, 3], p=[0.8, 0.2]) + self.support_locs = np.linspace( + -0.5 + self.support_margin, 0.5 - self.support_margin, n_support + ) + self.length = log_uniform(0.3, 0.8) + self.width = log_uniform(0.1, 0.2) + match self.support_side: + case "none": + self.thickness = log_uniform(0.03, 0.08) + case _: + self.thickness = log_uniform(0.01, 0.05) + self.support_width = log_uniform(0.01, 0.015) + self.support_thickness = self.support_width * log_uniform(0.4, 1.0) + self.support_length = self.width * uniform(0.7, 1.1) + self.plate_bevel = rg(self.plate_bevels) + self.support_join = np.random.choice(self.support_joins) + self.plate_surface = rg(self.plate_surfaces) + self.support_surface = rg(self.support_surfaces) + + def create_placeholder(self, **kwargs) -> bpy.types.Object: + box = new_bbox( + 0, + self.width, + -self.length / 2, + self.length / 2, + -self.support_length, + self.support_length, + ) + plane = new_bbox_2d( + 0, self.width, -self.length / 2, self.length / 2, self.thickness / 2 + ) + write_attr_data( + plane, + f"{t.PREFIX}{Subpart.SupportSurface.value}", + np.ones(1).astype(bool), + "INT", + "FACE", + ) + return join_objects([box, plane]) + + def create_asset(self, **params) -> bpy.types.Object: + obj = self.make_plate() + self.plate_surface.apply(obj) + if self.support_side != "none": + support = self.make_support() + supports = [support] + [ + deep_clone_obj(support) for _ in range(len(self.support_locs) - 1) + ] + for s, l in zip(supports, self.support_locs): + s.location[1] = self.length * l + self.support_surface.apply(supports) + obj = join_objects([obj] + supports) + return obj + + def make_plate(self): + obj = new_bbox( + 0, + self.width, + -self.length / 2, + self.length / 2, + -self.thickness / 2, + self.thickness / 2, + ) + c = read_edge_center(obj) + d = read_edge_direction(obj) + front = (np.abs(d[:, 1]) > 0.5) & (c[:, 0] > 0.1) + side = np.abs(d[:, 0]) > 0.5 + match self.plate_bevel: + case "front": + selection = front + case "side": + selection = front + side + case _: + selection = np.zeros_like(front) + with butil.ViewportMode(obj, "EDIT"): + select_edges(obj, selection) + bpy.ops.mesh.bevel( + offset=uniform(0.3, 0.5) * self.thickness, + segments=np.random.randint(4, 9), + ) + return obj + + def make_support_contour(self): + l = shapely.LineString(np.array([(1, 0), (0, 0), (0, 1)]) * self.support_length) + theta = np.linspace(0, np.pi / 2, 31) + alpha = rg(self.support_alphas) + r = 1 / ((np.cos(theta) + 1e-6) ** alpha + (np.sin(theta) + 1e-6) ** alpha) ** ( + 1 / alpha + ) + xy = r[:, np.newaxis] * np.stack([np.cos(theta), np.sin(theta)], -1) + d = shapely.LineString(xy * self.support_length * rg(self.support_ratios)) + return shapely.union(l, d) + + def make_support(self): + lines = [] + if self.support_side in ["top", "both"]: + lines.append(self.make_support_contour()) + if self.support_side in ["bottom", "both"]: + lines.append( + shapely.affinity.scale(self.make_support_contour(), 1, -1, 1, (0, 0, 0)) + ) + + contour = shapely.union_all(lines).buffer( + self.support_thickness / 2, join_style=self.support_join + ) + obj = polygon2obj(contour) + obj.rotation_euler[0] = np.pi / 2 + obj.location = self.support_thickness / 2, -self.support_width / 2, 0 + butil.apply_transform(obj, True) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_region_move( + TRANSFORM_OT_translate={"value": (0, self.support_width, 0)} + ) + return obj diff --git a/infinigen/assets/windows/__init__.py b/infinigen/assets/objects/windows/__init__.py similarity index 85% rename from infinigen/assets/windows/__init__.py rename to infinigen/assets/objects/windows/__init__.py index 4aed7852b..34b6cf7a4 100644 --- a/infinigen/assets/windows/__init__.py +++ b/infinigen/assets/objects/windows/__init__.py @@ -3,4 +3,4 @@ # Authors: Hongyu Wen -from .window import WindowFactory \ No newline at end of file +from .window import WindowFactory diff --git a/infinigen/assets/objects/windows/window.py b/infinigen/assets/objects/windows/window.py new file mode 100644 index 000000000..5647c2967 --- /dev/null +++ b/infinigen/assets/objects/windows/window.py @@ -0,0 +1,1621 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: +# - Hongyu Wen: primary author +# - Alexander Raistrick: update window glass + +import random + +import bpy +import numpy as np +from numpy.random import randint as RI +from numpy.random import uniform +from numpy.random import uniform as U + +from infinigen.assets.materials import metal_shader_list, wood_shader_list +from infinigen.assets.utils.autobevel import BevelSharp +from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.color import color_category +from infinigen.core.util.math import FixedSeed, clip_gaussian + + +def shader_window_glass(nw: NodeWrangler): + """Non-refractive glass shader, since windows consist of a one-sided mesh currently and would not properly + refract-then un-refract the light + """ + + roughness = clip_gaussian(0, 0.015, 0, 0.03, 0.03) + transmission = uniform(0.05, 0.12) + + # non-refractive glass + transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) + shader = nw.new_node(Nodes.GlossyBSDF, input_kwargs={"Roughness": roughness}) + shader = nw.new_node( + Nodes.MixShader, + input_kwargs={"Fac": transmission, 1: transparent_bsdf, 2: shader}, + ) + + # complete pass-through for non-camera rays, for render efficiency + light_path = nw.new_node(Nodes.LightPath) + shader = nw.new_node( + Nodes.MixShader, + input_kwargs={ + "Fac": light_path.outputs["Is Camera Ray"], + 1: transparent_bsdf, + 2: shader, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": shader}, + attrs={"is_active_output": True}, + ) + + +class WindowFactory(AssetFactory): + def __init__(self, factory_seed, coarse=False, curtain=None, shutter=None): + super(WindowFactory, self).__init__(factory_seed, coarse=coarse) + + with FixedSeed(factory_seed): + self.params = self.sample_parameters() + self.beveler = BevelSharp() + self.curtain = curtain + self.shutter = shutter + + @staticmethod + def sample_parameters(): + frame_width = U(0.05, 0.1) + sub_frame_width = U(0.01, frame_width) + sub_frame_h_amount = RI(1, 2) + sub_frame_v_amount = RI(1, 2) + glass_thickness = U(0.01, 0.03) + + shutter_panel_radius = U(0.001, 0.003) + shutter_width = U(0.03, 0.05) + shutter_thickness = U(0.003, 0.007) + shutter_rotation = U(0, 1) + shutter_inverval = shutter_width + U(0.001, 0.003) + + curtain_frame_depth = U(0.05, 0.1) + curtain_depth = U(0.03, curtain_frame_depth) + curtain_frame_radius = U(0.01, 0.02) + + shader_frame_material_choice = random.choice(wood_shader_list) + shader_curtain_frame_material_choice = random.choice(metal_shader_list) + shader_curtain_material_choice = shader_curtain_material + + params = { + "FrameWidth": frame_width, + "SubFrameWidth": sub_frame_width, + "SubPanelHAmount": sub_frame_h_amount, + "SubPanelVAmount": sub_frame_v_amount, + "GlassThickness": glass_thickness, + "CurtainFrameDepth": curtain_frame_depth, + "CurtainDepth": curtain_depth, + "CurtainFrameRadius": curtain_frame_radius, + "ShutterPanelRadius": shutter_panel_radius, + "ShutterWidth": shutter_width, + "ShutterThickness": shutter_thickness, + "ShutterRotation": shutter_rotation, + "ShutterInterval": shutter_inverval, + "FrameMaterial": surface.shaderfunc_to_material( + shader_frame_material_choice, vertical=True + ), + "CurtainFrameMaterial": surface.shaderfunc_to_material( + shader_curtain_frame_material_choice + ), + "CurtainMaterial": surface.shaderfunc_to_material( + shader_curtain_material_choice + ), + "Material": surface.shaderfunc_to_material(shader_window_glass), + } + return params + + def sample_asset_params( + self, dimensions=None, open=None, curtain=None, shutter=None + ): + if dimensions is None: + width = U(1, 4) + height = U(1, 4) + frame_thickness = U(0.05, 0.15) + else: + width, height, frame_thickness = dimensions + + panel_h_amount = RI(1, 2) + v_ = width / height * panel_h_amount + panel_v_amount = int(uniform(v_ * 1.6, v_ * 2.5)) + + if open is None: + open = U(0, 1) < 0.5 + + if shutter is None: + shutter = U(0, 1) < 0.5 + + if curtain is None: + curtain = U(0, 1) < 0.5 + if curtain: + open = False + sub_frame_thickness = U(0.01, frame_thickness) + + open = False # keep windows closed on generation, let articulation module handle this later on + open_type = RI(0, 3) + open_offset = 0 + oe_offset = 0 + if open_type == 0: + if frame_thickness < sub_frame_thickness * 2: + open_type = RI(1, 2) + else: + oe_offset = U( + sub_frame_thickness / 2, + (frame_thickness - 2 * sub_frame_thickness) / 2, + ) + if open: + open_offset = U(0, width / panel_h_amount) + else: + open_offset = 0 + open_h_angle = U(0, 0.3) if open_type == 1 and open else 0 + open_v_angle = -U(0, 0.3) if open_type == 2 and open else 0 + + curtain_interval_number = int(width / U(0.08, 0.2)) + curtain_mid_l = -U(0, width / 2) + curtain_mid_r = U(0, width / 2) + return { + **self.params, + "Width": width, + "Height": height, + "FrameThickness": frame_thickness, + "PanelHAmount": panel_h_amount, + "PanelVAmount": panel_v_amount, + "SubFrameThickness": sub_frame_thickness, + "OpenHAngle": open_h_angle, + "OpenVAngle": open_v_angle, + "OpenOffset": open_offset, + "OEOffset": oe_offset, + "Curtain": curtain, + "CurtainIntervalNumber": curtain_interval_number, + "CurtainMidL": curtain_mid_l, + "CurtainMidR": curtain_mid_r, + "Shutter": shutter, + } + + def create_asset(self, dimensions=None, open=None, realized=True, **params): + obj = butil.spawn_cube() + + butil.modify_mesh( + obj, + "NODES", + node_group=nodegroup_window_geometry(), + ng_inputs=self.sample_asset_params( + dimensions, open, self.curtain, self.shutter + ), + apply=realized, + ) + + obj.rotation_euler[0] = np.pi / 2 + butil.apply_transform(obj, True) + obj_ = deep_clone_obj(obj) + self.beveler(obj) + if max(obj.dimensions) > 8: + butil.delete(obj) + obj = obj_ + else: + butil.delete(obj_) + + bpy.ops.object.light_add( + type="AREA", radius=1, align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + portal = bpy.context.active_object + + w, _, h = obj.dimensions + portal.scale = (w, h, 1) + portal.data.cycles.is_portal = True + portal.rotation_euler = (-np.pi / 2, 0, 0) + butil.parent_to(portal, obj, no_inverse=True) + portal.hide_viewport = True + + return obj + + +@node_utils.to_nodegroup( + "nodegroup_window_geometry", singleton=True, type="GeometryNodeTree" +) +def nodegroup_window_geometry(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input_1 = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Height", 2.0000), + ("NodeSocketFloatDistance", "FrameWidth", 0.1000), + ("NodeSocketFloatDistance", "FrameThickness", 0.1000), + ("NodeSocketInt", "PanelHAmount", 0), + ("NodeSocketInt", "PanelVAmount", 0), + ("NodeSocketFloatDistance", "SubFrameWidth", 0.0500), + ("NodeSocketFloatDistance", "SubFrameThickness", 0.0500), + ("NodeSocketInt", "SubPanelHAmount", 3), + ("NodeSocketInt", "SubPanelVAmount", 2), + ("NodeSocketFloat", "GlassThickness", 0.0100), + ("NodeSocketFloat", "OpenHAngle", 0.5000), + ("NodeSocketFloat", "OpenVAngle", 0.5000), + ("NodeSocketFloat", "OpenOffset", 0.5000), + ("NodeSocketFloat", "OEOffset", 0.0500), + ("NodeSocketBool", "Curtain", False), + ("NodeSocketFloat", "CurtainFrameDepth", 0.5000), + ("NodeSocketFloat", "CurtainDepth", 0.0300), + ("NodeSocketFloat", "CurtainIntervalNumber", 20.0000), + ("NodeSocketFloatDistance", "CurtainFrameRadius", 0.0100), + ("NodeSocketFloat", "CurtainMidL", -0.5000), + ("NodeSocketFloat", "CurtainMidR", 0.5000), + ("NodeSocketBool", "Shutter", True), + ("NodeSocketFloatDistance", "ShutterPanelRadius", 0.0050), + ("NodeSocketFloatDistance", "ShutterWidth", 0.0500), + ("NodeSocketFloatDistance", "ShutterThickness", 0.0050), + ("NodeSocketFloat", "ShutterRotation", 0.0000), + ("NodeSocketFloat", "ShutterInterval", 0.0500), + ("NodeSocketMaterial", "FrameMaterial", None), + ("NodeSocketMaterial", "CurtainFrameMaterial", None), + ("NodeSocketMaterial", "CurtainMaterial", None), + ("NodeSocketMaterial", "Material", None), + ], + ) + + windowpanel = nw.new_node( + nodegroup_window_panel().name, + input_kwargs={ + "Width": group_input_1.outputs["Width"], + "Height": group_input_1.outputs["Height"], + "FrameWidth": group_input_1.outputs["FrameWidth"], + "FrameThickness": group_input_1.outputs["FrameThickness"], + "PanelWidth": group_input_1.outputs["FrameWidth"], + "PanelThickness": group_input_1.outputs["FrameThickness"], + "PanelHAmount": group_input_1.outputs["PanelHAmount"], + "PanelVAmount": group_input_1.outputs["PanelVAmount"], + "FrameMaterial": group_input_1.outputs["FrameMaterial"], + "Material": group_input_1.outputs["Material"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["FrameWidth"], + 1: group_input_1.outputs["PanelVAmount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Width"], 1: multiply}, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: group_input_1.outputs["PanelVAmount"]}, + attrs={"operation": "DIVIDE"}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: group_input_1.outputs["SubFrameWidth"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["FrameWidth"], + 1: group_input_1.outputs["PanelHAmount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Height"], 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_2, 1: group_input_1.outputs["PanelHAmount"]}, + attrs={"operation": "DIVIDE"}, + ) + + subtract_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide_1, 1: group_input_1.outputs["SubFrameWidth"]}, + attrs={"operation": "SUBTRACT"}, + ) + + windowpanel_1 = nw.new_node( + nodegroup_window_panel().name, + input_kwargs={ + "Width": subtract_1, + "Height": subtract_3, + "FrameWidth": group_input_1.outputs["SubFrameWidth"], + "FrameThickness": group_input_1.outputs["SubFrameThickness"], + "PanelWidth": group_input_1.outputs["SubFrameWidth"], + "PanelThickness": group_input_1.outputs["SubFrameThickness"], + "PanelHAmount": group_input_1.outputs["SubPanelHAmount"], + "PanelVAmount": group_input_1.outputs["SubPanelVAmount"], + "WithGlass": True, + "GlassThickness": group_input_1.outputs["GlassThickness"], + "FrameMaterial": group_input_1.outputs["FrameMaterial"], + "Material": group_input_1.outputs["Material"], + }, + ) + + windowshutter = nw.new_node( + nodegroup_window_shutter().name, + input_kwargs={ + "Width": subtract_1, + "Height": subtract_3, + "FrameWidth": group_input_1.outputs["FrameWidth"], + "FrameThickness": group_input_1.outputs["FrameThickness"], + "PanelWidth": group_input_1.outputs["ShutterPanelRadius"], + "PanelThickness": group_input_1.outputs["ShutterPanelRadius"], + "ShutterWidth": group_input_1.outputs["ShutterWidth"], + "ShutterThickness": group_input_1.outputs["ShutterThickness"], + "ShutterInterval": group_input_1.outputs["ShutterInterval"], + "ShutterRotation": group_input_1.outputs["ShutterRotation"], + "FrameMaterial": group_input_1.outputs["FrameMaterial"], + }, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input_1.outputs["Shutter"], + 14: windowpanel_1, + 15: windowshutter, + }, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Width"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + divide_2 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["Width"], + 1: group_input_1.outputs["PanelVAmount"], + }, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_2}, attrs={"operation": "MULTIPLY"} + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + divide_3 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["Height"], + 1: group_input_1.outputs["PanelHAmount"], + }, + attrs={"operation": "DIVIDE"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_3}, attrs={"operation": "MULTIPLY"} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_5}) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add, "Y": add_1}) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": switch.outputs[6], "Translation": combine_xyz}, + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": transform} + ) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input_1.outputs["PanelHAmount"], + 1: group_input_1.outputs["PanelVAmount"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": multiply_6}, + attrs={"domain": "INSTANCE"}, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input_1.outputs["PanelHAmount"]} + ) + + divide_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: reroute}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide_4}, attrs={"operation": "FLOOR"} + ) + + add_2 = nw.new_node( + Nodes.Math, input_kwargs={0: divide, 1: group_input_1.outputs["FrameWidth"]} + ) + + multiply_7 = nw.new_node( + Nodes.Math, input_kwargs={0: floor, 1: add_2}, attrs={"operation": "MULTIPLY"} + ) + + modulo = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: reroute}, + attrs={"operation": "MODULO"}, + ) + + add_3 = nw.new_node( + Nodes.Math, input_kwargs={0: divide_1, 1: group_input_1.outputs["FrameWidth"]} + ) + + multiply_8 = nw.new_node( + Nodes.Math, input_kwargs={0: modulo, 1: add_3}, attrs={"operation": "MULTIPLY"} + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={"operation": "POWER"} + ) + + multiply_9 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: group_input_1.outputs["OEOffset"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_7, "Y": multiply_8, "Z": multiply_9}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + power_1 = nw.new_node( + Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={"operation": "POWER"} + ) + + multiply_10 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["OpenVAngle"], 1: power_1}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_10}) + + modulo_1 = nw.new_node( + Nodes.Math, input_kwargs={0: floor, 1: 2.0000}, attrs={"operation": "MODULO"} + ) + + multiply_11 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide, 1: modulo_1}, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_11}) + + modulo_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_8, 1: 2.0000}, + attrs={"operation": "MODULO"}, + ) + + multiply_12 = nw.new_node( + Nodes.Math, + input_kwargs={0: divide_1, 1: modulo_2}, + attrs={"operation": "MULTIPLY"}, + ) + + add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_12}) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": add_4, "Y": add_5}) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": set_position, + "Rotation": combine_xyz_3, + "Pivot Point": combine_xyz_2, + }, + ) + + multiply_13 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["OpenHAngle"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_13}) + + multiply_14 = nw.new_node( + Nodes.Math, input_kwargs={0: add_3, 1: -0.5000}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_14}) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={ + "Instances": rotate_instances, + "Rotation": combine_xyz_5, + "Pivot Point": combine_xyz_6, + }, + ) + + power_2 = nw.new_node( + Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={"operation": "POWER"} + ) + + multiply_15 = nw.new_node( + Nodes.Math, + input_kwargs={0: power_2, 1: group_input_1.outputs["OpenOffset"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_15}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": rotate_instances_1, "Offset": combine_xyz_4}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [windowpanel, set_position_1]} + ) + + multiply_16 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_17 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_16, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_18 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["CurtainFrameDepth"], 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + curtain = nw.new_node( + nodegroup_curtain().name, + input_kwargs={ + "Width": group_input_1.outputs["Width"], + "Depth": group_input_1.outputs["CurtainDepth"], + "Height": group_input_1.outputs["Height"], + "IntervalNumber": group_input_1.outputs["CurtainIntervalNumber"], + "Radius": group_input_1.outputs["CurtainFrameRadius"], + "L1": multiply_17, + "R1": group_input_1.outputs["CurtainMidL"], + "L2": group_input_1.outputs["CurtainMidR"], + "R2": multiply_16, + "FrameDepth": multiply_18, + "CurtainFrameMaterial": group_input_1.outputs["CurtainFrameMaterial"], + "CurtainMaterial": group_input_1.outputs["CurtainMaterial"], + }, + ) + + multiply_19 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["FrameThickness"]}, + attrs={"operation": "MULTIPLY"}, + ) + + add_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input_1.outputs["CurtainFrameDepth"], 1: multiply_19}, + ) + + combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": add_6}) + + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curtain, "Translation": combine_xyz_7}, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_geometry, join_geometry]}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input_1.outputs["Curtain"], + 14: join_geometry, + 15: join_geometry_1, + }, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": switch_1.outputs[6]} + ) + + bounding_box = nw.new_node( + Nodes.BoundingBox, input_kwargs={"Geometry": realize_instances} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": realize_instances, + "Bounding Box": bounding_box.outputs["Bounding Box"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_line_seq", singleton=False, type="GeometryNodeTree") +def nodegroup_line_seq(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Width", -1.0000), + ("NodeSocketFloat", "Height", 0.5000), + ("NodeSocketFloat", "Amount", 0.5000), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply, "Y": multiply_1} + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": multiply_2, "Y": multiply_1} + ) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz, "End": combine_xyz_1} + ) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line} + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={ + "Geometry": geometry_to_instance, + "Amount": group_input.outputs["Amount"], + }, + attrs={"domain": "INSTANCE"}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["Amount"], 1: 1.0000} + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: add_1}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: divide}, attrs={"operation": "MULTIPLY"} + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_3}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_2, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup("nodegroup_curtain", singleton=False, type="GeometryNodeTree") +def nodegroup_curtain(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Width", 0.5000), + ("NodeSocketFloat", "Depth", 0.1000), + ("NodeSocketFloatDistance", "Height", 0.1000), + ("NodeSocketFloat", "IntervalNumber", 0.5000), + ("NodeSocketFloatDistance", "Radius", 1.0000), + ("NodeSocketFloat", "L1", 0.5000), + ("NodeSocketFloat", "R1", 0.0000), + ("NodeSocketFloat", "L2", 0.0000), + ("NodeSocketFloat", "R2", 0.5000), + ("NodeSocketFloat", "FrameDepth", 0.0000), + ("NodeSocketMaterial", "CurtainFrameMaterial", None), + ("NodeSocketMaterial", "CurtainMaterial", None), + ], + ) + + reroute_1 = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Radius"]} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_1, 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + ico_sphere = nw.new_node( + Nodes.MeshIcoSphere, input_kwargs={"Radius": multiply, "Subdivisions": 4} + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Width"]}, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_1, 1: -1.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_2}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": multiply_1}) + + curve_line = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz, "End": combine_xyz_1} + ) + + sample_curve_1 = nw.new_node( + Nodes.SampleCurve, input_kwargs={"Curves": curve_line, "Factor": 1.0000} + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": ico_sphere.outputs["Mesh"], + "Offset": sample_curve_1.outputs["Position"], + }, + ) + + combine_xyz_9 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_1, "Z": group_input.outputs["FrameDepth"]}, + ) + + curve_line_4 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_1, "End": combine_xyz_9} + ) + + combine_xyz_8 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": multiply_2, "Z": group_input.outputs["FrameDepth"]}, + ) + + curve_line_3 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz, "End": combine_xyz_8} + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_line, curve_line_4, curve_line_3]}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, input_kwargs={"Radius": group_input.outputs["Radius"]} + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": join_geometry_3, + "Profile Curve": curve_circle.outputs["Curve"], + "Fill Caps": True, + }, + ) + + ico_sphere_1 = nw.new_node( + Nodes.MeshIcoSphere, input_kwargs={"Radius": multiply, "Subdivisions": 4} + ) + + sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={"Curves": curve_line}) + + set_position_3 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": ico_sphere_1.outputs["Mesh"], + "Offset": sample_curve.outputs["Position"], + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_position_2, curve_to_mesh_1, set_position_3]}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Height"], 1: -0.4700}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_3}) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": join_geometry_2, "Offset": combine_xyz_3}, + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": set_position_1, + "Material": group_input.outputs["CurtainFrameMaterial"], + }, + ) + + combine_xyz_4 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["L1"]} + ) + + combine_xyz_5 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["R1"]} + ) + + curve_line_1 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_4, "End": combine_xyz_5} + ) + + resample_curve = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_1, "Count": 200} + ) + + combine_xyz_6 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["L2"]} + ) + + combine_xyz_7 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": group_input.outputs["R2"]} + ) + + curve_line_2 = nw.new_node( + Nodes.CurveLine, input_kwargs={"Start": combine_xyz_6, "End": combine_xyz_7} + ) + + resample_curve_1 = nw.new_node( + Nodes.ResampleCurve, input_kwargs={"Curve": curve_line_2, "Count": 200} + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [resample_curve, resample_curve_1]}, + ) + + spline_parameter_1 = nw.new_node(Nodes.SplineParameter) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": join_geometry_1, + 2: spline_parameter_1.outputs["Factor"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["IntervalNumber"], 1: 6.2800}, + attrs={"operation": "MULTIPLY"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: multiply_4, 1: group_input.outputs["Width"]}, + attrs={"operation": "DIVIDE"}, + ) + + multiply_5 = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Length"], 1: divide}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: 1.6800}) + + sine = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={"operation": "SINE"}) + + multiply_6 = nw.new_node( + Nodes.Math, + input_kwargs={0: sine, 1: group_input.outputs["Depth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Z": multiply_6}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": combine_xyz_2, + }, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["Height"]} + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": reroute, "Height": 0.0020}, + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + divide_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz.outputs["X"], 1: reroute}, + attrs={"operation": "DIVIDE"}, + ) + + capture_attribute_1 = nw.new_node( + Nodes.CaptureAttribute, input_kwargs={"Geometry": quadrilateral, 2: divide_1} + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_position, + "Profile Curve": capture_attribute_1.outputs["Geometry"], + }, + ) + + combine_xyz_12 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": capture_attribute_1.outputs[2], + "Y": capture_attribute.outputs[2], + }, + ) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={"Geometry": curve_to_mesh, "Name": "UVMap", 3: combine_xyz_12}, + attrs={"domain": "CORNER", "data_type": "FLOAT2"}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": store_named_attribute, + "Material": group_input.outputs["CurtainMaterial"], + }, + ) + + multiply_7 = nw.new_node( + Nodes.Math, + input_kwargs={0: reroute_1, 1: 1.3000}, + attrs={"operation": "MULTIPLY"}, + ) + + curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={"Radius": multiply_7}) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": curve_line, + "Profile Curve": curve_circle_1.outputs["Curve"], + }, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_3, 1: group_input.outputs["Radius"]} + ) + + combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_1}) + + set_position_4 = nw.new_node( + Nodes.SetPosition, + input_kwargs={"Geometry": curve_to_mesh_2, "Offset": combine_xyz_10}, + ) + + difference = nw.new_node( + Nodes.MeshBoolean, + input_kwargs={"Mesh 1": set_material, "Mesh 2": set_position_4}, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [set_material_1, difference.outputs["Mesh"]]}, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": join_geometry, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_window_shutter", singleton=False, type="GeometryNodeTree" +) +def nodegroup_window_shutter(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Height", 2.0000), + ("NodeSocketFloatDistance", "FrameWidth", 0.1000), + ("NodeSocketFloatDistance", "FrameThickness", 0.1000), + ("NodeSocketFloatDistance", "PanelWidth", 0.1000), + ("NodeSocketFloatDistance", "PanelThickness", 0.1000), + ("NodeSocketFloatDistance", "ShutterWidth", 0.1000), + ("NodeSocketFloatDistance", "ShutterThickness", 0.1000), + ("NodeSocketFloat", "ShutterInterval", 0.5000), + ("NodeSocketFloat", "ShutterRotation", 0.0000), + ("NodeSocketMaterial", "FrameMaterial", None), + ], + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Height"], + }, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: 2.0000}, attrs={"operation": "SQRT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["FrameWidth"], 1: sqrt}, + attrs={"operation": "MULTIPLY"}, + ) + + quadrilateral_1 = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": multiply, + "Height": group_input.outputs["FrameThickness"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": quadrilateral, "Profile Curve": quadrilateral_1}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Width"], + 1: group_input.outputs["FrameWidth"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": subtract, + "Y": group_input.outputs["ShutterWidth"], + "Z": group_input.outputs["ShutterThickness"], + }, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + geometry_to_instance = nw.new_node( + "GeometryNodeGeometryToInstance", + input_kwargs={"Geometry": cube.outputs["Mesh"]}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Height"], + 1: group_input.outputs["FrameWidth"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: group_input.outputs["ShutterInterval"]}, + attrs={"operation": "DIVIDE"}, + ) + + floor = nw.new_node( + Nodes.Math, input_kwargs={0: divide}, attrs={"operation": "FLOOR"} + ) + + shutter_number = nw.new_node( + Nodes.Math, + input_kwargs={0: floor, 1: 1.0000}, + label="ShutterNumber", + attrs={"operation": "SUBTRACT"}, + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + input_kwargs={"Geometry": geometry_to_instance, "Amount": shutter_number}, + attrs={"domain": "INSTANCE"}, + ) + + shutter_true_interval = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: floor}, + label="ShutterTrueInterval", + attrs={"operation": "DIVIDE"}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: duplicate_elements.outputs["Duplicate Index"], + 1: shutter_true_interval, + }, + attrs={"operation": "MULTIPLY"}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: -0.5000}, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: shutter_true_interval} + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}) + + combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": add_1}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + "Offset": combine_xyz_1, + }, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["ShutterRotation"]} + ) + + combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": reroute}) + + rotate_instances = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": set_position, "Rotation": combine_xyz_5}, + ) + + multiply_3 = nw.new_node( + Nodes.Math, + input_kwargs={0: shutter_true_interval, 1: 2.0000}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract_2 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract_1, 1: multiply_3}, + attrs={"operation": "SUBTRACT"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["PanelWidth"], + "Y": subtract_2, + "Z": group_input.outputs["PanelThickness"], + }, + ) + + cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz_2}) + + multiply_4 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["ShutterWidth"]}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"Y": multiply_4}) + + curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={"End": combine_xyz_3}) + + geometry_to_instance_1 = nw.new_node( + "GeometryNodeGeometryToInstance", input_kwargs={"Geometry": curve_line} + ) + + combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": reroute}) + + rotate_instances_1 = nw.new_node( + Nodes.RotateInstances, + input_kwargs={"Instances": geometry_to_instance_1, "Rotation": combine_xyz_4}, + ) + + realize_instances = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": rotate_instances_1} + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, input_kwargs={"Curves": realize_instances, "Factor": 1.0000} + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": cube_1.outputs["Mesh"], + "Offset": sample_curve.outputs["Position"], + }, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh, rotate_instances, set_position_1]}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_2, + "Material": group_input.outputs["FrameMaterial"], + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": set_material, "Shade Smooth": False}, + ) + + realize_instances_1 = nw.new_node( + Nodes.RealizeInstances, input_kwargs={"Geometry": set_shade_smooth} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": realize_instances_1}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_window_panel", singleton=False, type="GeometryNodeTree" +) +def nodegroup_window_panel(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloatDistance", "Width", 2.0000), + ("NodeSocketFloatDistance", "Height", 2.0000), + ("NodeSocketFloatDistance", "FrameWidth", 0.1000), + ("NodeSocketFloatDistance", "FrameThickness", 0.1000), + ("NodeSocketFloatDistance", "PanelWidth", 0.1000), + ("NodeSocketFloatDistance", "PanelThickness", 0.1000), + ("NodeSocketInt", "PanelHAmount", 0), + ("NodeSocketInt", "PanelVAmount", 0), + ("NodeSocketBool", "WithGlass", False), + ("NodeSocketFloat", "GlassThickness", 0.0000), + ("NodeSocketMaterial", "FrameMaterial", None), + ("NodeSocketMaterial", "Material", None), + ], + ) + + quadrilateral = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Height"], + }, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: 2.0000}, attrs={"operation": "SQRT"} + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["FrameWidth"], 1: sqrt}, + attrs={"operation": "MULTIPLY"}, + ) + + quadrilateral_1 = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={ + "Width": multiply, + "Height": group_input.outputs["FrameThickness"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": quadrilateral, "Profile Curve": quadrilateral_1}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["PanelHAmount"], 1: -1.0000} + ) + + lineseq = nw.new_node( + nodegroup_line_seq().name, + input_kwargs={ + "Width": group_input.outputs["Width"], + "Height": group_input.outputs["Height"], + "Amount": add, + }, + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": group_input.outputs["PanelWidth"]} + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["PanelThickness"], 1: 0.0010}, + attrs={"operation": "SUBTRACT"}, + ) + + quadrilateral_2 = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": reroute, "Height": subtract}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": lineseq, "Profile Curve": quadrilateral_2}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: group_input.outputs["PanelVAmount"], 1: -1.0000} + ) + + lineseq_1 = nw.new_node( + nodegroup_line_seq().name, + input_kwargs={ + "Width": group_input.outputs["Height"], + "Height": group_input.outputs["Width"], + "Amount": add_1, + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": lineseq_1, "Rotation": (0.0000, 0.0000, 1.5708)}, + ) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: 0.0010}, + attrs={"operation": "SUBTRACT"}, + ) + + quadrilateral_3 = nw.new_node( + "GeometryNodeCurvePrimitiveQuadrilateral", + input_kwargs={"Width": reroute, "Height": subtract_1}, + ) + + curve_to_mesh_2 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": transform, "Profile Curve": quadrilateral_3}, + ) + + join_geometry_3 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [curve_to_mesh_1, curve_to_mesh_2]}, + ) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [curve_to_mesh, join_geometry_3]} + ) + + set_material_1 = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": join_geometry_2, + "Material": group_input.outputs["FrameMaterial"], + }, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["Width"], + "Y": group_input.outputs["Height"], + "Z": group_input.outputs["GlassThickness"], + }, + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": combine_xyz}) + + store_named_attribute = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": cube.outputs["Mesh"], + "Name": "uv_map", + 3: cube.outputs["UV Map"], + }, + attrs={"domain": "CORNER", "data_type": "FLOAT_VECTOR"}, + ) + + set_material = nw.new_node( + Nodes.SetMaterial, + input_kwargs={ + "Geometry": store_named_attribute, + "Material": group_input.outputs["Material"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [set_material, set_material_1]} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["WithGlass"], + 14: set_material_1, + 15: join_geometry, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": switch.outputs[6], "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_shade_smooth}, + attrs={"is_active_output": True}, + ) + + +def shader_curtain_material(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": color_category("textile"), + "Transmission": np.random.uniform(0, 1), + "Transmission Roughness": 1.0, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_curtain_frame_material(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (0.1840, 0.0000, 0.8000, 1.0000)}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_frame_material(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={"Base Color": (0.8000, 0.5033, 0.0057, 1.0000)}, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) + + +def shader_glass_material(nw: NodeWrangler): + # Code generated using version 2.6.5 of the node_transpiler + + principled_bsdf = nw.new_node( + Nodes.PrincipledBSDF, + input_kwargs={ + "Base Color": (0.0094, 0.0055, 0.8000, 1.0000), + "Roughness": 0.0000, + }, + ) + + material_output = nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": principled_bsdf}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/organizer/basket.py b/infinigen/assets/organizer/basket.py deleted file mode 100644 index 5e711c113..000000000 --- a/infinigen/assets/organizer/basket.py +++ /dev/null @@ -1,306 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube -from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic - - -@node_utils.to_nodegroup('nodegroup_holes', singleton=False, type='GeometryNodeTree') -def nodegroup_holes(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 0.5000), - ('NodeSocketFloat', 'Value2', 0.5000), - ('NodeSocketFloat', 'Value3', 0.5000), - ('NodeSocketFloat', 'Value4', 0.5000), - ('NodeSocketFloat', 'Value5', 0.5000), - ('NodeSocketFloat', 'Value6', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: add}, attrs={'operation': 'SUBTRACT'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value6"], 1: 0.0000}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: add}, attrs={'operation': 'SUBTRACT'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value4"], 1: 0.0000}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: group_input.outputs["Value2"]}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: add_3}, attrs={'operation': 'DIVIDE'}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: add_3}, attrs={'operation': 'DIVIDE'}) - - grid = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Size X': subtract, 'Size Y': subtract_1, 'Vertices X': divide, 'Vertices Y': divide_1}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': grid.outputs["Mesh"], 'Name': 'uv_map', 3: grid.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value5"], 1: 0.0000}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: 0.1}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_5, 'Y': add_2, 'Z': add_2}) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_3}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={'Points': transform_1, 'Instance': store_named_attribute_1}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add}, attrs={'operation': 'SUBTRACT'}) - - divide_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: add_3}, attrs={'operation': 'DIVIDE'}) - - grid_1 = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Size X': subtract_2, 'Size Y': subtract, 'Vertices X': divide_2, 'Vertices Y': divide}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': grid_1.outputs["Mesh"], 'Name': 'uv_map', 3: grid_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_2, 'Rotation': (1.5708, 0.0000, 0.0000)}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.1}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': add_6, 'Z': add_2}) - - cube_3 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_4}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_3.outputs["Mesh"], 'Name': 'uv_map', 3: cube_3.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={'Points': transform_2, 'Instance': store_named_attribute_3}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances1': instance_on_points, 'Instances2': instance_on_points_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_handle_hole', singleton=False, type='GeometryNodeTree') -def nodegroup_handle_hole(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.0000), - ('NodeSocketFloat', 'Z', 0.0000), - ('NodeSocketFloat', 'Value', 0.5000), - ('NodeSocketFloat', 'Value2', 0.5000), - ('NodeSocketInt', 'Level', 0)]) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': 1.0000, 'Z': group_input.outputs["Z"]}) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_3}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', 3: cube_2.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subdivide_mesh_2 = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': store_named_attribute}) - - subdivision_surface_2 = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': subdivide_mesh_2, 'Level': group_input.outputs["Level"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["Value2"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': subtract}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': subdivision_surface_2, 'Translation': combine_xyz_4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - depth = nw.new_node(Nodes.Value, label='depth') - depth.outputs[0].default_value = kwargs['depth'] - - width = nw.new_node(Nodes.Value, label='width') - width.outputs[0].default_value = kwargs['width'] - - height = nw.new_node(Nodes.Value, label='height') - height.outputs[0].default_value = kwargs['height'] - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': depth, 'Y': width, 'Z': height}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', - 3: cube.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': store_named_attribute, 'Level': 2}) - - sub_level = nw.new_node(Nodes.Integer, label='sub_level') - sub_level.integer = kwargs['frame_sub_level'] - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': subdivide_mesh, 'Level': sub_level}) - - differences = [] - - if kwargs['has_handle']: - hole_depth = nw.new_node(Nodes.Value, label='hole_depth') - hole_depth.outputs[0].default_value = kwargs['handle_depth'] - - hole_height = nw.new_node(Nodes.Value, label='hole_height') - hole_height.outputs[0].default_value = kwargs['handle_height'] - - hole_dist = nw.new_node(Nodes.Value, label='hole_dist') - hole_dist.outputs[0].default_value = kwargs['handle_dist_to_top'] - - handle_level = nw.new_node(Nodes.Integer, label='handle_level') - handle_level.integer = kwargs['handle_sub_level'] - handle_hole = nw.new_node(nodegroup_handle_hole().name, - input_kwargs={'X': hole_depth, 'Z': hole_height, 'Value': height, 'Value2': hole_dist, - 'Level': handle_level}) - differences.append(handle_hole) - - thickness = nw.new_node(Nodes.Value, label='thickness') - thickness.outputs[0].default_value = kwargs['thickness'] - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: depth, 1: thickness}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: width, 1: thickness}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': subtract_1, 'Z': height}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', - 3: cube_1.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': store_named_attribute_1, 'Level': 2}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': subdivide_mesh_1, 'Level': sub_level}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: thickness, 2: 0.2500}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivision_surface_1, 'Translation': combine_xyz_2}) - - if kwargs['has_holes']: - gap_size = nw.new_node(Nodes.Value, label='gap_size') - gap_size.outputs[0].default_value = kwargs['hole_gap_size'] - - hole_edge_gap = nw.new_node(Nodes.Value, label='hole_edge_gap') - hole_edge_gap.outputs[0].default_value = kwargs['hole_edge_gap'] - - hole_size = nw.new_node(Nodes.Value, label='hole_size') - hole_size.outputs[0].default_value = kwargs['hole_size'] - holes = nw.new_node(nodegroup_holes().name, - input_kwargs={'Value1': height, 'Value2': gap_size, 'Value3': hole_edge_gap, - 'Value4': hole_size, 'Value5': depth, 'Value6': width}) - differences.extend([holes.outputs["Instances1"], holes.outputs["Instances2"]]) - - difference = nw.new_node(Nodes.MeshBoolean, - input_kwargs={'Mesh 1': subdivision_surface, 'Mesh 2': [transform] + differences}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': difference.outputs["Mesh"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: height}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': realize_instances, 'Translation': combine_xyz_3}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': transform_geometry, - 'Material': surface.shaderfunc_to_material(shader_rough_plastic)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, - attrs={'is_active_output': True}) - - -class BasketBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(BasketBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('depth', None) is None: - params['depth'] = uniform(0.15, 0.4) - if params.get('width', None) is None: - params['width'] = uniform(0.2, 0.6) - if params.get('height', None) is None: - params['height'] = uniform(0.06, 0.24) - if params.get('frame_sub_level', None) is None: - params['frame_sub_level'] = np.random.choice([0, 3], p=[0.5, 0.5]) - if params.get('thickness', None) is None: - params['thickness'] = uniform(0.001, 0.005) - - if params.get('has_handle', None) is None: - params['has_handle'] = np.random.choice([True, False], p=[0.8, 0.2]) - if params.get('handle_sub_level', None) is None: - params['handle_sub_level'] = np.random.choice([0, 1, 2], p=[0.2, 0.4, 0.4]) - if params.get('handle_depth', None) is None: - params['handle_depth'] = params['depth'] * uniform(0.2, 0.4) - if params.get('handle_height', None) is None: - params['handle_height'] = params['height'] * uniform(0.1, 0.25) - if params.get('handle_dist_to_top', None) is None: - params['handle_dist_to_top'] = (params['handle_height'] * 0.5 + - params['height'] * uniform(0.08, 0.15)) - - if params.get('has_holes', None) is None: - if params['height'] < 0.12: - params['has_holes'] = False - else: - params['has_holes'] = np.random.choice([True, False], p=[0.5, 0.5]) - if params.get('hole_size', None) is None: - params['hole_size'] = uniform(0.005, 0.01) - if params.get('hole_gap_size', None) is None: - params['hole_gap_size'] = params['hole_size'] * uniform(0.8, 1.1) - if params.get('hole_edge_gap', None) is None: - params['hole_edge_gap'] = uniform(0.04, 0.06) - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - return obj diff --git a/infinigen/assets/organizer/hook.py b/infinigen/assets/organizer/hook.py deleted file mode 100644 index 2403dd132..000000000 --- a/infinigen/assets/organizer/hook.py +++ /dev/null @@ -1,387 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging - -import bpy -from infinigen.assets.materials import shader_rough_plastic, shader_brushed_metal -from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic - - -def hook_geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.5 of the node_transpiler - - hook_num = nw.new_node(Nodes.Integer, label='hook_num') - hook_num.integer = kwargs["num_hook"] - - add = nw.new_node(Nodes.Math, input_kwargs={0: hook_num, 1: -1.0000}) - - hook_gap = nw.new_node(Nodes.Value, label='hook_gap') - hook_gap.outputs[0].default_value = kwargs["hook_gap"] - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: hook_gap, 1: add}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_1}) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': add, 'Start Location': combine_xyz_2, 'Offset': combine_xyz_1}, - attrs={'mode': 'END_POINTS'}) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Start': (0.0000, 0.0000, 0.0000), - 'Start Handle': (0.0000, 0.0000, kwargs["init_handle"]), - 'End Handle': kwargs["curve_handle"], - 'End': kwargs["curve_end_point"]}) - - curve_line = nw.new_node(Nodes.CurveLine) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [bezier_segment, curve_line]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Factor': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 0.8), (0.5, 0.8), (1.0000, 0.8)]) - - raduis = nw.new_node(Nodes.Value, label='raduis') - raduis.outputs[0].default_value = kwargs['hook_radius'] - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: raduis}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': join_geometry_3, 'Radius': multiply_3}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': kwargs['hook_resolution'], - 'Point 1': (1.0000, 0.0000, 0.0000), - 'Point 3': (-1.0000, 0.0000, 0.0000)}, - attrs={'mode': 'POINTS'}) - - hook_reshape = nw.new_node(Nodes.Vector, label='hook_reshape') - hook_reshape.vector = (1.0000, 1.0000, 1.0000) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Scale': hook_reshape}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': transform_geometry_2, - 'Fill Caps': True}) - - hook_size = nw.new_node(Nodes.Value, label='hook_size') - hook_size.outputs[0].default_value = kwargs['hook_size'] - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, 'Scale': hook_size}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': transform_geometry}) - - merge_by_distance_1 = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': realize_instances_1}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': merge_by_distance_1}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, input_kwargs={'Instances': instance_on_points}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': scale_instances, - 'Material': surface.shaderfunc_to_material(shader_brushed_metal)}) - - board_side_gap = nw.new_node(Nodes.Value, label='board_side_gap') - board_side_gap.outputs[0].default_value = kwargs['board_side_gap'] - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: board_side_gap}) - - board_thickness = nw.new_node(Nodes.Value, label='board_thickness') - board_thickness.outputs[0].default_value = kwargs['board_thickness'] - - board_height = nw.new_node(Nodes.Value, label='board_height') - board_height.outputs[0].default_value = kwargs['board_height'] - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': board_thickness, 'Z': board_height}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: board_thickness, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: board_height}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: hook_size, 1: multiply_5}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_4, 'Z': subtract}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Translation': combine_xyz_3}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': transform_geometry_1, - 'Material': surface.shaderfunc_to_material(shader_rough_plastic)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_2}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry_3}, - attrs={'is_active_output': True}) - - -def spatula_geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.5 of the node_transpiler - - handle_length = nw.new_node(Nodes.Value, label='handle_length') - handle_length.outputs[0].default_value = kwargs['handle_length'] - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': handle_length}) - - mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={'Count': 64, 'Offset': combine_xyz}, attrs={'mode': 'END_POINTS'}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': mesh_line}) - - handle_radius = nw.new_node(Nodes.Value, label='handle_radius') - handle_radius.outputs[0].default_value = kwargs['handle_radius'] - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], kwargs['handle_control_points']) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: handle_radius, 1: float_curve}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': mesh_to_curve, 'Radius': multiply}) - - curve_circle = nw.new_node(Nodes.CurveCircle) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, - 'Scale': (kwargs['handle_ratio'], 1.0, 1.0)}) - - hole_radius = nw.new_node(Nodes.Value, label='hole_radius') - hole_radius.outputs[0].default_value = kwargs['hole_radius'] - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Radius': hole_radius, 'Depth': 0.1000}) - - hole_place_ratio = nw.new_node(Nodes.Value, label='hole_placement') - hole_place_ratio.outputs[0].default_value = kwargs['hole_placement'] - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: handle_length, 1: hole_place_ratio}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_1, - 'Rotation': (0.0000, 1.5708, 0.0000), 'Scale': (kwargs['hole_ratio'], 1.0000, 1.0000)}) - - difference = nw.new_node(Nodes.MeshBoolean, input_kwargs={'Mesh 1': transform_geometry, 'Mesh 2': transform_geometry_1}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': (kwargs['plate_thickness'], kwargs['plate_width'], kwargs['plate_length']), - 'Vertices X': 4, 'Vertices Y': 4, 'Vertices Z': 4}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube.outputs["Mesh"], - 'Translation': (0.0000, 0.0000, -kwargs['plate_length'] / 2.)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [difference.outputs["Mesh"], transform_geometry_3]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_2}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': triangulate, 'Translation': combine_xyz_2}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': transform_geometry_2, - 'Material': surface.shaderfunc_to_material(shader_rough_plastic)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, - attrs={'is_active_output': True}) - - -class HookBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(HookBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_hang_points(self, params): - # compute the lowest point in the bezier curve - x = params['init_handle'] - y = params['curve_handle'][2] - params['init_handle'] - z = params['curve_end_point'][2] - params['curve_handle'][2] - - t1 = (x - y + np.sqrt(y ** 2 - x * z)) / (x + z - 2 * y) - t2 = (x - y - np.sqrt(y ** 2 - x * z)) / (x + z - 2 * y) - - t = 0 - if t1 >= 0 and t1 <= 1: - t = max(t1, t) - if t2 >= 0 and t2 <= 1: - t = max(t2, t) - if t == 0: - t = 0.5 - - # get x, z coordinate - alpha1 = 3 * ((1 - t) ** 2) * t - alpha2 = 3 * (1 - t) * (t ** 2) - alpha3 = t ** 3 - - z = alpha1 * params['init_handle'] + alpha2 * params['curve_handle'][-1] + alpha3 * params['curve_end_point'][-1] - x = alpha2 * params['curve_handle'][-2] + alpha3 * params['curve_end_point'][-2] - - ys = [] - total_length = params['board_side_gap'] + (params['num_hook'] - 1) * params['hook_gap'] - for i in range(params['num_hook']): - y = - total_length / 2. + params['board_side_gap'] / 2. + i * params['hook_gap'] - ys.append(y) - - hang_points = [] - for y in ys: - hang_points.append((x * params['hook_size'], y, z * params['hook_size'])) - - return hang_points - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('num_hook', None) is None: - params['num_hook'] = randint(3, 6) - if params.get('hook_size', None) is None: - params['hook_size'] = uniform(0.05, 0.1) - if params.get('hook_radius', None) is None: - params['hook_radius'] = uniform(0.002, 0.004) / params['hook_size'] - else: - params['hook_radius'] = params['hook_radius'] / params['hook_size'] - - if params.get('hook_resolution', None) is None: - params['hook_resolution'] = np.random.choice([4, 32], p=[0.5, 0.5]) - - if params.get("hook_gap", None) is None: - params["hook_gap"] = uniform(0.04, 0.08) - if params.get('board_height', None) is None: - params['board_height'] = params['hook_size'] + uniform(-0.02, 0.01) - if params.get('board_thickness', None) is None: - params['board_thickness'] = uniform(0.005, 0.015) - if params.get('board_side_gap', None) is None: - params['board_side_gap'] = uniform(0.03, 0.05) - - params['init_handle'] = uniform(-0.15, -0.25) - params["curve_handle"] = (0, uniform(0.15, 0.35), uniform(-0.15, -0.35)) - params["curve_end_point"] = (0, uniform(0.35, 0.55), uniform(-0.05, 0.15)) - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, hook_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - hang_points = self.get_hang_points(obj_params) - - return obj, hang_points - - -class SpatulaBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(SpatulaBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - - if params.get('hole_radius', None) is None: - params['hole_radius'] = uniform(0.003, 0.008) - if params.get('hole_placement', None) is None: - params['hole_placement'] = uniform(0.75, 0.9) - if params.get('hole_ratio', None) is None: - params['hole_ratio'] = uniform(0.8, 2.0) - - if params.get('handle_length', None) is None: - params['handle_length'] = uniform(0.15, 0.25) - - if params.get("handle_ratio", None) is None: - params["handle_ratio"] = uniform(0.1, 0.4) - if params.get("handle_control_points", None) is None: - params["handle_control_points"] = [(0, 0.5), (0.5, uniform(0.45, 0.65)), (1.0, uniform(0.4, 0.6))] - if params.get("handle_radius", None) is None: - params["handle_radius"] = (params['hole_radius'] / params["handle_control_points"][0][1]) / uniform(0.6, 0.8) - - if params.get('plate_thickness', None) is None: - params['plate_thickness'] = uniform(0.005, 0.01) - if params.get('plate_width', None) is None: - params['plate_width'] = uniform(0.04, 0.06) - if params.get('plate_length', None) is None: - params['plate_length'] = uniform(0.05, 0.08) - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, spatula_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class SpatulaOnHookBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(SpatulaOnHookBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - self.hook_fac = HookBaseFactory(factory_seed, params=params) - self.spatula_fac = SpatulaBaseFactory(factory_seed, params=params) - - def get_asset_params(self, i): - if self.params.get('hook_radius', None) is None: - r = uniform(0.002, 0.0035) - self.hook_fac.params['hook_radius'] = r - self.spatula_fac.params['hole_radius'] = r / uniform(0.3, 0.6) - - def create_asset(self, i, **params): - - self.get_asset_params(i) - hook, hang_points = self.hook_fac.create_asset(i) - spatula = self.spatula_fac.create_asset(i) - - spatula.location = hang_points[0] - butil.apply_transform(spatula, loc=True) - - return hook - - - - - - - diff --git a/infinigen/assets/organizer/plate_rack.py b/infinigen/assets/organizer/plate_rack.py deleted file mode 100644 index 3894741fa..000000000 --- a/infinigen/assets/organizer/plate_rack.py +++ /dev/null @@ -1,335 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube -from infinigen.assets.materials import shader_wood -from infinigen.assets.materials.plastics.plastic_rough import shader_rough_plastic - - -@node_utils.to_nodegroup('nodegroup_plate_rack_connect', singleton=False, type='GeometryNodeTree') -def nodegroup_plate_rack_connect(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'Value1', 0.5000), - ('NodeSocketFloat', 'Value', 0.5000)]) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value1"], 1: 2.0000, 2: -0.0020}, - attrs={'operation': 'MULTIPLY_ADD'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Radius': group_input.outputs["Radius"], 'Depth': multiply_add}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', - 3: cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply_add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 2: -uniform(0.02, 0.045)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_add_1}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz, - 'Rotation': (1.5708, 0.0000, 0.0000)}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_rack_cyn', singleton=False, type='GeometryNodeTree') -def nodegroup_rack_cyn(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'Value', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value"], 1: 0.0000}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Radius': group_input.outputs["Radius"], 'Depth': add}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', - 3: cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: add, 2: 0.0010}, attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_add}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_rack_base', singleton=False, type='GeometryNodeTree') -def nodegroup_rack_base(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Instance', None), - ('NodeSocketFloat', 'Value1', 0.5000), - ('NodeSocketFloat', 'Value2', 0.5000), - ('NodeSocketFloat', 'Value3', 0.5000), - ('NodeSocketInt', 'Count', 10)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value2"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_1}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', - 3: cube.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value3"], 1: 0.0000}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_2}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_1}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: add, 2: -0.0150}, attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_add, 'Y': add_2}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': add_2}) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': group_input.outputs["Count"], 'Start Location': combine_xyz_2, - 'Offset': combine_xyz_3}, - attrs={'mode': 'END_POINTS'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': group_input.outputs["Instance"]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Base': transform, 'Racks': realize_instances}, - attrs={'is_active_output': True}) - - -def rack_geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.5 of the node_transpiler - - rack_radius = nw.new_node(Nodes.Value, label='rack_radius') - rack_radius.outputs[0].default_value = kwargs['rack_radius'] - - rack_height = nw.new_node(Nodes.Value, label='rack_height') - rack_height.outputs[0].default_value = kwargs['rack_height'] - - rack_cyn = nw.new_node(nodegroup_rack_cyn().name, input_kwargs={'Radius': rack_radius, 'Value': rack_height}) - - base_length = nw.new_node(Nodes.Value, label='base_length') - base_length.outputs[0].default_value = kwargs['base_length'] - - base_width = nw.new_node(Nodes.Value, label='base_width') - base_width.outputs[0].default_value = kwargs['base_width'] - - base_gap = nw.new_node(Nodes.Value, label='base_gap') - base_gap.outputs[0].default_value = kwargs['base_gap'] - - integer = nw.new_node(Nodes.Integer) - integer.integer = kwargs['num_rack'] - - rack_base = nw.new_node(nodegroup_rack_base().name, - input_kwargs={'Instance': rack_cyn, 'Value1': base_length, 'Value2': base_width, - 'Value3': base_gap, 'Count': integer}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [rack_base.outputs["Base"], rack_base.outputs["Racks"]]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Scale': (1.0000, -1.0000, 1.0000)}) - - plate_rack_connect = nw.new_node(nodegroup_plate_rack_connect().name, - input_kwargs={'Radius': rack_radius, 'Value1': base_gap, 'Value': base_length}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, join_geometry, plate_rack_connect]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: base_width}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': transform}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': triangulate, - 'Material': surface.shaderfunc_to_material(shader_wood)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, - attrs={'is_active_output': True}) - - -def plate_geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.5 of the node_transpiler - - radius = nw.new_node(Nodes.Value, label='radius') - radius.outputs[0].default_value = kwargs['radius'] - - thickness = nw.new_node(Nodes.Value, label='thickness') - thickness.outputs[0].default_value = kwargs['thickness'] - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': radius, 'Depth': thickness}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': radius}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz, - 'Rotation': (0.0000, 1.5708, 0.0000)}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': transform_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={'Geometry': triangulate, - 'Material': surface.shaderfunc_to_material(shader_rough_plastic)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, - attrs={'is_active_output': True}) - - -class PlateRackBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(PlateRackBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_place_points(self, params): - # compute the lowest point in the bezier curve - xs = [] - for i in range(params['num_rack']-1): - l = params['base_length'] - d = (l - 0.03) / (params['num_rack']-1) - x = - l / 2. + 0.015 + (i + 0.5) * d - xs.append(x) - - y = 0 - z = params['base_width'] - - place_points = [] - for x in xs: - place_points.append((x, y, z)) - - return place_points - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('num_rack', None) is None: - params['num_rack'] = randint(3, 7) - if params.get('rack_radius', None) is None: - params['rack_radius'] = uniform(0.0025,0.006) - if params.get('rack_height', None) is None: - params['rack_height'] = uniform(0.08, 0.15) - if params.get('base_length', None) is None: - params['base_length'] = (params['num_rack'] - 1) * uniform(0.03, 0.06) + 0.03 - if params.get('base_gap', None) is None: - params['base_gap'] = uniform(0.05, 0.08) - if params.get('base_width', None) is None: - params['base_width'] = uniform(0.015, 0.03) - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, rack_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - place_points = self.get_place_points(obj_params) - - return obj, place_points - - -class PlateBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(PlateBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('radius', None) is None: - params['radius'] = uniform(0.15, 0.25) - if params.get('thickness', None) is None: - params['thickness'] = uniform(0.01, 0.025) - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, plate_geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class PlateOnRackBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(PlateOnRackBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - self.rack_fac = PlateRackBaseFactory(factory_seed, params=params) - self.plate_fac = PlateBaseFactory(factory_seed, params=params) - - def get_asset_params(self, i): - if self.params.get('base_gap', None) is None: - d = uniform(0.05, 0.08) - self.rack_fac.params['base_gap'] = d - self.plate_fac.params['radius'] = d + uniform(0.025, 0.06) - - def create_asset(self, i, **params): - - self.get_asset_params(i) - rack, place_points = self.rack_fac.create_asset(i) - plate = self.plate_fac.create_asset(i) - - plate.location = place_points[0] - butil.apply_transform(plate, loc=True) - - return plate diff --git a/infinigen/assets/rocks/boulder.py b/infinigen/assets/rocks/boulder.py deleted file mode 100644 index d05d585a4..000000000 --- a/infinigen/assets/rocks/boulder.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - - -import logging -from functools import reduce - -import bpy -import numpy as np -import trimesh.convex -from numpy.random import uniform -import gin - -from infinigen.assets.scatters import ivy -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface -from infinigen.assets.utils.object import trimesh2obj -from infinigen.assets.utils.decorate import geo_extension, write_attribute -from infinigen.core.util.random import log_uniform -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.placement.detail import remesh_with_attrs -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.placement.split_in_view import split_inview -from infinigen.core.placement import detail - -logger = logging.getLogger(__name__) - -class BoulderFactory(AssetFactory): - - config_mappings = {'boulder': [True, False], 'slab': [False, True]} - - def __init__( - self, factory_seed, - meshing_camera=None, - adapt_mesh_method='remesh', - cam_meshing_max_dist=1e7, - coarse=False, do_voronoi=True - ): - super(BoulderFactory, self).__init__(factory_seed, coarse) - - self.camera = meshing_camera - self.cam_meshing_max_dist = cam_meshing_max_dist - self.adapt_mesh_method = adapt_mesh_method - - self.octree_depth = 3 - self.do_voronoi = do_voronoi - self.weights = [.8, .2] - self.configs = ['boulder', 'slab'] - with FixedSeed(factory_seed): - self.rock_surface = surface.registry('rock_collection') - method = np.random.choice(self.configs, p=self.weights) - self.has_horizontal_cut, self.is_slab = self.config_mappings[method] - - @gin.configurable - def create_placeholder(self, boulder_scale = 1, **kwargs) -> bpy.types.Object: - butil.select_none() - - vertices = np.random.uniform(-1, 1, (32, 3)) - obj = trimesh2obj(trimesh.convex.convex_hull(vertices)) - surface.add_geomod(obj, self.geo_extrusion, apply=True) - butil.modify_mesh(obj, 'SUBSURF', render_levels=2, levels=2, subdivision_type='SIMPLE') - - obj.location[-1] += obj.dimensions[-1] * .2 - butil.apply_transform(obj, loc=True) - if self.is_slab: - obj.scale = *log_uniform(.5, 2., 2), log_uniform(.1, .15) - else: - obj.scale = *log_uniform(.4, 1.2, 2), log_uniform(.4, .8) - - obj.scale *= boulder_scale - butil.apply_transform(obj) - obj.rotation_euler[0] = uniform(-np.pi / 24, np.pi / 24) - butil.apply_transform(obj) - obj.rotation_euler[-1] = uniform(0, np.pi * 2) - butil.apply_transform(obj) - - with butil.SelectObjects(obj): - bpy.ops.geometry.attribute_convert(mode='VERTEX_GROUP') - - butil.modify_mesh(obj, 'BEVEL', limit_method='VGROUP', vertex_group='top', invert_vertex_group=True, - offset_type='PERCENT', width_pct=10) - butil.modify_mesh(obj, 'REMESH', apply=True, mode='SHARP', octree_depth=self.octree_depth) - surface.add_geomod(obj, geo_extension, apply=True) - - if self.do_voronoi: - voronoi_texture = bpy.data.textures.new(name='boulder', type='VORONOI') - voronoi_texture.noise_scale = log_uniform(.2, .5) - voronoi_texture.distance_metric = 'DISTANCE' - butil.modify_mesh(obj, 'DISPLACE', texture=voronoi_texture, strength=.01, mid_level=0) - - voronoi_texture = bpy.data.textures.new(name='boulder', type='VORONOI') - voronoi_texture.noise_scale = log_uniform(.05, .1) - voronoi_texture.distance_metric = 'DISTANCE' - butil.modify_mesh(obj, 'DISPLACE', texture=voronoi_texture, strength=.01, mid_level=0) - - return obj - - def finalize_placeholders(self, placeholders): - with FixedSeed(self.factory_seed): - self.rock_surface.apply(placeholders, is_rock=True) - - @staticmethod - def geo_extrusion(nw: NodeWrangler, extrude_scale=1): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - face_area = nw.new_node(Nodes.InputMeshFaceArea) - - tops = [] - extrude_configs = [(uniform(.2, .3), .8, .4), (.6, .2, .6)] - top_facing = nw.compare_direction('LESS_THAN', nw.new_node(Nodes.InputNormal), (0, 0, 1), np.pi * 2 / 3) - for prob, extrude, scale in extrude_configs: - extrude = extrude * extrude_scale - face_area_stats = nw.new_node(Nodes.AttributeStatistic, [geometry, None, face_area], - attrs={'domain': 'FACE'}).outputs - selection = reduce(lambda *xs: nw.boolean_math('AND', *xs), [top_facing, nw.bernoulli(prob), - nw.compare('GREATER_THAN', face_area, face_area_stats['Mean'])]) - geometry, top, side = nw.new_node(Nodes.ExtrudeMesh, [geometry, selection, None, - nw.uniform(extrude * .5, extrude)]).outputs - geometry = nw.new_node(Nodes.ScaleElements, [geometry, top, nw.uniform(scale * .5, scale)]) - tops.append(top) - - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': 'top', 'Value': reduce(lambda *xs: nw.boolean_math('OR', *xs), tops)}) - nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) - - def create_asset(self, i, placeholder, face_size=0.01, distance=0, **params): - - if self.camera is not None and distance < self.cam_meshing_max_dist: - assert self.adapt_mesh_method != 'remesh' - skin_obj, outofview, vert_dists, _ = split_inview(placeholder, cam=self.camera, vis_margin=0.15) - butil.parent_to(outofview, skin_obj, no_inverse=True, no_transform=True) - face_size = detail.target_face_size(vert_dists.min()) - else: - skin_obj = deep_clone_obj(placeholder, keep_modifiers=True, keep_materials=True) - - butil.parent_to(skin_obj, placeholder, no_inverse=True, no_transform=True) - - with butil.DisableModifiers(skin_obj): - detail.adapt_mesh_resolution(skin_obj, face_size, method=self.adapt_mesh_method, apply=True) - - butil.apply_modifiers(skin_obj) - tag_object(skin_obj, 'boulder') - - return skin_obj \ No newline at end of file diff --git a/infinigen/assets/scatters/__init__.py b/infinigen/assets/scatters/__init__.py index 5d6ed9ce3..d330f638e 100644 --- a/infinigen/assets/scatters/__init__.py +++ b/infinigen/assets/scatters/__init__.py @@ -1,2 +1,2 @@ +from .lichen import LichenFactory from .moss import MossFactory -from .lichen import LichenFactory \ No newline at end of file diff --git a/infinigen/assets/scatters/chopped_trees.py b/infinigen/assets/scatters/chopped_trees.py index d5340dc5f..54261a51f 100644 --- a/infinigen/assets/scatters/chopped_trees.py +++ b/infinigen/assets/scatters/chopped_trees.py @@ -4,34 +4,30 @@ # Authors: Alexander Raistrick -import pdb import logging import bpy import mathutils - import numpy as np -from numpy.random import uniform, normal -from tqdm import tqdm, trange - -from infinigen.core.util import blender as butil -from infinigen.core.util.math import rotate_match_directions, randomspacing -from infinigen.assets.creatures.util.geometry.metaballs import plusx_cylinder_unwrap +from numpy.random import normal, uniform -from infinigen.core.nodes import node_utils -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.assets.objects.trees.generate import GenericTreeFactory, random_species from infinigen.core import surface -from infinigen.assets.materials import wood - -from infinigen.core.placement.detail import remesh_with_attrs, target_face_size, scatter_res_distance - -from infinigen.assets.trees.generate import GenericTreeFactory, random_species +from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.core.placement.detail import ( + remesh_with_attrs, + scatter_res_distance, + target_face_size, +) +from infinigen.core.placement.instance_scatter import scatter_instances +from infinigen.core.util import blender as butil +from infinigen.core.util.math import randomspacing, rotate_match_directions logger = logging.getLogger(__name__) + def approx_settle_transform(obj, samples=200): - assert obj.type == 'MESH' + assert obj.type == "MESH" if len(obj.data.vertices) < 3 or len(obj.data.polygons) == 0: return @@ -41,39 +37,41 @@ def approx_settle_transform(obj, samples=200): # sample random planes and find the normal of the biggest one verts = np.empty((len(obj.data.vertices), 3)) - obj.data.vertices.foreach_get('co', verts.reshape(-1)) - verts = np.stack([verts[np.random.choice(np.arange(len(verts)), samples)] for _ in range(3)], axis=0) + obj.data.vertices.foreach_get("co", verts.reshape(-1)) + verts = np.stack( + [verts[np.random.choice(np.arange(len(verts)), samples)] for _ in range(3)], + axis=0, + ) ups = np.cross(verts[0] - verts[1], verts[0] - verts[2], axis=-1) best = np.linalg.norm(ups, axis=-1).argmax() # rotate according to that axis - rot_mat = rotate_match_directions(ups[best].reshape(1, 3), np.array([0, 0, 1]).reshape(1, 3))[0] + rot_mat = rotate_match_directions( + ups[best].reshape(1, 3), np.array([0, 0, 1]).reshape(1, 3) + )[0] obj.rotation_euler = mathutils.Matrix(rot_mat).to_euler() with butil.SelectObjects(obj): - bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='MEDIAN') + bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY", center="MEDIAN") bpy.ops.object.transform_apply(location=False, rotation=True, scale=True) - + return obj -def chop_object( - obj, n, cutter_size, - max_tilt=15, thickness=0.03 -): - assert obj.type == 'MESH' +def chop_object(obj, n, cutter_size, max_tilt=15, thickness=0.03): + assert obj.type == "MESH" bbox = np.array([obj.matrix_world @ mathutils.Vector(v) for v in obj.bound_box]) def cutter(t): butil.select_none() z = butil.lerp(bbox[:, -1].min(), bbox[:, -1].max(), t) - loc = (*bbox[:,:-1].mean(axis=0), z) + loc = (*bbox[:, :-1].mean(axis=0), z) bpy.ops.mesh.primitive_plane_add(size=cutter_size, location=loc) cut = bpy.context.active_object - cut.name = f'cutter({t:.2f})' + cut.name = f"cutter({t:.2f})" - butil.modify_mesh(cut, 'SOLIDIFY', thickness=thickness) + butil.modify_mesh(cut, "SOLIDIFY", thickness=thickness) butil.recalc_normals(cut, inside=False) if uniform() < 0.95: @@ -81,92 +79,114 @@ def cutter(t): else: # vertical chopper to break things up cut.location += mathutils.Vector(normal(0, 0.5, 3)) - cut.rotation_euler = np.deg2rad((uniform([-max_tilt, 50, 0], [max_tilt, 80, 360]))) + cut.rotation_euler = np.deg2rad( + (uniform([-max_tilt, 50, 0], [max_tilt, 80, 360])) + ) return cut - cutters = [cutter(t) for t in randomspacing(0.05, 0.85, n, margin=uniform(0.1, 0.4))] - chopped = butil.boolean([obj] + cutters, mode='DIFFERENCE', verbose=True) + cutters = [ + cutter(t) for t in randomspacing(0.05, 0.85, n, margin=uniform(0.1, 0.4)) + ] + chopped = butil.boolean([obj] + cutters, mode="DIFFERENCE", verbose=True) butil.delete(cutters) - chopped_list = butil.split_object(chopped, mode='LOOSE') + chopped_list = butil.split_object(chopped, mode="LOOSE") for obj in chopped_list: bpy.context.view_layer.objects.active = obj bpy.context.object.active_material_index = len(obj.material_slots) - 1 - bpy.ops.object.material_slot_remove() # remove the default white mat - + bpy.ops.object.material_slot_remove() # remove the default white mat + return chopped_list -def chopped_tree_collection(species_seed, n, boolean_res_mult=5): +def chopped_tree_collection(species_seed, n, boolean_res_mult=5): objs = [] - (genome, _, _), _ = random_species(season='winter') - factory = GenericTreeFactory(species_seed, genome, realize=True, - child_col=None, trunk_surface=surface.NoApply, - decimate_placeholder_levels=0, - coarse_mesh_placeholder=True) - trees = [factory.spawn_placeholder(i,(0,0,0),(0,0,0)) for i in range(n)] - - bark = surface.registry('bark') + (genome, _, _), _ = random_species(season="winter") + factory = GenericTreeFactory( + species_seed, + genome, + realize=True, + child_col=None, + trunk_surface=surface.NoApply, + decimate_placeholder_levels=0, + coarse_mesh_placeholder=True, + ) + trees = [factory.spawn_placeholder(i, (0, 0, 0), (0, 0, 0)) for i in range(n)] + + bark = surface.registry("bark") face_size = target_face_size(scatter_res_distance()) - attr_name = 'original_surface' + attr_name = "original_surface" for t in trees: butil.delete(list(t.children)) - remesh_with_attrs(t, face_size=boolean_res_mult*face_size) # lower res for efficiency - surface.write_attribute(trees, lambda nw: 1, attr_name, data_type='FLOAT', apply=True) + remesh_with_attrs( + t, face_size=boolean_res_mult * face_size + ) # lower res for efficiency + surface.write_attribute( + trees, lambda nw: 1, attr_name, data_type="FLOAT", apply=True + ) for i, tree in enumerate(trees): - n_chops = np.random.randint(3, 6) cutter_size = max(tree.dimensions[:-1]) chopped = chop_object(tree, n=n_chops, cutter_size=cutter_size) for j, o in enumerate(chopped): - if ( - len(o.data.vertices) < 10 or - max(o.dimensions) < 0.1 or - max(o.dimensions) > cutter_size * 0.8 + len(o.data.vertices) < 10 + or max(o.dimensions) < 0.1 + or max(o.dimensions) > cutter_size * 0.8 ): - logger.debug(f'filtering {i, j} with {len(o.data.vertices)=}, {o.dimensions=}') + logger.debug( + f"filtering {i, j} with {len(o.data.vertices)=}, {o.dimensions=}" + ) butil.delete(o) chopped[j] = None continue - o.name = f'chopped_tree({species_seed}, {i}, {j})' + o.name = f"chopped_tree({species_seed}, {i}, {j})" chopped[j] = remesh_with_attrs(o, face_size=face_size) chopped = [o for o in chopped if o is not None] - def selection(nw): - orig = nw.new_node(Nodes.NamedAttribute, [attr_name], attrs=dict(data_type='FLOAT')) - return nw.compare('GREATER_THAN', orig, 0.9999) # some interp will happen for some reason, clamp it + orig = nw.new_node( + Nodes.NamedAttribute, [attr_name], attrs=dict(data_type="FLOAT") + ) + return nw.compare( + "GREATER_THAN", orig, 0.9999 + ) # some interp will happen for some reason, clamp it + bark.apply(chopped, selection=selection) for o in chopped: butil.apply_modifiers(o) approx_settle_transform(o) - o.location = (0,0,0) + o.location = (0, 0, 0) o.parent = None objs += chopped - - return butil.group_in_collection(objs, 'assets:chopped_tree', reuse=False) -def apply(obj, species_seed=None, selection=None, n_trees=1, **kwargs): + return butil.group_in_collection(objs, "assets:chopped_tree", reuse=False) + +def apply(obj, species_seed=None, selection=None, n_trees=1, **kwargs): assert obj is not None if species_seed is None: species_seed = np.random.randint(1e6) - + col = chopped_tree_collection(species_seed, n=n_trees) col.hide_viewport = True scatter_obj = scatter_instances( - base_obj=obj, collection=col, - scale=1, scale_rand=0.5, scale_rand_axi=0.15, - ground_offset=0.1, density=0.7, - selection=selection) + base_obj=obj, + collection=col, + scale=1, + scale_rand=0.5, + scale_rand_axi=0.15, + ground_offset=0.1, + density=0.7, + selection=selection, + ) return scatter_obj, col diff --git a/infinigen/assets/scatters/clothes.py b/infinigen/assets/scatters/clothes.py index 123197206..cf4966756 100644 --- a/infinigen/assets/scatters/clothes.py +++ b/infinigen/assets/scatters/clothes.py @@ -9,7 +9,7 @@ import numpy as np from numpy.random import uniform -from infinigen.assets.creatures.util.cloth_sim import bake_cloth +from infinigen.assets.objects.creatures.util.cloth_sim import bake_cloth from infinigen.assets.utils.decorate import read_co, subsurf from infinigen.core.placement.factory import make_asset_collection from infinigen.core.util import blender as butil @@ -17,16 +17,20 @@ def cloth_sim(clothes, obj=None, end_frame=50, **kwargs): - with butil.ViewportMode(clothes, mode='OBJECT'), butil.SelectObjects(clothes), butil.Suppress(): + with ( + butil.ViewportMode(clothes, mode="OBJECT"), + butil.SelectObjects(clothes), + butil.Suppress(), + ): bpy.ops.ptcache.free_bake_all() if obj is None: obj = [] for o in obj if isinstance(obj, Iterable) else [obj]: - butil.modify_mesh(o, 'COLLISION', apply=False) - o.collision.damping_factor = .9 - o.collision.cloth_friction = 10. - o.collision.friction_factor = 1. - o.collision.stickiness = .9 + butil.modify_mesh(o, "COLLISION", apply=False) + o.collision.damping_factor = 0.9 + o.collision.cloth_friction = 10.0 + o.collision.friction_factor = 1.0 + o.collision.stickiness = 0.9 frame = bpy.context.scene.frame_current butil.select_none() with butil.Suppress(): @@ -42,37 +46,46 @@ def cloth_sim(clothes, obj=None, end_frame=50, **kwargs): class ClothesCover: - def __init__(self, bbox=(.3, .7, .3, .7), factory_fn=None, width=None, size=None): + def __init__( + self, bbox=(0.3, 0.7, 0.3, 0.7), factory_fn=None, width=None, size=None + ): from infinigen.assets.clothes import blanket, pants, shirt + probs = np.array([2, 1, 1]) if factory_fn is None: factory_fn = np.random.choice( [blanket.BlanketFactory, shirt.ShirtFactory, pants.PantsFactory], - p=probs / probs.sum() + p=probs / probs.sum(), ) self.factory = factory_fn(np.random.randint(1e5)) if width is not None: self.factory.width = width if size is not None: self.factory.size = size - self.col = make_asset_collection(self.factory, name='clothes', centered=True, n=3, verbose=False) + self.col = make_asset_collection( + self.factory, name="clothes", centered=True, n=3, verbose=False + ) self.bbox = bbox - self.z_offset = .2 + self.z_offset = 0.2 def apply(self, obj, selection=None, **kwargs): for obj in obj if isinstance(obj, list) else [obj]: x, y, z = read_co(obj).T - clothes = deep_clone_obj(np.random.choice(self.col.objects), keep_materials=True) + clothes = deep_clone_obj( + np.random.choice(self.col.objects), keep_materials=True + ) clothes.parent = obj - clothes.location = uniform(self.bbox[0], self.bbox[1]) * (np.max(x) - np.min(x)) + np.min( - x - ), uniform(self.bbox[2], self.bbox[3]) * (np.max(y) - np.min(y)) + np.min(y), np.max( - z - ) + self.z_offset - np.min(read_co(clothes)[:, -1]) + clothes.location = ( + uniform(self.bbox[0], self.bbox[1]) * (np.max(x) - np.min(x)) + + np.min(x), + uniform(self.bbox[2], self.bbox[3]) * (np.max(y) - np.min(y)) + + np.min(y), + np.max(z) + self.z_offset - np.min(read_co(clothes)[:, -1]), + ) clothes.rotation_euler[-1] = uniform(0, np.pi * 2) - cloth_sim(clothes, obj, mass=.05, tension_stiffness=2, distance_min=5e-3) + cloth_sim(clothes, obj, mass=0.05, tension_stiffness=2, distance_min=5e-3) subsurf(clothes, 2) def apply(obj, selection=None, **kwargs): - ClothesCover().apply(obj, selection, **kwargs) \ No newline at end of file + ClothesCover().apply(obj, selection, **kwargs) diff --git a/infinigen/assets/scatters/coral_reef.py b/infinigen/assets/scatters/coral_reef.py index 6a6f9a47f..99406c3cf 100644 --- a/infinigen/assets/scatters/coral_reef.py +++ b/infinigen/assets/scatters/coral_reef.py @@ -7,47 +7,61 @@ import numpy as np from numpy.random import uniform as U -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.assets.corals.generate import CoralFactory, TableCoralFactory - +from infinigen.assets.objects.corals.generate import CoralFactory, TableCoralFactory +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -def apply(obj, scale=1, density=5., n=12, selection=None, horizontal=False, **kwargs): +def apply(obj, scale=1, density=5.0, n=12, selection=None, horizontal=False, **kwargs): if horizontal: return apply_horizontal(obj, scale, density, n, selection) else: return apply_all(obj, scale, density, n, selection) -def apply_all(obj, scale=1, density=5., n=12, selection=None): +def apply_all(obj, scale=1, density=5.0, n=12, selection=None): n_species = np.random.randint(5, 10) factories = [CoralFactory(np.random.randint(1e7)) for i in range(n_species)] - corals = make_asset_collection(factories, name='coral', weights=U(0.8, 1, len(factories)), n=n) + corals = make_asset_collection( + factories, name="coral", weights=U(0.8, 1, len(factories)), n=n + ) scatter_obj = scatter_instances( - base_obj=obj, collection=corals, - density=density, min_spacing=scale*0.7, - scale=scale, scale_rand=0.5, scale_rand_axi=U(0, 0.2), - selection=selection) + base_obj=obj, + collection=corals, + density=density, + min_spacing=scale * 0.7, + scale=scale, + scale_rand=0.5, + scale_rand_axi=U(0, 0.2), + selection=selection, + ) return scatter_obj, corals -def apply_horizontal(obj, scale=1, density=5., n=4, selection=None): +def apply_horizontal(obj, scale=1, density=5.0, n=4, selection=None): n_species = np.random.randint(2, 3) factories = [TableCoralFactory(np.random.randint(1e5)) for _ in range(n_species)] - corals = make_asset_collection(factories, name='coral', - weights=np.random.uniform(0.8, 1, len(factories)), n=n, - verbose=True) + corals = make_asset_collection( + factories, + name="coral", + weights=np.random.uniform(0.8, 1, len(factories)), + n=n, + verbose=True, + ) r = np.deg2rad(10) scatter_obj = scatter_instances( - base_obj=obj, collection=corals, - density=density, min_spacing=scale * 0.5, - scale=1.5, scale_rand=U(0.2, 0.8), scale_rand_axi=U(0, 0.3), - normal=(0, 0, 1), - rotation_offset=lambda nw: nw.uniform(3*(-r,), 3*(r,)), - selection=selection + base_obj=obj, + collection=corals, + density=density, + min_spacing=scale * 0.5, + scale=1.5, + scale_rand=U(0.2, 0.8), + scale_rand_axi=U(0, 0.3), + normal=(0, 0, 1), + rotation_offset=lambda nw: nw.uniform(3 * (-r,), 3 * (r,)), + selection=selection, ) return scatter_obj, corals diff --git a/infinigen/assets/scatters/decorative_plants.py b/infinigen/assets/scatters/decorative_plants.py index 05d63db8c..3c3861d04 100644 --- a/infinigen/assets/scatters/decorative_plants.py +++ b/infinigen/assets/scatters/decorative_plants.py @@ -7,32 +7,29 @@ import numpy as np from numpy.random import uniform as U - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.small_plants import succulent +from infinigen.assets.scatters.utils.wind import wind +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.placement import detail -from infinigen.core.nodes import node_utils -from infinigen.assets.small_plants import succulent - -from infinigen.assets.scatters.utils.wind import wind def apply(obj, n=4, selection=None, **kwargs): - - fac_class = np.random.choice([ - succulent.SucculentFactory - ]) + fac_class = np.random.choice([succulent.SucculentFactory]) monocots = make_asset_collection( - fac_class(np.random.randint(1e5)), - n=n, verbose=True, **kwargs) + fac_class(np.random.randint(1e5)), n=n, verbose=True, **kwargs + ) scatter_obj = scatter_instances( - base_obj=obj, collection=monocots, - vol_density=U(0.05, 2), min_spacing=0.1, + base_obj=obj, + collection=monocots, + vol_density=U(0.05, 2), + min_spacing=0.1, normal_fac=0.5, - scale=U(0.3, 1), scale_rand=U(0.5, 0.95), + scale=U(0.3, 1), + scale_rand=U(0.5, 0.95), rotation_offset=wind(strength=10), taper_density=True, - selection=selection) + selection=selection, + ) return scatter_obj, monocots diff --git a/infinigen/assets/scatters/fern.py b/infinigen/assets/scatters/fern.py index 74e89c53d..aafa8fd96 100644 --- a/infinigen/assets/scatters/fern.py +++ b/infinigen/assets/scatters/fern.py @@ -5,24 +5,27 @@ import numpy as np -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.placement.factory import AssetFactory, make_asset_collection - -from infinigen.assets.small_plants.fern import FernFactory -from infinigen.core.util.random import random_general as rg +from infinigen.assets.objects.small_plants.fern import FernFactory from infinigen.assets.scatters.utils.wind import wind - -def apply(obj, selection=None, density=('uniform', 1, 6), **kwargs): +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.placement.instance_scatter import scatter_instances +from infinigen.core.util.random import random_general as rg - fern_col = make_asset_collection(FernFactory(np.random.randint(1e5)), n=2, verbose=True) +def apply(obj, selection=None, density=("uniform", 1, 6), **kwargs): + fern_col = make_asset_collection( + FernFactory(np.random.randint(1e5)), n=2, verbose=True + ) scatter_obj = scatter_instances( - base_obj=obj, collection=fern_col, - scale=0.7, scale_rand=0.7, scale_rand_axi=0.3, + base_obj=obj, + collection=fern_col, + scale=0.7, + scale_rand=0.7, + scale_rand_axi=0.3, vol_density=rg(density), - normal_fac=0.3, + normal_fac=0.3, rotation_offset=wind(strength=10), - selection=selection + selection=selection, ) - return scatter_obj, fern_col \ No newline at end of file + return scatter_obj, fern_col diff --git a/infinigen/assets/scatters/flowerplant.py b/infinigen/assets/scatters/flowerplant.py index e2ecc3e74..b2fa057f7 100644 --- a/infinigen/assets/scatters/flowerplant.py +++ b/infinigen/assets/scatters/flowerplant.py @@ -5,33 +5,32 @@ import numpy as np -from numpy.random import uniform, normal -from mathutils import Vector -from infinigen.core.util import blender as butil +from infinigen.assets.objects.grassland.flowerplant import FlowerPlantFactory +from infinigen.assets.scatters.utils.wind import wind +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface -from infinigen.core.nodes import node_utils -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.assets.grassland.flowerplant import FlowerPlantFactory -from infinigen.assets.materials import simple_greenery -from infinigen.assets.scatters.utils.wind import wind def apply(obj, selection=None, density=1.0): + flowerplant_col = make_asset_collection( + FlowerPlantFactory(np.random.randint(1e5)), n=12, verbose=True + ) - flowerplant_col = make_asset_collection(FlowerPlantFactory(np.random.randint(1e5)), n=12, verbose=True) - avg_vol = np.mean([np.prod(list(o.dimensions)) for o in flowerplant_col.objects]) density = np.clip(density / avg_vol, 0, 200) scatter_obj = scatter_instances( - base_obj=obj, collection=flowerplant_col, - scale=1.5, scale_rand=0.7, scale_rand_axi=0.2, + base_obj=obj, + collection=flowerplant_col, + scale=1.5, + scale_rand=0.7, + scale_rand_axi=0.2, density=float(density), - ground_offset=0, normal_fac=0.3, + ground_offset=0, + normal_fac=0.3, rotation_offset=wind(strength=20), - selection=selection, taper_scale=True + selection=selection, + taper_scale=True, ) - return scatter_obj, flowerplant_col \ No newline at end of file + return scatter_obj, flowerplant_col diff --git a/infinigen/assets/scatters/grass.py b/infinigen/assets/scatters/grass.py index 07bf68fb3..587aefb5b 100644 --- a/infinigen/assets/scatters/grass.py +++ b/infinigen/assets/scatters/grass.py @@ -4,35 +4,27 @@ # Authors: Alex Raistrick -from math import prod - -import bpy - import numpy as np -from numpy.random import uniform as U from mathutils import Vector +from numpy.random import uniform as U -from infinigen.core.util import blender as butil - - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.placement.instance_scatter import scatter_instances +from infinigen.assets.objects.grassland.grass_tuft import GrassTuftFactory +from infinigen.assets.scatters.utils.wind import wind from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface - - -from infinigen.assets.materials import simple_greenery, grass_blade_texture - -from infinigen.assets.grassland.grass_tuft import GrassTuftFactory +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.scatters.utils.wind import wind def scale_grass(nw: NodeWrangler): - random_scaling = nw.new_node(Nodes.RandomValue, input_kwargs={0: Vector((1.,1.,1.)), 1: Vector((1.2,1.2,2.))}, attrs={"data_type":'FLOAT_VECTOR'}) - return nw.multiply(random_scaling, Vector((2.5,2.5,2.5))) + random_scaling = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: Vector((1.0, 1.0, 1.0)), 1: Vector((1.2, 1.2, 2.0))}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + return nw.multiply(random_scaling, Vector((2.5, 2.5, 2.5))) -def apply(obj, selection=None, **kwargs): +def apply(obj, selection=None, **kwargs): n_fac = 1 facs = [GrassTuftFactory(np.random.randint(1e7)) for _ in range(n_fac)] grass_col = make_asset_collection(facs, n=10) @@ -40,12 +32,15 @@ def apply(obj, selection=None, **kwargs): scatter_obj = scatter_instances( base_obj=obj, collection=grass_col, - scale=U(1, 3), scale_rand=U(0.7, 1), scale_rand_axi=0.1, + scale=U(1, 3), + scale_rand=U(0.7, 1), + scale_rand_axi=0.1, vol_density=U(0.5, 5), - ground_offset=0, normal_fac=U(0, 0.5), + ground_offset=0, + normal_fac=U(0, 0.5), rotation_offset=wind(strength=10), selection=selection, - taper_scale=True + taper_scale=True, ) return scatter_obj, grass_col diff --git a/infinigen/assets/scatters/ground_leaves.py b/infinigen/assets/scatters/ground_leaves.py index 35f00b5bd..cb3d8a90c 100644 --- a/infinigen/assets/scatters/ground_leaves.py +++ b/infinigen/assets/scatters/ground_leaves.py @@ -5,20 +5,20 @@ from numpy.random import uniform as U -from mathutils import Vector +from infinigen.assets.objects.trees.generate import random_leaf_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.assets.trees.generate import random_leaf_collection def apply(obj, selection=None, density=70, season=None, **kwargs): - leaf_col=random_leaf_collection(season=season) + leaf_col = random_leaf_collection(season=season) return scatter_instances( base_obj=obj, collection=leaf_col, - scale=0.3, scale_rand=U(0, 0.9), - density=density, + scale=0.3, + scale_rand=U(0, 0.9), + density=density, ground_offset=0.05, selection=selection, - taper_density=True) + taper_density=True, + ) diff --git a/infinigen/assets/scatters/ground_mushroom.py b/infinigen/assets/scatters/ground_mushroom.py index fc7c6265c..959c11da3 100644 --- a/infinigen/assets/scatters/ground_mushroom.py +++ b/infinigen/assets/scatters/ground_mushroom.py @@ -7,27 +7,35 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.mushroom import MushroomFactory -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.mushroom import MushroomFactory +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances + class Mushrooms: - def __init__(self, n=10): - self.n_species = np.random.randint(2, 3) - self.factories = [MushroomFactory(np.random.randint(1e5)) for i in range(self.n_species)] + self.factories = [ + MushroomFactory(np.random.randint(1e5)) for i in range(self.n_species) + ] self.col = make_asset_collection( - self.factories, name='mushroom', n=n, verbose=True, - weights=np.random.uniform(0.5, 1, len(self.factories))) - - def apply(self, obj, scale=0.3, density=1., selection=None): - + self.factories, + name="mushroom", + n=n, + verbose=True, + weights=np.random.uniform(0.5, 1, len(self.factories)), + ) + + def apply(self, obj, scale=0.3, density=1.0, selection=None): scatter_obj = scatter_instances( - base_obj=obj, collection=self.col, - density=density, min_spacing=scale, - scale=scale, scale_rand=U(0.5, 0.9), - selection=selection, taper_scale=True) + base_obj=obj, + collection=self.col, + density=density, + min_spacing=scale, + scale=scale, + scale_rand=U(0.5, 0.9), + selection=selection, + taper_scale=True, + ) return scatter_obj diff --git a/infinigen/assets/scatters/ground_twigs.py b/infinigen/assets/scatters/ground_twigs.py index 825ea305a..97e2e0cd0 100644 --- a/infinigen/assets/scatters/ground_twigs.py +++ b/infinigen/assets/scatters/ground_twigs.py @@ -4,41 +4,44 @@ # Authors: Alexander Raistrick -from random import random -import bpy - import numpy as np from numpy.random import uniform as U -from mathutils import Vector - -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.nodes import node_utils -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.assets.objects.trees.generate import make_twig_collection, random_species from infinigen.core import surface +from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.trees.generate import make_twig_collection, random_species +from ..utils.misc import toggle_hide, toggle_show from .chopped_trees import approx_settle_transform -from ..utils.misc import toggle_show, toggle_hide def apply(obj, selection=None, n_leaf=0, n_twig=10, **kwargs): + (_, twig_params, leaf_params), _ = random_species(season="winter") + twigs = make_twig_collection( + np.random.randint(1e5), + twig_params, + leaf_params, + n_leaf=n_leaf, + n_twig=n_twig, + leaf_types=None, + trunk_surface=surface.registry("bark"), + ) - (_, twig_params, leaf_params), _ = random_species(season='winter') - twigs = make_twig_collection(np.random.randint(1e5), twig_params, leaf_params, - n_leaf=n_leaf, n_twig=n_twig, leaf_types=None, trunk_surface=surface.registry('bark')) - toggle_show(twigs) for o in twigs.objects: approx_settle_transform(o, samples=40) toggle_hide(twigs) scatter_obj = scatter_instances( - base_obj=obj, collection=twigs, - scale=U(0.15, 0.3), scale_rand=U(0, 0.3), scale_rand_axi=U(0, 0.2), - density=10, ground_offset=0.05, - selection=selection, taper_density=True) - + base_obj=obj, + collection=twigs, + scale=U(0.15, 0.3), + scale_rand=U(0, 0.3), + scale_rand_axi=U(0, 0.2), + density=10, + ground_offset=0.05, + selection=selection, + taper_density=True, + ) + return scatter_obj, twigs diff --git a/infinigen/assets/scatters/ivy.py b/infinigen/assets/scatters/ivy.py index 9a7def7d0..8230173c1 100644 --- a/infinigen/assets/scatters/ivy.py +++ b/infinigen/assets/scatters/ivy.py @@ -5,116 +5,185 @@ # Authors: Lingjie Mei -from collections.abc import Iterable - import numpy as np from numpy.random import uniform -from infinigen.assets.leaves.leaf_maple import LeafFactoryMaple -from infinigen.assets.trees.generate import random_season - +from infinigen.assets.materials.simple_brownish import shader_simple_brown +from infinigen.assets.objects.leaves.leaf_maple import LeafFactoryMaple +from infinigen.assets.objects.trees.generate import random_season from infinigen.assets.utils.mesh import fix_tree from infinigen.assets.utils.misc import assign_material from infinigen.assets.utils.nodegroup import geo_base_selection, geo_radius from infinigen.assets.utils.shortest_path import geo_shortest_path +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes -from infinigen.core.placement.factory import AssetFactory, make_asset_collection from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.surface import shaderfunc_to_material -from infinigen.assets.materials.simple_brownish import shader_simple_brown +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup def geo_leaf(nw: NodeWrangler, leaves): - leaf_up_prob = uniform(.0, .2) - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - normal = nw.new_node(Nodes.NamedAttribute, ['custom_normal'], attrs={'data_type': 'FLOAT_VECTOR'}) - tangent = nw.new_node(Nodes.NamedAttribute, ['tangent'], attrs={'data_type': 'FLOAT_VECTOR'}) - cotangent = nw.vector_math('CROSS_PRODUCT', tangent, normal) - switch = nw.compare('LESS_THAN', nw.separate(cotangent)[-1], 0) - cotangent = nw.scale(nw.switch(nw.bernoulli(leaf_up_prob), -1, 1), - nw.scale(nw.switch(switch, 1, -1), cotangent)) + leaf_up_prob = uniform(0.0, 0.2) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + normal = nw.new_node( + Nodes.NamedAttribute, ["custom_normal"], attrs={"data_type": "FLOAT_VECTOR"} + ) + tangent = nw.new_node( + Nodes.NamedAttribute, ["tangent"], attrs={"data_type": "FLOAT_VECTOR"} + ) + cotangent = nw.vector_math("CROSS_PRODUCT", tangent, normal) + switch = nw.compare("LESS_THAN", nw.separate(cotangent)[-1], 0) + cotangent = nw.scale( + nw.switch(nw.bernoulli(leaf_up_prob), -1, 1), + nw.scale(nw.switch(switch, 1, -1), cotangent), + ) perturb = np.pi / 6 - points, _, rotation = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': geometry, 'Density': uniform(500, 1000)}).outputs[ - :3] - rotation = nw.new_node(Nodes.AlignEulerToVector, [rotation, 1., normal], attrs={'axis': 'Z'}) + points, _, rotation = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": geometry, "Density": uniform(500, 1000)}, + ).outputs[:3] + rotation = nw.new_node( + Nodes.AlignEulerToVector, [rotation, 1.0, normal], attrs={"axis": "Z"} + ) # Leaves have primary axes Y - rotation = nw.new_node(Nodes.AlignEulerToVector, [rotation, 1., cotangent], - attrs={'axis': 'Y', 'pivot_axis': 'Z'}) + rotation = nw.new_node( + Nodes.AlignEulerToVector, + [rotation, 1.0, cotangent], + attrs={"axis": "Y", "pivot_axis": "Z"}, + ) rotation = nw.add(rotation, nw.uniform([-perturb] * 3, [perturb] * 3)) leaves = nw.new_node(Nodes.CollectionInfo, [leaves, True, True]) - instances = nw.new_node(Nodes.InstanceOnPoints, input_kwargs={ - 'Points': points, - 'Instance': leaves, - 'Pick Instance': True, - 'Rotation': rotation, - 'Scale': nw.uniform(.6, 1.) - }) + instances = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={ + "Points": points, + "Instance": leaves, + "Pick Instance": True, + "Rotation": rotation, + "Scale": nw.uniform(0.6, 1.0), + }, + ) instances = nw.new_node(Nodes.RealizeInstances, [instances]) geometry = nw.new_node(Nodes.JoinGeometry, [[geometry, instances]]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) class LeafFactoryIvy(LeafFactoryMaple): - - def __init__(self, factory_seed, season='spring', coarse=False): + def __init__(self, factory_seed, season="spring", coarse=False): super().__init__(factory_seed, season, coarse) def create_asset(self, face_size, **params): obj = super().create_asset(face_size=face_size, **params) - obj.scale = [.2] * 3 + obj.scale = [0.2] * 3 butil.apply_transform(obj) - butil.modify_mesh(obj, 'WELD', merge_threshold=face_size / 2, mode='CONNECTED') - tag_object(obj, 'leaf_ivy') + butil.modify_mesh(obj, "WELD", merge_threshold=face_size / 2, mode="CONNECTED") + tag_object(obj, "leaf_ivy") return obj class Ivy: - def __init__(self): self.factory = LeafFactoryIvy(np.random.randint(0, 1e5), random_season()) self.col = make_asset_collection(self.factory, 5) def apply(self, obj, selection=None): - - scatter_obj = butil.spawn_vert('scatter:' + 'ivy') - surface.add_geomod(scatter_obj, geo_base_selection, apply=True, input_args=[obj, selection, .05]) - - end_index = lambda nw: nw.compare('EQUAL', nw.new_node(Nodes.Index), - np.random.randint(len(scatter_obj.data.vertices))) - weight = lambda nw: nw.scalar_multiply(nw.uniform(.8, 1), nw.scalar_sub(2, nw.math('ABSOLUTE', nw.dot( - nw.vector_math('NORMALIZE', nw.sub(*nw.new_node(Nodes.InputEdgeVertices).outputs[2:])), - (0, 0, 1))))) - surface.add_geomod(scatter_obj, geo_shortest_path, apply=True, - input_args=[end_index, weight, uniform(.1, .15), uniform(.1, .15)]) + scatter_obj = butil.spawn_vert("scatter:" + "ivy") + surface.add_geomod( + scatter_obj, + geo_base_selection, + apply=True, + input_args=[obj, selection, 0.05], + ) + + def end_index(nw): + return nw.compare( + "EQUAL", + nw.new_node(Nodes.Index), + np.random.randint(len(scatter_obj.data.vertices)), + ) + + def weight(nw): + return nw.scalar_multiply( + nw.uniform(0.8, 1), + nw.scalar_sub( + 2, + nw.math( + "ABSOLUTE", + nw.dot( + nw.vector_math( + "NORMALIZE", + nw.sub( + *nw.new_node(Nodes.InputEdgeVertices).outputs[2:] + ), + ), + (0, 0, 1), + ), + ), + ), + ) + + surface.add_geomod( + scatter_obj, + geo_shortest_path, + apply=True, + input_args=[end_index, weight, uniform(0.1, 0.15), uniform(0.1, 0.15)], + ) fix_tree(scatter_obj) - surface.add_geomod(scatter_obj, geo_radius, apply=True, input_args=[.005, 12]) + surface.add_geomod(scatter_obj, geo_radius, apply=True, input_args=[0.005, 12]) assign_material(scatter_obj, shaderfunc_to_material(shader_simple_brown)) surface.add_geomod(scatter_obj, geo_leaf, apply=True, input_args=[self.col]) return scatter_obj + def apply(obj, selection=None): factory = LeafFactoryIvy(np.random.randint(0, 1e5), random_season()) col = make_asset_collection(factory, 5) - scatter_obj = butil.spawn_vert('scatter:' + 'ivy') - surface.add_geomod(scatter_obj, geo_base_selection, apply=True, input_args=[obj, selection, .05]) - - end_index = lambda nw: nw.compare('EQUAL', nw.new_node(Nodes.Index), - np.random.randint(len(scatter_obj.data.vertices))) - weight = lambda nw: nw.scalar_multiply(nw.uniform(.8, 1), nw.scalar_sub(2, nw.math('ABSOLUTE', nw.dot( - nw.vector_math('NORMALIZE', nw.sub(*nw.new_node(Nodes.InputEdgeVertices).outputs[2:])), - (0, 0, 1))))) - surface.add_geomod(scatter_obj, geo_shortest_path, apply=True, - input_args=[end_index, weight, uniform(.1, .15), uniform(.1, .15)]) + scatter_obj = butil.spawn_vert("scatter:" + "ivy") + surface.add_geomod( + scatter_obj, geo_base_selection, apply=True, input_args=[obj, selection, 0.05] + ) + + def end_index(nw): + return nw.compare( + "EQUAL", + nw.new_node(Nodes.Index), + np.random.randint(len(scatter_obj.data.vertices)), + ) + + def weight(nw): + return nw.scalar_multiply( + nw.uniform(0.8, 1), + nw.scalar_sub( + 2, + nw.math( + "ABSOLUTE", + nw.dot( + nw.vector_math( + "NORMALIZE", + nw.sub(*nw.new_node(Nodes.InputEdgeVertices).outputs[2:]), + ), + (0, 0, 1), + ), + ), + ), + ) + + surface.add_geomod( + scatter_obj, + geo_shortest_path, + apply=True, + input_args=[end_index, weight, uniform(0.1, 0.15), uniform(0.1, 0.15)], + ) fix_tree(scatter_obj) - surface.add_geomod(scatter_obj, geo_radius, apply=True, input_args=[.005, 12]) + surface.add_geomod(scatter_obj, geo_radius, apply=True, input_args=[0.005, 12]) assign_material(scatter_obj, shaderfunc_to_material(shader_simple_brown)) surface.add_geomod(scatter_obj, geo_leaf, apply=True, input_args=[col]) diff --git a/infinigen/assets/scatters/jellyfish.py b/infinigen/assets/scatters/jellyfish.py index 91381fa76..0e4d14210 100644 --- a/infinigen/assets/scatters/jellyfish.py +++ b/infinigen/assets/scatters/jellyfish.py @@ -7,29 +7,38 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.creatures.jellyfish import JellyfishFactory +from infinigen.assets.objects.creatures.jellyfish import JellyfishFactory from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -def apply(obj, scale=1, density=1., n=6, selection=None): +def apply(obj, scale=1, density=1.0, n=6, selection=None): n_species = np.random.randint(2, 3) factories = list(JellyfishFactory(np.random.randint(1e5)) for i in range(n_species)) - jellyfish = make_asset_collection(factories, name='jellyfish', - weights=np.random.uniform(0.5, 1, len(factories)), n=n, - verbose=True) + jellyfish = make_asset_collection( + factories, + name="jellyfish", + weights=np.random.uniform(0.5, 1, len(factories)), + n=n, + verbose=True, + ) def ground_offset(nw: NodeWrangler): return nw.uniform(4 * scale, 8 * scale) r = np.pi / 3 scatter_obj = scatter_instances( - base_obj=obj, collection=jellyfish, - density=density, min_spacing=scale * 4, - scale=scale, scale_rand=U(0.2, 0.9), - ground_offset=ground_offset, selection=selection, + base_obj=obj, + collection=jellyfish, + density=density, + min_spacing=scale * 4, + scale=scale, + scale_rand=U(0.2, 0.9), + ground_offset=ground_offset, + selection=selection, normal_fac=0.0, - rotation_offset=lambda nw: nw.uniform((-r, 0, 0), (r, 0, 0)), reset_children=False, + rotation_offset=lambda nw: nw.uniform((-r, 0, 0), (r, 0, 0)), + reset_children=False, ) return scatter_obj, jellyfish diff --git a/infinigen/assets/scatters/lichen.py b/infinigen/assets/scatters/lichen.py index aff9f28ef..72d98c6a4 100644 --- a/infinigen/assets/scatters/lichen.py +++ b/infinigen/assets/scatters/lichen.py @@ -4,51 +4,42 @@ # Authors: Lingjie Mei -from functools import reduce - -import bpy -import colorsys import numpy as np -from numpy.random import uniform, normal as N +from numpy.random import normal as N -from infinigen.assets.utils.misc import assign_material -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.particles import LichenFactory +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth -from infinigen.assets.utils.object import data2mesh -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.assets.debris import LichenFactory class Lichen: - def __init__(self): self.fac = LichenFactory(np.random.randint(1e5)) - self.col = make_asset_collection(self.fac, name='lichen', n=5) + self.col = make_asset_collection(self.fac, name="lichen", n=5) def apply(self, obj, selection=None): - scatter_obj = scatter_instances( - base_obj=obj, collection=self.col, - density=5e3, min_spacing=.08, - scale=1, scale_rand=N(0.5, 0.07), - selection=selection + base_obj=obj, + collection=self.col, + density=5e3, + min_spacing=0.08, + scale=1, + scale_rand=N(0.5, 0.07), + selection=selection, ) return scatter_obj def apply(obj, selection=None): fac = LichenFactory(np.random.randint(1e5)) - col = make_asset_collection(fac, name='lichen', n=5) + col = make_asset_collection(fac, name="lichen", n=5) scatter_obj = scatter_instances( - base_obj=obj, collection=col, - density=5e3, min_spacing=.08, - scale=1, scale_rand=N(0.5, 0.07), - selection=selection + base_obj=obj, + collection=col, + density=5e3, + min_spacing=0.08, + scale=1, + scale_rand=N(0.5, 0.07), + selection=selection, ) return scatter_obj diff --git a/infinigen/assets/scatters/mollusk.py b/infinigen/assets/scatters/mollusk.py index d806cbf37..d56ad3134 100644 --- a/infinigen/assets/scatters/mollusk.py +++ b/infinigen/assets/scatters/mollusk.py @@ -6,30 +6,41 @@ import numpy as np -from infinigen.assets.mollusk import MolluskFactory -from infinigen.assets.utils.misc import CountInstance, toggle_hide -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.util import blender as butil -from infinigen.core.nodes import node_utils +from infinigen.assets.objects.mollusk import MolluskFactory +from infinigen.assets.utils.misc import CountInstance +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core import surface -def apply(obj, scale=0.4, density=1., n=10, selection=None): - with CountInstance('mollusk'): +def apply(obj, scale=0.4, density=1.0, n=10, selection=None): + with CountInstance("mollusk"): n_species = np.random.randint(4, 6) - factories = list(MolluskFactory(np.random.randint(1e5)) for _ in range(n_species)) - mollusk = make_asset_collection(factories, name='mollusk', - weights=np.random.uniform(0.5, 1, len(factories)), n=n, - verbose=True) + factories = list( + MolluskFactory(np.random.randint(1e5)) for _ in range(n_species) + ) + mollusk = make_asset_collection( + factories, + name="mollusk", + weights=np.random.uniform(0.5, 1, len(factories)), + n=n, + verbose=True, + ) def scaling(nw): - return nw.uniform([.4 * scale] * 3, [.8 * scale] * 3, data_type='FLOAT_VECTOR') - - scatter_obj = scatter_instances('mollusk', - base_obj=obj, collection=mollusk, - density=density, scaling=scaling, - min_spacing=scale, normal=(0,0,1), - selection=selection, taper_density=True) + return nw.uniform( + [0.4 * scale] * 3, [0.8 * scale] * 3, data_type="FLOAT_VECTOR" + ) + + scatter_obj = scatter_instances( + "mollusk", + base_obj=obj, + collection=mollusk, + density=density, + scaling=scaling, + min_spacing=scale, + normal=(0, 0, 1), + selection=selection, + taper_density=True, + ) return scatter_obj, mollusk diff --git a/infinigen/assets/scatters/monocot.py b/infinigen/assets/scatters/monocots.py similarity index 57% rename from infinigen/assets/scatters/monocot.py rename to infinigen/assets/scatters/monocots.py index 78ee7ebbd..9496a26bd 100644 --- a/infinigen/assets/scatters/monocot.py +++ b/infinigen/assets/scatters/monocots.py @@ -7,24 +7,27 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.monocot.generate import MonocotFactory -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.monocot.generate import MonocotFactory from infinigen.assets.scatters.utils.wind import wind +from infinigen.core.placement.factory import make_asset_collection +from infinigen.core.placement.instance_scatter import scatter_instances + def apply(obj, n=4, grass=None, selection=None, **kwargs): - monocots = make_asset_collection( - MonocotFactory(np.random.randint(1e5), grass=grass), - n=n, verbose=True, **kwargs) + MonocotFactory(np.random.randint(1e5), grass=grass), n=n, verbose=True, **kwargs + ) scatter_obj = scatter_instances( - base_obj=obj, collection=monocots, - vol_density=U(0.2, 4), min_spacing=0.1, + base_obj=obj, + collection=monocots, + vol_density=U(0.2, 4), + min_spacing=0.1, ground_offset=(0, 0, -0.05), - scale=U(0.05, 0.4), scale_rand=U(0.5, 0.95), + scale=U(0.05, 0.4), + scale_rand=U(0.5, 0.95), rotation_offset=wind(strength=20), normal_fac=0.3, - selection=selection) + selection=selection, + ) return scatter_obj, monocots diff --git a/infinigen/assets/scatters/moss.py b/infinigen/assets/scatters/moss.py index 2e39692cf..75b6e086a 100644 --- a/infinigen/assets/scatters/moss.py +++ b/infinigen/assets/scatters/moss.py @@ -6,35 +6,47 @@ import numpy as np from numpy.random import uniform as U -from infinigen.core.placement.instance_scatter import scatter_instances +from infinigen.assets.objects.particles import MossFactory from infinigen.assets.utils.misc import assign_material -from infinigen.core.placement.factory import make_asset_collection -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.debris import MossFactory class MossCover: - def __init__(self): - self.col = make_asset_collection(MossFactory(np.random.randint(1e5)), name='moss', n=3) - base_hue = U(.24, .28) + self.col = make_asset_collection( + MossFactory(np.random.randint(1e5)), name="moss", n=3 + ) + base_hue = U(0.24, 0.28) for o in self.col.objects: - assign_material(o, surface.shaderfunc_to_material(MossFactory.shader_moss, - (base_hue + U(-.02, .02)) % 1)) + assign_material( + o, + surface.shaderfunc_to_material( + MossFactory.shader_moss, (base_hue + U(-0.02, 0.02)) % 1 + ), + ) def apply(self, obj, selection=None): - def instance_index(nw: NodeWrangler, n): - return nw.math('MODULO', - nw.new_node(Nodes.FloatToInt, [nw.scalar_multiply(nw.musgrave(10), 2 * n)]), n) + return nw.math( + "MODULO", + nw.new_node( + Nodes.FloatToInt, [nw.scalar_multiply(nw.musgrave(10), 2 * n)] + ), + n, + ) scatter_obj = scatter_instances( - base_obj=obj, collection=self.col, - density=2e4, min_spacing=.005, - scale=1, scale_rand=U(0.3, 0.7), + base_obj=obj, + collection=self.col, + density=2e4, + min_spacing=0.005, + scale=1, + scale_rand=U(0.3, 0.7), selection=selection, - instance_index=instance_index) + instance_index=instance_index, + ) return scatter_obj diff --git a/infinigen/assets/scatters/mushroom.py b/infinigen/assets/scatters/mushroom.py index 8b03640c2..29edf0cfa 100644 --- a/infinigen/assets/scatters/mushroom.py +++ b/infinigen/assets/scatters/mushroom.py @@ -6,25 +6,26 @@ from collections.abc import Iterable -import bpy import bmesh +import bpy import numpy as np from mathutils import Matrix from numpy.random import uniform -from infinigen.assets.mushroom import MushroomFactory -from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.assets.objects.mushroom import MushroomFactory from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util import blender as butil -def geo_skeleton(nw: NodeWrangler, base_obj, selection, threshold=.05): - geometry = nw.new_node(Nodes.ObjectInfo, [base_obj], attrs={'transform_space': 'RELATIVE'}).outputs[ - 'Geometry'] +def geo_skeleton(nw: NodeWrangler, base_obj, selection, threshold=0.05): + geometry = nw.new_node( + Nodes.ObjectInfo, [base_obj], attrs={"transform_space": "RELATIVE"} + ).outputs["Geometry"] selection = surface.eval_argument(nw, selection) geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) geometry = nw.new_node(Nodes.MergeByDistance, [geometry, None, threshold]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def apply(objs, selection=None, **kwargs): @@ -37,32 +38,50 @@ def apply(objs, selection=None, **kwargs): objs = [objs] if len(objs) == 0: return - selections = selection if isinstance(selection, Iterable) else [selection] * len(objs) + selections = ( + selection if isinstance(selection, Iterable) else [selection] * len(objs) + ) - for obj, selection in zip(objs,selections): - temp_obj = butil.spawn_vert('temp') - surface.add_geomod(temp_obj, geo_skeleton, apply=True, input_args=[obj, selection]) - with butil.ViewportMode(temp_obj, 'EDIT'): + for obj, selection in zip(objs, selections): + temp_obj = butil.spawn_vert("temp") + surface.add_geomod( + temp_obj, geo_skeleton, apply=True, input_args=[obj, selection] + ) + with butil.ViewportMode(temp_obj, "EDIT"): bm = bmesh.from_edit_mesh(temp_obj.data) bm.verts.ensure_lookup_table() selected = np.random.choice(bm.verts, np.random.randint(2, 5)) rotations, start_locs, directions = [], [], [] for v in selected: - normal_ratio = uniform(.4, .6) + normal_ratio = uniform(0.4, 0.6) v: bmesh.types.BMVert for e in v.link_edges: obj = e.other_vert(v) if len(e.link_faces) == 2: direction = np.array(obj.co - v.co) direction = direction / np.linalg.norm(direction) - normal = np.mean(np.array([f.normal for f in e.link_faces]), - 0) * normal_ratio + np.array( - [0, 0, 1 - normal_ratio]) + direction * uniform(.2, .5) + normal = ( + np.mean(np.array([f.normal for f in e.link_faces]), 0) + * normal_ratio + + np.array([0, 0, 1 - normal_ratio]) + + direction * uniform(0.2, 0.5) + ) normal = normal / np.linalg.norm(normal) perp_direction = direction - np.dot(direction, normal) * normal perp_direction = perp_direction / np.linalg.norm(perp_direction) - rotation = np.array(Matrix(np.stack([perp_direction, np.cross(normal, perp_direction), - normal])).transposed().to_euler()) + rotation = np.array( + Matrix( + np.stack( + [ + perp_direction, + np.cross(normal, perp_direction), + normal, + ] + ) + ) + .transposed() + .to_euler() + ) rotations.append(rotation) start_locs.append(np.array(v.co)) directions.append(direction) @@ -72,10 +91,11 @@ def apply(objs, selection=None, **kwargs): mushrooms, keypoints = mushroom_keypoints[factory_index] indices = np.random.randint(0, len(mushrooms), len(rotations)) augmented = [keypoints[i] for i in indices] - locations, rotations, scales = factories[factory_index].find_closest(augmented, rotations, start_locs, - directions) + locations, rotations, scales = factories[factory_index].find_closest( + augmented, rotations, start_locs, directions + ) - scatter_obj = butil.spawn_vert('asset:mushroom') + scatter_obj = butil.spawn_vert("asset:mushroom") for i, l, r, s in zip(indices, locations, rotations, scales): with butil.SelectObjects(mushrooms[i]): bpy.ops.object.duplicate(linked=True) @@ -86,7 +106,9 @@ def apply(objs, selection=None, **kwargs): objs.parent = scatter_obj scattered_objects.append(scatter_obj) - col = butil.group_in_collection(base_mushrooms, name=f'assets:base_mushroom', reuse=False) + col = butil.group_in_collection( + base_mushrooms, name="assets:base_mushroom", reuse=False + ) col.hide_viewport = True col.hide_render = True return scattered_objects, col diff --git a/infinigen/assets/scatters/pebbles.py b/infinigen/assets/scatters/pebbles.py index 0cc8f4670..3aefd2518 100644 --- a/infinigen/assets/scatters/pebbles.py +++ b/infinigen/assets/scatters/pebbles.py @@ -4,30 +4,31 @@ # Authors: Alexander Raistrick -import bpy -from mathutils import Vector import numpy as np from numpy.random import uniform as U -from infinigen.core.nodes.node_wrangler import Nodes +from infinigen.assets.objects.rocks.blender_rock import BlenderRockFactory +from infinigen.core import surface +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.assets.rocks.blender_rock import BlenderRockFactory def apply(obj, n=5, detail=3, selection=None, **kwargs): - fac = BlenderRockFactory(np.random.randint(1e5), detail=detail) rocks = make_asset_collection(fac, n=n) - surface.registry('rock_collection').apply(list(rocks.objects)) + surface.registry("rock_collection").apply(list(rocks.objects)) scatter_obj = scatter_instances( - base_obj=obj, collection=rocks, - vol_density=U(0.05, 0.4), ground_offset=0.03, - scale=U(0.05, 1), scale_rand=U(0.75, 0.95), scale_rand_axi=U(0.4, 0.6), - selection=selection, taper_density=True) + base_obj=obj, + collection=rocks, + vol_density=U(0.05, 0.4), + ground_offset=0.03, + scale=U(0.05, 1), + scale_rand=U(0.75, 0.95), + scale_rand_axi=U(0.4, 0.6), + selection=selection, + taper_density=True, + ) return scatter_obj, rocks diff --git a/infinigen/assets/scatters/pine_needle.py b/infinigen/assets/scatters/pine_needle.py index 1fed0c2c5..80efd578b 100644 --- a/infinigen/assets/scatters/pine_needle.py +++ b/infinigen/assets/scatters/pine_needle.py @@ -4,32 +4,33 @@ # Authors: Lingjie Mei -import colorsys - -import bpy -import mathutils import numpy as np from numpy.random import uniform as U +from infinigen.assets.objects.particles import PineNeedleFactory from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.debris import PineNeedleFactory def apply(obj, scale=1, density=2e3, n=3, selection=None): n_species = np.random.randint(2, 3) factories = [PineNeedleFactory(np.random.randint(1e5)) for i in range(n_species)] - pine_needle = make_asset_collection(factories, - weights=U(0.5, 1, len(factories)), n=n, - verbose=True) - + pine_needle = make_asset_collection( + factories, weights=U(0.5, 1, len(factories)), n=n, verbose=True + ) + d = np.deg2rad(U(5, 15)) scatter_obj = scatter_instances( - base_obj=obj, collection=pine_needle, - vol_density=U(0.01, 0.03), rotation_offset=lambda nw: nw.uniform((-d,)*3, (d,)*3), + base_obj=obj, + collection=pine_needle, + vol_density=U(0.01, 0.03), + rotation_offset=lambda nw: nw.uniform((-d,) * 3, (d,) * 3), ground_offset=lambda nw: nw.uniform(0, 0.015), - scale=U(2, 3), scale_rand=U(0.4, 0.8), scale_rand_axi=U(0.3, 0.7), - selection=selection, taper_density=True + scale=U(2, 3), + scale_rand=U(0.4, 0.8), + scale_rand_axi=U(0.3, 0.7), + selection=selection, + taper_density=True, ) return scatter_obj, pine_needle diff --git a/infinigen/assets/scatters/pinecone.py b/infinigen/assets/scatters/pinecone.py index 9b0d1b19b..9de858d5c 100644 --- a/infinigen/assets/scatters/pinecone.py +++ b/infinigen/assets/scatters/pinecone.py @@ -7,30 +7,35 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.monocot.pinecone import PineconeFactory -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.monocot.pinecone import PineconeFactory +from infinigen.assets.scatters.chopped_trees import approx_settle_transform +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.scatters.chopped_trees import approx_settle_transform def apply(obj, n=5, selection=None): n_species = np.random.randint(2, 3) factories = [PineconeFactory(np.random.randint(1e5)) for i in range(n_species)] pinecones = make_asset_collection( - factories, n=n, verbose=True, - weights=np.random.uniform(0.5, 1, len(factories))) - + factories, n=n, verbose=True, weights=np.random.uniform(0.5, 1, len(factories)) + ) + for o in pinecones.objects: approx_settle_transform(o, samples=30) d = np.deg2rad(90) ar = np.deg2rad(20) scatter_obj = scatter_instances( - base_obj=obj, collection=pinecones, - vol_density=U(0.05, 0.25), min_spacing=0.05, - rotation_offset=lambda nw: nw.uniform((d-ar, -ar, -ar), (d+ar, ar, ar)), - scale=U(0.05, 0.8), scale_rand=U(0.2, 0.8), scale_rand_axi=U(0, 0.1), - selection=selection, taper_density=True) + base_obj=obj, + collection=pinecones, + vol_density=U(0.05, 0.25), + min_spacing=0.05, + rotation_offset=lambda nw: nw.uniform((d - ar, -ar, -ar), (d + ar, ar, ar)), + scale=U(0.05, 0.8), + scale_rand=U(0.2, 0.8), + scale_rand_axi=U(0, 0.1), + selection=selection, + taper_density=True, + ) return scatter_obj, pinecones diff --git a/infinigen/assets/scatters/seashells.py b/infinigen/assets/scatters/seashells.py index 4f9a51943..d9945ccc0 100644 --- a/infinigen/assets/scatters/seashells.py +++ b/infinigen/assets/scatters/seashells.py @@ -5,32 +5,41 @@ import numpy as np -from numpy.random import uniform as U, uniform +from numpy.random import uniform +from numpy.random import uniform as U -from infinigen.assets.mollusk import MolluskFactory -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.assets.objects.mollusk import MolluskFactory +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.assets.scatters.chopped_trees import approx_settle_transform - from infinigen.core.util.random import random_general as rg -def apply(obj, density=('uniform', 0.2, 1.), n=10, selection=None): + +def apply(obj, density=("uniform", 0.2, 1.0), n=10, selection=None): n_species = np.random.randint(4, 6) factories = list(MolluskFactory(np.random.randint(1e5)) for _ in range(n_species)) mollusk = make_asset_collection( - factories, name='mollusk', verbose=True, - weights=np.random.uniform(0.5, 1, len(factories)), n=n, face_size=.02) + factories, + name="mollusk", + verbose=True, + weights=np.random.uniform(0.5, 1, len(factories)), + n=n, + face_size=0.02, + ) - #for o in mollusk.objects: + # for o in mollusk.objects: # approx_settle_transform(o, samples=30) - scale = uniform(.3, .5) + scale = uniform(0.3, 0.5) scatter_obj = scatter_instances( - base_obj=obj, collection=mollusk, + base_obj=obj, + collection=mollusk, vol_density=rg(density), - scale=scale, scale_rand=U(0.5, 0.9), scale_rand_axi=U(0.1, 0.5), - selection=selection, taper_density=True, - ground_offset=lambda nw: nw.uniform(0, scale) + scale=scale, + scale_rand=U(0.5, 0.9), + scale_rand_axi=U(0.1, 0.5), + selection=selection, + taper_density=True, + ground_offset=lambda nw: nw.uniform(0, scale), ) return scatter_obj, mollusk diff --git a/infinigen/assets/scatters/seaweed.py b/infinigen/assets/scatters/seaweed.py index 265649a62..ec8c2fade 100644 --- a/infinigen/assets/scatters/seaweed.py +++ b/infinigen/assets/scatters/seaweed.py @@ -7,24 +7,33 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.underwater.seaweed import SeaweedFactory -from infinigen.core.nodes.node_wrangler import NodeWrangler +from infinigen.assets.objects.underwater.seaweed import SeaweedFactory +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -def apply(obj, scale=1, density=1., n=5, selection=None, **kwargs): +def apply(obj, scale=1, density=1.0, n=5, selection=None, **kwargs): n_species = np.random.randint(2, 5) factories = [SeaweedFactory(np.random.randint(1e5)) for i in range(n_species)] - seaweeds = make_asset_collection(factories, name='seaweed', - weights=np.random.uniform(0.5, 1, len(factories)), n=n, - verbose=True, **kwargs) + seaweeds = make_asset_collection( + factories, + name="seaweed", + weights=np.random.uniform(0.5, 1, len(factories)), + n=n, + verbose=True, + **kwargs, + ) scatter_obj = scatter_instances( - base_obj=obj, collection=seaweeds, - vol_density=U(2, 10), min_spacing=0.02, - scale=U(0.2, 1), scale_rand=U(0.1, 0.9), scale_rand_axi=U(0, 0.2), + base_obj=obj, + collection=seaweeds, + vol_density=U(2, 10), + min_spacing=0.02, + scale=U(0.2, 1), + scale_rand=U(0.1, 0.9), + scale_rand_axi=U(0, 0.2), normal_fac=0.3, - selection=selection) - + selection=selection, + ) + return scatter_obj, seaweeds diff --git a/infinigen/assets/scatters/slime_mold.py b/infinigen/assets/scatters/slime_mold.py index cafbf825a..c4342192a 100644 --- a/infinigen/assets/scatters/slime_mold.py +++ b/infinigen/assets/scatters/slime_mold.py @@ -4,7 +4,6 @@ # Authors: Lingjie Mei -import colorsys import numpy as np from numpy.random import uniform @@ -12,48 +11,78 @@ from infinigen.assets.utils.misc import assign_material from infinigen.assets.utils.nodegroup import geo_base_selection, geo_radius from infinigen.assets.utils.shortest_path import geo_shortest_path +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes +from infinigen.core.nodes.node_utils import build_color_ramp from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -from infinigen.core.util.color import hsv2rgba from infinigen.core.surface import shaderfunc_to_material from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_utils import build_color_ramp +from infinigen.core.util.color import hsv2rgba def shader_mold(nw: NodeWrangler, base_hue): - bright_color = hsv2rgba((base_hue + uniform(-.04, .04)) % 1, uniform(.8, 1.), .8) - dark_color = hsv2rgba(base_hue, uniform(.4, .6), .2) + bright_color = hsv2rgba( + (base_hue + uniform(-0.04, 0.04)) % 1, uniform(0.8, 1.0), 0.8 + ) + dark_color = hsv2rgba(base_hue, uniform(0.4, 0.6), 0.2) - color = build_color_ramp(nw, nw.musgrave(10), [.0, .3, .7, 1.], - [dark_color, dark_color, bright_color, bright_color]) - roughness = .8 - bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={'Base Color': color, 'Roughness': roughness}) + color = build_color_ramp( + nw, + nw.musgrave(10), + [0.0, 0.3, 0.7, 1.0], + [dark_color, dark_color, bright_color, bright_color], + ) + roughness = 0.8 + bsdf = nw.new_node( + Nodes.PrincipledBSDF, input_kwargs={"Base Color": color, "Roughness": roughness} + ) return bsdf class SlimeMold: - def __init__(self): pass def apply(self, obj, selection=None): - scatter_obj = butil.spawn_vert('scatter:' + 'slime_mold') - surface.add_geomod(scatter_obj, geo_base_selection, apply=True, input_args=[obj, selection]) + scatter_obj = butil.spawn_vert("scatter:" + "slime_mold") + surface.add_geomod( + scatter_obj, geo_base_selection, apply=True, input_args=[obj, selection] + ) if len(scatter_obj.data.vertices) < 5: butil.delete(scatter_obj) return - end_index = lambda nw: nw.build_index_case(np.random.randint(0, len(scatter_obj.data.vertices), 40)) - weight = lambda nw: nw.build_float_curve(nw.new_node(Nodes.InputEdgeAngle).outputs['Signed Angle'], - [(0, .25), (.2, .4)]) + def end_index(nw): + return nw.build_index_case( + np.random.randint(0, len(scatter_obj.data.vertices), 40) + ) + + def weight(nw): + return nw.build_float_curve( + nw.new_node(Nodes.InputEdgeAngle).outputs["Signed Angle"], + [(0, 0.25), (0.2, 0.4)], + ) - surface.add_geomod(scatter_obj, geo_shortest_path, apply=True, input_args=[end_index, weight, .1, .02]) + surface.add_geomod( + scatter_obj, + geo_shortest_path, + apply=True, + input_args=[end_index, weight, 0.1, 0.02], + ) treeify(scatter_obj) - surface.add_geomod(scatter_obj, geo_radius, apply=True, input_args=[ - lambda nw: nw.build_float_curve(nw.new_node(Nodes.NamedAttribute, ['spline_parameter']), - [(0, .008), (1, .015)]), 6]) - base_hue = uniform(.02, .16) + surface.add_geomod( + scatter_obj, + geo_radius, + apply=True, + input_args=[ + lambda nw: nw.build_float_curve( + nw.new_node(Nodes.NamedAttribute, ["spline_parameter"]), + [(0, 0.008), (1, 0.015)], + ), + 6, + ], + ) + base_hue = uniform(0.02, 0.16) assign_material(scatter_obj, shaderfunc_to_material(shader_mold, base_hue)) return scatter_obj diff --git a/infinigen/assets/scatters/snow_layer.py b/infinigen/assets/scatters/snow_layer.py index 7f70018f2..64ad02d21 100644 --- a/infinigen/assets/scatters/snow_layer.py +++ b/infinigen/assets/scatters/snow_layer.py @@ -5,21 +5,15 @@ import bpy -import mathutils -from numpy.random import uniform, normal - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface +from infinigen.core.tagging import tag_object from infinigen.core.util import blender as butil -from infinigen.core.nodes import node_utils -from infinigen.core.tagging import tag_object, tag_nodegroup class Snowlayer: def __init__(self): pass - + def apply(self, obj, **kwargs): bpy.context.scene.snow.height = 0.1 with butil.SelectObjects(obj): diff --git a/infinigen/assets/scatters/urchin.py b/infinigen/assets/scatters/urchin.py index 54fd536a9..5e1a30a47 100644 --- a/infinigen/assets/scatters/urchin.py +++ b/infinigen/assets/scatters/urchin.py @@ -7,28 +7,36 @@ import numpy as np from numpy.random import uniform as U -from infinigen.assets.underwater.urchin import UrchinFactory +from infinigen.assets.objects.underwater.urchin import UrchinFactory from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.factory import AssetFactory, make_asset_collection +from infinigen.core.placement.factory import make_asset_collection from infinigen.core.placement.instance_scatter import scatter_instances def apply(obj, n=5, selection=None): n_species = np.random.randint(2, 3) factories = list(UrchinFactory(np.random.randint(1e5)) for i in range(n_species)) - urchin = make_asset_collection(factories, name='urchin', - weights=np.random.uniform(0.5, 1, len(factories)), n=n, - verbose=True) - + urchin = make_asset_collection( + factories, + name="urchin", + weights=np.random.uniform(0.5, 1, len(factories)), + n=n, + verbose=True, + ) + scale = U(0.1, 0.8) def ground_offset(nw: NodeWrangler): - return nw.uniform(.4 * scale, .8 * scale) + return nw.uniform(0.4 * scale, 0.8 * scale) scatter_obj = scatter_instances( - base_obj=obj, collection=urchin, - vol_density=U(0.5, 2), ground_offset=ground_offset, - scale=scale, scale_rand=U(0.2, 0.4), - selection=selection) + base_obj=obj, + collection=urchin, + vol_density=U(0.5, 2), + ground_offset=ground_offset, + scale=scale, + scale_rand=U(0.2, 0.4), + selection=selection, + ) return scatter_obj, urchin diff --git a/infinigen/assets/scatters/utils/cluster.py b/infinigen/assets/scatters/utils/cluster.py index 759fb2959..4ed8783ba 100644 --- a/infinigen/assets/scatters/utils/cluster.py +++ b/infinigen/assets/scatters/utils/cluster.py @@ -7,71 +7,138 @@ import numpy as np from numpy.random import uniform +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.instance_scatter import bucketed_instance, camera_cull_points -from infinigen.core import surface +from infinigen.core.placement.instance_scatter import ( + bucketed_instance, + camera_cull_points, +) def select_points(nw: NodeWrangler, geometry, density, selection, radius, min_distance): keypoint_density = density / 5 - keypoints = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': geometry, 'Selection': selection, 'Density': keypoint_density - }).outputs['Points'] - distance = nw.new_node(Nodes.Proximity, [keypoints], attrs={'target_element': 'POINTS'}).outputs['Distance'] - selection = nw.boolean_math('AND', nw.compare('LESS_THAN', distance, radius), selection) - points, normal = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': geometry, 'Selection': selection, 'Density': density - }).outputs[:2] + keypoints = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={ + "Mesh": geometry, + "Selection": selection, + "Density": keypoint_density, + }, + ).outputs["Points"] + distance = nw.new_node( + Nodes.Proximity, [keypoints], attrs={"target_element": "POINTS"} + ).outputs["Distance"] + selection = nw.boolean_math( + "AND", nw.compare("LESS_THAN", distance, radius), selection + ) + points, normal = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": geometry, "Selection": selection, "Density": density}, + ).outputs[:2] if min_distance > 0: - points = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': points, 'Distance': min_distance}) + points = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": points, "Distance": min_distance}, + ) return points, distance, normal -def instance_rotation(nw: NodeWrangler, normal, delta_normal=.1, z_rotation='musgrave'): - perturbed_normal = nw.new_node(Nodes.VectorRotate, input_kwargs={ - 'Vector': normal, - 'Rotation': nw.uniform([-delta_normal] * 3, [delta_normal] * 3) - }, attrs={'rotation_type': 'EULER_XYZ'}) - if z_rotation == 'musgrave': +def instance_rotation( + nw: NodeWrangler, normal, delta_normal=0.1, z_rotation="musgrave" +): + perturbed_normal = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": normal, + "Rotation": nw.uniform([-delta_normal] * 3, [delta_normal] * 3), + }, + attrs={"rotation_type": "EULER_XYZ"}, + ) + if z_rotation == "musgrave": z_rotation = nw.scalar_multiply(nw.new_node(Nodes.MusgraveTexture), 2 * np.pi) - elif z_rotation == 'random': + elif z_rotation == "random": z_rotation = nw.uniform(0, 2 * np.pi) else: z_rotation = uniform(0, 2 * np.pi) - rotation = nw.new_node(Nodes.RotateEuler, input_kwargs={ - 'Rotation': nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': perturbed_normal}, - attrs={'axis': 'Z'}), - 'Axis': perturbed_normal, - 'Angle': z_rotation - }, attrs={'type': 'AXIS_ANGLE'}) + rotation = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": perturbed_normal}, + attrs={"axis": "Z"}, + ), + "Axis": perturbed_normal, + "Angle": z_rotation, + }, + attrs={"type": "AXIS_ANGLE"}, + ) return rotation -def cluster_scatter(nw: NodeWrangler, base_obj, collection, density, instance_index=None, radius=.02, - min_distance=0., buckets=((10000, 0.0)), scaling=(1, 1, 1), normal=None, - selection=True, ground_offset=0, realize_instances=False, material=None, perturb_normal=.1, - z_rotation='musgrave', transform_space='ORIGINAL', reset_children=True): - geometry = nw.new_node(Nodes.ObjectInfo, [base_obj], attrs={'transform_space': transform_space}).outputs[ - 'Geometry'] +def cluster_scatter( + nw: NodeWrangler, + base_obj, + collection, + density, + instance_index=None, + radius=0.02, + min_distance=0.0, + buckets=((10000, 0.0)), + scaling=(1, 1, 1), + normal=None, + selection=True, + ground_offset=0, + realize_instances=False, + material=None, + perturb_normal=0.1, + z_rotation="musgrave", + transform_space="ORIGINAL", + reset_children=True, +): + geometry = nw.new_node( + Nodes.ObjectInfo, [base_obj], attrs={"transform_space": transform_space} + ).outputs["Geometry"] selection = surface.eval_argument(nw, selection, geometry=geometry) - points, distance, default_normal = select_points(nw, geometry, density, selection, radius, min_distance) + points, distance, default_normal = select_points( + nw, geometry, density, selection, radius, min_distance + ) if normal is None: normal = default_normal visible, vis_distance = camera_cull_points(nw) scale = surface.eval_argument(nw, scaling, distance=distance) rotation = instance_rotation(nw, normal, perturb_normal, z_rotation) - instanced = bucketed_instance(nw, points, collection, vis_distance, buckets, visible, scale, rotation, - instance_index, reset_children) + instanced = bucketed_instance( + nw, + points, + collection, + vis_distance, + buckets, + visible, + scale, + rotation, + instance_index, + reset_children, + ) if ground_offset != 0: - instanced = nw.new_node(Nodes.TranslateInstances, [instanced], input_kwargs={ - "Translation": nw.combine(0, 0, surface.eval_argument(nw, ground_offset)), - "Local Space": True - }) + instanced = nw.new_node( + Nodes.TranslateInstances, + [instanced], + input_kwargs={ + "Translation": nw.combine( + 0, 0, surface.eval_argument(nw, ground_offset) + ), + "Local Space": True, + }, + ) if realize_instances: instanced = nw.new_node(Nodes.RealizeInstances, [instanced]) if material is not None: - instanced = nw.new_node(Nodes.SetMaterial, input_kwargs={"Geometry": instanced, "Material": material}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': instanced}) + instanced = nw.new_node( + Nodes.SetMaterial, + input_kwargs={"Geometry": instanced, "Material": material}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": instanced}) diff --git a/infinigen/assets/scatters/utils/selection.py b/infinigen/assets/scatters/utils/selection.py index 67622b2c8..0da06a84a 100644 --- a/infinigen/assets/scatters/utils/selection.py +++ b/infinigen/assets/scatters/utils/selection.py @@ -9,21 +9,36 @@ from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.util.math import FixedSeed, int_hash -def scatter_lower(nw: NodeWrangler, height_range=(.5, 2), fill_range=(.0, .8), noise_scale=.4): +def scatter_lower( + nw: NodeWrangler, height_range=(0.5, 2), fill_range=(0.0, 0.8), noise_scale=0.4 +): height = uniform(*height_range) middle = height * uniform(*fill_range) - lower = nw.bernoulli(nw.build_float_curve(nw.separate(nw.new_node(Nodes.InputPosition))[-1], - [(0, 1), (middle, 1), (height, 0)])) - compare = nw.compare('GREATER_THAN', lower, - nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': noise_scale}), ) + lower = nw.bernoulli( + nw.build_float_curve( + nw.separate(nw.new_node(Nodes.InputPosition))[-1], + [(0, 1), (middle, 1), (height, 0)], + ) + ) + compare = nw.compare( + "GREATER_THAN", + lower, + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": noise_scale}), + ) return compare -def scatter_upward(nw: NodeWrangler, normal_thresh=np.pi * .75, noise_scale=.4, noise_thresh=.3): - compare = nw.compare('GREATER_THAN', nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': noise_scale}), - noise_thresh) - upward = nw.compare_direction('LESS_THAN', nw.new_node(Nodes.InputNormal), (0, 0, 1), normal_thresh) - return nw.boolean_math('AND', compare, upward) +def scatter_upward( + nw: NodeWrangler, normal_thresh=np.pi * 0.75, noise_scale=0.4, noise_thresh=0.3 +): + compare = nw.compare( + "GREATER_THAN", + nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": noise_scale}), + noise_thresh, + ) + upward = nw.compare_direction( + "LESS_THAN", nw.new_node(Nodes.InputNormal), (0, 0, 1), normal_thresh + ) + return nw.boolean_math("AND", compare, upward) diff --git a/infinigen/assets/scatters/utils/wind.py b/infinigen/assets/scatters/utils/wind.py index 2dc550abb..94fcfcada 100644 --- a/infinigen/assets/scatters/utils/wind.py +++ b/infinigen/assets/scatters/utils/wind.py @@ -1,63 +1,82 @@ - # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Yiming Zuo -from math import prod -from functools import partial - -import numpy as np -from numpy.random import uniform as U, normal as N, randint from mathutils import Vector +from numpy.random import normal as N from infinigen.core.nodes.node_wrangler import Nodes -def wind_rotation(nw, speed=1.0, direction=None, scale=1.0, strength=30): +def wind_rotation(nw, speed=1.0, direction=None, scale=1.0, strength=30): if direction is None: direction = Vector([N(0, 1), N(0, 1), 0]) - normalize_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: direction}, - attrs={'operation': 'NORMALIZE'}) - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': normalize_1.outputs["Vector"], 'Angle': 1.5708}) + normalize_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: direction}, attrs={"operation": "NORMALIZE"} + ) + vector_rotate_1 = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": normalize_1.outputs["Vector"], "Angle": 1.5708}, + ) position_2 = nw.new_node(Nodes.InputPosition) - scene_time = nw.new_node('GeometryNodeInputSceneTime') - t = nw.new_node(Nodes.Math, + scene_time = nw.new_node("GeometryNodeInputSceneTime") + t = nw.new_node( + Nodes.Math, input_kwargs={0: scene_time.outputs["Seconds"], 1: speed}, - attrs={'operation': 'MULTIPLY'}) - t = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normalize_1.outputs["Vector"], 'Scale': t}, - attrs={'operation': 'SCALE'}) - t = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_2, 1: t.outputs["Vector"]}) - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': t.outputs["Vector"], 'Scale': scale}) - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: strength, 1: -0.2}, - attrs={'operation': 'MULTIPLY'}) - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: multiply_1, 4: strength}) - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: strength, 1: -0.2}, - attrs={'operation': 'MULTIPLY'}) - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: strength, 1: 0.2}, - attrs={'operation': 'MULTIPLY'}) - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: multiply_2, 3: multiply_3, 'Seed': 1}) - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_2.outputs["Result"], 1: random_value_2.outputs[1]}) - deg2rad = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: 0.0175}, - attrs={'operation': 'MULTIPLY'}) - rotation = nw.new_node(Nodes.RotateEuler, - input_kwargs={'Rotation': Vector((0,0,0)), 'Axis': vector_rotate_1, 'Angle': deg2rad}, - attrs={'type': 'AXIS_ANGLE'}) + attrs={"operation": "MULTIPLY"}, + ) + t = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normalize_1.outputs["Vector"], "Scale": t}, + attrs={"operation": "SCALE"}, + ) + t = nw.new_node( + Nodes.VectorMath, input_kwargs={0: position_2, 1: t.outputs["Vector"]} + ) + noise_texture = nw.new_node( + Nodes.NoiseTexture, input_kwargs={"Vector": t.outputs["Vector"], "Scale": scale} + ) + multiply_1 = nw.new_node( + Nodes.Math, input_kwargs={0: strength, 1: -0.2}, attrs={"operation": "MULTIPLY"} + ) + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": noise_texture.outputs["Fac"], + 3: multiply_1, + 4: strength, + }, + ) + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: strength, 1: -0.2}, attrs={"operation": "MULTIPLY"} + ) + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: strength, 1: 0.2}, attrs={"operation": "MULTIPLY"} + ) + random_value_2 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: multiply_2, 3: multiply_3, "Seed": 1} + ) + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range_2.outputs["Result"], 1: random_value_2.outputs[1]}, + ) + deg2rad = nw.new_node( + Nodes.Math, input_kwargs={0: add_1, 1: 0.0175}, attrs={"operation": "MULTIPLY"} + ) + rotation = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": Vector((0, 0, 0)), + "Axis": vector_rotate_1, + "Angle": deg2rad, + }, + attrs={"type": "AXIS_ANGLE"}, + ) return rotation + def wind(*args, **kwargs): return lambda nw: wind_rotation(nw, *args, **kwargs) diff --git a/infinigen/assets/seating/bedframe.py b/infinigen/assets/seating/bedframe.py deleted file mode 100644 index d7029a2b3..000000000 --- a/infinigen/assets/seating/bedframe.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.seating.chairs.chair import ChairFactory -from infinigen.assets.seating.mattress import make_coiled -from infinigen.assets.utils.decorate import ( - subdivide_edge_ring, remove_faces, read_normal, read_co, write_co, - remove_vertices, select_faces, write_attribute, -) -from infinigen.assets.utils.object import new_grid, join_objects -from infinigen.core import surface -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import log_uniform -from infinigen.core.util import blender as butil -from infinigen.core.util.random import random_general as rg -from infinigen.assets.material_assignments import AssetList - - -class BedFrameFactory(ChairFactory): - scale = 1. - leg_decor_types = 'weighted_choice', (2, 'coiled'), (2, 'pad'), (1, 'plain'), (2, 'legs') - back_types = 'weighted_choice', (3, 'coiled'), (3, 'pad'), (2, 'whole'), (1, 'horizontal-bar'), (1, 'vertical-bar') - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.width = log_uniform(1.4, 2.4) - self.size = uniform(2, 2.4) - self.thickness = uniform(.05, .12) - self.has_all_legs = uniform() < .2 - self.leg_thickness = uniform(.08, .12) - self.leg_height = uniform(.2, .6) - self.leg_decor_type = rg(self.leg_decor_types) - self.leg_decor_wrapped = uniform() < .5 - self.back_height = uniform(.5, 1.3) - self.seat_back = 1 - self.seat_subdivisions_x = np.random.randint(1, 4) - self.seat_subdivisions_y = int(log_uniform(4, 10)) - self.has_arm = False - self.leg_type = 'vertical' - self.leg_x_offset = 0 - self.leg_y_offset = 0, 0 - self.back_x_offset = 0 - self.back_y_offset = 0 - - materials = AssetList['BedFrameFactory']() - self.surface = materials['surface'].assign_material() - self.limb_surface = materials['limb_surface'].assign_material() - - scratch_prob, edge_wear_prob = materials['wear_tear_prob'] - self.scratch, self.edge_wear = materials['wear_tear'] - self.scratch = None if uniform() > scratch_prob else self.scratch - self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear - - self.clothes_scatter = surface.NoApply - self.dot_distance = log_uniform(.16, .2) - self.dot_size = uniform(.005, .02) - self.dot_depth = uniform(.04, .08) - self.panel_distance = uniform(.3, .5) - self.panel_margin = uniform(.01, .02) - self.post_init() - - def make_seat(self): - obj = new_grid(x_subdivisions=self.seat_subdivisions_x, y_subdivisions=self.seat_subdivisions_y) - obj.scale = (self.width - self.leg_thickness) / 2, (self.size - self.leg_thickness) / 2, 1 - butil.apply_transform(obj, True) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.delete(type='ONLY_FACE') - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': (0, 0, self.thickness)}) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.leg_thickness - 1e-3, offset=0, solidify_mode='NON_MANIFOLD') - obj.location = 0, -self.size / 2, -self.thickness / 2 - butil.apply_transform(obj, True) - butil.modify_mesh(obj, 'BEVEL', width=self.bevel_width, segments=8) - return obj - - def make_legs(self): - legs = super().make_legs() - if self.has_all_legs: - leg_starts = np.array( - [[-1, -.5, 0], [0, -1, 0], [0, 0, 0], [1, -.5, 0]] - ) * np.array( - [[self.width / 2, self.size, 0]] - ) - leg_ends = leg_starts.copy() - leg_ends[0, 0] -= self.leg_x_offset - leg_ends[3, 0] += self.leg_x_offset - leg_ends[2, 1] += self.leg_y_offset[0] - leg_ends[1, 1] -= self.leg_y_offset[1] - leg_ends[:, -1] = -self.leg_height - legs += self.make_limb(leg_ends, leg_starts) - return legs - - def make_leg_decors(self, legs): - if self.leg_decor_type == 'none': - return super().make_leg_decors(legs) - obj = join_objects([deep_clone_obj(_) for _ in legs]) - x, y, z = read_co(obj).T - z = np.maximum(z, -self.leg_height * uniform(.7, .9)) - write_co(obj, np.stack([x, y, z], -1)) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.convex_hull() - bpy.ops.mesh.normals_make_consistent(inside=False) - remove_faces(obj, np.abs(read_normal(obj)[:, -1]) > .5) - if self.leg_decor_wrapped: - x, y, z = read_co(obj).T - x[x < 0] -= self.leg_thickness / 2 + 1e-3 - x[x > 0] += self.leg_thickness / 2 + 1e-3 - y[y < -self.size / 2] -= self.leg_thickness / 2 + 1e-3 - y[y > -self.size / 2] += self.leg_thickness / 2 + 1e-3 - write_co(obj, np.stack([x, y, z], -1)) - match self.leg_decor_type: - case 'coiled': - self.divide(obj, self.dot_distance) - make_coiled(obj, self.dot_distance, self.dot_depth, self.dot_size) - case 'pad': - self.divide(obj, self.panel_distance) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.inset(thickness=self.panel_margin, depth=self.panel_margin, use_individual=True) - butil.modify_mesh(obj, 'BEVEL', segments=4) - write_attribute(obj, 1, 'panel', 'FACE') - return [obj] - - def divide(self, obj, distance): - for i, size in enumerate(obj.dimensions): - axis = np.zeros(3) - axis[i] = 1 - distance = distance if i != 2 else distance * uniform(.5, 1.) - subdivide_edge_ring(obj, int(np.ceil(size / distance)), axis) - - def make_back_decors(self, backs, finalize=True): - decors = super().make_back_decors(backs) - match self.back_type: - case 'coiled': - obj = self.make_back(backs) - self.divide(obj, self.dot_distance) - make_coiled(obj, self.dot_distance, self.dot_depth, self.dot_size) - obj.scale = (1 - 1e-3,) * 3 - write_attribute(obj, 1, 'panel', 'FACE') - with butil.ViewportMode(decors[0], 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bisect(plane_co=(0, 0, self.back_height), plane_no=(0, 0, 1), clear_inner=True) - return [obj] + decors - case 'pad': - obj = self.make_back(backs) - self.divide(obj, self.panel_distance) - with butil.ViewportMode(obj, 'EDIT'): - select_faces(obj, np.abs(read_normal(obj)[:, 1]) > .5) - bpy.ops.mesh.inset(thickness=self.panel_margin, depth=self.panel_margin, use_individual=True) - butil.modify_mesh(obj, 'BEVEL', segments=4) - write_attribute(obj, 1, 'panel', 'FACE') - obj.scale = (1 - 1e-3,) * 3 - with butil.ViewportMode(decors[0], 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bisect(plane_co=(0, 0, self.back_height), plane_no=(0, 0, 1), clear_inner=True) - return [obj] + decors - case _: - return decors - - def make_back(self, backs): - obj = join_objects([deep_clone_obj(b) for b in backs]) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.convex_hull() - butil.modify_mesh(obj, 'SOLIDIFY', thickness=np.minimum(self.thickness, self.leg_thickness), offset=0) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.normals_make_consistent(inside=False) - return obj diff --git a/infinigen/assets/seating/chairs/bar_chair.py b/infinigen/assets/seating/chairs/bar_chair.py deleted file mode 100644 index 733cb79ac..000000000 --- a/infinigen/assets/seating/chairs/bar_chair.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -from numpy.random import uniform, choice -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import tagging, tags as t - -from infinigen.assets.seating.chairs.seats.round_seats import generate_round_seats - -from infinigen.assets.tables.cocktail_table import geometry_create_legs -from infinigen.assets.material_assignments import AssetList - -def geometry_assemble_chair(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - generateseat = nw.new_node(generate_round_seats(thickness=kwargs['Top Thickness'], - radius=kwargs['Top Profile Width'], - seat_material=kwargs['SeatMaterial']).name) - - seat_instance = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': generateseat, - 'Translation': (0.0000, 0.0000, kwargs['Top Height'])}) - - legs = nw.new_node(geometry_create_legs(**kwargs).name) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [seat_instance, legs]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -class BarChairFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(BarChairFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params, leg_style = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params(leg_style) - - self.params.update(self.material_params) - - def get_material_params(self, leg_style): - material_assignments = AssetList['BarChairFactory'](leg_style=leg_style) - - params = { - "SeatMaterial": material_assignments['seat'].assign_material(), - "LegMaterial": material_assignments['leg'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # all in meters - if dimensions is None: - x = uniform(0.35, 0.45) - z = uniform(0.7, 1) - dimensions = (x, x, z) - - x, y, z = dimensions - - top_thickness = uniform(0.06, 0.10) - - leg_style = choice(['straight', 'single_stand', 'wheeled']) - - parameters = { - 'Top Profile Width': x, - 'Top Thickness': top_thickness, - 'Height': z, - 'Top Height': z - top_thickness, - 'Leg Style': leg_style, - 'Leg NGon': choice([4, 32]), - 'Leg Placement Top Relative Scale': 0.7, - 'Leg Placement Bottom Relative Scale': uniform(1.1, 1.3), - 'Leg Height': 1.0, - } - - if leg_style == "single_stand": - leg_number = 1 - leg_diameter = uniform(0.7*x, 0.9*x) - - leg_curve_ctrl_pts = [(0.0, uniform(0.1, 0.2)), - (0.5, uniform(0.1, 0.2)), (0.9, uniform(0.2, 0.3)), (1.0, 1.0)] - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood']) - }) - - elif leg_style == "straight": - leg_diameter = uniform(0.04, 0.06) - leg_number = choice([3, 4]) - - leg_curve_ctrl_pts = [(0.0, 1.0), (0.4, uniform(0.85, 0.95)), (1.0, uniform(0.4, 0.6))] - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood']), - 'Strecher Relative Pos': uniform(0.6, 0.9), - 'Strecher Increament': choice([0, 1, 2]) - }) - - elif leg_style == "wheeled": - leg_diameter = uniform(0.03, 0.05) - leg_number = 1 - pole_number = choice([4, 5]) - joint_height = uniform(0.5, 0.8) * (z - top_thickness) - wheel_arc_sweep_angle = uniform(120, 240) - wheel_width = uniform(0.11, 0.15) - wheel_rot = uniform(0, 360) - pole_length = uniform(1.6, 2.0) - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Pole Number': pole_number, - 'Leg Diameter': leg_diameter, - 'Leg Joint Height': joint_height, - 'Leg Wheel Arc Sweep Angle': wheel_arc_sweep_angle, - 'Leg Wheel Width': wheel_width, - 'Leg Wheel Rot': wheel_rot, - 'Leg Pole Length': pole_length, - # 'Leg Material': choice(['metal']) - }) - - else: - raise NotImplementedError - - - - return parameters, leg_style - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - surface.add_geomod(obj, geometry_assemble_chair, apply=True, input_kwargs=self.params) - tagging.tag_system.relabel_obj(obj) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - diff --git a/infinigen/assets/seating/chairs/chair.py b/infinigen/assets/seating/chairs/chair.py deleted file mode 100644 index ed37dafdf..000000000 --- a/infinigen/assets/seating/chairs/chair.py +++ /dev/null @@ -1,364 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import ( - read_co, read_edge_center, read_edge_direction, remove_edges, - remove_vertices, select_edges, solidify, subsurf, write_attribute, write_co, -) -from infinigen.assets.utils.draw import align_bezier, bezier_curve -from infinigen.assets.utils.nodegroup import geo_radius -from infinigen.assets.utils.object import join_objects, new_bbox -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.surface import NoApply -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.math import FixedSeed, normalize -from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList - -from infinigen.core.util.random import random_general as rg - - -class ChairFactory(AssetFactory): - back_types = 'weighted_choice', (1, 'whole'), (1, 'partial'), (1, 'horizontal-bar'), (1, 'vertical-bar') - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.width = uniform(.4, .5) - self.size = uniform(.38, .45) - self.thickness = uniform(.04, .08) - self.bevel_width = self.thickness * (.1 if uniform() < .4 else .5) - self.seat_back = uniform(.7, 1.) if uniform() < .75 else 1. - self.seat_mid = uniform(.7, .8) - self.seat_mid_x = uniform(self.seat_back + self.seat_mid * (1 - self.seat_back), 1) - self.seat_mid_z = uniform(0, .5) - self.seat_front = uniform(1., 1.2) - self.is_seat_round = uniform() < .6 - self.is_seat_subsurf = uniform() < .5 - - self.leg_thickness = uniform(.04, .06) - self.limb_profile = uniform(1.5, 2.5) - self.leg_height = uniform(.45, .5) - self.back_height = uniform(.4, .5) - self.is_leg_round = uniform() < .5 - self.leg_type = np.random.choice(['vertical', 'straight', 'up-curved', 'down-curved']) - - self.leg_x_offset = 0 - self.leg_y_offset = 0, 0 - self.back_x_offset = 0 - self.back_y_offset = 0 - - self.has_leg_x_bar = uniform() < .6 - self.has_leg_y_bar = uniform() < .6 - self.leg_offset_bar = uniform(.2, .4), uniform(.6, .8) - - self.has_arm = uniform() < 0.7 - self.arm_thickness = uniform(.04, .06) - self.arm_height = self.arm_thickness * uniform(.6, 1) - self.arm_y = uniform(.8, 1) * self.size - self.arm_z = uniform(.3, .6) * self.back_height - self.arm_mid = np.array([uniform(-.03, .03), uniform(-.03, .09), uniform(-.09, .03)]) - self.arm_profile = log_uniform(.1, 3, 2) - - self.back_thickness = uniform(.04, .05) - self.back_type = rg(self.back_types) - self.back_profile = [(0, 1)] - self.back_vertical_cuts = np.random.randint(1, 4) - self.back_partial_scale = uniform(1, 1.4) - - materials = AssetList['ChairFactory']() - self.limb_surface = materials['limb'].assign_material() - self.surface = materials['surface'].assign_material() - if uniform() < .3: - self.panel_surface = self.surface - else: - self.panel_surface = materials['panel'].assign_material() - - scratch_prob, edge_wear_prob = materials['wear_tear_prob'] - self.scratch, self.edge_wear = materials['wear_tear'] - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - self.scratch = None - if not is_edge_wear: - self.edge_wear = None - - #from infinigen.assets.clothes import blanket - #from infinigen.assets.scatters.clothes import ClothesCover - #self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), - # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() - self.clothes_scatter = NoApply() - self.post_init() - - def post_init(self): - with FixedSeed(self.factory_seed): - if self.leg_type == 'vertical': - self.leg_x_offset = 0 - self.leg_y_offset = 0, 0 - self.back_x_offset = 0 - self.back_y_offset = 0 - else: - self.leg_x_offset = self.width * uniform(.05, .2) - self.leg_y_offset = self.size * uniform(.05, .2, 2) - self.back_x_offset = self.width * uniform(-.1, .15) - self.back_y_offset = self.size * uniform(.1, .25) - - match self.back_type: - case 'partial': - self.back_profile = (uniform(.4, .8), 1), - case 'horizontal-bar': - n_cuts = np.random.randint(2, 4) - locs = uniform(1, 2, n_cuts).cumsum() - locs = locs / locs[-1] - ratio = uniform(.5, .75) - locs = np.array([(p + ratio * (l - p), l) for p, l in zip([0, *locs[:-1]], locs)]) - lowest = uniform(0, .4) - self.back_profile = locs * (1 - lowest) + lowest - case 'vertical-bar': - self.back_profile = (uniform(.8, .9), 1), - case _: - self.back_profile = [(0, 1)] - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - obj = new_bbox( - -self.width / 2 - max(self.leg_x_offset, self.back_x_offset), - self.width / 2 + max(self.leg_x_offset, self.back_x_offset), - -self.size - self.leg_y_offset[1] - self.leg_thickness * .5, - max(self.leg_y_offset[0], self.back_y_offset), - -self.leg_height, - self.back_height * 1.2 - ) - obj.rotation_euler.z += np.pi / 2 - butil.apply_transform(obj) - return obj - - def create_asset(self, **params) -> bpy.types.Object: - - obj = self.make_seat() - legs = self.make_legs() - backs = self.make_backs() - - parts = [obj] + legs + backs - parts.extend(self.make_leg_decors(legs)) - if self.has_arm: - parts.extend(self.make_arms(obj, backs)) - parts.extend(self.make_back_decors(backs)) - - for obj in legs: - self.solidify(obj, 2) - for obj in backs: - self.solidify(obj, 2, self.back_thickness) - - obj = join_objects(parts) - obj.rotation_euler.z += np.pi / 2 - butil.apply_transform(obj) - - with FixedSeed(self.factory_seed): - # TODO: wasteful to create unique materials for each individual asset - self.surface.apply(obj) - self.panel_surface.apply(obj, selection='panel') - self.limb_surface.apply(obj, selection='limb') - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - def make_seat(self): - x_anchors = np.array( - [0, -self.seat_back, -self.seat_mid_x, -1, 0, 1, self.seat_mid_x, self.seat_back, - 0] - ) * self.width / 2 - y_anchors = np.array([0, 0, -self.seat_mid, -1, -self.seat_front, -1, -self.seat_mid, 0, 0]) * self.size - z_anchors = np.array([0, 0, self.seat_mid_z, 0, 0, 0, self.seat_mid_z, 0, 0]) * self.thickness - vector_locations = [1, 7] if self.is_seat_round else [1, 3, 5, 7] - obj = bezier_curve((x_anchors, y_anchors, z_anchors), vector_locations, 8) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.fill_grid(use_interp_simple=True) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness, offset=0) - subsurf(obj, 1, not self.is_seat_subsurf) - butil.modify_mesh(obj, 'BEVEL', width=self.bevel_width, segments=8) - return obj - - def make_legs(self): - leg_starts = np.array( - [[-self.seat_back, 0, 0], [-1, -1, 0], [1, -1, 0], [self.seat_back, 0, 0]] - ) * np.array( - [[self.width / 2, self.size, 0]] - ) - leg_ends = leg_starts.copy() - leg_ends[[0, 1], 0] -= self.leg_x_offset - leg_ends[[2, 3], 0] += self.leg_x_offset - leg_ends[[0, 3], 1] += self.leg_y_offset[0] - leg_ends[[1, 2], 1] -= self.leg_y_offset[1] - leg_ends[:, -1] = -self.leg_height - return self.make_limb(leg_ends, leg_starts) - - def make_limb(self, leg_ends, leg_starts): - limbs = [] - for leg_start, leg_end in zip(leg_starts, leg_ends): - match self.leg_type: - case 'up-curved': - axes = [(0, 0, 1), None] - scale = [self.limb_profile, 1] - case 'down-curved': - axes = [None, (0, 0, 1)] - scale = [1, self.limb_profile] - case _: - axes = None - scale = None - limb = align_bezier(np.stack([leg_start, leg_end], -1), axes, scale, resolution=64) - limb.location = np.array( - [1 if leg_start[0] < 0 else -1, 1 if leg_start[1] < -self.size / 2 else -1, - 0] - ) * self.leg_thickness / 2 - butil.apply_transform(limb, True) - limbs.append(limb) - return limbs - - def make_backs(self): - back_starts = np.array([[-self.seat_back, 0, 0], [self.seat_back, 0, 0]]) * self.width / 2 - back_ends = back_starts.copy() - back_ends[:, 0] += np.array([self.back_x_offset, -self.back_x_offset]) - back_ends[:, 1] = self.back_y_offset - back_ends[:, 2] = self.back_height - return self.make_limb(back_starts, back_ends) - - def make_leg_decors(self, legs): - decors = [] - if self.has_leg_x_bar: - z_height = -self.leg_height * uniform(*self.leg_offset_bar) - locs = [] - for leg in legs: - co = read_co(leg) - locs.append(co[np.argmin(np.abs(co[:, -1] - z_height))]) - decors.append(self.solidify(bezier_curve(np.stack([locs[0], locs[3]], -1)), 0)) - decors.append(self.solidify(bezier_curve(np.stack([locs[1], locs[2]], -1)), 0)) - if self.has_leg_y_bar: - z_height = -self.leg_height * uniform(*self.leg_offset_bar) - locs = [] - for leg in legs: - co = read_co(leg) - locs.append(co[np.argmin(np.abs(co[:, -1] - z_height))]) - decors.append(self.solidify(bezier_curve(np.stack([locs[0], locs[1]], -1)), 1)) - decors.append(self.solidify(bezier_curve(np.stack([locs[2], locs[3]], -1)), 1)) - for d in decors: - write_attribute(d, 1, 'limb', 'FACE') - return decors - - def make_back_decors(self, backs, finalize=True): - obj = join_objects([deep_clone_obj(b) for b in backs]) - x, y, z = read_co(obj).T - x += np.where(x > 0, self.back_thickness / 2, -self.back_thickness / 2) - write_co(obj, np.stack([x, y, z], -1)) - smoothness = uniform(0, 1) - profile_shape_factor = uniform(0, .4) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - center = read_edge_center(obj) - for z_min, z_max in self.back_profile: - select_edges( - obj, (z_min * self.back_height <= center[:, -1]) & ( - center[:, -1] <= z_max * self.back_height) - ) - bpy.ops.mesh.bridge_edge_loops( - number_cuts=32, interpolation='LINEAR', smoothness=smoothness, - profile_shape_factor=profile_shape_factor - ) - bpy.ops.mesh.select_loose() - bpy.ops.mesh.delete() - butil.modify_mesh(obj, 'SOLIDIFY', thickness=np.minimum(self.thickness, self.back_thickness), offset=0) - if finalize: - butil.modify_mesh(obj, 'BEVEL', width=self.bevel_width, segments=8) - parts = [obj] - if self.back_type == 'vertical-bar': - other = join_objects([deep_clone_obj(b) for b in backs]) - with butil.ViewportMode(other, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.bridge_edge_loops( - number_cuts=self.back_vertical_cuts, interpolation='LINEAR', - smoothness=smoothness, profile_shape_factor=profile_shape_factor - ) - bpy.ops.mesh.select_all(action='INVERT') - bpy.ops.mesh.delete() - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.delete(type='ONLY_FACE') - remove_edges(other, np.abs(read_edge_direction(other)[:, -1]) < .5) - remove_vertices(other, lambda x, y, z: z < -self.thickness / 2) - remove_vertices( - other, lambda x, y, z: z > ( - self.back_profile[0][0] + self.back_profile[0][1]) * self.back_height / 2 - ) - parts.append(self.solidify(other, 2, self.back_thickness)) - elif self.back_type == 'partial': - co = read_co(obj) - co[:, 1] *= self.back_partial_scale - write_co(obj, co) - for p in parts: - write_attribute(p, 1, 'panel', 'FACE') - return parts - - def make_arms(self, base, backs): - co = read_co(base) - end = co[np.argmin(co[:, 0] - (np.abs(co[:, 1] + self.arm_y) < .02))] - end[0] += self.arm_thickness / 4 - end_ = end.copy() - end_[0] = -end[0] - arms = [] - co = read_co(backs[0]) - start = co[np.argmin(co[:, 0] - (np.abs(co[:, -1] - self.arm_z) < .02))] - start[0] -= self.arm_thickness / 4 - start_ = start.copy() - start_[0] = -start[0] - for start, end in zip([start, start_], [end, end_]): - mid = np.array( - [end[0] + self.arm_mid[0] * (-1 if end[0] > 0 else 1), end[1] + self.arm_mid[1], - start[2] + self.arm_mid[2]] - ) - arm = align_bezier( - np.stack([start, mid, end], -1), - np.array([[end[0] - start[0], end[1] - start[1], 0], [0, 1 / np.sqrt(2), 1 / np.sqrt(2)], [0, 0, 1]]), - [1, *self.arm_profile, 1] - ) - if self.is_leg_round: - surface.add_geomod( - arm, geo_radius, apply=True, input_args=[self.arm_thickness / 2, 32], - input_kwargs={'to_align_tilt': False} - ) - else: - with butil.ViewportMode(arm, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={ - 'value': (self.arm_thickness if end[0] < 0 else -self.arm_thickness, 0, 0) - } - ) - butil.modify_mesh(arm, 'SOLIDIFY', thickness=self.arm_height, offset=0) - write_attribute(arm, 1, 'limb', 'FACE') - arms.append(arm) - return arms - - def solidify(self, obj, axis, thickness=None): - if thickness is None: - thickness = self.leg_thickness - if self.is_leg_round: - solidify(obj, axis, thickness) - butil.modify_mesh(obj, 'BEVEL', width=self.bevel_width, segments=8) - else: - surface.add_geomod(obj, geo_radius, apply=True, input_args=[thickness / 2, 32]) - write_attribute(obj, 1, 'limb', 'FACE') - return obj - - diff --git a/infinigen/assets/seating/chairs/office_chair.py b/infinigen/assets/seating/chairs/office_chair.py deleted file mode 100644 index 7a4f3ba5a..000000000 --- a/infinigen/assets/seating/chairs/office_chair.py +++ /dev/null @@ -1,206 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -import bpy - -import numpy as np -from numpy.random import uniform, choice - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface, tagging - -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.seating.chairs.seats.curvy_seats import generate_curvy_seats - -from infinigen.assets.tables.cocktail_table import geometry_create_legs -from infinigen.assets.material_assignments import AssetList - -def geometry_assemble_chair(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - generateseat = nw.new_node(generate_curvy_seats().name, - input_kwargs={ - 'Width': kwargs['Top Profile Width'], - 'Front Relative Width': kwargs['Top Front Relative Width'], - 'Front Bent': kwargs['Top Front Bent'], - 'Seat Bent': kwargs['Top Seat Bent'], - 'Mid Bent': kwargs['Top Mid Bent'], - 'Mid Relative Width': kwargs['Top Mid Relative Width'], - 'Back Bent': kwargs['Top Back Bent'], - 'Back Relative Width': kwargs['Top Back Relative Width'], - 'Mid Pos': kwargs['Top Mid Pos'], - 'Seat Height': kwargs['Top Thickness'], - }) - - seat_instance = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': generateseat, - 'Translation': (0.0000, 0.0000, kwargs['Top Height'])}) - - seat_instance = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': seat_instance, 'Material': kwargs['TopMaterial']}) - - legs = nw.new_node(geometry_create_legs(**kwargs).name) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [seat_instance, legs]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -class OfficeChairFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(OfficeChairFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params, leg_style = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params(leg_style) - self.params.update(self.material_params) - - def get_material_params(self, leg_style): - material_assignments = AssetList['OfficeChairFactory'](leg_style) - params = { - "TopMaterial": material_assignments['top'].assign_material(), - "LegMaterial": material_assignments['leg'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # all in meters - - if dimensions is None: - x = uniform(0.5, 0.6) - z = uniform(1.0, 1.4) - dimensions = ( - x, x, z - ) - - x, y, z = dimensions - - top_thickness = uniform(0.5, 0.7) - - # straight has the bug that seat and legs are disjoint, so disable for now. - - # leg_style = choice(['straight', 'single_stand', 'wheeled']) - leg_style = choice(['single_stand', 'wheeled']) - - parameters = { - 'Top Profile Width': x, - 'Top Thickness': top_thickness, - 'Top Front Relative Width': uniform(0.5, 0.8), - 'Top Front Bent': uniform(-1.5, -0.4), - 'Top Seat Bent': uniform(-1.5, -0.4), - 'Top Mid Bent': uniform(-2.4, -0.5), - 'Top Mid Relative Width': uniform(0.5, 0.9), - 'Top Back Bent': uniform(-1, -0.1), - 'Top Back Relative Width': uniform(0.6, 0.9), - 'Top Mid Pos': uniform(0.4, 0.6), - # 'Top Material': choice(['leather', 'wood', 'plastic', 'glass']), - 'Height': z, - 'Top Height': z - top_thickness, - 'Leg Style': leg_style, - 'Leg NGon': choice([4, 32]), - 'Leg Placement Top Relative Scale': 0.7, - 'Leg Placement Bottom Relative Scale': uniform(1.1, 1.3), - 'Leg Height': 1.0, - } - - if leg_style == "single_stand": - leg_number = 1 - leg_diameter = uniform(0.7*x, 0.9*x) - - leg_curve_ctrl_pts = [(0.0, uniform(0.1, 0.2)), - (0.5, uniform(0.1, 0.2)), (0.9, uniform(0.2, 0.3)), (1.0, 1.0)] - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood']) - }) - - elif leg_style == "straight": - leg_diameter = uniform(0.04, 0.06) - leg_number = 4 - - leg_curve_ctrl_pts = [(0.0, 1.0), (0.4, uniform(0.85, 0.95)), (1.0, uniform(0.4, 0.6))] - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood']), - 'Strecher Relative Pos': uniform(0.2, 0.6), - 'Strecher Increament': choice([0, 1, 2]) - }) - - elif leg_style == "wheeled": - leg_diameter = uniform(0.03, 0.05) - leg_number = 1 - pole_number = choice([4, 5]) - joint_height = uniform(0.5, 0.8) * (z - top_thickness) - wheel_arc_sweep_angle = uniform(120, 240) - wheel_width = uniform(0.11, 0.15) - wheel_rot = uniform(0, 360) - pole_length = uniform(1.6, 2.0) - - parameters.update({ - 'Leg Number': leg_number, - 'Leg Pole Number': pole_number, - 'Leg Diameter': leg_diameter, - 'Leg Joint Height': joint_height, - 'Leg Wheel Arc Sweep Angle': wheel_arc_sweep_angle, - 'Leg Wheel Width': wheel_width, - 'Leg Wheel Rot': wheel_rot, - 'Leg Pole Length': pole_length, - # 'Leg Material': choice(['metal']) - }) - - else: - raise NotImplementedError - - return parameters, leg_style - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - - surface.add_geomod(obj, geometry_assemble_chair, apply=True, input_kwargs=self.params) - tagging.tag_system.relabel_obj(obj) - - obj.rotation_euler.z += np.pi / 2 - butil.apply_transform(obj) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - diff --git a/infinigen/assets/seating/chairs/seats/curvy_seats.py b/infinigen/assets/seating/chairs/seats/curvy_seats.py deleted file mode 100644 index 128d5d1ff..000000000 --- a/infinigen/assets/seating/chairs/seats/curvy_seats.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -import bpy -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_utils import nodegroup_bent -from infinigen.assets.table_decorations.utils import nodegroup_lofting, nodegroup_warp_around_curve - -# TODO: set material automatically - -@node_utils.to_nodegroup('generate_curvy_seats', singleton=False, type='GeometryNodeTree') -def generate_curvy_seats(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'U Resolution', 256), - ('NodeSocketInt', 'V Resolution', 128), - ('NodeSocketFloat', 'Width', 0.5000), - ('NodeSocketFloat', 'Thickness', 0.0300), - ('NodeSocketFloat', 'Front Relative Width', 0.5000), - ('NodeSocketFloat', 'Front Bent', -0.3800), - ('NodeSocketFloat', 'Seat Bent', -0.5600), - ('NodeSocketFloat', 'Mid Relative Width', 0.5000), - ('NodeSocketFloat', 'Mid Bent', -0.7000), - ('NodeSocketFloat', 'Back Relative Width', 0.5000), - ('NodeSocketFloat', 'Back Bent', -0.2000), - ('NodeSocketFloat', 'Top Relative Width', 0.5000), - ('NodeSocketFloat', 'Top Bent', -0.2000), - ('NodeSocketFloat', 'Seat Height', 0.6000), - ('NodeSocketFloat', 'Mid Pos', 0.5000), - ('NodeSocketMaterial', 'SeatMaterial', None)]) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Width"], 'Y': group_input.outputs["Thickness"], 'Z': 1.0000}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_1.outputs["Curve"], 'Translation': (0.0000, 0.0000, 0.5000), 'Scale': combine_xyz}) - - bent = nw.new_node(nodegroup_bent().name, - input_kwargs={'Geometry': transform_geometry_1, 'Amount': group_input.outputs["Seat Bent"]}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["Mid Relative Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': group_input.outputs["Thickness"], 'Z': 1.0000}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_2.outputs["Curve"], 'Translation': (0.0000, 0.0000, 1.0000), 'Scale': combine_xyz_2}) - - bent_1 = nw.new_node(nodegroup_bent().name, - input_kwargs={'Geometry': transform_geometry_2, 'Amount': group_input.outputs["Mid Bent"]}) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_3.outputs["Curve"], 'Scale': (0.0000, 0.0050, 1.0000)}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["Front Relative Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_1, 'Y': 0.0050, 'Z': 1.0000}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Translation': (0.0000, 0.0000, 0.0600), 'Scale': combine_xyz_1}) - - bent_2 = nw.new_node(nodegroup_bent().name, - input_kwargs={'Geometry': transform_geometry, 'Amount': group_input.outputs["Front Bent"]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [bent_1, bent, bent_2, transform_geometry_3]}) - - curve_circle_4 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["Back Relative Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Y': group_input.outputs["Thickness"], 'Z': 1.0000}) - - transform_geometry_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_4.outputs["Curve"], 'Translation': (0.0000, 0.0000, 1.5000), 'Scale': combine_xyz_3}) - - bent_3 = nw.new_node(nodegroup_bent().name, - input_kwargs={'Geometry': transform_geometry_4, 'Amount': group_input.outputs["Back Bent"]}) - - curve_circle_5 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["Top Relative Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': 0.0050, 'Z': 1.0000}) - - transform_geometry_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_5.outputs["Curve"], 'Translation': (0.0000, 0.0000, 2.0200), 'Scale': combine_xyz_4}) - - bent_4 = nw.new_node(nodegroup_bent().name, - input_kwargs={'Geometry': transform_geometry_5, 'Amount': group_input.outputs["Top Bent"]}) - - curve_circle_6 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["U Resolution"], 'Radius': 0.5000}) - - transform_geometry_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_6.outputs["Curve"], 'Translation': (0.0000, 0.0000, 2.1000), 'Scale': (0.0000, 0.0050, 1.0000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_geometry_6, bent_4, bent_3]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_2, join_geometry]}) - - lofting_001 = nw.new_node(nodegroup_lofting().name, - input_kwargs={'Profile Curves': join_geometry_1, 'U Resolution': group_input.outputs["U Resolution"], 'V Resolution': group_input.outputs["V Resolution"]}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_4, 'Z': 0.0300}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["Mid Pos"], 'Z': -0.0500}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_5, 'Z': group_input.outputs["Seat Height"]}) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': 128, 'Start': combine_xyz_6, 'Start Handle': combine_xyz_7, 'End Handle': (0.0000, 0.1000, 0.1000), 'End': combine_xyz_5}) - - warparoundcurvealt = nw.new_node(nodegroup_warp_around_curve().name, - input_kwargs={'Geometry': lofting_001.outputs["Geometry"], 'Curve': bezier_segment}) - - # material_func =np.random.choice([plastic.shader_rough_plastic, metal.get_shader(), wood_new.shader_wood, leather.shader_leather]) - - warparoundcurvealt = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': warparoundcurvealt, 'Material': group_input.outputs["SeatMaterial"]}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': warparoundcurvealt}, attrs={'is_active_output': True}) - diff --git a/infinigen/assets/seating/chairs/seats/round_seats.py b/infinigen/assets/seating/chairs/seats/round_seats.py deleted file mode 100644 index 99959d77f..000000000 --- a/infinigen/assets/seating/chairs/seats/round_seats.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_top import nodegroup_capped_cylinder -from infinigen.assets.materials.leather_and_fabrics.leather import shader_leather - -@node_utils.to_nodegroup('generate_round_seats', singleton=False, type='GeometryNodeTree') -def generate_round_seats(nw: NodeWrangler, thickness=None, radius=None, cap_radius=None, bevel_factor=None, seat_material=None): - # Code generated using version 2.6.4 of the node_transpiler - if thickness is None: - thickness = uniform(0.05, 0.12) - if radius is None: - radius = uniform(0.35, 0.45) - if cap_radius is None: - cap_radius = uniform(2.0, 3.2) - if bevel_factor is None: - bevel_factor = uniform(0.01, 0.04) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: thickness, 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: bevel_factor, 1: thickness}, attrs={'operation': 'DIVIDE'}) - - cappedcylinder = nw.new_node(nodegroup_capped_cylinder().name, - input_kwargs={'Thickness': multiply, 'Radius': radius, 'Cap Flatness': cap_radius, 'Fillet Radius Vertical': divide, 'Cap Relative Scale': 0.0140, 'Cap Relative Z Offset': -0.0020, 'Resolution': 128}) - - seat = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': cappedcylinder, 'Material': seat_material}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': seat}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/seating/sofa.py b/infinigen/assets/seating/sofa.py deleted file mode 100644 index 0f33fb326..000000000 --- a/infinigen/assets/seating/sofa.py +++ /dev/null @@ -1,743 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick, Stamatis Alexandropolous, Yiming Zuo - -import bpy -import bpy -import mathutils -import random - -import numpy as np -from numpy.random import uniform, normal, randint, choice - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface - -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core import tagging, tags as t - -from infinigen.core.util.random import log_uniform, clip_gaussian - -from infinigen.assets.material_assignments import AssetList - -@node_utils.to_nodegroup('nodegroup_array_fill_line', singleton=False, type='GeometryNodeTree') -def nodegroup_array_fill_line(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Line Start', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Line End', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Instance Dimensions', (0.0000, 0.0000, 0.0000)), - ('NodeSocketInt', 'Count', 10), - ('NodeSocketGeometry', 'Instance', None)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Instance Dimensions"], 1: (0.0000, -0.5000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: group_input.outputs["Line End"], 1: multiply.outputs["Vector"]}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Line Start"], 1: multiply.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': group_input.outputs["Count"], 'Start Location': add.outputs["Vector"], 'Offset': subtract.outputs["Vector"]}, - attrs={'mode': 'END_POINTS'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': mesh_line, 'Instance': group_input.outputs["Instance"]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_corner_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_corner_cube(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVectorTranslation', 'Location', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVectorTranslation', 'CenteringLoc', (0.5000, 0.5000, 0.0000)), - ('NodeSocketVectorTranslation', 'Dimensions', (1.0000, 1.0000, 1.0000)), - ('NodeSocketFloat', 'SupportingEdgeFac', 0.0000), - ('NodeSocketInt', 'Vertices X', 4), - ('NodeSocketInt', 'Vertices Y', 4), - ('NodeSocketInt', 'Vertices Z', 4)]) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': group_input.outputs["Dimensions"], 'Vertices X': group_input.outputs["Vertices X"], 'Vertices Y': group_input.outputs["Vertices Y"], 'Vertices Z': group_input.outputs["Vertices Z"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Vector': group_input.outputs["CenteringLoc"], 9: (0.5000, 0.5000, 0.5000), 10: (-0.5000, -0.5000, -0.5000)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: map_range.outputs["Vector"], 1: group_input.outputs["Dimensions"], 2: group_input.outputs["Location"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Translation': multiply_add.outputs["Vector"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': transform_geometry, 'Name': 'UVMap', 3: cube.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': store_named_attribute}, attrs={'is_active_output': True}) - -ARM_TYPE_SQUARE = 0 -ARM_TYPE_ROUND = 1 -ARM_TYPE_ANGULAR = 2 - -@node_utils.to_nodegroup('nodegroup_sofa_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_sofa_geometry(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketVector', 'Dimensions', (0.0000, 0.9000, 2.5000)), - ('NodeSocketVector', 'Arm Dimensions', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Back Dimensions', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Seat Dimensions', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'Foot Dimensions', (0.0000, 0.0000, 0.0000)), - ('NodeSocketFloat', 'Baseboard Height', 0.1300), - ('NodeSocketFloat', 'Backrest Width', 0.1100), - ('NodeSocketFloat', 'Seat Margin', 0.9700), - ('NodeSocketFloat', 'Backrest Angle', -0.2000), - ('NodeSocketFloatFactor', 'arm_width', 0.7000), - ('NodeSocketInt', 'Arm Type', 0), - ('NodeSocketFloatFactor', 'Arm_height', 0.7318), - ('NodeSocketFloatAngle', 'arms_angle', 0.8727), - ('NodeSocketBool', 'Footrest', False), - ('NodeSocketInt', 'Count', 4), - ('NodeSocketFloat', 'Scaling footrest', 1.5000), - ('NodeSocketInt', 'Reflection', 0), - ('NodeSocketBool', 'leg_type', False), - ('NodeSocketFloat', 'leg_dimensions', 0.5000), - ('NodeSocketFloat', 'leg_z', 1.0000), - ('NodeSocketInt', 'leg_faces', 20), - ('NodeSocketBool', 'Subdivide', True)]) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Dimensions"], 1: (0.0000, 0.5000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Arm Dimensions"]}) - - arm_cube = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'Location': multiply.outputs["Vector"], 'CenteringLoc': (0.0000, 1.0000, 0.0000), 'Dimensions': reroute, 'Vertices Z': 10}, - label='ArmCube') - - reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': arm_cube}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': reroute}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: -0.1000, 2: separate_xyz_1.outputs["Z"], 3: -0.1000, 4: 0.2000}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["arm_width"], 'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0092, 0.7688), (0.1011, 0.5937), (0.1494, 0.4062), (0.3954, 0.0781), (1.0000, 0.2187)]) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply.outputs["Vector"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_2.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: subtract}, attrs={'operation': 'MULTIPLY'}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_14 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_14.outputs["X"], 1: -1.0000, 2: 0.6000, 3: 2.1000, 4: -1.1000}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': group_input.outputs["Arm_height"], 'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.1341, 0.2094), (0.7386, 1.0000), (0.9682, 0.0781), (1.0000, 0.0000)]) - - separate_xyz_15 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': (-2.9000, 3.3000, 0.0000)}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_14.outputs["Z"], 1: separate_xyz_15.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve_1, 1: subtract_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': multiply_2}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': combine_xyz, 'Axis': (1.0000, 0.0000, 0.0000), 'Angle': group_input.outputs["arms_angle"]}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': reroute_1, 'Offset': vector_rotate}) - - multiply_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Dimensions"], 1: (0.0000, 0.5000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Arm Dimensions"]}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_3.outputs["Z"], 1: separate_xyz_3.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_3.outputs["X"], 'Y': separate_xyz_3.outputs["Y"], 'Z': subtract_2}) - - reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': combine_xyz_1}) - - arm_cube_1 = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'Location': multiply_3.outputs["Vector"], 'CenteringLoc': (0.0000, 1.0000, 0.0000), 'Dimensions': reroute_2}, - label='ArmCube') - - separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': reroute_2}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_4.outputs["X"], 1: 1.0001}, attrs={'operation': 'MULTIPLY'}) - - reroute_3 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': multiply_4}) - - arm_cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Side Segments': 4, 'Radius': separate_xyz_4.outputs["Y"], 'Depth': reroute_3}, - attrs={'fill_type': 'TRIANGLE_FAN'}) - - arm_cylinder = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': arm_cylinder.outputs["Mesh"], 'Name': 'UVMap', 3: arm_cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: reroute_3, 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - separate_xyz_5 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply_3.outputs["Vector"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': divide, 'Y': separate_xyz_5.outputs["Y"], 'Z': separate_xyz_4.outputs["Z"]}) - - arm_cylinder = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': arm_cylinder, 'Translation': combine_xyz_2, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - roundtop = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [arm_cube_1, arm_cylinder]}) - - square_or_round = nw.new_node( - Nodes.Switch, - input_kwargs={ - 'Switch': nw.compare('EQUAL', group_input.outputs['Arm Type'], ARM_TYPE_SQUARE), - 'False': roundtop, - 'True': arm_cube_1, - } - ) - - angular_or_squareround = nw.new_node(Nodes.Switch, - input_kwargs={ - 'Switch': nw.compare('EQUAL', group_input.outputs['Arm Type'], ARM_TYPE_ANGULAR), - 'False': square_or_round, - 'True': set_position - } - ) - - transform_geometry_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': angular_or_squareround, 'Scale': (1.0000, -1.0000, 1.0000)}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': transform_geometry_1}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [flip_faces, angular_or_squareround]}) - - separate_xyz_6 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Back Dimensions"]}) - - separate_xyz_7 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Arm Dimensions"]}) - - separate_xyz_8 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Dimensions"]}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_7.outputs["Y"], 1: -2.0000, 2: separate_xyz_8.outputs["Y"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_6.outputs["X"], 'Y': multiply_add, 'Z': separate_xyz_6.outputs["Z"]}) - - back_board = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'CenteringLoc': (0.0000, 0.5000, -1.0000), 'Dimensions': combine_xyz_3, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='BackBoard') - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_2, back_board]}) - - multiply_5 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_3, 1: (1.0000, 0.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Arm Dimensions"], 1: (0.0000, -2.0000, 0.0000), 2: group_input.outputs["Dimensions"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_add_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Back Dimensions"], 1: (-1.0000, 0.0000, 0.0000), 2: multiply_add_1.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - separate_xyz_9 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply_add_2.outputs["Vector"]}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_9.outputs["X"], 'Y': separate_xyz_9.outputs["Y"], 'Z': group_input.outputs["Baseboard Height"]}) - - base_board = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'Location': multiply_5.outputs["Vector"], 'CenteringLoc': (0.0000, 0.5000, -1.0000), 'Dimensions': combine_xyz_4, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='BaseBoard') - - reroute_13 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Count"]}) - - equal = nw.new_node(Nodes.Compare, input_kwargs={2: reroute_13, 3: 4}, attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - reroute_5 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': separate_xyz_9.outputs["Y"]}) - - separate_xyz_10 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Seat Dimensions"]}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_5, 1: separate_xyz_10.outputs["Y"]}, attrs={'operation': 'DIVIDE'}) - - ceil = nw.new_node(Nodes.Math, input_kwargs={0: divide_1}, attrs={'operation': 'CEIL'}) - - combine_xyz_14 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': ceil, 'Z': 1.0000}) - - divide_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz_4, 1: combine_xyz_14}, attrs={'operation': 'DIVIDE'}) - - reroute_12 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': divide_2.outputs["Vector"]}) - - base_board_1 = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'Location': multiply_5.outputs["Vector"], 'CenteringLoc': (0.0000, 0.5000, -1.0000), 'Dimensions': reroute_12, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='BaseBoard') - - equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={0: 4.0000, 2: reroute_13, 3: 4}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - switch_8 = nw.new_node(Nodes.Switch, - input_kwargs={0: equal_1, 8: divide_2.outputs["Vector"], 9: combine_xyz_4}, - attrs={'input_type': 'VECTOR'}) - - separate_xyz_16 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': switch_8.outputs[3]}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_16.outputs["Y"], 1: 0.7000}, attrs={'operation': 'MULTIPLY'}) - - grid_1 = nw.new_node(Nodes.MeshGrid, input_kwargs={'Size Y': multiply_6, 'Vertices X': 1, 'Vertices Y': 2}) - - combine_xyz_18 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 0.1000, 'Y': separate_xyz_16.outputs["Y"], 'Z': separate_xyz_16.outputs["Z"]}) - - subtract_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: switch_8.outputs[3], 1: combine_xyz_18}, - attrs={'operation': 'SUBTRACT'}) - - multiply_7 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Back Dimensions"], 1: (1.0000, 0.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_3.outputs["Vector"], 1: multiply_7.outputs["Vector"]}) - - transform_geometry_10 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': grid_1.outputs["Mesh"], 'Translation': add.outputs["Vector"], 'Scale': (1.0000, 1.0000, 0.9000)}) - - cone = nw.new_node('GeometryNodeMeshCone', - input_kwargs={'Vertices': group_input.outputs["leg_faces"], 'Side Segments': 4, 'Radius Top': 0.0100, 'Radius Bottom': 0.0250, 'Depth': 0.0700}) - - reroute_9 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["leg_dimensions"]}) - - combine_xyz_17 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute_9, 'Y': reroute_9, 'Z': group_input.outputs["leg_z"]}) - - transform_geometry_9 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cone.outputs["Mesh"], 'Translation': (0.0000, 0.0000, 0.0100), 'Rotation': (0.0000, 3.1416, 0.0000), 'Scale': combine_xyz_17}) - - foot_cube = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'CenteringLoc': (0.5000, 0.5000, 0.9000), 'Dimensions': group_input.outputs["Foot Dimensions"]}, - label='FootCube') - - transform_geometry_12 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': foot_cube, 'Scale': (0.5000, 0.8000, 0.8000)}) - - switch_6 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["leg_type"], 14: transform_geometry_9, 15: transform_geometry_12}) - - transform_geometry_8 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': switch_6.outputs[6]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform_geometry_10, 'Instance': transform_geometry_8, 'Scale': (1.0000, 1.0000, 1.2000)}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - join_geometry_10 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [base_board_1, realize_instances_1]}) - - subtract_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_14, 1: (1.0000, 1.0000, 1.0000)}, - attrs={'operation': 'SUBTRACT'}) - - multiply_8 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_4.outputs["Vector"], 1: (0.0000, 0.5000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_9 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: divide_2.outputs["Vector"], 1: multiply_8.outputs["Vector"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_16 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': group_input.outputs["Reflection"], 'Z': 1.0000}) - - multiply_10 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_9.outputs["Vector"], 1: combine_xyz_16}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Scaling footrest"], 'Y': 1.0000, 'Z': 1.0000}) - - transform_geometry_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_10, 'Translation': multiply_10.outputs["Vector"], 'Scale': combine_xyz_12}) - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Footrest"], 15: transform_geometry_5}) - - combine_xyz_19 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Scaling footrest"], 'Y': 1.3000, 'Z': 1.0000}) - - transform_geometry_11 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': realize_instances_1, 'Scale': combine_xyz_19}) - - base_board_2 = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'Location': multiply_5.outputs["Vector"], 'CenteringLoc': (0.0000, 0.5000, -1.0000), 'Dimensions': combine_xyz_4, 'Vertices X': 3, 'Vertices Y': 3, 'Vertices Z': 3}, - label='BaseBoard') - - combine_xyz_13 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Scaling footrest"], 'Y': 1.0000, 'Z': 1.0000}) - - transform_geometry_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': base_board_2, 'Scale': combine_xyz_13}) - - join_geometry_11 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_geometry_11, transform_geometry_6]}) - - switch_4 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Footrest"], 15: join_geometry_11}) - - switch_5 = nw.new_node(Nodes.Switch, input_kwargs={1: equal, 14: switch_2.outputs[6], 15: switch_4.outputs[6]}) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_3, base_board, switch_5.outputs[6]]}) - - grid = nw.new_node(Nodes.MeshGrid, input_kwargs={'Vertices X': 2, 'Vertices Y': 2}) - - multiply_11 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Dimensions"], 1: (0.5000, 0.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_12 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Dimensions"], 1: (1.0000, 1.0000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_13 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Foot Dimensions"], 1: (2.5000, 2.5000, 0.0000)}, - attrs={'operation': 'MULTIPLY'}) - - subtract_5 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_12.outputs["Vector"], 1: multiply_13.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': grid.outputs["Mesh"], 'Translation': multiply_11.outputs["Vector"], 'Scale': subtract_5.outputs["Vector"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform_geometry_2, 'Instance': transform_geometry_8}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points}) - - join_geometry_5 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_4, realize_instances]}) - - reroute_10 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Count"]}) - - equal_2 = nw.new_node(Nodes.Compare, - input_kwargs={1: 4.0000, 2: reroute_10, 3: 4}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - reroute_4 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': combine_xyz_4}) - - multiply_14 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: reroute_4, 1: (0.0000, -0.5000, 1.0000)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_15 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: reroute_4, 1: (0.0000, 0.5000, 1.0000)}, - attrs={'operation': 'MULTIPLY'}) - - equal_3 = nw.new_node(Nodes.Compare, - input_kwargs={1: 4.0000, 2: reroute_10, 3: 4}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - reroute_11 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Reflection"]}) - - switch_7 = nw.new_node(Nodes.Switch, input_kwargs={0: equal_3, 4: reroute_11, 5: 1}, attrs={'input_type': 'INT'}) - - combine_xyz_15 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': switch_7.outputs[1], 'Z': 1.1000}) - - multiply_16 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_15.outputs["Vector"], 1: combine_xyz_15}, - attrs={'operation': 'MULTIPLY'}) - - divide_3 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_5, 1: ceil}, attrs={'operation': 'DIVIDE'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_10.outputs["X"], 'Y': divide_3, 'Z': separate_xyz_10.outputs["Z"]}) - - reroute_6 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': combine_xyz_5}) - - multiply_17 = nw.new_node(Nodes.VectorMath, input_kwargs={0: reroute_6, 1: combine_xyz_15}, attrs={'operation': 'MULTIPLY'}) - - multiply_18 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz_5, 1: (1.0000, 1.0300, 1.0000)}, - attrs={'operation': 'MULTIPLY'}) - - seat_cushion = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'CenteringLoc': (0.0000, 0.5000, 0.0000), 'Dimensions': multiply_18.outputs["Vector"], 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='SeatCushion') - - upwards_part = nw.new_node(Nodes.Compare, input_kwargs={'A': nw.new_node(Nodes.Index), 'B': 2}, attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - seat_cushion = tagging.tag_nodegroup(nw, seat_cushion, t.Subpart.SupportSurface, selection=upwards_part) - - index = nw.new_node(Nodes.Index) - - equal_4 = nw.new_node(Nodes.Compare, input_kwargs={2: index, 3: 1}, attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': seat_cushion, 'Selection': equal_4, 'Name': 'TAG_support', 6: True}, - attrs={'data_type': 'BOOLEAN', 'domain': 'FACE'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 1.0000 - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': store_named_attribute_1, 'Selection': value, 'Name': 'TAG_cushion', 6: True}, - attrs={'data_type': 'BOOLEAN', 'domain': 'FACE'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Seat Margin"], 'Y': group_input.outputs["Seat Margin"], 'Z': 1.0000}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute_2, 'Scale': combine_xyz_6}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Scaling footrest"], 'Y': 1.0000, 'Z': 1.1000}) - - transform_geometry_7 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_geometry_3, 'Scale': combine_xyz_11}) - - nodegroup_array_fill_line_002 = nw.new_node(nodegroup_array_fill_line().name, - input_kwargs={'Line Start': multiply_14.outputs["Vector"], 'Line End': multiply_16.outputs["Vector"], 'Instance Dimensions': multiply_17.outputs["Vector"], 'Count': reroute_10, 'Instance': transform_geometry_7}) - - separate_xyz_17 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': multiply_16.outputs["Vector"]}) - - combine_xyz_21 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': separate_xyz_17.outputs["Z"]}) - - reroute_14 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': ceil}) - - combine_xyz_20 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': reroute_14, 'Z': 1.0000}) - - transform_geometry_13 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_geometry_7, 'Scale': combine_xyz_20}) - - nodegroup_array_fill_line_002_1 = nw.new_node(nodegroup_array_fill_line().name, - input_kwargs={'Line End': combine_xyz_21, 'Count': 1, 'Instance': transform_geometry_13}) - - switch_9 = nw.new_node(Nodes.Switch, - input_kwargs={1: equal_2, 14: nodegroup_array_fill_line_002, 15: nodegroup_array_fill_line_002_1}) - - switch_3 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Footrest"], 15: switch_9.outputs[6]}) - - nodegroup_array_fill_line_002_2 = nw.new_node(nodegroup_array_fill_line().name, - input_kwargs={'Line Start': multiply_14.outputs["Vector"], 'Line End': multiply_15.outputs["Vector"], 'Instance Dimensions': reroute_6, 'Count': reroute_14, 'Instance': transform_geometry_3}) - - join_geometry_9 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [switch_3.outputs[6], nodegroup_array_fill_line_002_2]}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': join_geometry_9, 'Level': 2}) - - separate_xyz_11 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Seat Dimensions"]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Backrest Width"], 'Z': separate_xyz_11.outputs["Z"]}) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_14.outputs["Vector"], 1: combine_xyz_7}) - - add_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: multiply_15.outputs["Vector"], 1: combine_xyz_7}) - - separate_xyz_12 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Dimensions"]}) - - subtract_6 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_12.outputs["Z"], 1: separate_xyz_11.outputs["Z"]}, - attrs={'operation': 'SUBTRACT'}) - - subtract_7 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_6, 1: group_input.outputs["Baseboard Height"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': subtract_7, 'Y': divide_3, 'Z': group_input.outputs["Backrest Width"]}) - - seat_cushion_1 = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'CenteringLoc': (0.1000, 0.5000, 1.0000), 'Dimensions': combine_xyz_8, 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='SeatCushion') - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': seat_cushion_1, 'Offset Scale': 0.0300}) - - scale_elements = nw.new_node(Nodes.ScaleElements, - input_kwargs={'Geometry': extrude_mesh.outputs["Mesh"], 'Selection': extrude_mesh.outputs["Top"], 'Scale': 0.6000}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': scale_elements}) - - random_value = nw.new_node(Nodes.RandomValue, attrs={'data_type': 'FLOAT_VECTOR'}) - - store_named_attribute_3 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': subdivision_surface_1, 'Name': 'UVMap', 3: random_value.outputs["Value"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - multiply_19 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Backrest Width"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_13 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': group_input.outputs["Back Dimensions"]}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_13.outputs["X"], 1: 0.1000}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_19, 1: add_3}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Backrest Angle"], 1: -1.5708}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_5}) - - transform_geometry_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_3, 'Translation': combine_xyz_9, 'Rotation': combine_xyz_10, 'Scale': combine_xyz_6}) - - nodegroup_array_fill_line_003 = nw.new_node(nodegroup_array_fill_line().name, - input_kwargs={'Line Start': add_1.outputs["Vector"], 'Line End': add_2.outputs["Vector"], 'Instance Dimensions': reroute_6, 'Count': ceil, 'Instance': transform_geometry_4}) - - join_geometry_6 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [subdivide_mesh, nodegroup_array_fill_line_003]}) - - join_geometry_7 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_5, realize_instances, join_geometry_6]}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': join_geometry_5, 'Level': 2}) - - join_geometry_8 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [subdivide_mesh_1, realize_instances, join_geometry_6]}) - - subdivision_surface_2 = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': join_geometry_8, 'Level': 1}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: True, 14: join_geometry_7, 15: subdivision_surface_2}) - switch = nw.new_node(Nodes.Switch, input_kwargs={ - 1: group_input.outputs['Subdivide'], - 14: join_geometry_7, - 15: subdivision_surface_2 - }) - - bounding_box = nw.new_node(nodegroup_corner_cube().name, - input_kwargs={'CenteringLoc': (0.0000, 0.5000, -1.0000), 'Dimensions': group_input.outputs["Dimensions"], 'Vertices X': 2, 'Vertices Y': 2, 'Vertices Z': 2}, - label='BoundingBox') - - reroute_7 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': bounding_box}) - - reroute_8 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': reroute_7}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': switch_1.outputs[6], 'BoundingBox': reroute_8}, - attrs={'is_active_output': True}) - - - -def sofa_parameter_distribution(dimensions=None): - - if dimensions is None: - dimensions = ( - uniform(0.95, 1.1), - clip_gaussian(1.75, 0.75, 0.9, 3), - uniform(0.69, 0.97) - ) - - return { - 'Dimensions': dimensions, - 'Arm Dimensions': ( - uniform(1, 1), - uniform(0.06, 0.15), - uniform(0.5, 0.75), - ), - 'Back Dimensions': ( - uniform(0.15, 0.25), - 0.0000, - uniform(0.5, 0.75) - ), - 'Seat Dimensions': ( - dimensions[0], - uniform(0.7, 1), - uniform(0.15, 0.3) - ), - 'Foot Dimensions': ( - uniform(0.07, 0.25), - 0.06, - 0.06 - ), - 'Baseboard Height': uniform(0.05, 0.09), - 'Backrest Width': uniform(0.1, 0.2), - 'Seat Margin': uniform(0.9700, 1), - 'Backrest Angle': uniform(-0.15, -0.5), - - 'Arm Type': np.random.choice( - [ARM_TYPE_SQUARE, ARM_TYPE_ROUND, ARM_TYPE_ANGULAR], - p=[0.4, 0.2, 0.4] - ), - 'arm_width': uniform(0.6, 0.9), - 'Arm_height': uniform(0.7,1.0), - 'arms_angle': uniform(0.0, 1.08), - 'Footrest': True if uniform() > 0.5 and dimensions[1] > 2 else False, - 'Count': 1 if uniform()>0.2 else 4, - 'Scaling footrest': uniform(1.3, 1.6), - 'Reflection':1 if uniform()>0.5 else -1, - 'leg_type': True if uniform()>0.5 else False, - 'leg_dimensions': uniform(0.4,0.9), - 'leg_z':uniform(1.1, 2.5), - 'leg_faces':uniform(4,25) - } - - -class SofaFactory(AssetFactory): - def __init__(self, factory_seed): - from infinigen.assets.clothes import blanket - super().__init__(factory_seed) - with FixedSeed(factory_seed): - self.params = sofa_parameter_distribution() - #from infinigen.assets.scatters.clothes import ClothesCover - #self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(1, 1.5), - # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() - materials = AssetList['SofaFactory']() - self.sofa_fabric = materials['sofa_fabric'].assign_material() - - def create_placeholder(self, **_): - obj = butil.spawn_vert() - butil.modify_mesh( - obj, - 'NODES', - node_group=nodegroup_sofa_geometry(), - ng_inputs={**self.params, }, - apply=True - ) - tagging.tag_system.relabel_obj(obj) - surface.add_material(obj, self.sofa_fabric) - return obj - - def create_asset(self, i, placeholder, face_size, **_): - - hipoly = butil.copy(placeholder, keep_materials=True) - - butil.modify_mesh(hipoly, 'SUBSURF', levels=1, apply=True) - - with butil.SelectObjects(hipoly): - bpy.ops.object.shade_smooth() - - return hipoly - -class ArmChairFactory(SofaFactory): - - def __init__(self, factory_seed): - super().__init__(factory_seed) - with FixedSeed(factory_seed): - dimensions = ( - uniform(0.8, 1), - uniform(0.9, 1.1), - uniform(0.69, 0.97) - ) - self.params = sofa_parameter_distribution(dimensions=dimensions) \ No newline at end of file diff --git a/infinigen/assets/shelves/cabinet.py b/infinigen/assets/shelves/cabinet.py deleted file mode 100644 index 517816530..000000000 --- a/infinigen/assets/shelves/cabinet.py +++ /dev/null @@ -1,1003 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube, blender_rotate -from infinigen.assets.shelves.large_shelf import LargeShelfBaseFactory, LargeShelfFactory, LargeShelfIkeaFactory -from infinigen.assets.materials.shelf_shaders import get_shelf_material -from infinigen.core.util.math import FixedSeed - - -@node_utils.to_nodegroup('nodegroup_node_group', singleton=False, type='GeometryNodeTree') -def nodegroup_node_group(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0120, 0.00060, 0.0400)}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': 0.0100, 'Depth': 0.00050}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': cylinder.outputs["Mesh"], - 'Translation': (0.0050, 0.0000, 0.0000), - 'Rotation': (1.5708, 0.0000, 0.0000) - }) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0200, 0.0006, 0.0120)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube_1, 'Translation': (0.0080, 0.0000, 0.0000)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [cube, transform, transform_1]}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'attach_height', 0.1000), - ('NodeSocketFloat', 'door_width', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["door_width"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0181}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': subtract, 'Z': group_input.outputs["attach_height"]}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_knob_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_knob_handle(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Radius', 0.0100), - ('NodeSocketFloat', 'thickness_1', 0.5000), ('NodeSocketFloat', 'thickness_2', 0.5000), - ('NodeSocketFloat', 'length', 0.5000), ('NodeSocketFloat', 'knob_mid_height', 0.0000), - ('NodeSocketFloat', 'edge_width', 0.5000), ('NodeSocketFloat', 'door_width', 0.5000)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["thickness_2"], 1: group_input.outputs["thickness_1"] - }) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["length"]}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': group_input.outputs["Radius"], 'Depth': add_1 - }) - - subtract = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["door_width"], - 1: group_input.outputs["edge_width"] - }, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.005}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': add_2, - 'Y': multiply_1, - 'Z': group_input.outputs["knob_mid_height"] - }) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': cylinder.outputs["Mesh"], - 'Translation': combine_xyz_6, - 'Rotation': (1.5708, 0.0000, 0.0000) - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_6}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_mid_board', singleton=False, type='GeometryNodeTree') -def nodegroup_mid_board(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_k = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_3, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_1}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_4}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_4, 'Material': kwargs['material'][0]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_7, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5 - }) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 1.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_2}) - - transform_7 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_8}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_7, 'Material': kwargs['material'][1]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances, 'mid_height': multiply}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_mid_board_001', singleton=False, type='GeometryNodeTree') -def nodegroup_mid_board_001(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - multiply_k = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_3, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_1}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_4}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_4, 'Material': kwargs['material'][0]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': set_material}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances, 'mid_height': multiply}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_double_rampled_edge', singleton=False, type='GeometryNodeTree') -def nodegroup_double_rampled_edge(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness_2', 0.5000), ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'thickness_1', 0.5000), ('NodeSocketFloat', 'ramp_angle', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_10}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 3, 'Radius': 0.0100}) - - endpoint_selection = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000}) - - tangent = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'TANGENT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: 2.0000, 1: multiply}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': add_4}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': curve_circle.outputs["Curve"], - 'Selection': endpoint_selection, - 'Position': combine_xyz_7 - }) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': add_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': set_position, - 'Selection': endpoint_selection_1, - 'Position': combine_xyz_8 - }) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 0.9900}, - attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5, 'Y': add_4}) - - set_position_2 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': set_position_1, - 'Selection': op_and, - 'Position': combine_xyz_9 - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': curve_line, - 'Profile Curve': set_position_2, - 'Fill Caps': True - }) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': add_4, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_6}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': add_3, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_7}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_6}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_8}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_11}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_12}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position_2, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': curve_line_1, - 'Profile Curve': transform_2, - 'Fill Caps': True - }) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh, transform_4, curve_to_mesh_1]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry_1, 'Distance': 0.0001}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': merge_by_distance}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': realize_instances, 'Level': 4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': subdivide_mesh}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_ramped_edge', singleton=False, type='GeometryNodeTree') -def nodegroup_ramped_edge(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness_2', 0.5000), ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'thickness_1', 0.5000), ('NodeSocketFloat', 'ramp_angle', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_10}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 3, 'Radius': 0.0100}) - - endpoint_selection = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000}) - - tangent = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'TANGENT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}, - attrs={'operation': 'SUBTRACT'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': add_4}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': curve_circle.outputs["Curve"], - 'Selection': endpoint_selection, - 'Position': combine_xyz_7 - }) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': add_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': set_position, - 'Selection': endpoint_selection_1, - 'Position': combine_xyz_8 - }) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 0.9900}, - attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Y': add_4}) - - set_position_2 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': set_position_1, - 'Selection': op_and, - 'Position': combine_xyz_9 - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': curve_line, - 'Profile Curve': set_position_2, - 'Fill Caps': True - }) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': add_4, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_4}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': add_3, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_5}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_4, 'Y': add_6}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_6}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_11}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh, transform_4]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry_1, 'Distance': 0.0001}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': merge_by_distance}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': realize_instances, 'Level': 4}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_7}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': subdivide_mesh, 'Translation': combine_xyz_4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_panel_edge_frame', singleton=False, type='GeometryNodeTree') -def nodegroup_panel_edge_frame(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'vertical_edge', None), - ('NodeSocketFloat', 'door_width', 0.5000), ('NodeSocketFloat', 'door_height', 0.0000), - ('NodeSocketGeometry', 'horizontal_edge', None)]) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["door_width"], 2: 0.0010}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - transform_7 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["horizontal_edge"], - 'Translation': (0.0000, -0.0001, 0.0000), - 'Scale': (0.9999, 1.0000, 1.0000) - }) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: 1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["door_height"], 1: 0.0001}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Z': add_1}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': transform_7, - 'Translation': combine_xyz_2, - 'Rotation': (0.0000, -1.5708, 0.0000) - }) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': transform_7, - 'Translation': combine_xyz_1, - 'Rotation': (0.0000, 1.5708, 0.0000) - }) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_add}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["vertical_edge"], - 'Translation': combine_xyz - }) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_3, transform_2, transform_1, transform]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply, 'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -def geometry_door_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - door_height = nw.new_node(Nodes.Value, label='door_height') - door_height.outputs[0].default_value = kwargs['door_height'] - - door_edge_thickness_2 = nw.new_node(Nodes.Value, label='door_edge_thickness_2') - door_edge_thickness_2.outputs[0].default_value = kwargs['edge_thickness_2'] - - door_edge_width = nw.new_node(Nodes.Value, label='door_edge_width') - door_edge_width.outputs[0].default_value = kwargs['edge_width'] - - door_edge_thickness_1 = nw.new_node(Nodes.Value, label='door_edge_thickness_1') - door_edge_thickness_1.outputs[0].default_value = kwargs['edge_thickness_1'] - - door_edge_ramp_angle = nw.new_node(Nodes.Value, label='door_edge_ramp_angle') - door_edge_ramp_angle.outputs[0].default_value = kwargs['edge_ramp_angle'] - - ramped_edge = nw.new_node(nodegroup_ramped_edge().name, input_kwargs={ - 'height': door_height, - 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, - 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle - }) - - door_width = nw.new_node(Nodes.Value, label='door_width') - door_width.outputs[0].default_value = kwargs['door_width'] - - ramped_edge_1 = nw.new_node(nodegroup_ramped_edge().name, input_kwargs={ - 'height': door_width, - 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, - 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle - }) - - panel_edge_frame = nw.new_node(nodegroup_panel_edge_frame().name, input_kwargs={ - 'vertical_edge': ramped_edge, - 'door_width': door_width, - 'door_height': door_height, - 'horizontal_edge': ramped_edge_1 - }) - - add = nw.new_node(Nodes.Math, input_kwargs={0: panel_edge_frame.outputs["Value"], 1: 0.0001}) - - mid_board_thickness = nw.new_node(Nodes.Value, label='mid_board_thickness') - mid_board_thickness.outputs[0].default_value = kwargs['board_thickness'] - - if kwargs['has_mid_ramp']: - mid_board = nw.new_node(nodegroup_mid_board(material=kwargs['board_material']).name, input_kwargs={ - 'height': door_height, - 'thickness': mid_board_thickness, - 'width': door_width - }) - else: - mid_board = nw.new_node(nodegroup_mid_board_001(material=kwargs['board_material']).name, input_kwargs={ - 'height': door_height, - 'thickness': mid_board_thickness, - 'width': door_width - }) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': -0.0001, 'Z': mid_board.outputs["mid_height"]}) - - frame = [panel_edge_frame.outputs["Geometry"]] - if kwargs['has_mid_ramp']: - double_rampled_edge = nw.new_node(nodegroup_double_rampled_edge().name, input_kwargs={ - 'height': door_width, - 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, - 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle - }) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': double_rampled_edge, - 'Translation': combine_xyz_5, - 'Rotation': (0.0000, 1.5708, 0.0000) - }) - frame.append(transform_5) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': frame}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': kwargs['frame_material'] - }) - - knob_raduis = nw.new_node(Nodes.Value, label='knob_raduis') - knob_raduis.outputs[0].default_value = kwargs['knob_R'] - - know_length = nw.new_node(Nodes.Value, label='know_length') - know_length.outputs[0].default_value = kwargs['knob_length'] - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: door_height}, attrs={'operation': 'MULTIPLY'}) - - knob_handle = nw.new_node(nodegroup_knob_handle().name, input_kwargs={ - 'Radius': knob_raduis, - 'thickness_1': door_edge_thickness_1, - 'thickness_2': door_edge_thickness_2, - 'length': know_length, - 'knob_mid_height': multiply, - 'edge_width': door_edge_width, - 'door_width': door_width - }) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': knob_handle, 'Material': kwargs['frame_material']}) - - attach_gadgets = [] - - for h in kwargs['attach_height']: - attach_height = nw.new_node(Nodes.Value, label='attach_height') - attach_height.outputs[0].default_value = h - - attach = nw.new_node(nodegroup_node_group().name, - input_kwargs={'attach_height': attach_height, 'door_width': door_width}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': attach, 'Material': get_shelf_material('metal')}) - attach_gadgets.append(set_material_1) - - geos = [set_material_2, set_material_3, mid_board.outputs["Geometry"]] + attach_gadgets - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': geos}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: door_width, 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': transform}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': triangulate, - 'Scale': (-1.0 if kwargs['door_left_hinge'] else 1.0, 1.0000, 1.0000) - }) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -def geometry_cabinet_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - right_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['door'][0]}) - left_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['door'][1]}) - shelf_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['shelf']}) - - doors = [] - transform_r = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': right_door_info.outputs['Geometry'], - 'Translation': kwargs['door_hinge_pos'][0], - 'Rotation': (0, 0, kwargs['door_open_angle']) - }) - doors.append(transform_r) - if len(kwargs['door_hinge_pos']) > 1: - transform_l = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': left_door_info.outputs['Geometry'], - 'Translation': kwargs['door_hinge_pos'][1], - 'Rotation': (0, 0, kwargs['door_open_angle']) - }) - doors.append(transform_l) - - attaches = [] - for pos in kwargs['attach_pos']: - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0006, 0.0200, 0.04500)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': -0.0100}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0005, 0.0340, 0.0200)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, cube_1]}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': join_geometry, - 'Translation': (0.0000, -0.0170, 0.0000) - }) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_2, 'Translation': pos}) - - attaches.append(transform_3) - - join_geometry_a = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': attaches}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_a, 'Material': get_shelf_material('metal')}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [shelf_info.outputs['Geometry']] + doors + [set_material] - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - -class CabinetDoorBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(CabinetDoorBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = {} - - def get_asset_params(self, i=0): - params = self.params.copy() - if params.get('door_height', None) is None: - params['door_height'] = uniform(0.7, 2.2) - if params.get('door_width', None) is None: - params['door_width'] = uniform(0.3, 0.4) - if params.get('edge_thickness_1', None) is None: - params['edge_thickness_1'] = uniform(0.01, 0.02) - if params.get('edge_width', None) is None: - params['edge_width'] = uniform(0.03, 0.05) - if params.get('edge_thickness_2', None) is None: - params['edge_thickness_2'] = uniform(0.005, 0.01) - if params.get('edge_ramp_angle', None) is None: - params['edge_ramp_angle'] = uniform(0.6, 0.8) - params['board_thickness'] = params['edge_thickness_1'] - 0.005 - if params.get('knob_R', None) is None: - params['knob_R'] = uniform(0.003, 0.006) - if params.get('knob_length', None) is None: - params['knob_length'] = uniform(0.018, 0.035) - if params.get('attach_height', None) is None: - gap = uniform(0.05, 0.15) - params['attach_height'] = [gap, params['door_height'] - gap] - if params.get('has_mid_ramp', None) is None: - params['has_mid_ramp'] = np.random.choice([True, False], p=[0.6, 0.4]) - if params.get('door_left_hinge', None) is None: - params['door_left_hinge'] = False - - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.5, 0.2, 0.3]) - if params.get('board_material', None) is None: - if params['has_mid_ramp']: - lower_mat = np.random.choice([params['frame_material'], 'glass'], p=[0.7, 0.3]) - upper_mat = np.random.choice([lower_mat, 'glass'], p=[0.6, 0.4]) - params['board_material'] = [lower_mat, upper_mat] - else: - params['board_material'] = [params['frame_material']] - - params = self.get_material_func(params) - return params - - def get_material_func(self, params, randomness=True): - params['frame_material'] = get_shelf_material(params['frame_material']) - materials = [] - if not isinstance(params['board_material'], list): - params['board_material'] = [params['board_material']] - for mat in params['board_material']: - materials.append(get_shelf_material(mat)) - params['board_material'] = materials - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_door_nodes, apply=True, attributes=[], input_kwargs=obj_params) - - if params.get('ret_params', False): - return obj, obj_params - - return obj - - -class CabinetDoorIkeaFactory(CabinetDoorBaseFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(CabinetDoorIkeaFactory, self).__init__(factory_seed, coarse=coarse) - self.params = { - 'edge_thickness_1': 0.012, - 'edge_thickness_2': 0.008, - 'board_thickness': 0.006, - 'edge_width': 0.02, - 'edge_ramp_angle': 0.5, - 'knob_R': 0.004, - 'knob_length': 0.03, - 'has_mid_ramp': False, - 'attach_height': 0.08 - } - - def get_asset_params(self, i=0): - params = self.params.copy() - if params.get('door_height', None) is None: - params['door_height'] = uniform(0.7, 2.2) - if params.get('door_width', None) is None: - params['door_width'] = uniform(0.3, 0.4) - if params.get('door_left_hinge', None) is None: - params['door_left_hinge'] = False - - params['attach_height'] = [params['door_height'] - params['attach_height'], params['attach_height']] - params = self.get_material_func(params) - return params - - -class CabinetBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(CabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.shelf_params = {} - self.door_params = {} - self.mat_params = {} - self.shelf_fac = LargeShelfBaseFactory(factory_seed) - self.door_fac = CabinetDoorBaseFactory(factory_seed) - - def sample_params(self): - # Update fac params - pass - - def get_material_params(self): - with FixedSeed(self.factory_seed): - params = self.mat_params.copy() - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.5, 0.2, 0.3]) - return params - - def get_shelf_params(self, i=0): - params = self.shelf_params.copy() - if params.get('shelf_cell_width', None) is None: - params['shelf_cell_width'] = [ - np.random.choice([0.76, 0.36], p=[0.5, 0.5]) * np.clip(normal(1., 0.1), 0.75, 1.25)] - if params.get('shelf_cell_height', None) is None: - num_v_cells = randint(3, 7) - shelf_cell_height = [] - for i in range(num_v_cells): - shelf_cell_height.append(0.3 * np.clip(normal(1., 0.06), 0.75, 1.25)) - params['shelf_cell_height'] = shelf_cell_height - if params.get('frame_material', None) is None: - params['frame_material'] = self.mat_params['frame_material'] - - return params - - def get_door_params(self, i=0): - params = self.door_params.copy() - - # get door params - shelf_width = self.shelf_params['shelf_width'] + self.shelf_params['side_board_thickness'] * 2 - if params.get('door_width', None) is None: - if shelf_width < 0.55: - params['door_width'] = shelf_width - params['num_door'] = 1 - else: - params['door_width'] = shelf_width / 2. - 0.0005 - params['num_door'] = 2 - if params.get('door_height', None) is None: - params['door_height'] = (self.shelf_params['division_board_z_translation'][-1] - - self.shelf_params['division_board_z_translation'][0] + self.shelf_params[ - 'division_board_thickness']) - if len(self.shelf_params['division_board_z_translation']) > 5 and np.random.choice([True, False], - p=[0.5, 0.5]): - params['door_height'] = (self.shelf_params['division_board_z_translation'][3] - - self.shelf_params['division_board_z_translation'][0] + - self.shelf_params['division_board_thickness']) - if params.get('frame_material', None) is None: - params['frame_material'] = self.mat_params['frame_material'] - - return params - - def get_cabinet_params(self, i=0): - params = dict() - - shelf_width = self.shelf_params['shelf_width'] + self.shelf_params['side_board_thickness'] * 2 - if self.door_params['num_door'] == 1: - params['door_hinge_pos'] = [(self.shelf_params['shelf_depth'] / 2. + 0.0025, -shelf_width / 2., - self.shelf_params['bottom_board_height'])] - params['door_open_angle'] = 0 - params['attach_pos'] = [( - self.shelf_params['shelf_depth'] / 2., -self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height']] - elif self.door_params['num_door'] == 2: - params['door_hinge_pos'] = [(self.shelf_params['shelf_depth'] / 2. + 0.008, -shelf_width / 2., - self.shelf_params['bottom_board_height']), ( - self.shelf_params['shelf_depth'] / 2. + 0.008, shelf_width / 2., - self.shelf_params['bottom_board_height'])] - params['door_open_angle'] = 0 - params['attach_pos'] = [( - self.shelf_params['shelf_depth'] / 2., -self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height']] + [( - self.shelf_params['shelf_depth'] / 2., self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height']] - else: - raise NotImplementedError - - return params - - def get_cabinet_components(self, i): - # update material params - self.sample_params() - self.mat_params = self.get_material_params() - - # create shelf - shelf_params = self.get_shelf_params(i=i) - self.shelf_fac.params = shelf_params - shelf, shelf_params = self.shelf_fac.create_asset(i=i, ret_params=True) - shelf.name = 'cabinet_frame' - self.shelf_params = shelf_params - - # create doors - door_params = self.get_door_params(i=i) - self.door_fac.params = door_params - self.door_fac.params['door_left_hinge'] = False - right_door, door_obj_params = self.door_fac.create_asset(i=i, ret_params=True) - right_door.name = 'cabinet_right_door' - self.door_fac.params = door_obj_params - self.door_fac.params['door_left_hinge'] = True - left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) - left_door.name = 'cabinet_left_door' - self.door_params = door_obj_params - - return shelf, right_door, left_door - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - shelf, right_door, left_door = self.get_cabinet_components(i=i) - - # create cabinet - cabinet_params = self.get_cabinet_params(i=i) - surface.add_geomod(obj, geometry_cabinet_nodes, attributes=[], input_kwargs={ - 'door': [right_door, left_door], - 'shelf': shelf, - 'door_hinge_pos': cabinet_params['door_hinge_pos'], - 'door_open_angle': cabinet_params['door_open_angle'], - 'attach_pos': cabinet_params['attach_pos'] - }) - butil.delete([shelf, left_door, right_door]) - return obj - - -class CabinetFactory(CabinetBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = (uniform(0.25, 0.35), uniform(0.3, 0.7), uniform(0.9, 1.8)) - - params['bottom_board_height'] = 0.083 - params['shelf_depth'] = params['Dimensions'][0] - 0.01 - num_h = int((params['Dimensions'][2] - 0.083) / 0.3) - params['shelf_cell_height'] = [(params['Dimensions'][2] - 0.083) / num_h for _ in range(num_h)] - params['shelf_cell_width'] = [params['Dimensions'][1]] - self.shelf_params = self.shelf_fac.sample_params() - diff --git a/infinigen/assets/shelves/cell_shelf.py b/infinigen/assets/shelves/cell_shelf.py deleted file mode 100644 index 781e36496..000000000 --- a/infinigen/assets/shelves/cell_shelf.py +++ /dev/null @@ -1,900 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -import numpy as np -import bpy - -from infinigen.assets.materials import metal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory - -from infinigen.core.util import blender as butil, math as mu -from infinigen.core import tagging, tags as t - -from infinigen.assets.shelves.utils import nodegroup_tagged_cube -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_wood, shader_shelves_wood_sampler, - shader_shelves_white_metallic, shader_shelves_white_metallic_sampler, - shader_shelves_black_metallic, shader_shelves_black_metallic_sampler) - -from infinigen.assets.utils.object import new_bbox -from infinigen.core.util.math import FixedSeed - -@node_utils.to_nodegroup('nodegroup_screw_head', singleton=False, type='GeometryNodeTree') -def nodegroup_screw_head(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Radius': 0.0050, 'Depth': 0.0010}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Z', 0.5000), - ('NodeSocketFloat', 'leg', 0.5000), - ('NodeSocketFloat', 'X', 0.5000), - ('NodeSocketFloat', 'external', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["external"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["X"], 1: add}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Z"], 1: group_input.outputs["leg"]}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': multiply_1, 'Z': add_2}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["depth"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': subtract_1, 'Z': add_2}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_1}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': subtract_1, 'Z': add_2}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_2}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': multiply_1, 'Z': add_2}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_3}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_2, transform_3, transform_4, transform_5]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_3}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_base_frame', singleton=False, type='GeometryNodeTree') -def nodegroup_base_frame(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'leg_height', 0.5000), - ('NodeSocketFloat', 'leg_size', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'bottom_x', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_size"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add, 'Z': add_1}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["bottom_x"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': multiply_1, 'Z': multiply_2}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': multiply_1, 'Z': multiply_2}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: 0.0000}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': subtract_1, 'Z': multiply_2}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_3}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': subtract_1, 'Z': multiply_2}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_4}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: multiply_4}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_2, 'Y': add, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_5, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - subtract_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': subtract_3}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_6}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: 0.0000}) - - subtract_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': subtract_4, 'Z': subtract_3}) - - transform_7 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_7}) - - subtract_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: multiply_4}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': subtract_5, 'Z': add}) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_8, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - subtract_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_6}, attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_5}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: add}) - - subtract_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5, 'Y': add_6, 'Z': subtract_7}) - - transform_8 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_2, 'Translation': combine_xyz_9}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_7, 'Y': add_6, 'Z': subtract_7}) - - transform_9 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_2, 'Translation': combine_xyz_10}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={ - 'Geometry': [transform_2, transform_3, transform_4, transform_5, transform_6, - transform_7, transform_8, transform_9]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_3}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_back_board', singleton=False, type='GeometryNodeTree') -def nodegroup_back_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.0000), - ('NodeSocketFloat', 'Z', 0.5000), - ('NodeSocketFloat', 'leg', 0.5000), - ('NodeSocketFloat', 'external', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Z"], 1: 0.0000}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["X"], 'Y': 0.01, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["leg"]}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: group_input.outputs["external"]}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add_2}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_5}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_6}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_attach_gadget', singleton=False, type='GeometryNodeTree') -def nodegroup_attach_gadget(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'z', 0.5000), - ('NodeSocketFloat', 'base_leg', 0.5000), - ('NodeSocketFloat', 'x', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'size', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["size"], 1: 0.0000}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': 0.0010, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_4}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["x"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["thickness"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["base_leg"], 1: group_input.outputs["z"]}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1 , 1: group_input.outputs["thickness"]}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -0.02}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Z': subtract_2}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_5}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Z': subtract_2}) - - transform_7 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_6}) - - join_geometry_5 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_6, transform_7]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_5}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_h_division_placement', singleton=False, type='GeometryNodeTree') -def nodegroup_h_division_placement(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'cell_size', 0.5000), - ('NodeSocketFloat', 'leg_height', 0.5000), - ('NodeSocketFloat', 'division_board_thickness', 0.5000), - ('NodeSocketFloat', 'external_board_thickness', 0.5000), - ('NodeSocketFloat', 'index', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"]}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["index"], 1: 0.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["cell_size"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["external_board_thickness"], 1: 0.0000}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: add_2}, attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["division_board_thickness"], - 1: group_input.outputs["leg_height"]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_3}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add_5}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply, 'Z': add_6}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': combine_xyz}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_h_division_board', singleton=False, type='GeometryNodeTree') -def nodegroup_h_division_board(nw: NodeWrangler, tag_support=False): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'cell_size', 0.5000), - ('NodeSocketFloat', 'horizontal_cell_num', 0.5000), - ('NodeSocketFloat', 'division_board_thickness', 0.5000), - ('NodeSocketFloat', 'depth', 0.0000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["cell_size"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: group_input.outputs["division_board_thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["division_board_thickness"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add_2, 'Y': group_input.outputs["depth"], 'Z': add_3}) - if tag_support: - cube = nw.new_node(nodegroup_tagged_cube().name, input_kwargs={'Size': combine_xyz}) - else: - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': cube}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_v_division_board_placement', singleton=False, type='GeometryNodeTree') -def nodegroup_v_division_board_placement(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'base_leg', 0.5000), - ('NodeSocketFloat', 'external_thickness', 0.5000), - ('NodeSocketFloat', 'side_z', 0.5000), - ('NodeSocketFloat', 'index', 0.5000), - ('NodeSocketFloat', 'h_cell_num', 0.5000), - ('NodeSocketFloat', 'division_thickness', 0.5000), - ('NodeSocketFloat', 'cell_size', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["h_cell_num"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -1.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={1: add_1}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["index"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: add_2}, attrs={'operation': 'SUBTRACT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_3, 1: group_input.outputs["division_thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: add_2}, attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["cell_size"], 1: subtract_1}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_3}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"]}, - attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["base_leg"], 1: group_input.outputs["external_thickness"]}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["side_z"]}, - attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: multiply_5}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': multiply_4, 'Z': add_6}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': combine_xyz_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_v_division_board', singleton=False, type='GeometryNodeTree') -def nodegroup_v_division_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'division_board_thickness', 0.0000), - ('NodeSocketFloat', 'depth', 0.0000), - ('NodeSocketFloat', 'cell_size', 0.5000), - ('NodeSocketFloat', 'vertical_cell_num', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["cell_size"], 1: add}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 1.0000}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["division_board_thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) - - add_200 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: -0.001}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["division_board_thickness"], - 'Y': add_200, 'Z': add_1}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': cube, 'Value': add_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_top_bottom_board', singleton=False, type='GeometryNodeTree') -def nodegroup_top_bottom_board(nw: NodeWrangler, tag_support=False): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'base_leg_height', 0.5000), - ('NodeSocketFloat', 'horizontal_cell_num', 0.5000), - ('NodeSocketFloat', 'vertical_cell_num', 0.5000), - ('NodeSocketFloat', 'cell_size', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'division_board_thickness', 0.5000), - ('NodeSocketFloat', 'external_board_thickness', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["external_board_thickness"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["division_board_thickness"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 0.0000}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -1.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["cell_size"], 1: 0.0000}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_5, 1: add_2}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_2}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_6, 1: 0.0020}) - - add_8 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: add_8, 1: 0.0000}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_7, 'Y': add_9, 'Z': add}) - - if tag_support: - cube_1 = nw.new_node(nodegroup_tagged_cube().name, input_kwargs={'Size': combine_xyz_3}) - else: - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_3, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_8}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - add_10 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: group_input.outputs["base_leg_height"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3, 'Z': add_10}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz}) - - add_11 = nw.new_node(Nodes.Math, input_kwargs={0: add_10, 1: add}) - - add_12 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_12, 1: add_5}, attrs={'operation': 'MULTIPLY'}) - - add_13 = nw.new_node(Nodes.Math, input_kwargs={0: add_11, 1: multiply_5}) - - add_14 = nw.new_node(Nodes.Math, input_kwargs={0: add_12, 1: -1.0000}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: add_14}, attrs={'operation': 'MULTIPLY'}) - - add_15 = nw.new_node(Nodes.Math, input_kwargs={0: add_13, 1: multiply_6}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3, 'Z': add_15}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1, 'x': add_7}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_side_board', singleton=False, type='GeometryNodeTree') -def nodegroup_side_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'base_leg_height', 0.5000), - ('NodeSocketFloat', 'horizontal_cell_num', 0.5000), - ('NodeSocketFloat', 'vertical_cell_num', 0.5000), - ('NodeSocketFloat', 'cell_size', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'division_thickness', 0.5000), - ('NodeSocketFloat', 'external_thickness', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["external_thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["vertical_cell_num"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: 1.0000}, attrs={'operation': 'SUBTRACT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["division_thickness"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["cell_size"], 1: 0.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: add_4}, attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: multiply_1}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_5}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: add_4, 1: group_input.outputs["horizontal_cell_num"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["horizontal_cell_num"], 1: 1.0000}, - attrs={'operation': 'SUBTRACT'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: subtract_1}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: multiply_3}) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: add_6}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={1: add_7}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_5}, attrs={'operation': 'MULTIPLY'}) - - add_8 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: group_input.outputs["base_leg_height"]}) - - add_9 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["external_thickness"], 1: add_8}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_4, 'Y': multiply_5, 'Z': add_9}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_7, 'Y': multiply_5, 'Z': add_9}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - base_leg_height = nw.new_node(Nodes.Value, label='base_leg_height') - base_leg_height.outputs[0].default_value = kwargs['base_leg_height'] - - horizontal_cell_num = nw.new_node(Nodes.Integer, label='horizontal_cell_num') - horizontal_cell_num.integer = kwargs['horizontal_cell_num'] - - vertical_cell_num = nw.new_node(Nodes.Integer, label='vertical_cell_num') - vertical_cell_num.integer = kwargs['vertical_cell_num'] - - cell_size = nw.new_node(Nodes.Value, label='cell_size') - cell_size.outputs[0].default_value = kwargs['cell_size'] - - depth = nw.new_node(Nodes.Value, label='depth') - depth.outputs[0].default_value = kwargs['depth'] - - division_board_thickness = nw.new_node(Nodes.Value, label='division_board_thickness') - division_board_thickness.outputs[0].default_value = kwargs['division_board_thickness'] - - external_board_thickness = nw.new_node(Nodes.Value, label='external_board_thickness') - external_board_thickness.outputs[0].default_value = kwargs['external_board_thickness'] - - sideboard = nw.new_node(nodegroup_side_board().name, - input_kwargs={'base_leg_height': base_leg_height, - 'horizontal_cell_num': horizontal_cell_num, - 'vertical_cell_num': vertical_cell_num, 'cell_size': cell_size, - 'depth': depth, 'division_thickness': division_board_thickness, - 'external_thickness': external_board_thickness}) - - topbottomboard = nw.new_node(nodegroup_top_bottom_board(tag_support=kwargs.get('tag_support', False)).name, - input_kwargs={'base_leg_height': base_leg_height, - 'horizontal_cell_num': horizontal_cell_num, - 'vertical_cell_num': vertical_cell_num, 'cell_size': cell_size, - 'depth': depth, 'division_board_thickness': division_board_thickness, - 'external_board_thickness': external_board_thickness}) - - vdivisionboard = nw.new_node(nodegroup_v_division_board().name, - input_kwargs={'division_board_thickness': division_board_thickness, 'depth': depth, - 'cell_size': cell_size, 'vertical_cell_num': vertical_cell_num}) - - all_components = [sideboard, topbottomboard.outputs["Geometry"]] - - v_division_boards = [] - for i in range(1, kwargs['horizontal_cell_num']): - v_division_index = nw.new_node(Nodes.Integer, label='VDivisionIndex') - v_division_index.integer = i - - vdivisionboardplacement = nw.new_node(nodegroup_v_division_board_placement().name, - input_kwargs={'depth': depth, 'base_leg': base_leg_height, - 'external_thickness': external_board_thickness, - 'side_z': vdivisionboard.outputs["Value"], - 'index': v_division_index, 'h_cell_num': horizontal_cell_num, - 'division_thickness': division_board_thickness, - 'cell_size': cell_size}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': vdivisionboard.outputs["Mesh"], - 'Translation': vdivisionboardplacement}) - v_division_boards.append(transform_1) - - if len(v_division_boards) > 0: - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': v_division_boards}) - all_components.append(join_geometry_1) - - hdivisionboard = nw.new_node(nodegroup_h_division_board(tag_support=kwargs.get('tag_support', False)).name, - input_kwargs={'cell_size': cell_size, 'horizontal_cell_num': horizontal_cell_num, - 'division_board_thickness': division_board_thickness, 'depth': depth}) - - h_division_boards = [] - for j in range(1, kwargs['vertical_cell_num']): - h_division_index = nw.new_node(Nodes.Integer, label='HDivisionIndex') - h_division_index.integer = j - - hdivisionplacement = nw.new_node(nodegroup_h_division_placement().name, - input_kwargs={'depth': depth, 'cell_size': cell_size, - 'leg_height': base_leg_height, - 'division_board_thickness': external_board_thickness, - 'external_board_thickness': division_board_thickness, - 'index': h_division_index}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': hdivisionboard, 'Translation': hdivisionplacement}) - h_division_boards.append(transform) - - if len(h_division_boards) > 0: - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': h_division_boards}) - all_components.append(join_geometry) - - if kwargs['has_backboard']: - backboard = nw.new_node(nodegroup_back_board().name, - input_kwargs={'X': topbottomboard.outputs["x"], 'Z': vdivisionboard.outputs["Value"], - 'leg': base_leg_height, 'external': external_board_thickness}) - all_components.append(backboard) - else: - attach_square_size = nw.new_node(Nodes.Value, label='attach_square_size') - attach_square_size.outputs[0].default_value = kwargs['attachment_size'] - - attachgadget = nw.new_node(nodegroup_attach_gadget().name, - input_kwargs={'z': vdivisionboard.outputs["Value"], 'base_leg': base_leg_height, - 'x': topbottomboard.outputs["x"], - 'thickness': external_board_thickness, - 'size': attach_square_size}) - all_components.append(attachgadget) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': all_components}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_4}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': realize_instances, - 'Material': surface.shaderfunc_to_material(kwargs['wood_material'])}) - - base_leg_size = nw.new_node(Nodes.Value, label='base_leg_size') - base_leg_size.outputs[0].default_value = kwargs['base_leg_size'] - - merge_components = [set_material_1] - if kwargs['has_base_frame']: - baseframe = nw.new_node(nodegroup_base_frame().name, - input_kwargs={'leg_height': base_leg_height, 'leg_size': base_leg_size, 'depth': depth, - 'bottom_x': topbottomboard.outputs["x"]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': baseframe}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': realize_instances_1, - 'Material': surface.shaderfunc_to_material(kwargs['base_material'])}) - merge_components.append(set_material) - - screwhead = nw.new_node(nodegroup_screw_head().name, - input_kwargs={'Z': vdivisionboard.outputs["Value"], 'leg': base_leg_height, - 'X': topbottomboard.outputs["x"], 'external': external_board_thickness, - 'depth': depth}) - - realize_instances_2 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': screwhead}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': realize_instances_2, - 'Material': surface.shaderfunc_to_material(metal.get_shader())}) - merge_components.append(set_material_2) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': merge_components}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': join_geometry_2}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -class CellShelfBaseFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(CellShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) - with FixedSeed(factory_seed): - self.params = self.sample_params() - self.params = self.get_asset_params(self.params) - - def get_asset_params(self, params): - - if params is None: - params = {} - - if params.get('depth', None) is None: - params['depth'] = np.clip(normal(0.39, 0.05), 0.29, 0.49) - if params.get('cell_size', None) is None: - params['cell_size'] = np.clip(normal(0.335, 0.03), 0.26, 0.40) - if params.get('vertical_cell_num', None) is None: - params['vertical_cell_num'] = randint(1, 7) - if params.get('horizontal_cell_num', None) is None: - params['horizontal_cell_num'] = randint(1, 7) - if params.get('division_board_thickness', None) is None: - params['division_board_thickness'] = np.clip(normal(0.015, 0.005), 0.008, 0.022) - if params.get('external_board_thickness', None) is None: - params['external_board_thickness'] = np.clip(normal(0.04, 0.005), 0.028, 0.052) - if params.get('has_backboard', None) is None: - params['has_backboard'] = False - if params.get('has_base_frame', None) is None: - params['has_base_frame'] = np.random.choice([True, False], p=[0.4, 0.6]) - if params['has_base_frame']: - if params.get('base_leg_height', None) is None: - params['base_leg_height'] = np.clip(normal(0.174, 0.03), 0.1, 0.25) - if params.get('base_leg_size', None) is None: - params['base_leg_size'] = np.clip(normal(0.035, 0.007), 0.02, 0.05) - if params.get('base_material', None) is None: - params['base_material'] = np.random.choice(['black', 'white'], p=[0.4, 0.6]) - else: - params['base_leg_height'] = 0.0 - params['base_leg_size'] = 0.0 - params['base_material'] = 'white' - if params.get('attachment_size', None) is None: - params['attachment_size'] = np.clip(normal(0.05, 0.02), 0.02, 0.1) - if params.get('wood_material', None) is None: - params['wood_material'] = np.random.choice(['black_wood', 'white', 'wood'], p=[0.3, 0.2, 0.5]) - params['tag_support'] = True - params = self.get_material_func(params, randomness=True) - return params - - def get_material_func(self, params, randomness=True): - if params['wood_material'] == 'white': - if randomness: - params['wood_material'] = lambda x: shader_shelves_white(x, **shader_shelves_white_sampler()) - else: - params['wood_material'] = shader_shelves_white - elif params['wood_material'] == 'black_wood': - if randomness: - params['wood_material'] = lambda x: shader_shelves_black_wood(x, **shader_shelves_black_wood_sampler()) - else: - params['wood_material'] = shader_shelves_black_wood - elif params['wood_material'] == 'wood': - if randomness: - params['wood_material'] = lambda x: shader_shelves_wood(x, **shader_shelves_wood_sampler()) - else: - params['wood_material'] = shader_shelves_wood - else: - raise NotImplementedError - - if params['base_material'] == 'white': - if randomness: - params['base_material'] = lambda x: shader_shelves_white_metallic(x, **shader_shelves_white_metallic_sampler()) - else: - params['base_material'] = shader_shelves_white_metallic - elif params['base_material'] == 'black': - if randomness: - params['base_material'] = lambda x: shader_shelves_black_metallic(x, **shader_shelves_black_metallic_sampler()) - else: - params['base_material'] = shader_shelves_black_metallic - else: - raise NotImplementedError - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.params - surface.add_geomod(obj, geometry_nodes, attributes=[], input_kwargs=obj_params, apply=True) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class CellShelfFactory(CellShelfBaseFactory): - - def sample_params(self): - params = dict() - params['Dimensions'] = (uniform(0.3, 0.45), - uniform(2 * 0.35, 6 * 0.35), - uniform(1 * 0.35, 6 * 0.35)) - h_cell_num = int(params['Dimensions'][1] / 0.35) - params['cell_size'] = params['Dimensions'][1] / h_cell_num - params['horizontal_cell_num'] = h_cell_num - params['vertical_cell_num'] = max(int(params['Dimensions'][2] / params['cell_size']), 1) - params['depth'] = params['Dimensions'][0] - params['has_base_frame'] = False - params['Dimensions'] = list(params['Dimensions']) - params['Dimensions'][2] = params['vertical_cell_num'] * params['cell_size'] - return params - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - x,y,z = self.params['Dimensions'][0], self.params['Dimensions'][1], self.params['Dimensions'][2] - return new_bbox(0, x, -y/2 * 1.1, y/2 * 1.1, 0, z + (self.params['vertical_cell_num'] - 1) * self.params['division_board_thickness'] + 2 * self.params['external_board_thickness'] ) - -class TVStandFactory(CellShelfFactory): - - def sample_params(self): # TODO HACK copied code just following the pattern to get this working - params = dict() - params['Dimensions'] = ( - uniform(0.3, 0.45), - uniform(2 * 0.35, 6 * 0.35), - uniform(0.3, 0.5) - ) - h_cell_num = int(params['Dimensions'][1] / 0.35) - params['cell_size'] = params['Dimensions'][1] / h_cell_num - params['horizontal_cell_num'] = h_cell_num - params['vertical_cell_num'] = max(int(params['Dimensions'][2] / params['cell_size']), 1) - params['depth'] = params['Dimensions'][0] - params['has_base_frame'] = False - params['Dimensions'] = list(params['Dimensions']) - params['Dimensions'][2] = params['vertical_cell_num'] * params['cell_size'] - return params \ No newline at end of file diff --git a/infinigen/assets/shelves/doors.py b/infinigen/assets/shelves/doors.py deleted file mode 100644 index 78dc111c6..000000000 --- a/infinigen/assets/shelves/doors.py +++ /dev/null @@ -1,739 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -import bpy - -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_wood, shader_shelves_wood_sampler, - shader_glass) - -@node_utils.to_nodegroup('nodegroup_node_group', singleton=False, type='GeometryNodeTree') -def nodegroup_node_group(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0120, 0.00060, 0.0400)}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Vertices': 64, 'Radius': 0.0100, 'Depth': 0.00050}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': (0.0050, 0.0000, 0.0000), - 'Rotation': (1.5708, 0.0000, 0.0000)}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0200, 0.0006, 0.0120)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cube_1, 'Translation': (0.0080, 0.0000, 0.0000)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [cube, transform, transform_1]}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'attach_height', 0.1000), - ('NodeSocketFloat', 'door_width', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["door_width"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0181}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Z': group_input.outputs["attach_height"]}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_knob_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_knob_handle(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.0100), - ('NodeSocketFloat', 'thickness_1', 0.5000), - ('NodeSocketFloat', 'thickness_2', 0.5000), - ('NodeSocketFloat', 'length', 0.5000), - ('NodeSocketFloat', 'knob_mid_height', 0.0000), - ('NodeSocketFloat', 'edge_width', 0.5000), - ('NodeSocketFloat', 'door_width', 0.5000)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["thickness_2"], 1: group_input.outputs["thickness_1"]}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: group_input.outputs["length"]}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': group_input.outputs["Radius"], 'Depth': add_1}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["door_width"], 1: group_input.outputs["edge_width"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.005}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add_2, 'Y': multiply_1, 'Z': group_input.outputs["knob_mid_height"]}) - - transform_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_6, - 'Rotation': (1.5708, 0.0000, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_6}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_mid_board', singleton=False, type='GeometryNodeTree') -def nodegroup_mid_board(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"]}, attrs={'operation': 'MULTIPLY'}) - - multiply_k = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_3, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_1}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_4}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_4, - 'Material': surface.shaderfunc_to_material(kwargs['material'][0])}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_7, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 1.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_2}) - - transform_7 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_8}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_7, - 'Material': surface.shaderfunc_to_material(kwargs['material'][1])}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances, 'mid_height': multiply}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_mid_board_001', singleton=False, type='GeometryNodeTree') -def nodegroup_mid_board_001(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - multiply_k = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_k = nw.new_node(Nodes.Math, input_kwargs={0: multiply_k, 1: 0.004}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -0.0001}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_3, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_k, 'Z': multiply_1}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_4}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform_4, - 'Material': surface.shaderfunc_to_material(kwargs['material'][0])}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': set_material}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances, 'mid_height': multiply}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_double_rampled_edge', singleton=False, type='GeometryNodeTree') -def nodegroup_double_rampled_edge(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness_2', 0.5000), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'thickness_1', 0.5000), - ('NodeSocketFloat', 'ramp_angle', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_10}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 3, 'Radius': 0.0100}) - - endpoint_selection = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000}) - - tangent = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'TANGENT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: 2.0000, 1: multiply}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': add_4}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Selection': endpoint_selection, - 'Position': combine_xyz_7}) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': add_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Selection': endpoint_selection_1, - 'Position': combine_xyz_8}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 0.9900}, attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5, 'Y': add_4}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Selection': op_and, - 'Position': combine_xyz_9}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line, 'Profile Curve': set_position_2, 'Fill Caps': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': add_4, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_6}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': add_3, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_7}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_6}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_8}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_11}) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_12}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position_2, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line_1, 'Profile Curve': transform_2, 'Fill Caps': True}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh, transform_4, curve_to_mesh_1]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry_1, 'Distance': 0.0001}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': merge_by_distance}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': realize_instances, 'Level': 4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': subdivide_mesh}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_ramped_edge', singleton=False, type='GeometryNodeTree') -def nodegroup_ramped_edge(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness_2', 0.5000), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'thickness_1', 0.5000), - ('NodeSocketFloat', 'ramp_angle', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_10}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 3, 'Radius': 0.0100}) - - endpoint_selection = nw.new_node(Nodes.EndpointSelection, input_kwargs={'End Size': 0}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ramp_angle"], 1: 0.0000}) - - tangent = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'TANGENT'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_2"], 1: 0.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: tangent, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: subtract}, attrs={'operation': 'SUBTRACT'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness_1"], 1: 0.0000}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': add_4}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Selection': endpoint_selection, - 'Position': combine_xyz_7}) - - endpoint_selection_1 = nw.new_node(Nodes.EndpointSelection, input_kwargs={'Start Size': 0}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: add_3}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': add_5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Selection': endpoint_selection_1, - 'Position': combine_xyz_8}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 1.0100}, attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 0.9900}, attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: less_than, 1: greater_than}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Y': add_4}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Selection': op_and, - 'Position': combine_xyz_9}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line, 'Profile Curve': set_position_2, 'Fill Caps': True}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Y': add_4, 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_4}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': add_3, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_1}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_3}, attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_4, 1: multiply_5}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_4, 'Y': add_6}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_11 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_6}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_11}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh, transform_4]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': join_geometry_1, 'Distance': 0.0001}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': merge_by_distance}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': realize_instances, 'Level': 4}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_7}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': subdivide_mesh, 'Translation': combine_xyz_4}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_panel_edge_frame', singleton=False, type='GeometryNodeTree') -def nodegroup_panel_edge_frame(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'vertical_edge', None), - ('NodeSocketFloat', 'door_width', 0.5000), - ('NodeSocketFloat', 'door_height', 0.0000), - ('NodeSocketGeometry', 'horizontal_edge', None)]) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["door_width"], 2: 0.0010}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - transform_7 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["horizontal_edge"], - 'Translation': (0.0000, -0.0001, 0.0000), - 'Scale': (0.9999, 1.0000, 1.0000)}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: 1.0000}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["door_height"], 1: 0.0001}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Z': add_1}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_7, 'Translation': combine_xyz_2, - 'Rotation': (0.0000, -1.5708, 0.0000)}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_7, 'Translation': combine_xyz_1, - 'Rotation': (0.0000, 1.5708, 0.0000)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_add}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["vertical_edge"], 'Translation': combine_xyz}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - # transform_1 = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': transform_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_3, transform_2, transform_1, transform]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply, 'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -def geometry_door_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - door_height = nw.new_node(Nodes.Value, label='door_height') - door_height.outputs[0].default_value = kwargs['door_height'] - - door_edge_thickness_2 = nw.new_node(Nodes.Value, label='door_edge_thickness_2') - door_edge_thickness_2.outputs[0].default_value = kwargs['edge_thickness_2'] - - door_edge_width = nw.new_node(Nodes.Value, label='door_edge_width') - door_edge_width.outputs[0].default_value = kwargs['edge_width'] - - door_edge_thickness_1 = nw.new_node(Nodes.Value, label='door_edge_thickness_1') - door_edge_thickness_1.outputs[0].default_value = kwargs['edge_thickness_1'] - - door_edge_ramp_angle = nw.new_node(Nodes.Value, label='door_edge_ramp_angle') - door_edge_ramp_angle.outputs[0].default_value = kwargs['edge_ramp_angle'] - - ramped_edge = nw.new_node(nodegroup_ramped_edge().name, - input_kwargs={'height': door_height, 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle}) - - door_width = nw.new_node(Nodes.Value, label='door_width') - door_width.outputs[0].default_value = kwargs['door_width'] - - ramped_edge_1 = nw.new_node(nodegroup_ramped_edge().name, - input_kwargs={'height': door_width, 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle}) - - panel_edge_frame = nw.new_node(nodegroup_panel_edge_frame().name, - input_kwargs={'vertical_edge': ramped_edge, 'door_width': door_width, - 'door_height': door_height, 'horizontal_edge': ramped_edge_1}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: panel_edge_frame.outputs["Value"], 1: 0.0001}) - - mid_board_thickness = nw.new_node(Nodes.Value, label='mid_board_thickness') - mid_board_thickness.outputs[0].default_value = kwargs['board_thickness'] - - if kwargs['has_mid_ramp']: - mid_board = nw.new_node(nodegroup_mid_board(material=kwargs['panel_material']).name, - input_kwargs={'height': door_height, 'thickness': mid_board_thickness, - 'width': door_width}) - else: - mid_board = nw.new_node(nodegroup_mid_board_001(material=kwargs['panel_material']).name, - input_kwargs={'height': door_height, 'thickness': mid_board_thickness, - 'width': door_width}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': -0.0001, 'Z': mid_board.outputs["mid_height"]}) - - frame = [panel_edge_frame.outputs["Geometry"]] - if kwargs['has_mid_ramp']: - double_rampled_edge = nw.new_node(nodegroup_double_rampled_edge().name, - input_kwargs={'height': door_width, 'thickness_2': door_edge_thickness_2, - 'width': door_edge_width, 'thickness_1': door_edge_thickness_1, - 'ramp_angle': door_edge_ramp_angle}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': double_rampled_edge, 'Translation': combine_xyz_5, - 'Rotation': (0.0000, 1.5708, 0.0000)}) - frame.append(transform_5) - - knob_raduis = nw.new_node(Nodes.Value, label='knob_raduis') - knob_raduis.outputs[0].default_value = kwargs['knob_R'] - - know_length = nw.new_node(Nodes.Value, label='know_length') - know_length.outputs[0].default_value = kwargs['knob_length'] - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: door_height}, attrs={'operation': 'MULTIPLY'}) - - knob_handle = nw.new_node(nodegroup_knob_handle().name, - input_kwargs={'Radius': knob_raduis, 'thickness_1': door_edge_thickness_1, - 'thickness_2': door_edge_thickness_2, 'length': know_length, - 'knob_mid_height': multiply, - 'edge_width': door_edge_width, 'door_width': door_width}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': frame + [knob_handle]}) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, - 'Material': surface.shaderfunc_to_material(kwargs['frame_material'])}) - - geos = [set_material_3, mid_board.outputs["Geometry"]] - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': geos}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: door_width, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': transform}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances_1}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, - 'Scale': (-1.0 if kwargs['door_left_hinge'] else 1.0, 1.0000, 1.0000)}) - - if kwargs['door_left_hinge']: - transform_1 = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': transform_1}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_2}, - attrs={'is_active_output': True}) - - -class CabinetDoorBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(CabinetDoorBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = {} - - def get_asset_params(self, i=0): - params = self.params.copy() - if params.get('door_height', None) is None: - params['door_height'] = uniform(0.7, 2.2) - if params.get('door_width', None) is None: - params['door_width'] = uniform(0.3, 0.4) - if params.get('edge_thickness_1', None) is None: - params['edge_thickness_1'] = uniform(0.01, 0.018) - if params.get('edge_width', None) is None: - params['edge_width'] = uniform(0.03, 0.05) - if params.get('edge_thickness_2', None) is None: - params['edge_thickness_2'] = uniform(0.005, 0.008) - if params.get('edge_ramp_angle', None) is None: - params['edge_ramp_angle'] = uniform(0.6, 0.8) - params['board_thickness'] = params['edge_thickness_1'] - 0.005 - if params.get('knob_R', None) is None: - params['knob_R'] = uniform(0.003, 0.006) - if params.get('knob_length', None) is None: - params['knob_length'] = uniform(0.018, 0.035) - if params.get('attach_height', None) is None: - gap = uniform(0.05, 0.15) - params['attach_height'] = [gap, params['door_height'] - gap] - if params.get('has_mid_ramp', None) is None: - params['has_mid_ramp'] = np.random.choice([True, False], p=[0.6, 0.4]) - if params.get('door_left_hinge', None) is None: - params['door_left_hinge'] = False - - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.5, 0.2, 0.3]) - if params.get('panel_material', None) is None: - if params['has_mid_ramp']: - lower_mat = np.random.choice([params['frame_material'], 'glass'], p=[0.7, 0.3]) - upper_mat = np.random.choice([lower_mat, 'glass'], p=[0.6, 0.4]) - params['panel_material'] = [lower_mat, upper_mat] - else: - params['panel_material'] = [params['frame_material']] - - params = self.get_material_func(params) - return params - - def get_material_func(self, params, randomness=True): - white_wood_params = shader_shelves_white_sampler() - black_wood_params = shader_shelves_black_wood_sampler() - normal_wood_params = shader_shelves_wood_sampler() - if params['frame_material'] == 'white': - if randomness: - params['frame_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['frame_material'] = shader_shelves_white - elif params['frame_material'] == 'black_wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, z_axis_texture=True) - elif params['frame_material'] == 'wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_wood(x, z_axis_texture=True) - - materials = [] - if not isinstance(params['panel_material'], list): - params['panel_material'] = [params['board_material']] - for mat in params['panel_material']: - if mat == 'white': - if randomness: - mat = lambda x: shader_shelves_white(x, **white_wood_params) - else: - mat = shader_shelves_white - elif mat == 'black_wood': - if randomness: - mat = lambda x: shader_shelves_black_wood(x, **black_wood_params, z_axis_texture=True) - else: - mat = lambda x: shader_shelves_black_wood(x, z_axis_texture=True) - elif mat == 'wood': - if randomness: - mat = lambda x: shader_shelves_wood(x, **normal_wood_params, z_axis_texture=True) - else: - mat = lambda x: shader_shelves_wood(x, z_axis_texture=True) - elif mat == 'glass': - if randomness: - mat = lambda x: shader_glass(x) - else: - mat = shader_glass - materials.append(mat) - params['panel_material'] = materials - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_door_nodes, apply=True, attributes=[], input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - if params.get('ret_params', False): - return obj, obj_params - - return obj - diff --git a/infinigen/assets/shelves/drawers.py b/infinigen/assets/shelves/drawers.py deleted file mode 100644 index 7393f9819..000000000 --- a/infinigen/assets/shelves/drawers.py +++ /dev/null @@ -1,416 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint - -from infinigen.assets.materials import metal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -import bpy -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_wood, shader_shelves_wood_sampler, - shader_glass) - - -@node_utils.to_nodegroup('nodegroup_board_rail', singleton=False, type='GeometryNodeTree') -def nodegroup_board_rail(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - cylinder_1 = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': 0.0040, 'Depth': 0.0050}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder_1.outputs["Mesh"], 'Name': 'uv_map', - 3: cylinder_1.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0200}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_1}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_3, - 'Rotation': (0.0000, 1.5708, 0.0000)}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 0.0300}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': 0.0020, 'Y': subtract, 'Z': group_input.outputs["width"]}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', - 3: cube.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': 0.0030, 'Depth': subtract}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', - 3: cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz_1, - 'Rotation': (1.5708, 0.0000, 0.0000)}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Scale': (1.0000, 1.0000, -1.0000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_2, transform_1]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_5, transform, join_geometry_2]}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: 0.0030}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: 0.0200}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_2, 'Y': multiply_3, 'Z': add_3}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Translation': combine_xyz_2}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_3, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_4, transform_3]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_3}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_kallax_drawer_frame', singleton=False, type='GeometryNodeTree') -def nodegroup_kallax_drawer_frame(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 4, 'Vertices Y': 4, 'Vertices Z': 4}) - - store_named_attribute_1 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', - 3: cube.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_3}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -0.0001}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 2: 0.0100}, attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': add_4, 'Z': multiply_add}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_1, 'Translation': combine_xyz_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.0001}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add_5}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_6, 'Y': add_1, 'Z': add}) - - cube_1 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_2, 'Vertices X': 4, 'Vertices Y': 4, 'Vertices Z': 4}) - - store_named_attribute_2 = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_1.outputs["Mesh"], 'Name': 'uv_map', - 3: cube_1.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply_add_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -0.5000, 2: -0.0001}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_add_1, 'Z': 0.0100}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute_2, 'Translation': combine_xyz_3}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_3, 'Y': add, 'Z': add_2}) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': 4, 'Vertices Y': 4, 'Vertices Z': 4}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube_2.outputs["Mesh"], 'Name': 'uv_map', - 3: cube_2.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - multiply_add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -1.0000, 2: multiply_2}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 2: 0.0100}, attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_add_2, 'Z': multiply_add_3}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_5}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, transform, transform_2, transform_3]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_door_knob', singleton=False, type='GeometryNodeTree') -def nodegroup_door_knob(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.0040), - ('NodeSocketFloat', 'length', 0.5000), - ('NodeSocketFloat', 'z', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["length"], 1: 0.0000}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 64, 'Radius': group_input.outputs["Radius"], 'Depth': add}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', - 3: cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0001}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["z"], 1: 0.0000}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_1, 'Z': multiply_1}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_2, - 'Rotation': (1.5708, 0.0000, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_drawer_door_board', singleton=False, type='GeometryNodeTree') -def nodegroup_drawer_door_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'height', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cube.outputs["Mesh"], 'Name': 'uv_map', - 3: cube.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply, 'Z': multiply_1}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - door_thickness = nw.new_node(Nodes.Value, label='door_thickness') - door_thickness.outputs[0].default_value = kwargs['drawer_board_thickness'] - - drawer_board_width = nw.new_node(Nodes.Value, label='drawer_board_width') - drawer_board_width.outputs[0].default_value = kwargs['drawer_board_width'] - - drawer_board_height = nw.new_node(Nodes.Value, label='drawer_board_height') - drawer_board_height.outputs[0].default_value = kwargs['drawer_board_height'] - - drawer_door_board = nw.new_node(nodegroup_drawer_door_board().name, - input_kwargs={'thickness': door_thickness, 'width': drawer_board_width, - 'height': drawer_board_height}) - - knob_radius = nw.new_node(Nodes.Value, label='knob_radius') - knob_radius.outputs[0].default_value = kwargs['knob_radius'] - - knob_length = nw.new_node(Nodes.Value, label='knob_length') - knob_length.outputs[0].default_value = kwargs['knob_length'] - - door_knob = nw.new_node(nodegroup_door_knob().name, - input_kwargs={'Radius': knob_radius, 'length': knob_length, 'z': drawer_board_height}) - - drawer_depth = nw.new_node(Nodes.Value, label='drawer_depth') - drawer_depth.outputs[0].default_value = kwargs['drawer_depth'] - kwargs['drawer_board_thickness'] - - drawer_side_height = nw.new_node(Nodes.Value, label='drawer_side_height') - drawer_side_height.outputs[0].default_value = kwargs['drawer_side_height'] - - drawer_width = nw.new_node(Nodes.Value, label='drawer_width') - drawer_width.outputs[0].default_value = kwargs['drawer_width'] - - kallax_drawer_frame = nw.new_node(nodegroup_kallax_drawer_frame().name, - input_kwargs={'depth': drawer_depth, 'height': drawer_side_height, - 'thickness': door_thickness, 'width': drawer_width}) - - side_tilt_width = nw.new_node(Nodes.Value, label='side_tilt_width') - side_tilt_width.outputs[0].default_value = kwargs['side_tilt_width'] - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={ - 'Geometry': [door_knob, drawer_door_board, kallax_drawer_frame]}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, - 'Material': surface.shaderfunc_to_material(kwargs['frame_material'])}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': set_material_2}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output_1 = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -class CabinetDrawerBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(CabinetDrawerBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = {} - - def get_asset_params(self, i=0): - params = self.params.copy() - if params.get('drawer_board_thickness', None) is None: - params['drawer_board_thickness'] = uniform(0.005, 0.01) - if params.get('drawer_board_width', None) is None: - params['drawer_board_width'] = uniform(0.3, 0.7) - if params.get('drawer_board_height', None) is None: - params['drawer_board_height'] = uniform(0.25, 0.4) - if params.get('drawer_depth', None) is None: - params['drawer_depth'] = uniform(0.3, 0.4) - if params.get('drawer_side_height', None) is None: - params['drawer_side_height'] = uniform(0.05, 0.2) - if params.get('drawer_width', None) is None: - params['drawer_width'] = params['drawer_board_width'] - uniform(0.015, 0.025) - if params.get('side_tilt_width', None) is None: - params['side_tilt_width'] = uniform(0.02, 0.03) - if params.get('knob_radius', None) is None: - params['knob_radius'] = uniform(0.003, 0.006) - if params.get('knob_length', None) is None: - params['knob_length'] = uniform(0.018, 0.035) - - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.5, 0.2, 0.3]) - if params.get('knob_material', None) is None: - params['knob_material'] = np.random.choice([params['frame_material'], 'metal'], p=[0.5, 0.5]) - - params = self.get_material_func(params) - return params - - def get_material_func(self, params, randomness=True): - white_wood_params = shader_shelves_white_sampler() - black_wood_params = shader_shelves_black_wood_sampler() - normal_wood_params = shader_shelves_wood_sampler() - if params['frame_material'] == 'white': - if randomness: - params['frame_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['frame_material'] = shader_shelves_white - elif params['frame_material'] == 'black_wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, z_axis_texture=True) - elif params['frame_material'] == 'wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_wood(x, z_axis_texture=True) - - if params['knob_material'] == 'metal': - params['knob_material'] = metal.get_shader() - else: - params['knob_material'] = params['frame_material'] - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, apply=True, attributes=[], input_kwargs=obj_params) - - if params.get('ret_params', False): - return obj, obj_params - - return obj diff --git a/infinigen/assets/shelves/kitchen_cabinet.py b/infinigen/assets/shelves/kitchen_cabinet.py deleted file mode 100644 index c9e9d1335..000000000 --- a/infinigen/assets/shelves/kitchen_cabinet.py +++ /dev/null @@ -1,328 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint - -from infinigen.assets.materials.shelf_shaders import get_shelf_material -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -from infinigen.core.util.math import FixedSeed - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube, blender_rotate -from infinigen.assets.shelves.large_shelf import LargeShelfBaseFactory, LargeShelfFactory -from infinigen.assets.shelves.doors import CabinetDoorBaseFactory -from infinigen.assets.shelves.drawers import CabinetDrawerBaseFactory -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_wood, shader_shelves_wood_sampler -) -from infinigen.assets.utils.object import new_bbox - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - cabinets = [] - for i, component in enumerate(kwargs['components']): - frame_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': component[0]}) - - attachments = [] - if component[1] == 'door': - right_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': component[2][0]}) - left_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object':component[2][1]}) - - transform_r = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': right_door_info.outputs['Geometry'], - 'Translation': component[2][2]['door_hinge_pos'][0], - 'Rotation': (0, 0, component[2][2]['door_open_angle'])}) - attachments.append(transform_r) - if len(component[2][2]['door_hinge_pos']) > 1: - transform_l = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': left_door_info.outputs['Geometry'], - 'Translation': component[2][2]['door_hinge_pos'][1], - 'Rotation': (0, 0, component[2][2]['door_open_angle'])}) - attachments.append(transform_l) - elif component[1] == 'drawer': - - for j, drawer in enumerate(component[2]): - drawer_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': drawer[0]}) - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': drawer_info.outputs['Geometry'], - 'Translation': drawer[1]['drawer_hinge_pos']}) - attachments.append(transform) - else: - continue - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': attachments}) - #[frame_info.outputs['Geometry']]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, - 'Translation': (0, kwargs['y_translations'][i], 0)}) - cabinets.append(transform) - - try: - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': cabinets}) - except TypeError: - import pdb; pdb.set_trace() - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_1}, attrs={'is_active_output': True}) - - -class KitchenCabinetBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(KitchenCabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.frame_params = {} - self.material_params = {} - self.cabinet_widths = [] - self.frame_fac = LargeShelfBaseFactory(factory_seed) - self.door_fac = CabinetDoorBaseFactory(factory_seed) - self.drawer_fac = CabinetDrawerBaseFactory(factory_seed) - self.drawer_only = False - with FixedSeed(factory_seed): - self.params = self.sample_params() - - def sample_params(self): - pass - - def get_material_params(self): - with FixedSeed(self.factory_seed): - params = self.material_params.copy() - if params.get('frame_material', None) is None: - with FixedSeed(self.factory_seed): - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.4, 0.3, 0.3]) - params['board_material'] = params['frame_material'] - return self.get_material_func(params, randomness=True) - - def get_material_func(self, params, randomness=True): - with FixedSeed(self.factory_seed): - white_wood_params = shader_shelves_white_sampler() - black_wood_params = shader_shelves_black_wood_sampler() - normal_wood_params = shader_shelves_wood_sampler() - if params['frame_material'] == 'white': - if randomness: - params['frame_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['frame_material'] = shader_shelves_white - elif params['frame_material'] == 'black_wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, z_axis_texture=True) - elif params['frame_material'] == 'wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_wood(x, z_axis_texture=True) - - if params['board_material'] == 'white': - if randomness: - params['board_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['board_material'] = shader_shelves_white - elif params['board_material'] == 'black_wood': - if randomness: - params['board_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params) - else: - params['board_material'] = shader_shelves_black_wood - elif params['board_material'] == 'wood': - if randomness: - params['board_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params) - else: - params['board_material'] = shader_shelves_wood - - params['panel_meterial'] = params['frame_material'] - params['knob_material'] = params['frame_material'] - return params - - def get_frame_params(self, width, i=0): - params = self.frame_params.copy() - params['shelf_cell_width'] = [width] - params.update(self.material_params.copy()) - return params - - def get_attach_params(self, attach_type, i=0): - param_sets = [] - if attach_type == 'none': - pass - elif attach_type == 'door': - params = dict() - shelf_width = self.frame_params['shelf_width'] + self.frame_params['side_board_thickness'] * 2 - if shelf_width <= 0.6: - params['door_width'] = shelf_width - params['has_mid_ramp'] = False - params['edge_thickness_1'] = 0.01 - params['door_hinge_pos'] = [(self.frame_params['shelf_depth'] / 2. + 0.0025, -shelf_width / 2., - self.frame_params['bottom_board_height'])] - params['door_open_angle'] = 0 - else: - params['door_width'] = shelf_width / 2. - 0.0005 - params['has_mid_ramp'] = False - params['edge_thickness_1'] = 0.01 - params['door_hinge_pos'] = [(self.frame_params['shelf_depth'] / 2. + 0.008, -shelf_width / 2., - self.frame_params['bottom_board_height']), - (self.frame_params['shelf_depth'] / 2. + 0.008, shelf_width / 2., - self.frame_params['bottom_board_height'])] - params['door_open_angle'] = 0 - - params['door_height'] = (self.frame_params['division_board_z_translation'][-1] - - self.frame_params['division_board_z_translation'][0] + - self.frame_params['division_board_thickness']) - params.update(self.material_params.copy()) - param_sets.append(params) - elif attach_type == 'drawer': - for i, h in enumerate(self.frame_params['shelf_cell_height']): - params = dict() - drawer_h = (self.frame_params['division_board_z_translation'][i+1] - - self.frame_params['division_board_z_translation'][i] - - self.frame_params['division_board_thickness']) - drawer_depth = self.frame_params['shelf_depth'] - params['drawer_board_width'] = self.frame_params['shelf_width'] - params['drawer_board_height'] = drawer_h - params['drawer_depth'] = drawer_depth - params['drawer_hinge_pos'] = (self.frame_params['shelf_depth'] / 2., 0, - (self.frame_params['division_board_thickness'] / 2. + - self.frame_params['division_board_z_translation'][i])) - params.update(self.material_params.copy()) - param_sets.append(params) - else: - raise NotImplementedError - - return param_sets - - def get_cabinet_params(self, i=0): - x_translations = [] - accum_w, thickness = 0, self.frame_params.get('side_board_thickness', 0.005) # instructed by Beining - for w in self.cabinet_widths: - accum_w += thickness + w / 2. - x_translations.append(accum_w) - accum_w += thickness + w / 2. + 0.0005 - return x_translations - - def create_cabinet_components(self, i, drawer_only=False): - # update material params - self.material_params = self.get_material_params() - - components = [] - for k, w in enumerate(self.cabinet_widths): - # create frame - frame_params = self.get_frame_params(w, i=i) - self.frame_fac.params = frame_params - frame, frame_params = self.frame_fac.create_asset(i=i, ret_params=True) - frame.name = f'cabinet_frame_{k}' - self.frame_params = frame_params - - # create attach - if drawer_only: - attach_type = np.random.choice(['drawer', 'door'], p=[0.5, 0.5]) - else: - attach_type = np.random.choice(['drawer', 'door', 'none'], p=[0.4, 0.4, 0.2]) - - attach_params = self.get_attach_params(attach_type, i=i) - if attach_type == 'door': - self.door_fac.params = attach_params[0] - self.door_fac.params['door_left_hinge'] = False - right_door, door_obj_params = self.door_fac.create_asset(i=i, ret_params=True) - right_door.name = f'cabinet_right_door_{k}' - self.door_fac.params = door_obj_params - self.door_fac.params['door_left_hinge'] = True - left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) - left_door.name = f'cabinet_left_door_{k}' - components.append([frame, 'door', [right_door, left_door, attach_params[0]]]) - - elif attach_type == 'drawer': - drawers = [] - for j, p in enumerate(attach_params): - self.drawer_fac.params = p - drawer = self.drawer_fac.create_asset(i=i) - drawer.name = f'drawer_{k}_layer{j}' - drawers.append([drawer, p]) - components.append([frame, 'drawer', drawers]) - - elif attach_type == 'none': - components.append([frame, 'none']) - - else: - raise NotImplementedError - - return components - - def create_asset(self, i=0, **params): - components = self.create_cabinet_components(i=i, drawer_only=self.drawer_only) - cabinet_params = self.get_cabinet_params(i=i) - join_objs = [] - - contain_attach = False - for com in components: - if com[1] == 'none': - continue - else: - contain_attach = True - - if contain_attach: - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - surface.add_geomod(obj, geometry_nodes, attributes=[], input_kwargs={ - 'components': components, - 'y_translations': cabinet_params - }, apply=True) - - join_objs += [obj] - - for i, c in enumerate(components): - if c[1] == 'door': - butil.delete(c[2][:-1]) - elif c[1] == 'drawer': - butil.delete([x[0] for x in c[2]]) - c[0].location = (0, cabinet_params[i], 0) - butil.apply_transform(c[0], loc=True) - join_objs.append(c[0]) - - #butil.delete(c[:1]) - obj = butil.join_objects(join_objs) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class KitchenCabinetFactory(KitchenCabinetBaseFactory): - def __init__(self, factory_seed, params={}, coarse=False, dimensions=None, drawer_only=False): - self.dimensions = dimensions - super().__init__(factory_seed, params, coarse) - self.drawer_only = drawer_only - - def sample_params(self): - params = dict() - if self.dimensions is None: - dimensions = ( - uniform(0.25, 0.35), - uniform(1.0, 4.0), - uniform(0.5, 1.3)) - self.dimensions = dimensions - else: - dimensions = self.dimensions - params['Dimensions'] = dimensions - - params['bottom_board_height'] = 0.06 - params['shelf_depth'] = params['Dimensions'][0] - 0.01 - num_h = int((params['Dimensions'][2] - 0.06) / 0.3) - params['shelf_cell_height'] = [(params['Dimensions'][2] - 0.06) / num_h for _ in range(num_h)] - - self.frame_params = params - - n_cells= max(int(params['Dimensions'][1] / 0.45),1) - intervals = np.random.uniform(0.55, 1.0, size=(n_cells,)) - intervals = intervals / intervals.sum() * params['Dimensions'][1] - self.cabinet_widths = intervals.tolist() - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - x,y,z = self.dimensions - return new_bbox(-x/2 * 1.2, x/2 * 1.2, 0, y * 1.1, 0, (z + 0.06)) diff --git a/infinigen/assets/shelves/kitchen_space.py b/infinigen/assets/shelves/kitchen_space.py deleted file mode 100644 index 4f4136ca5..000000000 --- a/infinigen/assets/shelves/kitchen_space.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo, Stamatis Alexandropoulos - -import bpy -import bpy -import mathutils -from mathutils import Vector -from numpy.random import uniform, normal, randint, choice - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core import tagging, tags as t - -from infinigen.assets.utils.object import new_bbox - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.shelves.kitchen_cabinet import KitchenCabinetFactory -from infinigen.assets.table_decorations.sink import SinkFactory -from infinigen.assets.wall_decorations.range_hood import RangeHoodFactory - -from infinigen.core.util import blender as butil - -from infinigen.assets.tables.table_top import nodegroup_generate_table_top -from infinigen.assets.materials.table_materials import shader_marble -from infinigen.core.constraints.example_solver.room.constants import WALL_HEIGHT, WALL_THICKNESS - -def nodegroup_tag_cube(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - index = nw.new_node(Nodes.Index) - - equal = nw.new_node(Nodes.Compare, input_kwargs={2: index, 3: 5}, attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - cube = tagging.tag_nodegroup(nw, group_input.outputs['Geometry'], t.Subpart.SupportSurface, selection=equal) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': cube}, attrs={'is_active_output': True}) - -def geometry_nodes_add_cabinet_top(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.0500 - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Max"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Min"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 1.4140}, attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: subtract}, attrs={'operation': 'DIVIDE'}) - - generatetabletop = nw.new_node(nodegroup_generate_table_top().name, - input_kwargs={'Thickness': value, 'N-gon': 4, 'Profile Width': multiply, 'Aspect Ratio': divide, 'Fillet Ratio': 0.0100, 'Fillet Radius Vertical': 0.0100}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': generatetabletop, 'Material': surface.shaderfunc_to_material(shader_marble)}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Y"], 1: separate_xyz_1.outputs["Y"]}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Max"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': divide_1, 'Z': separate_xyz_2.outputs["Z"]}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': set_material, 'Translation': combine_xyz}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [group_input.outputs["Geometry"], transform_geometry]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -def geometry_node_to_tagged_bbox(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': bounding_box, 'Scale': (0.9700, 0.9700, 1.000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}, attrs={'is_active_output': True}) - -def geometry_node_to_bbox(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': bounding_box, 'Scale': (0.9700, 0.9700, 1.000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}, attrs={'is_active_output': True}) - -class KitchenSpaceFactory(AssetFactory): - def __init__( - self, - factory_seed, - coarse=False, - dimensions=None, - island=False - ): - super(KitchenSpaceFactory, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - - if dimensions is None: - dimensions = Vector(( - uniform(0.7, 1), - uniform(1.7, 5), - uniform(2.3, WALL_HEIGHT - WALL_THICKNESS) - )) - - self.island = island - if self.island: - dimensions.x *= uniform(1.5, 2) - - self.dimensions = dimensions - - self.params = self.sample_parameters(dimensions) - - - def sample_parameters(self, dimensions): - self.cabinet_bottom_height = uniform(0.8, 1.0) - self.cabinet_top_height = uniform(0.8, 1.0) - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - x, y, z = self.dimensions - box = new_bbox(-x/2 * 1.08, x/2 * 1.08, 0, y, 0, self.cabinet_bottom_height + 0.095) - surface.add_geomod(box, nodegroup_tag_cube, apply=True) - - if not self.island: - box_top = new_bbox(-x/2, x*0.16, 0, y, z - self.cabinet_top_height - 0.1, z) - box = butil.join_objects([box, box_top]) - - return box - - def create_asset(self, **params): - x, y, z = self.dimensions - parts = [] - - - cabinet_bottom_height = self.cabinet_bottom_height - cabinet_top_height = self.cabinet_top_height - - cabinet_bottom_factory = KitchenCabinetFactory(self.factory_seed, dimensions=(x, y-0.15, cabinet_bottom_height), drawer_only=True) - cabinet_bottom = cabinet_bottom_factory(i=0) - parts.append(cabinet_bottom) - - surface.add_geomod(cabinet_bottom, geometry_nodes_add_cabinet_top, apply=True) - - if not self.island: - # top - top_mid_width = uniform(1.0, 1.3) - cabinet_top_width = (y - top_mid_width) / 2.0 - 0.05 - - cabinet_top_factory = KitchenCabinetFactory(self.factory_seed, dimensions=(x / 2.0, cabinet_top_width, cabinet_top_height), drawer_only=False) - cabinet_top_left = cabinet_top_factory(i=0) - cabinet_top_right = cabinet_top_factory(i=1) - - cabinet_top_left.location = (-x/4.0, 0.0, z-cabinet_top_height) - cabinet_top_right.location = (-x/4.0, y - cabinet_top_width, z-cabinet_top_height) - - # hood / cab - # mid_style = choice(['range_hood', 'cabinet']) - # mid_style = 'range_hood' - mid_style = choice(['cabinet']) - if mid_style == 'range_hood': - range_hood_factory = RangeHoodFactory(self.factory_seed, dimensions=(x*0.66, top_mid_width + 0.15, cabinet_top_height)) - top_mid = range_hood_factory(i=0) - top_mid.location = (-x*0.5, y/2.0, z-cabinet_top_height+0.05) - - elif mid_style == 'cabinet': - cabinet_top_mid_factory = KitchenCabinetFactory(self.factory_seed, dimensions=(x*0.66, top_mid_width, cabinet_top_height * 0.8), drawer_only=False) - top_mid = cabinet_top_mid_factory(i=0) - top_mid.location = (-x/6.0, y/2.0 - top_mid_width / 2.0, z-(cabinet_top_height * 0.8)) - - else: - raise NotImplementedError - - # parts += [sink, cabinet_top_left, cabinet_top_right, top_mid] - parts += [cabinet_top_left, cabinet_top_right, top_mid] - - kitchen_space = butil.join_objects(parts)#[cabinet_bottom, sink, cabinet_top_left, cabinet_top_right, top_mid]) - - if not self.island: - kitchen_space.dimensions = self.dimensions - butil.apply_transform(kitchen_space) - - tagging.tag_system.relabel_obj(kitchen_space) - - return kitchen_space - -class KitchenIslandFactory(KitchenSpaceFactory): - - def __init__(self, factory_seed): - - super(KitchenIslandFactory, self).__init__( - factory_seed=factory_seed, - island=True, - ) \ No newline at end of file diff --git a/infinigen/assets/shelves/large_shelf.py b/infinigen/assets/shelves/large_shelf.py deleted file mode 100644 index 2ba1950ae..000000000 --- a/infinigen/assets/shelves/large_shelf.py +++ /dev/null @@ -1,616 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_wood, shader_shelves_wood_sampler, - shader_shelves_white_metallic, shader_shelves_white_metallic_sampler, - shader_shelves_black_metallic, shader_shelves_black_metallic_sampler) - - -@node_utils.to_nodegroup('nodegroup_screw_head', singleton=False, type='GeometryNodeTree') -def nodegroup_screw_head(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Depth', 0.0050), - ('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'division_thickness', 0.5000), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'screw_width_gap', 0.5000), - ('NodeSocketFloat', 'screw_depth_gap', 0.0000)]) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Radius': group_input.outputs["Radius"], - 'Depth': group_input.outputs["Depth"]}, - attrs={'fill_type': 'TRIANGLE_FAN'}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cylinder.outputs["Mesh"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["screw_width_gap"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["screw_width_gap"], 1: 0.0000}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["division_thickness"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': multiply_2, 'Z': multiply_3}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Translation': combine_xyz}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Y': subtract_1, 'Z': multiply_3}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Translation': combine_xyz_4}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_1, transform_6]}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_2, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_4, join_geometry_2]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_3}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_attachment', singleton=False, type='GeometryNodeTree') -def nodegroup_attachment(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'attach_thickness', 0.0000), - ('NodeSocketFloat', 'attach_length', 0.0000), - ('NodeSocketFloat', 'attach_z_translation', 0.0000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'attach_gap', 0.5000), - ('NodeSocketFloat', 'attach_width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_width"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_length"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': add_1, 'Z': group_input.outputs["attach_thickness"]}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["attach_gap"]}, - attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: multiply_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': subtract_1, 'Y': add_2, - 'Z': group_input.outputs["attach_z_translation"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_1, transform]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_division_board', singleton=False, type='GeometryNodeTree') -def nodegroup_division_board(nw: NodeWrangler, material, tag_support=False): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 0.0000), - ('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'depth', 0.0000), - ('NodeSocketFloat', 'z_translation', 0.0000), - ('NodeSocketFloat', 'x_translation', 0.0000), - ('NodeSocketFloat', 'screw_depth', 0.0000), - ('NodeSocketFloat', 'screw_radius', 0.0000), - ('NodeSocketFloat', 'screw_width_gap', 0.0000), - ('NodeSocketFloat', 'screw_depth_gap', 0.0000)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': group_input.outputs["depth"], - 'Z': group_input.outputs["thickness"]}) - - if tag_support: - cube = nw.new_node(nodegroup_tagged_cube().name, input_kwargs={'Size': combine_xyz}) - else: - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - screw_head = nw.new_node(nodegroup_screw_head().name, - input_kwargs={'Depth': group_input.outputs["screw_depth"], - 'Radius': group_input.outputs["screw_radius"], - 'division_thickness': group_input.outputs["thickness"], - 'width': group_input.outputs["width"], 'depth': group_input.outputs["depth"], - 'screw_width_gap': group_input.outputs["screw_width_gap"], - 'screw_depth_gap': group_input.outputs["screw_depth_gap"]}) - - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [cube, screw_head]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["x_translation"], - 'Z': group_input.outputs["z_translation"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_bottom_board', singleton=False, type='GeometryNodeTree') -def nodegroup_bottom_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 0.0000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'y_gap', 0.5000), - ('NodeSocketFloat', 'x_translation', 0.0000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'width', 0.0000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': group_input.outputs["thickness"], - 'Z': add}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["y_gap"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["x_translation"], 'Y': subtract, - 'Z': multiply_1}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_back_board', singleton=False, type='GeometryNodeTree') -def nodegroup_back_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': add, 'Z': add_1}) - - cube_2 = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz_4, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -0.5000, 2: multiply}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_add, 'Z': multiply_1}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_2, 'Translation': combine_xyz_5}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_5}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_side_board', singleton=False, type='GeometryNodeTree') -def nodegroup_side_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'board_thickness', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'x_translation', 0.0000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["x_translation"], 'Z': multiply}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - side_board_thickness = nw.new_node(Nodes.Value, label='side_board_thickness') - side_board_thickness.outputs[0].default_value = kwargs['side_board_thickness'] - - shelf_depth = nw.new_node(Nodes.Value, label='shelf_depth') - shelf_depth.outputs[0].default_value = kwargs['shelf_depth'] - - add = nw.new_node(Nodes.Math, input_kwargs={0: shelf_depth, 1: 0.0040}) - - shelf_height = nw.new_node(Nodes.Value, label='shelf_height') - shelf_height.outputs[0].default_value = kwargs['shelf_height'] - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: shelf_height, 1: 0.0020}) - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: shelf_height, 1: -0.0010}) - side_boards = [] - - for x in kwargs['side_board_x_translation']: - side_board_x_translation = nw.new_node(Nodes.Value, label='side_board_x_translation') - side_board_x_translation.outputs[0].default_value = x - - side_board = nw.new_node(nodegroup_side_board().name, - input_kwargs={'board_thickness': side_board_thickness, - 'depth': add, 'height': add_1, - 'x_translation': side_board_x_translation}) - side_boards.append(side_board) - - shelf_width = nw.new_node(Nodes.Value, label='shelf_width') - shelf_width.outputs[0].default_value = kwargs['shelf_width'] - - backboard_thickness = nw.new_node(Nodes.Value, label='backboard_thickness') - backboard_thickness.outputs[0].default_value = kwargs['backboard_thickness'] - - add_side = nw.new_node(Nodes.Math, input_kwargs={0: shelf_width, 1: kwargs['side_board_thickness'] * 2}) - back_board = nw.new_node(nodegroup_back_board().name, - input_kwargs={'width': add_side, 'thickness': backboard_thickness, - 'height': add_2, 'depth': shelf_depth}) - - bottom_board_y_gap = nw.new_node(Nodes.Value, label='bottom_board_y_gap') - bottom_board_y_gap.outputs[0].default_value = kwargs['bottom_board_y_gap'] - - bottom_board_height = nw.new_node(Nodes.Value, label='bottom_board_height') - bottom_board_height.outputs[0].default_value = kwargs['bottom_board_height'] - - bottom_boards = [] - for i in range(len(kwargs['shelf_cell_width'])): - - bottom_gap_x_translation = nw.new_node(Nodes.Value, label='bottom_gap_x_translation') - bottom_gap_x_translation.outputs[0].default_value = kwargs['bottom_gap_x_translation'][i] - - shelf_cell_width = nw.new_node(Nodes.Value, label='shelf_cell_width') - shelf_cell_width.outputs[0].default_value = kwargs['shelf_cell_width'][i] - - bottomboard = nw.new_node(nodegroup_bottom_board().name, - input_kwargs={'thickness': side_board_thickness, 'depth': shelf_depth, - 'y_gap': bottom_board_y_gap, 'x_translation': bottom_gap_x_translation, - 'height': bottom_board_height, 'width': shelf_cell_width}) - - bottom_boards.append(bottomboard) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [back_board] + side_boards + bottom_boards}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': realize_instances, - 'Material': surface.shaderfunc_to_material(kwargs['frame_material'])}) - - division_board_thickness = nw.new_node(Nodes.Value, label='division_board_thickness') - division_board_thickness.outputs[0].default_value = kwargs['division_board_thickness'] - - division_boards = [] - for i in range(len(kwargs['shelf_cell_width'])): - for j in range(len(kwargs['division_board_z_translation'])): - - division_board_z_translation = nw.new_node(Nodes.Value, label='division_board_z_translation') - division_board_z_translation.outputs[0].default_value = kwargs['division_board_z_translation'][j] - - division_board_x_translation = nw.new_node(Nodes.Value, label='division_board_x_translation') - division_board_x_translation.outputs[0].default_value = kwargs['division_board_x_translation'][i] - - shelf_cell_width = nw.new_node(Nodes.Value, label='shelf_cell_width') - shelf_cell_width.outputs[0].default_value = kwargs['shelf_cell_width'][i] - - screw_depth_head = nw.new_node(Nodes.Value, label='screw_depth_head') - screw_depth_head.outputs[0].default_value = kwargs['screw_depth_head'] - - screw_head_radius = nw.new_node(Nodes.Value, label='screw_head_radius') - screw_head_radius.outputs[0].default_value = kwargs['screw_head_radius'] - - screw_width_gap = nw.new_node(Nodes.Value, label='screw_width_gap') - screw_width_gap.outputs[0].default_value = kwargs['screw_width_gap'] - - screw_depth_gap = nw.new_node(Nodes.Value, label='screw_depth_gap') - screw_depth_gap.outputs[0].default_value = kwargs['screw_depth_gap'] - - division_board = nw.new_node(nodegroup_division_board(material=kwargs['board_material'], - tag_support=kwargs.get('tag_support', False)).name, - input_kwargs={'thickness': division_board_thickness, - 'width': shelf_cell_width, - 'depth': shelf_depth, - 'z_translation': division_board_z_translation, - 'x_translation': division_board_x_translation, - 'screw_depth': screw_depth_head, - 'screw_radius': screw_head_radius, - 'screw_width_gap': screw_width_gap, - 'screw_depth_gap': screw_depth_gap}) - division_boards.append(division_board) - - attach_thickness = nw.new_node(Nodes.Value, label='attach_thickness') - attach_thickness.outputs[0].default_value = kwargs['attach_thickness'] - - attach_length = nw.new_node(Nodes.Value, label='attach_length') - attach_length.outputs[0].default_value = kwargs['attach_length'] - - attach_z_translation = nw.new_node(Nodes.Value, label='attach_z_translation') - attach_z_translation.outputs[0].default_value = kwargs['attach_z_translation'] - - attach_gap = nw.new_node(Nodes.Value, label='attach_gap') - attach_gap.outputs[0].default_value = kwargs['attach_gap'] - - attach_width = nw.new_node(Nodes.Value, label='attach_width') - attach_width.outputs[0].default_value = kwargs['attach_width'] - - join_geometry_k = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': division_boards}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_k, - 'Material': surface.shaderfunc_to_material(kwargs['board_material'])}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, set_material_1]}) - - realize_instances_3 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_3}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances_3}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -class LargeShelfBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(LargeShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = {} - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('shelf_depth', None) is None: - params['shelf_depth'] = np.clip(normal(0.26, 0.03), 0.18, 0.36) - if params.get('side_board_thickness', None) is None: - params['side_board_thickness'] = np.clip(normal(0.02, 0.002), 0.015, 0.025) - if params.get('back_board_thickness', None) is None: - params['backboard_thickness'] = 0.01 - if params.get('bottom_board_y_gap', None) is None: - params['bottom_board_y_gap'] = uniform(0.01, 0.05) - if params.get('bottom_board_height', None) is None: - params['bottom_board_height'] = (np.clip(normal(0.083, 0.01), 0.05, 0.11) * - np.random.choice([1., 0.], p=[0.8, 0.2])) - if params.get('division_board_thickness', None) is None: - params['division_board_thickness'] = np.clip(normal(0.02, 0.002), 0.015, 0.025) - if params.get('screw_depth_head', None) is None: - params['screw_depth_head'] = uniform(0.001, 0.004) - if params.get('screw_head_radius', None) is None: - params['screw_head_radius'] = uniform(0.001, 0.004) - if params.get('screw_width_gap', None) is None: - params['screw_width_gap'] = uniform(0.0, 0.02) - if params.get('screw_depth_gap', None) is None: - params['screw_depth_gap'] = uniform(0.025, 0.06) - if params.get('attach_length', None) is None: - params['attach_length'] = uniform(0.05, 0.1) - if params.get('attach_width', None) is None: - params['attach_width'] = uniform(0.01, 0.025) - if params.get('attach_thickness', None) is None: - params['attach_thickness'] = uniform(0.002, 0.005) - if params.get('attach_gap', None) is None: - params['attach_gap'] = uniform(0.0, 0.05) - if params.get('shelf_cell_width', None) is None: - num_h_cells = randint(1, 4) - shelf_cell_width = [] - for i in range(num_h_cells): - shelf_cell_width.append(np.random.choice([0.76, 0.36], p=[0.5, 0.5]) * - np.clip(normal(1., 0.1), 0.75, 1.25)) - params['shelf_cell_width'] = shelf_cell_width - if params.get('shelf_cell_height', None) is None: - num_v_cells = randint(3, 8) - shelf_cell_height = [] - for i in range(num_v_cells): - shelf_cell_height.append(0.3 * np.clip(normal(1., 0.1), 0.75, 1.25)) - params['shelf_cell_height'] = shelf_cell_height - - params = self.update_translation_params(params) - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.4, 0.3, 0.3]) - if params.get('board_material', None) is None: - params['board_material'] = params['frame_material'] - - params = self.get_material_func(params) - params['tag_support'] = True - return params - - def get_material_func(self, params, randomness=True): - white_wood_params = shader_shelves_white_sampler() - black_wood_params = shader_shelves_black_wood_sampler() - normal_wood_params = shader_shelves_wood_sampler() - if params['frame_material'] == 'white': - if randomness: - params['frame_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['frame_material'] = shader_shelves_white - elif params['frame_material'] == 'black_wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_black_wood(x, z_axis_texture=True) - elif params['frame_material'] == 'wood': - if randomness: - params['frame_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params, z_axis_texture=True) - else: - params['frame_material'] = lambda x: shader_shelves_wood(x, z_axis_texture=True) - - if params['board_material'] == 'white': - if randomness: - params['board_material'] = lambda x: shader_shelves_white(x, **white_wood_params) - else: - params['board_material'] = shader_shelves_white - elif params['board_material'] == 'black_wood': - if randomness: - params['board_material'] = lambda x: shader_shelves_black_wood(x, **black_wood_params) - else: - params['board_material'] = shader_shelves_black_wood - elif params['board_material'] == 'wood': - if randomness: - params['board_material'] = lambda x: shader_shelves_wood(x, **normal_wood_params) - else: - params['board_material'] = shader_shelves_wood - - return params - - def update_translation_params(self, params): - cell_widths = params['shelf_cell_width'] - cell_heights = params['shelf_cell_height'] - side_thickness = params['side_board_thickness'] - div_thickness = params['division_board_thickness'] - - # get shelf_width and shelf_height - width = (len(cell_widths) - 1) * side_thickness * 2 + (len(cell_widths) - 1) * 0.001 - height = (len(cell_heights) + 1) * div_thickness + params['bottom_board_height'] - for w in cell_widths: - width += w - for h in cell_heights: - height += h - - params['shelf_width'] = width - params['shelf_height'] = height - params['attach_z_translation'] = height - div_thickness - - # get side_board_x_translation - dist = - (width + side_thickness) / 2. - side_board_x_translation = [dist] - - for w in cell_widths: - dist += side_thickness + w - side_board_x_translation.append(dist) - dist += side_thickness + 0.001 - side_board_x_translation.append(dist) - side_board_x_translation = side_board_x_translation[:-1] - - # get division_board_z_translation - dist = params['bottom_board_height'] + div_thickness / 2. - division_board_z_translation = [dist] - for h in cell_heights: - dist += h + div_thickness - division_board_z_translation.append(dist) - - # get division_board_x_translation - division_board_x_translation = [] - for i in range(len(cell_widths)): - division_board_x_translation.append((side_board_x_translation[2 * i] + side_board_x_translation[2 * i+1]) / 2.) - - params['side_board_x_translation'] = side_board_x_translation - params['division_board_x_translation'] = division_board_x_translation - params['division_board_z_translation'] = division_board_z_translation - params['bottom_gap_x_translation'] = division_board_x_translation - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - - if params.get('ret_params', False): - return obj, obj_params - - tagging.tag_system.relabel_obj(obj) - - return obj - - -class LargeShelfFactory(LargeShelfBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = ( - uniform(0.25, 0.35), - uniform(0.3, 2.0), - uniform(0.9, 2.0) - ) - - params['bottom_board_height'] = 0.083 - params['shelf_depth'] = params['Dimensions'][0] - 0.01 - num_h = int((params['Dimensions'][2] - 0.083) / 0.3) - params['shelf_cell_height'] = [(params['Dimensions'][2] - 0.083) / num_h for _ in range(num_h)] - num_v = max(int(params['Dimensions'][1] / 0.5), 1) - params['shelf_cell_width'] = [params['Dimensions'][1] / num_v for _ in range(num_v)] - return params diff --git a/infinigen/assets/shelves/simple_bookcase.py b/infinigen/assets/shelves/simple_bookcase.py deleted file mode 100644 index 6ea9796a9..000000000 --- a/infinigen/assets/shelves/simple_bookcase.py +++ /dev/null @@ -1,518 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint - -from infinigen.assets.materials.shelf_shaders import get_shelf_material -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube - - -@node_utils.to_nodegroup('nodegroup_attach_gadget', singleton=False, type='GeometryNodeTree') -def nodegroup_attach_gadget(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'division_thickness', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), ('NodeSocketFloat', 'attach_thickness', 0.5000), - ('NodeSocketFloat', 'attach_width', 0.5000), ('NodeSocketFloat', 'attach_back_len', 0.5000), - ('NodeSocketFloat', 'attach_top_len', 0.5000), ('NodeSocketFloat', 'depth', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_width"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_top_len"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_thickness"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add_1}, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["height"], - 1: group_input.outputs["division_thickness"] - }, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply, 'Z': subtract_1}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["attach_back_len"], 1: 0.0000}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_2, 'Z': add_4}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={ - 'Size': combine_xyz_1, - 'Vertices X': 5, - 'Vertices Y': 5, - 'Vertices Z': 5 - }) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_4}, attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: multiply_2}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1, 'Z': subtract_2}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_1, 'Translation': combine_xyz_3}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'attach1': transform, 'attach2': transform_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_screw_head', singleton=False, type='GeometryNodeTree') -def nodegroup_screw_head(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Depth', 0.0050), - ('NodeSocketFloatDistance', 'Radius', 1.0000), ('NodeSocketFloat', 'bottom_gap', 0.5000), - ('NodeSocketFloat', 'division_thickness', 0.5000), ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'screw_gap', 0.5000)]) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={ - 'Radius': group_input.outputs["Radius"], - 'Depth': group_input.outputs["Depth"] - }, attrs={'fill_type': 'TRIANGLE_FAN'}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': cylinder.outputs["Mesh"], - 'Rotation': (0.0000, 1.5708, 0.0000) - }) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["screw_gap"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["division_thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: multiply_2}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': subtract, 'Z': subtract_1}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_1}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: group_input.outputs["bottom_gap"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': subtract, 'Z': add_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Translation': combine_xyz}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply, 'Y': multiply_3, 'Z': subtract_1}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_2}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: add_1}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_2}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Z': multiply_4}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_3}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': multiply_3, 'Z': add_1}) - - transform_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_4}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [transform_2, transform_1, transform_3, transform_5, transform_6] - }) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_2, 'Scale': (-1.0000, 1.0000, 1.0000)}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_4, join_geometry_2]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_3}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_back_board', singleton=False, type='GeometryNodeTree') -def nodegroup_back_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'thickness', 0.5000), ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': add, 'Z': add_1}) - - cube_2 = nw.new_node(Nodes.MeshCube, input_kwargs={ - 'Size': combine_xyz_4, - 'Vertices X': 10, - 'Vertices Y': 10, - 'Vertices Z': 10 - }) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: -0.5000, 2: multiply}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_add, 'Z': multiply_1}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube_2, 'Translation': combine_xyz_5}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_5}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_division_board', singleton=False, type='GeometryNodeTree') -def nodegroup_division_board(nw: NodeWrangler, tag_support=False): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'board_thickness', 0.0000), - ('NodeSocketFloat', 'depth', 0.5000), ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'side_thickness', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["side_thickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': subtract, - 'Y': add, - 'Z': group_input.outputs["board_thickness"] - }) - - if tag_support: - cube_1 = nw.new_node(nodegroup_tagged_cube().name, input_kwargs={'Size': combine_xyz_3}) - else: - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={ - 'Size': combine_xyz_3, - 'Vertices X': 10, - 'Vertices Y': 10, - 'Vertices Z': 10 - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': cube_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_division_boards', singleton=False, type='GeometryNodeTree') -def nodegroup_division_boards(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), ('NodeSocketFloat', 'gap', 0.5000), - ('NodeSocketGeometry', 'Geometry', None)]) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["gap"], 1: multiply}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': realize_instances_1, 'Translation': combine_xyz_1}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: add}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': realize_instances_1, 'Translation': combine_xyz_2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': subtract}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': realize_instances_1, 'Translation': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={ - 'board1': transform_2, - 'board2': transform_3, - 'board3': transform_4 - }, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_side_board', singleton=False, type='GeometryNodeTree') -def nodegroup_side_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'board_thickness', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'width', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_thickness"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["depth"], 1: 0.0000}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["height"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1, 'Z': add_2}) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Z': multiply_1}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Z': multiply_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - side_board_thickness = nw.new_node(Nodes.Value, label='side_board_thickness') - side_board_thickness.outputs[0].default_value = kwargs['side_board_thickness'] - - shelf_depth = nw.new_node(Nodes.Value, label='shelf_depth') - shelf_depth.outputs[0].default_value = kwargs['depth'] - - shelf_height = nw.new_node(Nodes.Value, label='shelf_height') - shelf_height.outputs[0].default_value = kwargs['height'] - - shelf_width = nw.new_node(Nodes.Value, label='shelf_width') - shelf_width.outputs[0].default_value = kwargs['width'] - - side_board = nw.new_node(nodegroup_side_board().name, input_kwargs={ - 'board_thickness': side_board_thickness, - 'depth': shelf_depth, - 'height': shelf_height, - 'width': shelf_width - }) - - division_board_thickness = nw.new_node(Nodes.Value, label='division_board_thickness') - division_board_thickness.outputs[0].default_value = kwargs['division_board_thickness'] - - bottom_gap = nw.new_node(Nodes.Value, label='bottom_gap') - bottom_gap.outputs[0].default_value = kwargs['bottom_gap'] - - division_board = nw.new_node(nodegroup_division_board(tag_support=kwargs['tag_support']).name, - input_kwargs={ - 'board_thickness': division_board_thickness, - 'depth': shelf_depth, - 'width': shelf_width, - 'side_thickness': side_board_thickness - }) - - division_boards = nw.new_node(nodegroup_division_boards().name, input_kwargs={ - 'thickness': division_board_thickness, - 'height': shelf_height, - 'gap': bottom_gap, - 'Geometry': division_board - }) - - backboard_thickness = nw.new_node(Nodes.Value, label='backboard_thickness') - backboard_thickness.outputs[0].default_value = kwargs['backboard_thickness'] - - back_board = nw.new_node(nodegroup_back_board().name, input_kwargs={ - 'width': shelf_width, - 'thickness': backboard_thickness, - 'height': shelf_height, - 'depth': shelf_depth - }) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [side_board, division_boards.outputs["board1"], division_boards.outputs["board2"], - back_board, division_boards.outputs["board3"]] - }) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': realize_instances, - 'Material': kwargs['frame_material'] - }) - - screw_depth_head = nw.new_node(Nodes.Value, label='screw_depth_head') - screw_depth_head.outputs[0].default_value = kwargs['screw_head_depth'] - - screw_head_radius = nw.new_node(Nodes.Value, label='screw_head_radius') - screw_head_radius.outputs[0].default_value = kwargs['screw_head_radius'] - - screw_head_gap = nw.new_node(Nodes.Value, label='screw_head_gap') - screw_head_gap.outputs[0].default_value = kwargs['screw_head_dist'] - - screw_head = nw.new_node(nodegroup_screw_head().name, input_kwargs={ - 'Depth': screw_depth_head, - 'Radius': screw_head_radius, - 'bottom_gap': bottom_gap, - 'division_thickness': division_board_thickness, - 'width': shelf_width, - 'height': shelf_height, - 'depth': shelf_depth, - 'screw_gap': screw_head_gap - }) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': screw_head}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': realize_instances_1, - 'Material': kwargs['metal_material'] - }) - - attach_thickness = nw.new_node(Nodes.Value, label='attach_thickness') - attach_thickness.outputs[0].default_value = kwargs['attach_thickness'] - - attach_width = nw.new_node(Nodes.Value, label='attach_width') - attach_width.outputs[0].default_value = kwargs['attach_width'] - - attach_back_length = nw.new_node(Nodes.Value, label='attach_back_length') - attach_back_length.outputs[0].default_value = kwargs['attach_back_length'] - - attach_top_length = nw.new_node(Nodes.Value, label='attach_top_length') - attach_top_length.outputs[0].default_value = kwargs['attach_top_length'] - - attach_gadget = nw.new_node(nodegroup_attach_gadget().name, input_kwargs={ - 'division_thickness': division_board_thickness, - 'height': shelf_height, - 'attach_thickness': attach_thickness, - 'attach_width': attach_width, - 'attach_back_len': attach_back_length, - 'attach_top_len': attach_top_length, - 'depth': shelf_depth - }) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [attach_gadget.outputs["attach1"], attach_gadget.outputs["attach2"]] - }) - - realize_instances_2 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_2}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': realize_instances_2, - 'Material': kwargs['metal_material'] - }) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, set_material_1, set_material_2]}) - - realize_instances_3 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances_3}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -class SimpleBookcaseBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(SimpleBookcaseBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('depth', None) is None: - params['depth'] = np.clip(normal(0.3, 0.05), 0.15, 0.45) - if params.get('width', None) is None: - params['width'] = np.clip(normal(0.5, 0.1), 0.25, 0.75) - if params.get('height', None) is None: - params['height'] = np.clip(normal(0.8, 0.1), 0.5, 1.0) - params['side_board_thickness'] = uniform(0.005, 0.03) - params['division_board_thickness'] = np.clip(normal(0.015, 0.005), 0.005, 0.025) - params['bottom_gap'] = np.clip(normal(0.14, 0.05), 0.0, 0.2) - params['backboard_thickness'] = uniform(0.01, 0.02) - params['screw_head_depth'] = uniform(0.002, 0.008) - params['screw_head_radius'] = uniform(0.003, 0.008) - params['screw_head_dist'] = uniform(0.03, 0.1) - params['attach_thickness'] = uniform(0.002, 0.005) - params['attach_width'] = uniform(0.01, 0.04) - params['attach_top_length'] = uniform(0.03, 0.1) - params['attach_back_length'] = uniform(0.02, 0.05) - params['frame_material'] = get_shelf_material('white') - params['metal_material'] = get_shelf_material('metal') - params['tag_support'] = True - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, apply=True, attributes=[], input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class SimpleBookcaseFactory(SimpleBookcaseBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = (uniform(0.25, 0.4), uniform(0.5, 0.7), uniform(0.7, 0.9)) - params['depth'] = params['Dimensions'][0] - 0.015 - params['width'] = params['Dimensions'][1] - params['height'] = params['Dimensions'][2] - return params diff --git a/infinigen/assets/shelves/simple_desk.py b/infinigen/assets/shelves/simple_desk.py deleted file mode 100644 index 440ae9fe5..000000000 --- a/infinigen/assets/shelves/simple_desk.py +++ /dev/null @@ -1,267 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube -from infinigen.assets.materials.shelf_shaders import ( - shader_shelves_white, shader_shelves_white_sampler, - shader_shelves_black_wood, shader_shelves_black_wood_sampler, - shader_shelves_white_metallic, shader_shelves_white_metallic_sampler, - shader_shelves_black_metallic, shader_shelves_black_metallic_sampler) - - -@node_utils.to_nodegroup('nodegroup_table_legs', singleton=False, type='GeometryNodeTree') -def nodegroup_table_legs(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'thickness', 0.5000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloatDistance', 'radius', 0.0200), - ('NodeSocketFloat', 'width', 0.5000), - ('NodeSocketFloat', 'depth', 0.5000), - ('NodeSocketFloat', 'dist', 0.5000)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["height"], 1: group_input.outputs["thickness"]}, - attrs={'operation': 'SUBTRACT'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Radius': group_input.outputs["radius"], 'Depth': subtract, 'Vertices': 128}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["width"]}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["dist"], 1: 0.0000}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={1: group_input.outputs["depth"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': subtract_2, 'Z': multiply_2}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_2}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': subtract_2, 'Z': multiply_2}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_3}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': multiply_4, 'Z': multiply_2}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_4}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': multiply_4, 'Z': multiply_2}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_5}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform, transform_2, transform_3, transform_4]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_table_top', singleton=False, type='GeometryNodeTree') -def nodegroup_table_top(nw: NodeWrangler, tag_support=True): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'depth', 0.0000), - ('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'height', 0.5000), - ('NodeSocketFloat', 'thickness', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["thickness"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["width"], 'Y': group_input.outputs["depth"], - 'Z': add}) - - if tag_support: - cube = nw.new_node(nodegroup_tagged_cube().name, input_kwargs={'Size': combine_xyz}) - - else: - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 10, 'Vertices Y': 10, 'Vertices Z': 10}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["height"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': subtract}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - table_depth = nw.new_node(Nodes.Value, label='table_depth') - table_depth.outputs[0].default_value = kwargs['depth'] - - table_width = nw.new_node(Nodes.Value, label='table_width') - table_width.outputs[0].default_value = kwargs['width'] - - table_height = nw.new_node(Nodes.Value, label='table_height') - table_height.outputs[0].default_value = kwargs['height'] - - top_thickness = nw.new_node(Nodes.Value, label='top_thickness') - top_thickness.outputs[0].default_value = kwargs['thickness'] - - table_top = nw.new_node(nodegroup_table_top(tag_support=True).name, - input_kwargs={'depth': table_depth, 'width': table_width, 'height': table_height, - 'thickness': top_thickness}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': table_top, - 'Material': surface.shaderfunc_to_material(kwargs['top_material'])}) - - leg_radius = nw.new_node(Nodes.Value, label='leg_radius') - leg_radius.outputs[0].default_value = kwargs['leg_radius'] - - leg_center_to_edge = nw.new_node(Nodes.Value, label='leg_center_to_edge') - leg_center_to_edge.outputs[0].default_value = kwargs['leg_dist'] - - table_legs = nw.new_node(nodegroup_table_legs().name, - input_kwargs={'thickness': top_thickness, 'height': table_height, 'radius': leg_radius, - 'width': table_width, 'depth': table_depth, 'dist': leg_center_to_edge}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': table_legs, - 'Material': surface.shaderfunc_to_material(kwargs['leg_material'])}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': realize_instances}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, 1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, - attrs={'is_active_output': True}) - - -class SimpleDeskBaseFactory(AssetFactory): - - def __init__(self, factory_seed, params={}, coarse=False): - super(SimpleDeskBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = params - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('depth', None) is None: - params['depth'] = np.clip(normal(0.6, 0.05), 0.45, 0.7) - if params.get('width', None) is None: - params['width'] = np.clip(normal(1.0, 0.1), 0.7, 1.3) - if params.get('height', None) is None: - params['height'] = np.clip(normal(0.73, 0.05), 0.6, 0.83) - if params.get('top_material', None) is None: - params['top_material'] = np.random.choice(['white', 'black_wood']) - if params.get('leg_material', None) is None: - params['leg_material'] = np.random.choice(['white', 'black']) - if params.get('leg_radius', None) is None: - params['leg_radius'] = uniform(0.01, 0.025) - if params.get('leg_dist', None) is None: - params['leg_dist'] = uniform(0.035, 0.07) - if params.get('thickness', None) is None: - params['thickness'] = uniform(0.01, 0.03) - - params = self.get_material_func(params) - return params - - def get_material_func(self, params, randomness=True): - if params['top_material'] == 'white': - if randomness: - params['top_material'] = lambda x: shader_shelves_white(x, **shader_shelves_white_sampler()) - else: - params['top_material'] = shader_shelves_white - elif params['top_material'] == 'black_wood': - if randomness: - params['top_material'] = lambda x: shader_shelves_black_wood(x, **shader_shelves_black_wood_sampler()) - else: - params['top_material'] = shader_shelves_black_wood - else: - raise NotImplementedError - - if params['leg_material'] == 'white': - if randomness: - params['leg_material'] = lambda x: shader_shelves_white_metallic(x, **shader_shelves_white_metallic_sampler()) - else: - params['leg_material'] = shader_shelves_white_metallic - elif params['leg_material'] == 'black': - if randomness: - params['leg_material'] = lambda x: shader_shelves_black_metallic(x, **shader_shelves_black_metallic_sampler()) - else: - params['leg_material'] = shader_shelves_black_metallic - else: - raise NotImplementedError - - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, attributes=[], apply=True, input_kwargs=obj_params) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class SimpleDeskFactory(SimpleDeskBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = (uniform(0.5, 0.75), - uniform(0.8, 2), - uniform(0.6, 0.8)) - params['depth'] = params['Dimensions'][0] - params['width'] = params['Dimensions'][1] - params['height'] = params['Dimensions'][2] - return params - -class SidetableDeskFactory(SimpleDeskBaseFactory): - def sample_params(self): - params = dict() - w = 0.55 * normal(1, 0.1) - params['Dimensions'] = (w, w, w * normal(1, 0.05)) - params['depth'] = params['Dimensions'][0] - params['width'] = params['Dimensions'][1] - params['height'] = params['Dimensions'][2] - return params - diff --git a/infinigen/assets/shelves/single_cabinet.py b/infinigen/assets/shelves/single_cabinet.py deleted file mode 100644 index 14dc77257..000000000 --- a/infinigen/assets/shelves/single_cabinet.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy -from infinigen.assets.shelves.utils import nodegroup_tagged_cube, blender_rotate -from infinigen.assets.shelves.large_shelf import LargeShelfBaseFactory -from infinigen.assets.shelves.doors import CabinetDoorBaseFactory - -from infinigen.core.util.math import FixedSeed -from infinigen.assets.utils.object import new_bbox - -def geometry_cabinet_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - right_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['door'][0]}) - left_door_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['door'][1]}) - shelf_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': kwargs['shelf']}) - - doors = [] - transform_r = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': right_door_info.outputs['Geometry'], - 'Translation': kwargs['door_hinge_pos'][0], - 'Rotation': (0, 0, kwargs['door_open_angle'])}) - doors.append(transform_r) - if len(kwargs['door_hinge_pos']) > 1: - transform_l = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': left_door_info.outputs['Geometry'], - 'Translation': kwargs['door_hinge_pos'][1], - 'Rotation': (0, 0, kwargs['door_open_angle'])}) - doors.append(transform_l) - - attaches = [] - for pos in kwargs['attach_pos']: - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0006, 0.0200, 0.04500)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': -0.0100}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz}) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': (0.0005, 0.0340, 0.0200)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, cube_1]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Translation': (0.0000, -0.0170, 0.0000)}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_2, 'Translation': pos}) - - attaches.append(transform_3) - - join_geometry_a = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': attaches}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': doors + [join_geometry_a]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - -class SingleCabinetBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(SingleCabinetBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.shelf_params = {} - self.door_params = {} - self.mat_params = {} - self.shelf_fac = LargeShelfBaseFactory(factory_seed) - self.door_fac = CabinetDoorBaseFactory(factory_seed) - with FixedSeed(factory_seed): - self.params = self.sample_params() - - def sample_params(self): - # Update fac params - pass - - def get_material_params(self): - with FixedSeed(self.factory_seed): - params = self.mat_params.copy() - if params.get('frame_material', None) is None: - params['frame_material'] = np.random.choice(['white', 'black_wood', 'wood'], p=[0.5, 0.2, 0.3]) - return params - - def get_shelf_params(self, i=0): - params = self.shelf_params.copy() - if params.get('shelf_cell_width', None) is None: - params['shelf_cell_width'] = [np.random.choice([0.76, 0.36], p=[0.5, 0.5]) * - np.clip(normal(1., 0.1), 0.75, 1.25)] - if params.get('shelf_cell_height', None) is None: - num_v_cells = randint(3, 7) - shelf_cell_height = [] - for i in range(num_v_cells): - shelf_cell_height.append(0.3 * np.clip(normal(1., 0.06), 0.75, 1.25)) - params['shelf_cell_height'] = shelf_cell_height - if params.get('frame_material', None) is None: - params['frame_material'] = self.mat_params['frame_material'] - - return params - - def get_door_params(self, i=0): - params = self.door_params.copy() - - # get door params - shelf_width = self.shelf_params['shelf_width'] + self.shelf_params['side_board_thickness'] * 2 - if params.get('door_width', None) is None: - if shelf_width < 0.55: - params['door_width'] = shelf_width - params['num_door'] = 1 - else: - params['door_width'] = shelf_width / 2. - 0.0005 - params['num_door'] = 2 - if params.get('door_height', None) is None: - params['door_height'] = (self.shelf_params['division_board_z_translation'][-1] - - self.shelf_params['division_board_z_translation'][0] + - self.shelf_params['division_board_thickness']) - if len(self.shelf_params['division_board_z_translation']) > 5 and \ - np.random.choice([True, False], p=[0.5, 0.5]): - params['door_height'] = (self.shelf_params['division_board_z_translation'][3] - - self.shelf_params['division_board_z_translation'][0] + - self.shelf_params['division_board_thickness']) - if params.get('frame_material', None) is None: - params['frame_material'] = self.mat_params['frame_material'] - - return params - - def get_cabinet_params(self, i=0): - params = dict() - - shelf_width = self.shelf_params['shelf_width'] + self.shelf_params['side_board_thickness'] * 2 - if self.door_params['num_door'] == 1: - params['door_hinge_pos'] = [(self.shelf_params['shelf_depth'] / 2. + 0.0025, -shelf_width / 2., - self.shelf_params['bottom_board_height'])] - params['door_open_angle'] = 0 - params['attach_pos'] = [ - (self.shelf_params['shelf_depth'] / 2., -self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height'] - ] - elif self.door_params['num_door'] == 2: - params['door_hinge_pos'] = [(self.shelf_params['shelf_depth'] / 2. + 0.008, -shelf_width / 2., - self.shelf_params['bottom_board_height']), - (self.shelf_params['shelf_depth'] / 2. + 0.008, shelf_width / 2., - self.shelf_params['bottom_board_height'])] - params['door_open_angle'] = 0 - params['attach_pos'] = [ - (self.shelf_params['shelf_depth'] / 2., -self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height'] - ] + [ - (self.shelf_params['shelf_depth'] / 2., self.shelf_params['shelf_width'] / 2., - self.shelf_params['bottom_board_height'] + z) for z in self.door_params['attach_height'] - ] - else: - raise NotImplementedError - - return params - - def get_cabinet_components(self, i): - # update material params - self.mat_params = self.get_material_params() - - # create shelf - shelf_params = self.get_shelf_params(i=i) - self.shelf_fac.params = shelf_params - shelf, shelf_params = self.shelf_fac.create_asset(i=i, ret_params=True) - shelf.name = 'cabinet_frame' - self.shelf_params = shelf_params - - # create doors - door_params = self.get_door_params(i=i) - self.door_fac.params = door_params - self.door_fac.params['door_left_hinge'] = False - right_door, door_obj_params = self.door_fac.create_asset(i=i, ret_params=True) - right_door.name = 'cabinet_right_door' - self.door_fac.params = door_obj_params - self.door_fac.params['door_left_hinge'] = True - left_door, _ = self.door_fac.create_asset(i=i, ret_params=True) - left_door.name = 'cabinet_left_door' - self.door_params = door_obj_params - - return shelf, right_door, left_door - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - shelf, right_door, left_door = self.get_cabinet_components(i=i) - - # create cabinet - cabinet_params = self.get_cabinet_params(i=i) - surface.add_geomod(obj, geometry_cabinet_nodes, attributes=[], apply=True, input_kwargs={ - 'door': [right_door, left_door], - 'shelf': shelf, - 'door_hinge_pos': cabinet_params['door_hinge_pos'], - 'door_open_angle': cabinet_params['door_open_angle'], - 'attach_pos': cabinet_params['attach_pos'] - }) - butil.delete([left_door, right_door]) - obj = butil.join_objects([shelf, obj]) - - tagging.tag_system.relabel_obj(obj) - return obj - - -class SingleCabinetFactory(SingleCabinetBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = ( - uniform(0.25, 0.35), - uniform(0.3, 0.7), - uniform(0.9, 1.8) - ) - - params['bottom_board_height'] = 0.083 - params['shelf_depth'] = params['Dimensions'][0] - 0.01 - num_h = int((params['Dimensions'][2] - 0.083) / 0.3) - params['shelf_cell_height'] = [(params['Dimensions'][2] - 0.083) / num_h for _ in range(num_h)] - params['shelf_cell_width'] = [params['Dimensions'][1]] - self.shelf_params = params - self.dims = params['Dimensions'] - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - x,y,z = self.dims - return new_bbox(-x/2 * 1.2, x/2 * 1.2, -y/2 * 1.2, y/2 * 1.2, 0, (z + 0.083) * 1.02) diff --git a/infinigen/assets/shelves/triangle_shelf.py b/infinigen/assets/shelves/triangle_shelf.py deleted file mode 100644 index 3eb60ef98..000000000 --- a/infinigen/assets/shelves/triangle_shelf.py +++ /dev/null @@ -1,870 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -from numpy.random import uniform, normal, randint - -from infinigen.assets.materials.shelf_shaders import get_shelf_material -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t - -import bpy - -from infinigen.assets.shelves.utils import nodegroup_tagged_cube - - -@node_utils.to_nodegroup('nodegroup_table_profile', singleton=False, type='GeometryNodeTree') -def nodegroup_table_profile(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketInt', 'Profile N-gon', 4), - ('NodeSocketFloat', 'Profile Width', 1.0000), ('NodeSocketFloat', 'Profile Aspect Ratio', 1.0000), - ('NodeSocketFloat', 'Profile Fillet Ratio', 0.2000)]) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.7071 - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={ - 'Resolution': group_input.outputs["Profile N-gon"], - 'Radius': value - }) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 3.1416, 1: group_input.outputs["Profile N-gon"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': divide}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Rotation': combine_xyz_1}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Profile Aspect Ratio"], - 1: group_input.outputs["Profile Width"] - }, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Profile Width"], - 'Y': multiply, - 'Z': 1.0000 - }) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_2, 'Scale': combine_xyz}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Profile Width"], - 1: group_input.outputs["Profile Fillet Ratio"] - }, attrs={'operation': 'MULTIPLY'}) - - fillet_curve_1 = nw.new_node('GeometryNodeFilletCurve', input_kwargs={ - 'Curve': transform_1, - 'Count': 4, - 'Radius': multiply_1, - 'Limit Radius': True - }, attrs={'mode': 'POLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Output': fillet_curve_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_curve_to_board', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_to_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketGeometry', 'Shape Curve', None), ('NodeSocketFloat', 'Height', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_1}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': curve_line, 'Tilt': 3.1416}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': set_curve_tilt, 'Count': 128, 'Length': 0.0500}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, input_kwargs={ - 'Geometry': resample_curve, - 2: spline_parameter_1.outputs["Factor"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': capture_attribute.outputs["Geometry"], - 'Profile Curve': group_input.outputs["Shape Curve"], - 'Fill Caps': True - }) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={ - 'Curve': group_input.outputs["Profile Curve"], - 'Factor': capture_attribute.outputs[2] - }, attrs={'mode': 'FACTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': sample_curve.outputs["Position"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - length = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={'operation': 'LENGTH'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 2: separate_xyz_1.outputs["Z"] - }) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={ - 'Value': separate_xyz.outputs["Z"], - 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"], - 3: multiply, - 4: 0.0000 - }) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': multiply_1, - 'Y': multiply_2, - 'Z': map_range.outputs["Result"] - }) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Position': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': set_position}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_leg_straight', singleton=False, type='GeometryNodeTree') -def nodegroup_leg_straight(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloat', 'Height', 0.5000), ('NodeSocketInt', 'N-gon', 0), - ('NodeSocketFloat', 'Profile Width', 0.5000), ('NodeSocketFloat', 'Aspect Ratio', 0.5000), - ('NodeSocketFloat', 'Fillet Ratio', 0.2000), ('NodeSocketInt', 'Resolution', 128)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_1}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': curve_line, 'Tilt': 3.1416}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={ - 'Curve': set_curve_tilt, - 'Count': group_input.outputs["Resolution"], - 'Length': 0.0500 - }) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, input_kwargs={ - 'Geometry': resample_curve, - 2: spline_parameter_1.outputs["Factor"] - }) - - tableprofile = nw.new_node(nodegroup_table_profile().name, input_kwargs={ - 'Profile N-gon': group_input.outputs["N-gon"], - 'Profile Width': group_input.outputs["Profile Width"], - 'Profile Aspect Ratio': group_input.outputs["Aspect Ratio"], - 'Profile Fillet Ratio': group_input.outputs["Fillet Ratio"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': capture_attribute.outputs["Geometry"], - 'Profile Curve': tableprofile, - 'Fill Caps': True - }) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={ - 'Curve': group_input.outputs["Profile Curve"], - 'Factor': capture_attribute.outputs[2] - }, attrs={'mode': 'FACTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': sample_curve.outputs["Position"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - length = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={'operation': 'LENGTH'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 2: separate_xyz_1.outputs["Z"] - }) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={ - 'Value': separate_xyz.outputs["Z"], - 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"], - 3: multiply, - 4: 0.0000 - }) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': multiply_1, - 'Y': multiply_2, - 'Z': map_range.outputs["Result"] - }) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Position': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': set_position, 'Profile Curve': tableprofile}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_curve_board', singleton=False, type='GeometryNodeTree') -def nodegroup_curve_board(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (1.0000, 0.0000, -1.0000), 'End': (1.0000, 0.0000, 1.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Thickness', 0.5000), - ('NodeSocketFloat', 'Fillet Radius Vertical', 0.0000), ('NodeSocketFloat', 'width', 0.0000), - ('NodeSocketFloat', 'extrude_length', 0.0000)]) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["width"]}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_3}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["width"]}) - - curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_4}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["width"], - 'Y': group_input.outputs["extrude_length"] - }) - - curve_line_3 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_3, 'End': combine_xyz_6}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["extrude_length"], - 'Y': group_input.outputs["width"] - }) - - curve_line_4 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_4, 'End': combine_xyz_5}) - - curve_line_5 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_6, 'End': combine_xyz_5}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [curve_line_1, curve_line_2, curve_line_3, curve_line_4, curve_line_5] - }) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': join_geometry_1}) - - merge_by_distance_1 = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': curve_to_mesh_1}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': merge_by_distance_1}) - - curve_to_board = nw.new_node(nodegroup_curve_to_board().name, input_kwargs={ - 'Profile Curve': curve_line, - 'Shape Curve': mesh_to_curve, - 'Height': group_input.outputs["Thickness"] - }) - - arc = nw.new_node('GeometryNodeCurveArc', - input_kwargs={'Resolution': 4, 'Radius': 0.7071, 'Sweep Angle': 4.7124}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': arc.outputs["Curve"], - 'Rotation': (0.0000, 0.0000, -0.7854) - }) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_2, 'Translation': (0.0000, 0.5000, 0.0000)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': group_input, 'Z': 1.0000}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_3, 'Scale': combine_xyz}) - - fillet_curve = nw.new_node('GeometryNodeFilletCurve', input_kwargs={ - 'Curve': transform_4, - 'Count': 8, - 'Radius': group_input, - 'Limit Radius': True - }, attrs={'mode': 'POLY'}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': fillet_curve, - 'Rotation': (1.5708, 1.5708, 0.0000), - 'Scale': group_input.outputs["Thickness"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Profile Curve': transform_6}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh, 'Translation': combine_xyz_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_board.outputs["Mesh"], transform_5]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': join_geometry}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Thickness"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': merge_by_distance, 'Translation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_side_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_side_leg(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (1.0000, 0.0000, -1.0000), 'End': (1.0000, 0.0000, 1.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Thickness', 0.5000), - ('NodeSocketInt', 'N-gon', 0), ('NodeSocketFloat', 'Profile Width', 0.5000), - ('NodeSocketFloat', 'Aspect Ratio', 0.5000), ('NodeSocketFloat', 'Fillet Ratio', 0.2000), - ('NodeSocketFloat', 'Fillet Radius Vertical', 0.0000)]) - - legstraight = nw.new_node(nodegroup_leg_straight().name, input_kwargs={ - 'Profile Curve': curve_line, - 'Height': group_input.outputs["Thickness"], - 'N-gon': group_input.outputs["N-gon"], - 'Profile Width': group_input.outputs["Profile Width"], - 'Aspect Ratio': group_input.outputs["Aspect Ratio"], - 'Fillet Ratio': group_input.outputs["Fillet Ratio"] - }) - - arc = nw.new_node('GeometryNodeCurveArc', - input_kwargs={'Resolution': 4, 'Radius': 0.7071, 'Sweep Angle': 4.7124}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': arc.outputs["Curve"], - 'Rotation': (0.0000, 0.0000, -0.7854) - }) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_2, 'Translation': (0.0000, 0.5000, 0.0000)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': group_input, 'Z': 1.0000}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_3, 'Scale': combine_xyz}) - - fillet_curve = nw.new_node('GeometryNodeFilletCurve', input_kwargs={ - 'Curve': transform_4, - 'Count': 8, - 'Radius': group_input, - 'Limit Radius': True - }, attrs={'mode': 'POLY'}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': fillet_curve, - 'Rotation': (1.5708, 1.5708, 0.0000), - 'Scale': group_input.outputs["Thickness"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': legstraight.outputs["Profile Curve"], - 'Profile Curve': transform_6 - }) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh, 'Translation': combine_xyz_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_5, legstraight.outputs["Mesh"]]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': join_geometry}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Thickness"]}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': merge_by_distance, 'Translation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_side_boards', singleton=False, type='GeometryNodeTree') -def nodegroup_side_boards(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0000), ('NodeSocketFloat', 'Z', 0.0000), - ('NodeSocketFloat', 'x1', 0.5000), ('NodeSocketFloat', 'x2', 0.5000), - ('NodeSocketFloat', 'x3', 0.0010), ('NodeSocketFloat', 'x4', 0.5000), - ('NodeSocketFloat', 'x5', 0.5000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["x5"], 1: 0.0000}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': add, - 'Y': group_input.outputs["Y"], - 'Z': group_input.outputs["Z"] - }) - - cube = nw.new_node(Nodes.MeshCube, - input_kwargs={'Size': combine_xyz, 'Vertices X': 5, 'Vertices Y': 5, 'Vertices Z': 5}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["x3"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["x1"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["x2"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Z': subtract}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_1}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["x4"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_1, 'Z': subtract_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube, 'Translation': combine_xyz_2}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_shelf_boards', singleton=False, type='GeometryNodeTree') -def nodegroup_shelf_boards(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Thickness', 0.0100), - ('NodeSocketFloat', 'Bottom_z', 0.0000), ('NodeSocketFloat', 'Mid_z', 0.0000), - ('NodeSocketFloat', 'Top_z', 0.0000), ('NodeSocketFloat', 'Board_width', 0.3000), - ('NodeSocketFloat', 'Leg_gap', 0.5000), ('NodeSocketFloat', 'extrude_length', 0.5000)]) - - curve_board = nw.new_node(nodegroup_curve_board().name, input_kwargs={ - 'Thickness': group_input.outputs["Thickness"], - 'Fillet Radius Vertical': 0.0100, - 'width': group_input.outputs["Board_width"], - 'extrude_length': group_input.outputs["extrude_length"] - }) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Leg_gap"], 1: 0.0000}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Z': group_input.outputs["Bottom_z"]}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': curve_board, - 'Translation': combine_xyz_1, - 'Rotation': (0.0000, 0.0000, -1.5708) - }) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Z': group_input.outputs["Mid_z"]}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': curve_board, - 'Translation': combine_xyz_4, - 'Rotation': (0.0000, 0.0000, -1.5708) - }) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Z': group_input.outputs["Top_z"]}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': curve_board, - 'Translation': combine_xyz_5, - 'Rotation': (0.0000, 0.0000, -1.5708) - }) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, transform_5, transform_6]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_screw_head', singleton=False, type='GeometryNodeTree') -def nodegroup_screw_head(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Radius': 0.004, 'Depth': 0.0030}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': cylinder.outputs["Mesh"], - 'Rotation': (1.5708, 0.0000, 0.0000) - }) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'leg_width', 0.5000), - ('NodeSocketFloat', 'board_thickness', 0.5000), ('NodeSocketFloat', 'board_height', 0.5000), - ('NodeSocketFloat', 'leg_gap', 0.5000), ('NodeSocketFloat', 'board_width', 0.5000), - ('NodeSocketFloat', 'leg_depth', 0.0000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_width"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_depth"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: 0.0000, 1: multiply_1}, attrs={'operation': 'SUBTRACT'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_height"], 1: multiply_2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': subtract, 'Z': add}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Translation': combine_xyz}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_width"], 1: 0.0000}) - - divide1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_depth"], 1: 0.5}, - attrs={'operation': 'MULTIPLY'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: divide1}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': add_2, 'Z': add}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_1}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_gap"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: multiply_3}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: multiply}, attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract_1, 'Y': subtract, 'Z': add}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_2}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, transform_2, transform_3]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_shelf_legs', singleton=False, type='GeometryNodeTree') -def nodegroup_shelf_legs(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'leg_gap', 0.5000), - ('NodeSocketFloat', 'leg_curve_ratio', 0.5000), ('NodeSocketFloat', 'leg_width', 0.5000), - ('NodeSocketFloat', 'leg_length', 0.5000), ('NodeSocketFloat', 'board_width', 0.5000), - ('NodeSocketFloat', 'leg_depth', 0.0000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_width"], 1: 0.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_length"], 1: 0.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["leg_depth"], - 1: group_input.outputs["leg_length"] - }, attrs={'operation': 'DIVIDE'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_curve_ratio"], 1: 0.0000}) - - side_leg = nw.new_node(nodegroup_side_leg().name, input_kwargs={ - 'Thickness': add, - 'N-gon': 4, - 'Profile Width': add_1, - 'Aspect Ratio': divide, - 'Fillet Ratio': add_2, - 'Fillet Radius Vertical': add_2 - }) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add_1}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': side_leg, - 'Translation': combine_xyz, - 'Rotation': (0.0000, 1.5708, 0.0000) - }) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["board_width"], 1: 0.0000}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: add}, attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["leg_gap"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: multiply_1}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_3}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_3}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': combine_xyz_2}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_4, transform_2, transform_3]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_2}, - attrs={'is_active_output': True}) - - -def geometry_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - leg_gap = nw.new_node(Nodes.Value, label='leg_gap') - leg_gap.outputs[0].default_value = kwargs['leg_board_gap'] - - curvature_ratio = nw.new_node(Nodes.Value, label='curvature_ratio') - curvature_ratio.outputs[0].default_value = kwargs['leg_curvature_ratio'] - - leg_width = nw.new_node(Nodes.Value, label='leg_width') - leg_width.outputs[0].default_value = kwargs['leg_width'] - - leg_length = nw.new_node(Nodes.Value, label='leg_length') - leg_length.outputs[0].default_value = kwargs['leg_length'] - - leg_depth = nw.new_node(Nodes.Value, label='leg_depth') - leg_depth.outputs[0].default_value = kwargs['leg_depth'] - - board_width = nw.new_node(Nodes.Value, label='board_width') - board_width.outputs[0].default_value = kwargs['board_width'] - - shelf_legs = nw.new_node(nodegroup_shelf_legs().name, input_kwargs={ - 'leg_gap': leg_gap, - 'leg_curve_ratio': curvature_ratio, - 'leg_width': leg_width, - 'leg_length': leg_length, - 'board_width': board_width, - 'leg_depth': leg_depth - }) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': shelf_legs, 'Material': kwargs['leg_material']}) - - board_thickness = nw.new_node(Nodes.Value, label='board_thickness') - board_thickness.outputs[0].default_value = kwargs['board_thickness'] - - board_extrude_length = nw.new_node(Nodes.Value, label='board_extrude_length') - board_extrude_length.outputs[0].default_value = kwargs['board_extrude_length'] - - bottom_layer_height = nw.new_node(Nodes.Value, label='bottom_layer_height') - bottom_layer_height.outputs[0].default_value = kwargs['bottom_layer_height'] - - mid_layer_height = nw.new_node(Nodes.Value, label='mid_layer_height') - mid_layer_height.outputs[0].default_value = kwargs['mid_layer_height'] - - top_layer_height = nw.new_node(Nodes.Value, label='top_layer_height') - top_layer_height.outputs[0].default_value = kwargs['top_layer_height'] - - screwhead1 = nw.new_node(nodegroup_screw_head().name, input_kwargs={ - 'leg_width': leg_width, - 'board_thickness': board_thickness, - 'board_height': bottom_layer_height, - 'leg_gap': leg_gap, - 'board_width': board_width, - 'leg_depth': leg_depth - }) - - screwhead2 = nw.new_node(nodegroup_screw_head().name, input_kwargs={ - 'leg_width': leg_width, - 'board_thickness': board_thickness, - 'board_height': mid_layer_height, - 'leg_gap': leg_gap, - 'board_width': board_width, - 'leg_depth': leg_depth - }) - - screwhead3 = nw.new_node(nodegroup_screw_head().name, input_kwargs={ - 'leg_width': leg_width, - 'board_thickness': board_thickness, - 'board_height': top_layer_height, - 'leg_gap': leg_gap, - 'board_width': board_width, - 'leg_depth': leg_depth - }) - - join_geometry2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [screwhead1, screwhead2, screwhead3]}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': join_geometry2, - 'Material': get_shelf_material('metal') - }) - - shelf_boards = nw.new_node(nodegroup_shelf_boards().name, input_kwargs={ - 'Thickness': board_thickness, - 'Bottom_z': bottom_layer_height, - 'Mid_z': mid_layer_height, - 'Top_z': top_layer_height, - 'Board_width': board_width, - 'Leg_gap': leg_gap, - 'extrude_length': board_extrude_length - }) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': shelf_boards, 'Material': kwargs['board_material']}) - - side_board_height = nw.new_node(Nodes.Value, label='side_board_height') - side_board_height.outputs[0].default_value = kwargs['side_board_height'] - - side_boards = nw.new_node(nodegroup_side_boards().name, input_kwargs={ - 'Y': leg_depth, - 'Z': side_board_height, - 'x1': side_board_height, - 'x2': bottom_layer_height, - 'x3': leg_gap, - 'x4': top_layer_height, - 'x5': board_width - }) - - set_material_3 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': side_boards, 'Material': kwargs['leg_material']}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [set_material, set_material_2, set_material_1, set_material_3] - }) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry}) - - transform4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': realize_instances, 'Scale': (-1, 1, 1)}) - - triangulate = nw.new_node('GeometryNodeTriangulate', input_kwargs={'Mesh': transform4}) - - transform5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': triangulate, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform5}, - attrs={'is_active_output': True}) - - -class TriangleShelfBaseFactory(AssetFactory): - def __init__(self, factory_seed, params={}, coarse=False): - super(TriangleShelfBaseFactory, self).__init__(factory_seed, coarse=coarse) - self.params = {} - - def sample_params(self): - return self.params.copy() - - def get_asset_params(self, i=0): - params = self.sample_params() - if params.get('leg_board_gap', None) is None: - params['leg_board_gap'] = uniform(0.002, 0.005) - if params.get('leg_width', None) is None: - params['leg_width'] = uniform(0.01, 0.03) - if params.get('leg_depth', None) is None: - params['leg_depth'] = uniform(0.01, 0.02) - if params.get('leg_length', None) is None: - params['leg_length'] = np.clip(normal(0.6, 0.05), 0.45, 0.75) - if params.get('leg_curvature_ratio', None) is None: - params['leg_curvature_ratio'] = uniform(0.0, 0.02) - if params.get('board_thickness', None) is None: - params['board_thickness'] = uniform(0.01, 0.025) - if params.get('board_width', None) is None: - params['board_width'] = np.clip(normal(0.3, 0.03), 0.2, 0.4) - if params.get('board_extrude_length', None) is None: - params['board_extrude_length'] = uniform(0.03, 0.07) - if params.get('side_board_height', None) is None: - params['side_board_height'] = uniform(0.02, 0.04) - if params.get('bottom_layer_height', None) is None: - params['bottom_layer_height'] = uniform(0.05, 0.1) - if params.get('shelf_layer_height', None) is None: - params['top_layer_height'] = params['leg_length'] - uniform(0.02, 0.07) - if params.get('board_material', None) is None: - params['board_material'] = np.random.choice(['black_wood', 'wood', 'white'], p=[0.2, 0.6, 0.2]) - if params.get('leg_material', None) is None: - params['leg_material'] = np.random.choice(['black_wood', 'wood', 'white'], p=[0.2, 0.6, 0.2]) - params['mid_layer_height'] = (params['top_layer_height'] + params['bottom_layer_height']) / 2. - - params = self.get_material_func(params) - return params - - def get_material_func(self, params, randomness=True): - params['board_material'] = get_shelf_material(params['board_material']) - params['leg_material'] = get_shelf_material(params['leg_material'], z_axis_texture=True) - return params - - def create_asset(self, i=0, **params): - bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - obj_params = self.get_asset_params(i) - surface.add_geomod(obj, geometry_nodes, attributes=[], input_kwargs=obj_params, apply=True) - tagging.tag_system.relabel_obj(obj) - - return obj - - -class TriangleShelfFactory(TriangleShelfBaseFactory): - def sample_params(self): - params = dict() - params['Dimensions'] = (uniform(0.25, 0.35), uniform(0.25, 0.35), uniform(0.5, 0.7)) - params['leg_length'] = params['Dimensions'][2] - params['board_width'] = params['Dimensions'][0] - return params - diff --git a/infinigen/assets/shelves/utils.py b/infinigen/assets/shelves/utils.py deleted file mode 100644 index be55e31e6..000000000 --- a/infinigen/assets/shelves/utils.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -import bpy -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler, geometry_node_group_empty_new -from infinigen.core.nodes import node_utils -from infinigen.core import tagging, tags as t - -from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo - - -def get_nodegroup_assets(func, params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - with butil.TemporaryObject(obj) as base_obj: - node_group_func = func(**params) - geo_outputs = [o for o in node_group_func.outputs if o.bl_socket_idname == 'NodeSocketGeometry'] - results = {o.name: extract_nodegroup_geo(base_obj, node_group_func, o.name, - ng_params={}) for o in geo_outputs} - - return results - -@node_utils.to_nodegroup('nodegroup_tagged_cube', singleton=False, type='GeometryNodeTree') -def nodegroup_tagged_cube(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketVectorTranslation', 'Size', (1.0000, 1.0000, 1.0000))]) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': group_input.outputs["Size"]}) - - index = nw.new_node(Nodes.Index) - - equal = nw.new_node(Nodes.Compare, input_kwargs={2: index, 3: 2}, attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - cube = tagging.tag_nodegroup(nw, cube, t.Subpart.SupportSurface, selection=equal) - - #subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, input_kwargs={'Mesh': cube, 'Level': 2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': cube}, attrs={'is_active_output': True}) - - - -def blender_rotate(vec): - if isinstance(vec, tuple): - vec = list(vec) - if isinstance(vec, list): - vec = np.array(vec, dtype=np.float32) - if len(vec.shape) == 1: - vec = np.expand_dims(vec, axis=-1) - if vec.shape[0] == 3: - new_vec = np.array([[1, 0, 0], [0, 0, 1], [0, -1, 0]], dtype=np.float32) @ vec - return new_vec.squeeze() - if vec.shape[0] == 4: - new_vec = np.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]], dtype=np.float32) @ vec - return new_vec.squeeze() diff --git a/infinigen/assets/small_plants/fern.py b/infinigen/assets/small_plants/fern.py deleted file mode 100644 index b824a2560..000000000 --- a/infinigen/assets/small_plants/fern.py +++ /dev/null @@ -1,762 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han -# Acknowledgement: This file draws inspiration from https://www.youtube.com/watch?v=MGxNuS_-bpo by Bad Normals - - -import bpy -import mathutils - -import gin -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler - -from infinigen.assets.small_plants import leaf_general as Leaf - -from infinigen.core.nodes import node_utils -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core import surface -from infinigen.assets.materials import simple_greenery - -from infinigen.core.tagging import tag_object, tag_nodegroup - -def random_pinnae_level2_curvature(): - z_max_curvature = uniform(0.3, 0.45, (1,))[0] - y_curvature_noise = np.clip(np.abs(normal(0., 0.2, (1,))), a_min=0.0, a_max=0.3)[0] - y_curvature_k = uniform(-0.04, 0.2, (1,))[0] - z_curvature, y_curvature = [0.25], [0.5] - for k in range(1, 6): - z_curvature.append(0.25 + z_max_curvature * k / 5.) - y_curvature.append(0.5 + y_curvature_k + y_curvature_noise * k / 5.) - x_curvature = [0.0 for _ in range(6)] - return x_curvature, y_curvature, z_curvature - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_yaxis_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_yaxis_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'From Max', 1.0), - ('NodeSocketFloat', 'Value', 1.0)]) - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value"], 2: group_input.outputs["From Max"]}) - curvature = np.clip(normal(0, 0.3, 1), a_min=-0.4, a_max=0.4) - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.5), (0.1, curvature / 5. + 0.5), (0.25, curvature / 2.5 + 0.5), - (0.45, curvature / 1.5 + 0.5), (0.6, curvature / 1.2 + 0.5), (1.0, curvature + 0.5)]) - add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={'operation': 'ADD'}) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_zaxis_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_zaxis_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'From Max', 1.0), ('NodeSocketFloat', 'Value', 1.0)]) - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value"], 2: group_input.outputs["From Max"]}) - curvature = normal(0, 0.2, 1) - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.5), (0.1, curvature / 5. + 0.5), - (0.25, curvature / 2.5 + 0.5), (0.45, curvature / 1.5 + 0.5), - (0.6, curvature / 1.2 + 0.5), (1.0, curvature + 0.5)]) - add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={'operation': 'ADD'}) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_gravity_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_gravity_rotation(nw: NodeWrangler, gravity_rotation=1.): - # Code generated using version 2.4.3 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'From Max', 1.0), ('NodeSocketFloat', 'Value', 1.0)]) - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value"], 2: group_input.outputs["From Max"]}) - curvature = uniform(0.25, 0.42, size=(1,))[0] * gravity_rotation - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.5), (0.1, curvature / 5. + 0.5), - (0.25, curvature / 2.5 + 0.5), (0.45, curvature / 1.67 + 0.5), - (0.6, curvature / 1.25 + 0.5), (1.0, curvature + 0.5)]) - add = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}, attrs={'operation': 'ADD'}) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 1}, attrs={'operation': 'MULTIPLY'}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_xaxis_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_xaxis_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'From Max', 1.0000), - ('NodeSocketFloat', 'Value1', 1.0000), - ('NodeSocketFloat', 'Value2', 1.0000)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value1"], 2: group_input.outputs["From Max"]}, - attrs={'clamp': False}) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0000, 0.0000), (0.2000, 0.2563), (0.4843, 0.4089), (0.7882, 0.3441), (1.0000, 0.0000)]) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value2'], 3: -1.5000, 4: 0.0000}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_stein', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_stein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 0.5)]) - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': group_input.outputs["Mesh"]}) - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value2'], 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': mesh_to_curve, 'Radius': multiply}) - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: 15.0}, - attrs={'operation': 'MULTIPLY'}) - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply_1, 'Resolution': 10}) - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, - 'Profile Curve': curve_circle.outputs["Curve"]}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': curve_to_mesh}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_scale', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_scale(nw: NodeWrangler, pinnae_contour): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'Value2', 1.0)]) - - pinnae_contour_float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': group_input.outputs["Value1"]}, - label='PinnaeContourFloatCurve') - node_utils.assign_curve(pinnae_contour_float_curve.mapping.curves[0], - [(0.0, pinnae_contour[0]), (0.2, pinnae_contour[1]), (0.4, pinnae_contour[2]), - (0.55, pinnae_contour[3]), (0.7, pinnae_contour[4]), (0.8, pinnae_contour[5]), - (0.9, pinnae_contour[6]), (1.0, pinnae_contour[7])]) - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value2'], 3: 1.0, 4: 3.0}) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: pinnae_contour_float_curve, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_instance_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_instance_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 1.0)]) - map_range_8 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value2'], 3: 2, 4: 3.1}) - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: map_range_8.outputs["Result"]}) - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_rotation(nw: NodeWrangler, gravity_rotation=1): - # Code generated using version 2.4.3 of the node_transpiler - - position = nw.new_node(Nodes.InputPosition) - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'Value2', 0.5)]) - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: bounding_box.outputs["Max"], 1: (0.0, 0.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value2'], 1: 0.0}) - pinnae_index = nw.new_node(Nodes.Index, label='PinnaeIndex') - pinnaelevel1xaxisrotation = nw.new_node(nodegroup_pinnae_level1_xaxis_rotation().name, - input_kwargs={'From Max': add, 1: pinnae_index, - 2: group_input.outputs["Value1"]}) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position, 'Center': (0, 0, 0), - 'Angle': pinnaelevel1xaxisrotation}, - attrs={'rotation_type': 'X_AXIS'}) - pinnaelevel1gravityrotation = nw.new_node(nodegroup_pinnae_level1_gravity_rotation(gravity_rotation=gravity_rotation).name, - input_kwargs={'From Max': add, 'Value': pinnae_index}) - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': vector_rotate, 'Center': (0, 0, 0), - 'Angle': pinnaelevel1gravityrotation}, - attrs={'rotation_type': 'X_AXIS'}) - pinnaelevel1zaxisrotation = nw.new_node(nodegroup_pinnae_level1_zaxis_rotation().name, - input_kwargs={'From Max': add, 'Value': pinnae_index}) - vector_rotate_2 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': vector_rotate_1, 'Center': multiply.outputs["Vector"], - 'Angle': pinnaelevel1zaxisrotation}, - attrs={'rotation_type': 'Z_AXIS'}) - pinnaelevel1yaxisrotation = nw.new_node(nodegroup_pinnae_level1_yaxis_rotation().name, - input_kwargs={'From Max': add, 'Value': pinnae_index}) - vector_rotate_3 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': vector_rotate_2, 'Center': multiply.outputs["Vector"], - 'Angle': pinnaelevel1yaxisrotation}, - attrs={'rotation_type': 'Y_AXIS'}) - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate_3, 'Value': pinnaelevel1xaxisrotation}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level1_instance_position', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level1_instance_position(nw: NodeWrangler, pinnae_contour): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'From Max', 1.0), - ('NodeSocketFloat', 'Value2', 1.0)]) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value1"], 2: group_input.outputs["From Max"], - 3: 1.0, 4: 0.0}) - - float_curve_2 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range_3.outputs["Result"]}) - node_utils.assign_curve(float_curve_2.mapping.curves[0], - [(0.0, pinnae_contour[0]), (0.2, pinnae_contour[1]), (0.4, pinnae_contour[2]), - (0.55, pinnae_contour[3]), (0.7, pinnae_contour[4]), (0.8, pinnae_contour[5]), - (0.9, pinnae_contour[6]), (1.0, pinnae_contour[7])]) - accumulate_field_1 = nw.new_node(Nodes.AccumulateField, input_kwargs={1: float_curve_2}) - # pinnae scale w.r.t fern age - map_range_5 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value2'], 3: 0.3, 4: 4.5}) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: accumulate_field_1.outputs[4], 1: map_range_5.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1, 'Result': map_range_3.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level2_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level2_rotation(nw: NodeWrangler, z_axis_rotate, y_axis_rotate, x_axis_rotate): - # Code generated using version 2.4.3 of the node_transpiler - - position_1 = nw.new_node(Nodes.InputPosition) - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'Value2', 0.5), - ('NodeSocketFloat', 'Value3', 0.5)]) - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value2'], 1: 0.0}) - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value3'], 1: 0.0}) - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': add, 'From Max': add_1}) - float_curve_1 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, z_axis_rotate[0]), (0.1, z_axis_rotate[1]), (0.25, z_axis_rotate[2]), - (0.45, z_axis_rotate[3]), (0.6, z_axis_rotate[4]), (1.0, z_axis_rotate[5])]) - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve_1, 1: -0.25}) - - # pinna z-axis curvature w.r.t the fern age - map_range_7 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value1'], 3: 1.2, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: add_2, 1: map_range_7.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Center': (0, 0, 0), - 'Angle': multiply_1}, attrs={'rotation_type': 'Z_AXIS'}) - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': add, 2: add_1}) - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, y_axis_rotate[0]), (0.1, y_axis_rotate[1]), (0.25, y_axis_rotate[2]), - (0.45, y_axis_rotate[3]), (0.6, y_axis_rotate[4]), (1.0, y_axis_rotate[5])]) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: -0.5}) - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': vector_rotate_1, 'Angle': multiply_2}, - attrs={'rotation_type': 'Y_AXIS'}) - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': add, 2: add_1}) - float_curve_2 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve_2.mapping.curves[0], - [(0.0, x_axis_rotate[0]), (0.1, x_axis_rotate[1]), (0.25, x_axis_rotate[2]), - (0.45, x_axis_rotate[3]), (0.6, x_axis_rotate[4]), (1.0, x_axis_rotate[5])]) - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: float_curve_2, 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - vector_rotate_2 = nw.new_node(Nodes.VectorRotate, input_kwargs={'Vector': vector_rotate, 'Angle': multiply_3}, - attrs={'rotation_type': 'X_AXIS'}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': vector_rotate_2}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level2_set_point', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level2_set_point(nw: NodeWrangler, pinna_contour): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'From Max', 1.0), - ('NodeSocketFloat', 'Value2', 1.0)]) - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Value1"], 2: group_input.outputs["From Max"], - 3: 1.0, 4: 0.0}) - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range_4.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, pinna_contour[0]), (0.38, pinna_contour[1]), - (0.55, pinna_contour[2]), (0.75, pinna_contour[3]), - (0.9, pinna_contour[4]), (1.0, pinna_contour[5])]) - accumulate_field_2 = nw.new_node(Nodes.AccumulateField, input_kwargs={1: float_curve}) - - # pinna scale w.r.t fern age - map_range_6 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value2'], 3: 0.5, 4: 2.0}) - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: accumulate_field_2.outputs[4], 1: map_range_6.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Vector': combine_xyz_2, 'Value': float_curve, - 'Result': map_range_4.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level2_instance_on_points', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level2_instance_on_points(nw: NodeWrangler, leaf, pinna_contour): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Value1', 1.0), - ('NodeSocketFloat', 'Value2', 0.5), - ('NodeSocketFloat', 'Value3', 1.0)]) - index = nw.new_node(Nodes.Index) - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': leaf}) - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': object_info_2.outputs["Geometry"], 'Scale': (1.2, -1.0, 1.0)}) - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': object_info_2.outputs["Geometry"], 'Scale': (1.2, 1.0, 1.0)}) - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, transform_2]}) - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value2'], 1: -0.3}) - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.57, 'Z': add}) - float_curve_6 = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': group_input.outputs["Value1"]}) - node_utils.assign_curve(float_curve_6.mapping.curves[0], [(0.0, pinna_contour[0]), (0.38, pinna_contour[1]), - (0.55, pinna_contour[2]), (0.75, pinna_contour[3]), - (0.9, pinna_contour[4]), (1.0, pinna_contour[5])]) - # pinna leaf size w.r.t the fern age - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': group_input.outputs['Value3'], 3: 6, 4: 8}) - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: float_curve_6, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], 'Selection': index, - 'Instance': join_geometry, 'Rotation': combine_xyz_3, - 'Scale': multiply.outputs["Vector"]}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Instances': instance_on_points_2}) - - -@node_utils.to_nodegroup('nodegroup_pinnae_level2_stein', singleton=False, type='GeometryNodeTree') -def nodegroup_pinnae_level2_stein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 0.5), - ('NodeSocketGeometry', 'Mesh', None)]) - mesh_to_curve_1 = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': group_input.outputs["Mesh"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Value1"], 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': mesh_to_curve_1, 'Radius': multiply}) - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs['Value2'], 1: 0.5}, - attrs={'operation': 'MULTIPLY'}) - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply_1, 'Resolution': 10}) - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, - 'Profile Curve': curve_circle_1.outputs["Curve"]}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': curve_to_mesh_1}) - - -@node_utils.to_nodegroup('nodegroup_pinnae', singleton=False, type='GeometryNodeTree') -def geometry_pinnae_nodes(nw: NodeWrangler, leaf, leaf_num_param=18, age_param=0.4, pinna_num_param=40, - version_num_param=4, gravity_rotation=1): - # Code generated using version 2.4.3 of the node_transpiler - - # Define Input Node - leaf_index = nw.new_node(Nodes.Index, label='LeafIndex') - pinna_index = nw.new_node(Nodes.Index, label='PinnaIndex') - pinna_num = nw.new_node(Nodes.Integer, label='PinnaNum', attrs={'integer': 10}) - pinna_num.integer = pinna_num_param - age = nw.new_node(Nodes.Value, label='Age') - age.outputs[0].default_value = age_param - - mesh_lines_left, selections_left = [], [] - mesh_lines_right, selections_right = [], [] - - # Generate Random Pinnae Contour, Two Modes: Linear+Noise, StepwiseLinear+Noise - mode_random_bit = randint(0, 2, size=(1,))[0] - if mode_random_bit: - pinnae_contour = [0, 0.2, 0.6, 1.4, 3.0, 4.0, 5.0, 6.0] - for i in range(8): - pinnae_contour[i] = (pinnae_contour[i] + normal(0, 0.04 * i, (1,))[0]) / 6. - else: - pinnae_contour = [0, 0.2, 0.6, 1.4, 3.0, 4.0, 5.0, 4.2] - for i in range(8): - pinnae_contour[i] = (pinnae_contour[i] + normal(0, 0.04 * i, (1,))[0]) / 6. - - # Common Components - pinnaelevel1instanceposition = nw.new_node(nodegroup_pinnae_level1_instance_position(pinnae_contour).name, - input_kwargs={0: pinna_index, 'From Max': pinna_num, 2: age}) - left_noise, right_noise = nw.new_node(Nodes.WhiteNoiseTexture), nw.new_node(Nodes.WhiteNoiseTexture) - pinnaelevel1scale = nw.new_node(nodegroup_pinnae_level1_scale(pinnae_contour).name, - input_kwargs={0: pinnaelevel1instanceposition.outputs["Result"], 1: age}) - - # Left & Right Instance Point Selections for each Version - random_bit = randint(2, size=(1,))[0] - for i in range(version_num_param): - index = nw.new_node(Nodes.Index) - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={0: left_noise.outputs["Value"], 1: i / version_num_param}, - attrs={'operation': 'GREATER_EQUAL'}) - less_equal = nw.new_node(Nodes.Compare, - input_kwargs={0: left_noise.outputs["Value"], 1: (i+1) / version_num_param}, - attrs={'operation': 'LESS_EQUAL'}) - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 2.0}, - attrs={'operation': 'GREATER_THAN'}) - modulo = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 2.0}, - attrs={'operation': 'MODULO'}) - if random_bit: - modulo = nw.new_node(Nodes.Math, input_kwargs={0: 1, 1: modulo}, attrs={'operation': 'SUBTRACT'}) - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: modulo}) - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - selections_left.append(op_and_2) - - random_bit = randint(2, size=(1,))[0] - for i in range(version_num_param): - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={0: right_noise.outputs["Value"], 1: i / version_num_param}, - attrs={'operation': 'GREATER_EQUAL'}) - less_equal = nw.new_node(Nodes.Compare, - input_kwargs={0: right_noise.outputs["Value"], 1: (i+1) / version_num_param}, - attrs={'operation': 'LESS_EQUAL'}) - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal}) - index = nw.new_node(Nodes.Index) - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 2.0}, - attrs={'operation': 'GREATER_THAN'}) - modulo = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 2.0}, - attrs={'operation': 'MODULO'}) - if random_bit: - modulo = nw.new_node(Nodes.Math, input_kwargs={0: 1, 1: modulo}, attrs={'operation': 'SUBTRACT'}) - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: modulo}) - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}) - selections_right.append(op_and_2) - - # Each Pinna Version - rotation, pinnaelevel1rotation = True, None - for i in range(version_num_param): - # Define the Pinna Contour of each Version - pinna_contour = [] - k = uniform(0.5, 0.58, size=(1,))[0] - for j in range(6): - pinna_contour.append(k * np.clip(j * (1. + normal(0, 0.1, (1,))[0]) / 5. + 0.08, 0, 0.7)) - # Define the Num Leaf of each Version - integer_2 = nw.new_node(Nodes.Integer, attrs={'integer': 10}) - integer_2.integer = leaf_num_param + randint(-1, 2, (1,))[0] - - mesh_line_pinna = nw.new_node(Nodes.MeshLine, input_kwargs={'Count': pinna_num, 'Offset': (0.0, 0.0, 0.0)}) - set_position_pinna = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line_pinna, - 'Position': pinnaelevel1instanceposition.outputs["Vector"]}) - if rotation: - pinnaelevel1rotation = nw.new_node(nodegroup_pinnae_level1_rotation(gravity_rotation=gravity_rotation).name, - input_kwargs={'Geometry': set_position_pinna, 1: age, 2: pinna_num}) - rotation = False - pinnaelevel1instancerotation = nw.new_node(nodegroup_pinnae_level1_instance_rotation().name, - input_kwargs={0: pinnaelevel1rotation.outputs["Value"], 1: age}) - set_rotation_pinna = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_pinna, - 'Position': pinnaelevel1rotation.outputs["Vector"]}) - mesh_line_leaf = nw.new_node(Nodes.MeshLine, input_kwargs={'Count': integer_2, 'Offset': (0.0, 0.0, 0.0)}) - pinnaelevel2setpoint = nw.new_node(nodegroup_pinnae_level2_set_point(pinna_contour=pinna_contour).name, - input_kwargs={0: leaf_index, 'From Max': integer_2, 2: age}) - set_position_leaf = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line_leaf, - 'Position': pinnaelevel2setpoint.outputs["Vector"]}) - - x_curvature, y_curvature, z_curvature = random_pinnae_level2_curvature() - pinnaelevel2rotation = nw.new_node(nodegroup_pinnae_level2_rotation(z_axis_rotate=z_curvature, - y_axis_rotate=y_curvature, - x_axis_rotate=x_curvature).name, - input_kwargs={'Geometry': set_position_leaf, 1: age, - 2: leaf_index, 3: integer_2}) - set_rotation_leaf = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_leaf, 'Position': pinnaelevel2rotation}) - pinna_on_pinnae = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_rotation_pinna, - 'Selection': selections_left[i], - 'Instance': set_rotation_leaf, - 'Rotation': pinnaelevel1instancerotation, - 'Scale': pinnaelevel1scale}) - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': pinna_on_pinnae, - 'Rotation': (-0.1571, 0.0, 0.0)}) - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances, 'Scale': (-1.0, 1.0, 1.0)}) - pinnaelevel2stein = nw.new_node(nodegroup_pinnae_level2_stein().name, - input_kwargs={0: pinnaelevel2setpoint.outputs["Result"], - 'Mesh': scale_instances}) - pinnaelevel2instanceonpoints = nw.new_node( - nodegroup_pinnae_level2_instance_on_points(leaf=leaf, pinna_contour=pinna_contour).name, - input_kwargs={'Points': scale_instances, 1: pinnaelevel2setpoint.outputs["Result"], 2: 0.0, 3: age}) - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [pinnaelevel2stein, pinnaelevel2instanceonpoints]}) - - mesh_lines_left.append(join_geometry) - if i == version_num_param - 1: - pinnaelevel1stein = nw.new_node(nodegroup_pinnae_level1_stein().name, - input_kwargs={'Mesh': set_rotation_pinna, 1: age, - 2: pinnaelevel1instanceposition.outputs["Result"]}) - mesh_lines_left.append(pinnaelevel1stein) - - for i in range(version_num_param): - # Define the Pinna Contour of each Version - pinna_contour = [] - k = uniform(0.5, 0.58, size=(1,))[0] - for j in range(6): - pinna_contour.append(k * np.clip(j * (1. + normal(0, 0.1, (1,))[0]) / 5. + 0.08, 0, 0.7)) - # Define the Num Leaf of each Version - integer_2 = nw.new_node(Nodes.Integer, attrs={'integer': 10}) - integer_2.integer = leaf_num_param + randint(-1, 2, (1,))[0] - - mesh_line_pinna = nw.new_node(Nodes.MeshLine, input_kwargs={'Count': pinna_num, 'Offset': (0.0, 0.0, 0.0)}) - set_position_pinna = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line_pinna, - 'Position': pinnaelevel1instanceposition.outputs["Vector"]}) - pinnaelevel1instancerotation = nw.new_node(nodegroup_pinnae_level1_instance_rotation().name, - input_kwargs={0: pinnaelevel1rotation.outputs["Value"], 1: age}) - set_rotation_pinna = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_pinna, - 'Position': pinnaelevel1rotation.outputs["Vector"]}) - mesh_line_leaf = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': integer_2, 'Offset': (0.0, 0.0, 0.0)}) - - pinnaelevel2setpoint = nw.new_node(nodegroup_pinnae_level2_set_point(pinna_contour=pinna_contour).name, - input_kwargs={0: leaf_index, 'From Max': integer_2, 2: age}) - - set_position_leaf = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': mesh_line_leaf, - 'Position': pinnaelevel2setpoint.outputs["Vector"]}) - x_curvature, y_curvature, z_curvature = random_pinnae_level2_curvature() - pinnaelevel2rotation = nw.new_node(nodegroup_pinnae_level2_rotation(z_axis_rotate=z_curvature, - y_axis_rotate=y_curvature, - x_axis_rotate=x_curvature).name, - input_kwargs={'Geometry': set_position_leaf, 1: age, 2: leaf_index, - 3: integer_2}) - set_rotation_leaf = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_leaf, 'Position': pinnaelevel2rotation}) - pinna_on_pinnae = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_rotation_pinna, 'Selection': selections_right[i], - 'Instance': set_rotation_leaf, 'Scale': pinnaelevel1scale, - 'Rotation': pinnaelevel1instancerotation}) - rotate_instances = nw.new_node(Nodes.RotateInstances, input_kwargs={'Instances': pinna_on_pinnae, - 'Rotation': (-0.1571, 0.0, 0.0)}) - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances, 'Scale': (1.0, 1.0, 1.0)}) - pinnaelevel2stein = nw.new_node(nodegroup_pinnae_level2_stein().name, - input_kwargs={0: pinnaelevel2setpoint.outputs["Result"], - 'Mesh': scale_instances}) - pinnaelevel2instanceonpoints = nw.new_node( - nodegroup_pinnae_level2_instance_on_points(leaf=leaf, pinna_contour=pinna_contour).name, - input_kwargs={'Points': scale_instances, 1: pinnaelevel2setpoint.outputs["Result"], 2: 0.0, 3: age}) - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [pinnaelevel2stein, pinnaelevel2instanceonpoints]}) - mesh_lines_right.append(join_geometry) - - join_geometry_whole = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': mesh_lines_left + mesh_lines_right}) - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': join_geometry_whole}) - noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={'Scale': 0.4, 'Roughness': 0.2}) - set_positions = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': realize_instances, - 'Offset': noise_texture.outputs["Color"]}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_positions}) - - -def check_vicinity(rotation, pinnae_rs): - for r in pinnae_rs: - if abs(rotation[1] - r[1]) < 0.1 and abs(rotation[2] - r[2]) < 0.15: - return True - return False - - -def geo_fern(nw: NodeWrangler, **kwargs): - pinnaes = [] - # Two modes: Random Like and Flatten Like - fern_mode = kwargs["fern_mode"] - pinnae_num = kwargs["pinnae_num"] - scale = kwargs["scale"] - version_num = kwargs["version_num"] - leaf = kwargs["leaf"] - if fern_mode == "young_and_grownup": - rotates = [] # Horizontal grownup pinnae - # Generate non-overlapping pinnae orientations - for i in range(pinnae_num): - flip_bit = randint(0, 3, (1,))[0] - if flip_bit: - rotate_z = uniform(2.74, 3.54, (1,))[0] - else: - rotate_z = uniform(-0.4, 0.4, (1,))[0] - rotate_x = uniform(0.8, 1.1, (1,))[0] - rotate_z2 = uniform(0, 6.28, (1,))[0] - if flip_bit: - gravity_dir = 1 - else: - gravity_dir = -1 - rotate = (rotate_z, rotate_x, rotate_z2, gravity_dir) - if check_vicinity(rotate, rotates): - continue - else: - rotates.append(rotate) - # Generate pinnae - for r in rotates: - random_age = uniform(0.7, 0.95, (1,))[0] - random_leaf_num = randint(15, 25, (1,))[0] - random_pinna_num = randint(60, 80, (1,))[0] - shape = nw.new_node(geometry_pinnae_nodes(leaf, leaf_num_param=random_leaf_num, age_param=random_age, - pinna_num_param=random_pinna_num, - version_num_param=version_num, - gravity_rotation=r[3]).name) - z_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shape, 'Rotation': (0., 0., r[0])}) - x_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': z_transform, 'Rotation': (-r[1], 0., 0.)}) - z2_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': x_transform, 'Rotation': (0., 0., r[2])}) - pinnaes.append(z2_transform) - - # Verticle young pinnae - young_num = randint(0, 5, size=(1,))[0] - for i in range(young_num): - random_age = uniform(0.2, 0.5, (1,))[0] - random_leaf_num = randint(14, 20, (1,))[0] - random_pinna_num = randint(60, 100, (1,))[0] - rotate_z = uniform(0, 6.28, (1,)) - rotate_x = uniform(0, 0.4, (1,)) - rotate_z2 = uniform(0, 6.28, (1,)) - shape = nw.new_node(geometry_pinnae_nodes(leaf, leaf_num_param=random_leaf_num, age_param=random_age, - pinna_num_param=random_pinna_num, - version_num_param=version_num, gravity_rotation=0).name) - z_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shape, 'Rotation': (0., 0., rotate_z[0])}) - x_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': z_transform, 'Rotation': (-rotate_x[0], 0., 0.)}) - z2_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': x_transform, 'Rotation': (0., 0., rotate_z2[0])}) - pinnaes.append(z2_transform) - elif fern_mode == 'all_grownup': - # Random grownup pinnae - rotates = [] - for i in range(pinnae_num): - rotate_z = normal(3.14, 0.2, (1,))[0] - rotate_x = uniform(0.5, 1.1, (1,))[0] - rotate_z2 = uniform(0, 6.28, (1,))[0] - rotate = (rotate_z, rotate_x, rotate_z2, 1) - if check_vicinity(rotate, rotates): - continue - else: - rotates.append(rotate) - - for r in rotates: - random_age = uniform(0.7, 0.9, (1,))[0] - random_leaf_num = randint(16, 25, (1,))[0] - random_pinna_num = randint(60, 80, (1,))[0] - shape = nw.new_node(geometry_pinnae_nodes(leaf, leaf_num_param=random_leaf_num, age_param=random_age, - pinna_num_param=random_pinna_num, - version_num_param=version_num, - gravity_rotation=r[3]).name) - z_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': shape, 'Rotation': (0., 0., r[0])}) - x_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': z_transform, 'Rotation': (-r[1], 0., 0.)}) - z2_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': x_transform, 'Rotation': (0., 0., r[2])}) - pinnaes.append(z2_transform) - elif fern_mode == 'single_pinnae': - shape = nw.new_node(geometry_pinnae_nodes(leaf, - leaf_num_param=20, - age_param=kwargs["age"], - pinna_num_param=60, - version_num_param=version_num).name) - pinnaes.append(shape) - else: - raise NotImplementedError - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': pinnaes}) - geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Scale': (scale, scale, scale)}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - -@gin.register -class FernFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(FernFactory, self).__init__(factory_seed, coarse=coarse) - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - if "fern_mode" not in params: - - type_bit = randint(0, 2, (1, ))[0] - if type_bit: - params["fern_mode"] = "young_and_grownup" - else: - params["fern_mode"] = "all_grownup" - - if "scale" not in params: - params["scale"] = 0.02 - - if "version_num" not in params: - params["version_num"] = 5 - - if "pinnae_num" not in params: - params["pinnae_num"] = randint(12, 30, size=(1,))[0] - - # Make the Leaf and Delete It Later - lf_seed = randint(0, 1000, size=(1,))[0] - leaf_model = Leaf.LeafFactory(genome={"leaf_width": 0.4, "width_rand": 0.04}, factory_seed=lf_seed) - leaf = leaf_model.create_asset(material=False) - params["leaf"] = leaf - - surface.add_geomod(obj, geo_fern, apply=True, attributes=[], input_kwargs=params) - butil.delete([leaf]) - with butil.SelectObjects(obj): - bpy.ops.object.material_slot_remove() - bpy.ops.object.shade_flat() - - simple_greenery.apply(obj) - - return obj - - def debug_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - params["fern_mode"] = "single_pinnae" - params["scale"] = 1.0 - params["version_num"] = 5 - params["pinnae_num"] = 1 - params["age"] = uniform(0.5, 0.9) - - leaf_model = Leaf.LeafFactory(genome={"leaf_width": 0.4, "width_rand": 0.04}, factory_seed=0) - leaf = leaf_model.create_asset(material=False) - params["leaf"] = leaf - surface.add_geomod(obj, geo_fern, apply=True, attributes=[], input_kwargs=params) - - bpy.ops.object.convert(target='MESH') - butil.delete([leaf]) - tag_object(obj, 'fern') - return obj - - - -# if __name__ == '__main__': -# fern = FernFactory(0) -# obj = fern.debug_asset() -# simple_greenery.apply([obj]) \ No newline at end of file diff --git a/infinigen/assets/small_plants/num_leaf_grass.py b/infinigen/assets/small_plants/num_leaf_grass.py deleted file mode 100644 index 567faabfe..000000000 --- a/infinigen/assets/small_plants/num_leaf_grass.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.assets.small_plants.leaf_general import LeafFactory -from infinigen.assets.small_plants.leaf_heart import LeafHeartFactory -from infinigen.assets.materials import simple_greenery -import numpy as np -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_leafon_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem(nw: NodeWrangler, z_rotation=(0, 0, 0,), leaf_scale=1.0, leaf=None): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None)]) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - object_info = nw.new_node(Nodes.ObjectInfo, - input_kwargs={'Object': leaf}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = leaf_scale - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Selection': endpoint_selection, - 'Instance': object_info.outputs["Geometry"], - 'Rotation': align_euler_to_vector, 'Scale': value}) - - vector_1 = nw.new_node(Nodes.Vector) - vector_1.vector = z_rotation - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points, 'Rotation': vector_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': rotate_instances}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.4}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 12, 'Radius': 0.03}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, curve_to_mesh, 'stem')}) - - -def geo_face_colors(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - rotation_scale = kwargs["stem_rotation"] - leaf_num = kwargs["leaf_num"] - leaf = kwargs["leaf"] - mid_z = uniform(0.35, 0.65, size=(1,))[0] - mid_x = normal(0., rotation_scale, size=(1,))[0] - mid_y = normal(0., rotation_scale, size=(1,))[0] - vector_2 = nw.new_node(Nodes.Vector) - vector_2.vector = (mid_x, mid_y, mid_z) - - top_x = normal(0., rotation_scale, size=(1,))[0] - top_y = normal(0., rotation_scale, size=(1,))[0] - vector = nw.new_node(Nodes.Vector) - vector.vector = (top_x, top_y, 1.0) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.0, 0.0, 0.0), 'Middle': vector_2, - 'End': vector}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 1.0, 'Roughness': 0.2}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Fac"], 1: (-0.5, -0.5, -0.5)}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 1: spline_parameter_1.outputs["Factor"]}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': quadratic_bezier, 'Offset': multiply.outputs["Vector"]}) - - stemgeometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': set_position}) - - leaf_scale = uniform(0.15, 0.35, size=(1,))[0] * kwargs["leaf_scale"] - leaves = [] - rotation = 0 - for _ in range(leaf_num): - leaves.append(nw.new_node(nodegroup_leaf_on_stem(z_rotation=(0, 0, rotation), leaf_scale=leaf_scale, leaf=leaf).name, - input_kwargs={'Points': set_position})) - rotation += 6.28 / leaf_num - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': leaves + [stemgeometry]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': join_geometry}) - - colored = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': realize_instances, - 'Material': surface.shaderfunc_to_material(simple_greenery.shader_simple_greenery)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': colored}) - - -class NumLeafGrassFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(NumLeafGrassFactory, self).__init__(factory_seed, coarse=coarse) - self.leaf_num = [2, 3, 4] - self.leaf_model = [LeafFactory, LeafHeartFactory] - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - lf_seed = randint(0, 1000, size=(1,))[0] - leaf_num = np.random.choice(self.leaf_num, size=(1,), p=[0.2, 0.4, 0.4])[0] - z_offset = normal(0, 0.05, size=(1,))[0] - if leaf_num == 2: - leaf_model = LeafFactory(genome={"leaf_width": 0.95, "width_rand": 0.1, - "z_scaling": z_offset}, factory_seed=lf_seed) - leaf = leaf_model.create_asset() - params["leaf_scale"] = 2.0 - elif leaf_num == 3: - leaf_model = LeafHeartFactory(genome={"leaf_width": 1.1, "width_rand": 0.05, - "z_scaling": z_offset}, factory_seed=lf_seed) - leaf = leaf_model.create_asset() - params["leaf_scale"] = 1.0 - else: - leaf_model = LeafHeartFactory(genome={"leaf_width": 0.85, "width_rand": 0.05, - "z_scaling": z_offset}, factory_seed=lf_seed) - leaf = leaf_model.create_asset() - params["leaf_scale"] = 1.0 - - params["leaf"] = leaf - params["leaf_num"] = leaf_num - params["stem_rotation"] = 0.15 - - surface.add_geomod(obj, geo_face_colors, apply=True, attributes=[], input_kwargs=params) - butil.delete([leaf]) - with butil.SelectObjects(obj): - bpy.ops.object.material_slot_remove() - bpy.ops.object.shade_flat() - - tag_object(obj, 'num_leaf_grass') - return obj - - -# if __name__ == '__main__': -# grass = NumLeafGrassFactory(0) -# obj = grass.create_asset() \ No newline at end of file diff --git a/infinigen/assets/small_plants/snake_plant.py b/infinigen/assets/small_plants/snake_plant.py deleted file mode 100644 index b620e5a8c..000000000 --- a/infinigen/assets/small_plants/snake_plant.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han -# Acknowledgements: This file draws inspiration from https://blenderartists.org/t/extrude-face-along-curve-with-geometry-nodes/1432653/3 - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.assets.materials import snake_plant -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_pedal_thickness', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_thickness(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 1.0)]) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value':group_input.outputs["Value"], 3: 0.2, 4: 0.04}) - - thickness = nw.new_node(Nodes.Value) - thickness.outputs[0].default_value = uniform(0.1, 0.35) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: thickness}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_z_pedal_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_z_pedal_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_1 = nw.new_node(Nodes.InputPosition) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 1.0)]) - - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group_input.outputs["Value"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0), (0.25, 0.25 + uniform(-0.1, 0.1)), - (0.50, 0.5 + uniform(-0.15, 0.15)), - (0.75, 0.5 + uniform(0.25, 0.25)), - (1.0, 1.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: uniform(0.8, 2.0)}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': multiply}, - attrs={'rotation_type': 'Z_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate_1}) - - -@node_utils.to_nodegroup('nodegroup_x_pedal_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_x_pedal_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_1 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: 0.5, 1: spline_parameter_1.outputs["Factor"]}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': multiply}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate}) - - -@node_utils.to_nodegroup('nodegroup_setup', singleton=False, type='GeometryNodeTree') -def nodegroup_setup(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 25, 'Start': (0.0, 0.0, 0.0), 'Middle': (0.0, 0.0, 1.0), - 'End': (uniform(-0.2, 0.2), uniform(0.2, 0.2), 2.0)}) - - x_pedal_rotation = nw.new_node(nodegroup_x_pedal_rotation().name) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': quadratic_bezier, 'Offset': x_pedal_rotation}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, - 2: spline_parameter.outputs["Factor"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Spline': capture_attribute_1.outputs[2], - 'Geometry': capture_attribute_1.outputs["Geometry"]}) - - -@node_utils.to_nodegroup('nodegroup_edge_extrusion', singleton=False, type='GeometryNodeTree') -def nodegroup_edge_extrusion(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 1.0), - ('NodeSocketGeometry', 'Geometry', None)]) - - init_width = uniform(0.15, 0.3) - - normal = nw.new_node(Nodes.InputNormal) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs['Geometry'], 1: normal}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group_input.outputs["Value"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, init_width), (0.25, init_width + uniform(0.0, 0.1)), - (0.50, init_width + uniform(0.02, 0.18)), (0.75, init_width + uniform(0.02, 0.1)), - (1.0, 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': float_curve}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Offset': combine_xyz}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_position_1}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': curve_to_mesh, 'Offset': capture_attribute.outputs["Attribute"], - 'Offset Scale': float_curve}, - attrs={'mode': 'EDGES'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': extrude_mesh.outputs["Mesh"]}) - - -@node_utils.to_nodegroup('nodegroup_face_extrusion', singleton=False, type='GeometryNodeTree') -def nodegroup_face_extrusion(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Value', 1.0)]) - - z_pedal_rotation = nw.new_node(nodegroup_z_pedal_rotation().name, - input_kwargs={'Value': group_input.outputs["Value"]}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': z_pedal_rotation}) - - pedal_thickness = nw.new_node(nodegroup_pedal_thickness().name, - input_kwargs={'Value': group_input.outputs["Value"]}) - - extrude_mesh_2 = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': set_position_2, 'Offset Scale': pedal_thickness, - 'Individual': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': extrude_mesh_2}) - - -@node_utils.to_nodegroup('nodegroup_single_pedal', singleton=False, type='GeometryNodeTree') -def nodegroup_single_pedal_nodes(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - setup = nw.new_node(nodegroup_setup().name) - - edge_extrusion = nw.new_node(nodegroup_edge_extrusion().name, - input_kwargs={'Value': setup.outputs["Spline"], - 'Geometry': setup.outputs["Geometry"]}) - - face_extrusion = nw.new_node(nodegroup_face_extrusion().name, - input_kwargs={'Geometry': edge_extrusion, 'Value': setup.outputs["Spline"]}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': face_extrusion, 'Level': 2}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': subdivision_surface}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}) - - -def check_vicinity(param, pedal_params): - for p in pedal_params: - r1 = max(param[0] * np.sin(param[1]), 0.2) - r2 = max(p[0] * np.sin(p[1]), 0.2) - dist = np.linalg.norm([param[2] - p[2], param[3] - p[3]]) - if r1 + r2 > dist: - return True - return False - - -def geometry_snake_plant_nodes(nw: NodeWrangler, **kwargs): - num_pedals = kwargs['num_pedals'] - pedals = [] - pedal_params = [] - c = 0 - while c < 50 and len(pedal_params) < num_pedals: - c += 1 - scale = uniform(0.7, 1.0) - x_rotation = normal(0, 0.15) - x, y = uniform(-0.7, 0.7), uniform(-0.7, 0.7) - param = (scale, x_rotation, x, y) - if check_vicinity(param, pedal_params): - continue - else: - pedal_params.append(param) - - for param in pedal_params: - scale = param[0] - z_rotation = uniform(0, 6.28) - x_rotation = param[1] - z2_rotation = uniform(0, 6.28) - x, y = param[2], param[3] - pedal = nw.new_node(nodegroup_single_pedal_nodes().name) - s_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': pedal, 'Scale': (scale, scale, scale), - 'Rotation': (0., 0., z_rotation)}) - x_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': s_transform, 'Rotation': (x_rotation, 0., 0.)}) - z_transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': x_transform, 'Rotation': (0., 0., z2_rotation), - 'Translation': (x, y, 0)}) - pedals.append(z_transform) - pedals = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': pedals}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': pedals, - 'Material': surface.shaderfunc_to_material(snake_plant.shader_snake_plant)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - - -class SnakePlantFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(SnakePlantFactory, self).__init__(factory_seed, coarse=coarse) - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - pedal_num = randint(4, 8) - params["num_pedals"] = pedal_num - - surface.add_geomod(obj, geometry_snake_plant_nodes, apply=True, input_kwargs=params) - - # convert to appropriate units - TODO replace this - butil.apply_modifiers(obj) - obj.scale = (0.2, 0.2, 0.2) - butil.apply_transform(obj, scale=True) - - butil.purge_empty_materials(obj) - - tag_object(obj, 'snake_plant') - return obj - - -if __name__ == '__main__': - grass = SnakePlantFactory(0) - obj = grass.create_asset() \ No newline at end of file diff --git a/infinigen/assets/small_plants/spider_plant.py b/infinigen/assets/small_plants/spider_plant.py deleted file mode 100644 index 3fc59c306..000000000 --- a/infinigen/assets/small_plants/spider_plant.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han -# Acknowledgements: This file draws inspiration from https://blenderartists.org/t/extrude-face-along-curve-with-geometry-nodes/1432653/3 - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.materials import spider_plant -import numpy as np - -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_set_leaf_countour', singleton=False, type='GeometryNodeTree') -def nodegroup_set_leaf_countour(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 1.0)]) - - float_curve_2 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group_input.outputs["Value"]}) - k = uniform(0, 0.05) - node_utils.assign_curve(float_curve_2.mapping.curves[0], - [(0.0, 0.1), (0.2, 0.1 + k / 1.5), (0.4, 0.1 + k / 1.5), - (0.6, 0.1), (0.8, 0.1 - k), (1.0, 0.0)], - handles=['AUTO', 'AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR']) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_2, 1: uniform(0.8, 1.3)}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_2, 'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_leaf_z_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_z_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_8 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"], 4: np.abs(normal(0, 0.6))}) - - vector_rotate_6 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_8, 'Center': (0.0, 0.0, 0.5), - 'Angle': map_range_1.outputs["Result"]}, - attrs={'rotation_type': 'Z_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate_6}) - - -@node_utils.to_nodegroup('nodegroup_leaf_x_rotation', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_x_rotation(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_5 = nw.new_node(Nodes.InputPosition) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 4: np.abs(normal(0, 1.2))}) - - vector_rotate_4 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_5, 'Center': (0.0, 0.0, 0.5), - 'Angle': map_range.outputs["Result"]}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate_4}) - - -@node_utils.to_nodegroup('nodegroup_leaf_rotate_on_base', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_rotate_on_base(nw: NodeWrangler, x_R=0.): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3, 3: 0.3}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: x_R, 1: random_value_2.outputs[1]}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.6, 3: 0.6}) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture_1.outputs["Fac"], 3: -0.5, 4: 0.5}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Y': random_value_3.outputs[1], - 'Z': map_range_1.outputs["Result"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1}) - - -@node_utils.to_nodegroup('nodegroup_leaf_scale_align', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_scale_align(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal}, - attrs={'axis': 'Y'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: 0.6, 4: 1.1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Rotation': align_euler_to_vector, 'Result': map_range.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_leaf_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 100, 'Start': (0.0, 0.0, 0.0), 'Middle': (0.0, 0.0, 0.5), - 'End': (0.0, 0.0, 1.0)}) - - leaf_x_rotation = nw.new_node(nodegroup_leaf_x_rotation().name) - - set_position_7 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': quadratic_bezier, 'Offset': leaf_x_rotation}) - - leaf_z_rotation = nw.new_node(nodegroup_leaf_z_rotation().name) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_7, 'Offset': leaf_z_rotation}) - - spline_parameter_3 = nw.new_node(Nodes.SplineParameter) - - capture_attribute_3 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position_2, - 2: spline_parameter_3.outputs["Factor"]}) - - normal_1 = nw.new_node(Nodes.InputNormal) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_3.outputs["Geometry"], 1: normal_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_leaf_countour = nw.new_node(nodegroup_set_leaf_countour().name, - input_kwargs={'Value': capture_attribute_3.outputs[2]}) - - set_position_8 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], - 'Offset': set_leaf_countour.outputs["Vector"]}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_position_8, 'Fill Caps': True}) - - extrude_mesh_3 = nw.new_node(Nodes.ExtrudeMesh, - input_kwargs={'Mesh': curve_to_mesh_2, - 'Offset': capture_attribute_2.outputs["Attribute"], - 'Offset Scale': set_leaf_countour.outputs["Value"]}, - attrs={'mode': 'EDGES'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': extrude_mesh_3}) - - -def geometry_spider_plant_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - num_leaf_versions = kwargs["num_leaf_versions"] - num_plant_bases = kwargs["num_plant_bases"] - base_radius = kwargs["base_radius"] - leaf_x_R = kwargs["leaf_x_R"] - leaf_x_S = kwargs["leaf_x_S"] - - leaves, bases = [], [] - for _ in range(num_leaf_versions): - leaf = nw.new_node(nodegroup_leaf_geometry().name) - leaves.append(leaf) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': leaves}) - - for i in range(num_plant_bases): - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': base_radius[i]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_circle.outputs["Curve"], 'Count': randint(20, 40)}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3 * base_radius[i], 3: 0.3 * base_radius[i]}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3 * base_radius[i], 3: 0.3 * base_radius[i]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value.outputs[1], 'Y': random_value_1.outputs[1]}) - - set_position_3 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Offset': combine_xyz}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, - input_kwargs={'Mesh': geometry_to_instance}) - - leaf_scale_align = nw.new_node(nodegroup_leaf_scale_align().name) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position_3, 'Instance': subdivision_surface, - 'Pick Instance': True, - 'Rotation': leaf_scale_align.outputs["Rotation"], - 'Scale': leaf_scale_align.outputs["Result"]}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = leaf_x_S[i] - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points, 'Scale': value}) - - leaf_rotate_on_base = nw.new_node(nodegroup_leaf_rotate_on_base(x_R=leaf_x_R[i]).name) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': scale_instances, 'Rotation': leaf_rotate_on_base}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - bases.append(realize_instances) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': bases}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': join_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_shade_smooth, - 'Material': surface.shaderfunc_to_material(spider_plant.shader_spider_plant)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material}) - - -class SpiderPlantFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(SpiderPlantFactory, self).__init__(factory_seed, coarse=coarse) - - def get_params(self): - params = {} - params["num_leaf_versions"] = randint(4, 8) - num_bases = randint(5, 12) - params["num_plant_bases"] = num_bases - base_radius, leaf_x_R, leaf_x_S = [], [], [] - init_base_radius = uniform(0.10, 0.20) - diff_base_radius = init_base_radius - 0.04 - init_x_R, diff_x_R = uniform(1.2, 1.5), uniform(0.7, 1.1) - init_x_S, diff_x_S = uniform(1.4, 2.0), uniform(0.2, 0.6) - for i in range(params["num_plant_bases"]): - base_radius.append(init_base_radius - (i * diff_base_radius) / num_bases) - leaf_x_R.append(init_x_R - (i * diff_x_R) / num_bases) - leaf_x_S.append(init_x_S - (i * diff_x_S) / num_bases) - params["base_radius"] = base_radius - params["leaf_x_R"] = leaf_x_R - params["leaf_x_S"] = leaf_x_S - - return params - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.get_params() - - surface.add_geomod(obj, geometry_spider_plant_nodes, apply=True, input_kwargs=params) - surface.add_material(obj, spider_plant.shader_spider_plant, selection=None) - - # convert to appropriate units - TODO replace this - butil.apply_modifiers(obj) - obj.scale = (0.1, 0.1, 0.1) - butil.apply_transform(obj, scale=True) - - tag_object(obj, 'spider_plant') - return obj - - -if __name__ == '__main__': - fac = SpiderPlantFactory(0) - fac.create_asset() diff --git a/infinigen/assets/small_plants/succulent.py b/infinigen/assets/small_plants/succulent.py deleted file mode 100644 index 6677108f1..000000000 --- a/infinigen/assets/small_plants/succulent.py +++ /dev/null @@ -1,530 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.assets.materials import succulent -from infinigen.core.placement.factory import AssetFactory -import numpy as np - -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_top', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal_2 = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal_2, 1: combine_xyz_3}, - attrs={'operation': 'MULTIPLY'}) - - index_1 = nw.new_node(Nodes.Index) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: 63.0}, - attrs={'operation': 'GREATER_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': greater_than}) - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_bottom', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 1: combine_xyz}, - attrs={'operation': 'MULTIPLY'}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: 64.0}, - attrs={'operation': 'LESS_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': less_than}) - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 128, 'Radius': 0.05}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y_bottom', 0.0), - ('NodeSocketFloat', 'X', 0.0), - ('NodeSocketFloat', 'Y_top', 0.0)]) - - pedal_cross_contour_bottom = nw.new_node(nodegroup_pedal_cross_contour_bottom().name, - input_kwargs={'Y': group_input.outputs["Y_bottom"], - 'X': group_input.outputs["X"]}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], - 'Selection': pedal_cross_contour_bottom.outputs["Value"], - 'Offset': pedal_cross_contour_bottom.outputs["Vector"]}) - - pedal_cross_contour_top = nw.new_node(nodegroup_pedal_cross_contour_top().name, - input_kwargs={'Y': group_input.outputs["Y_top"], - 'X': group_input.outputs["X"]}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, - 'Selection': pedal_cross_contour_top.outputs["Value"], - 'Offset': pedal_cross_contour_top.outputs["Vector"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 7.0, 'Detail': 15.0}, - attrs={'noise_dimensions': '4D'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Fac"], 'Scale': uniform(0.00, 0.02)}, - attrs={'operation': 'SCALE'}) - - set_position_5 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_2, 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_5}) - - -@node_utils.to_nodegroup('nodegroup_pedal_z_contour', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_z_contour(nw: NodeWrangler, curve_param=[]): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, curve_param[0]), (0.2, curve_param[1] * (1. + normal(0, 0.04))), - (0.4, curve_param[2] * (1. + normal(0, 0.1))), (0.6, curve_param[3] * (1. + normal(0, 0.03))), - (0.8, curve_param[4] * (1. + normal(0, 0.06))), (0.9, curve_param[5] * (1. + normal(0, 0.04))), - (1.0, 0.0)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_3 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - k = uniform(0.0, 0.3) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 0.0), (0.2, 0.2 - k / 2.5), (0.4, 0.4 - k / 1.1), (0.6, 0.6 - k), - (0.8, 0.8 - k / 1.5), (1.0, 1.0 - k / 3.)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.2)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_3, 'Center': (0.0, 0.0, 0.2), 'Angle': multiply}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate}) - - -@node_utils.to_nodegroup('nodegroup_pedal_rotation_on_base_circle', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_rotation_on_base_circle(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1, 3: 0.1}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value1', -1.3), - ('NodeSocketFloat', 'Value2', -1.57)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_1.outputs[1], 1: group_input.outputs["Value1"]}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3, 3: 0.3}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_2.outputs[1], 1: group_input.outputs["Value2"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': add, 'Z': add_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_2}) - - -@node_utils.to_nodegroup('nodegroup_base_perturbation', singleton=False, type='GeometryNodeTree') -def nodegroup_base_perturbation(nw: NodeWrangler, R=1.0): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.8 * R, 3: 0.8 * R}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.8 * R, 3: 0.8 * R}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2 * R, 3: 0.2 * R}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: random_value_1.outputs[1], 1: group_input.outputs["Value"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_4.outputs[1], 'Y': random_value.outputs[1], 'Z': add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1}) - - -@node_utils.to_nodegroup('nodegroup_pedal_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_geometry(nw: NodeWrangler, curve_param=[]): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': (0.0, 0.0, 0.2)}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 64}) - integer.integer = 64 - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': integer}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y_bottom', 0.0), - ('NodeSocketFloat', 'X', 0.0), - ('NodeSocketFloat', 'Y_top', 0.0), - ('NodeSocketFloat', 'pedal_stem', 0.2), - ('NodeSocketFloat', 'pedal_z', 0.5)]) - - pedal_stem_curvature = nw.new_node(nodegroup_pedal_stem_curvature().name, - input_kwargs={'Value': group_input.outputs["pedal_stem"]}) - - set_position_4 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Offset': pedal_stem_curvature}) - - pedal_z_contour = nw.new_node(nodegroup_pedal_z_contour(curve_param=curve_param).name, - input_kwargs={'Value': group_input.outputs["pedal_z"]}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_position_4, 'Radius': pedal_z_contour}) - - pedal_cross_contour = nw.new_node(nodegroup_pedal_cross_contour().name, - input_kwargs={'Y_bottom': group_input.outputs["Y_bottom"], - 'X': group_input.outputs["X"], - 'Y_top': group_input.outputs["Y_top"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': pedal_cross_contour, - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh}) - - -@node_utils.to_nodegroup('nodegroup_pedal_on_base', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_on_base(nw: NodeWrangler, R=1.0): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 0.1), - ('NodeSocketFloat', 'x_R', -1.3), - ('NodeSocketFloat', 'z_R', -1.57), - ('NodeSocketInt', 'Resolution', 10), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, 1.0, 1.0)), - ('NodeSocketFloat', 'base_z', 0.5)]) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"], - 'Radius': group_input.outputs["Radius"]}) - - base_perturbation = nw.new_node(nodegroup_base_perturbation(R=R).name, - input_kwargs={'Value': group_input.outputs["base_z"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle_1.outputs["Curve"], 'Offset': base_perturbation}) - - normal_1 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_1}, - attrs={'pivot_axis': 'Z'}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.7, 3: 1.2}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector_1, - 'Scale': random_value_3.outputs[1]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_1}) - - pedal_rotation_on_base_circle = nw.new_node(nodegroup_pedal_rotation_on_base_circle().name, - input_kwargs={0: group_input.outputs["x_R"], - 1: group_input.outputs["z_R"]}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': realize_instances_1, - 'Rotation': pedal_rotation_on_base_circle}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_1, 'Scale': group_input.outputs["Scale"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': scale_instances}) - - -def geometry_succulent_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - pedal_bases = [] - - pedal_cross_coutour_y_bottom = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_y_bottom') - pedal_cross_coutour_y_bottom.outputs[0].default_value = kwargs["cross_y_bottom"] - - pedal_cross_coutour_x = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_x') - pedal_cross_coutour_x.outputs[0].default_value = kwargs["cross_x"] - - pedal_cross_coutour_y_top = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_y_top') - pedal_cross_coutour_y_top.outputs[0].default_value = kwargs["cross_y_top"] - pedal_stem_curvature_scale = nw.new_node(Nodes.Value, - label='pedal_stem_curvature_scale') - pedal_stem_curvature_scale.outputs[0].default_value = np.abs(normal(0, 1.0)) - - pedal_z_coutour_scale = nw.new_node(Nodes.Value, - label='pedal_z_coutour_scale') - pedal_z_coutour_scale.outputs[0].default_value = uniform(0.4, 0.9) - material = kwargs["material"] - - for i in range(kwargs["num_bases"]): - pedal_geometry = nw.new_node(nodegroup_pedal_geometry(curve_param=kwargs["pedal_curve_param"]).name, - input_kwargs={'Y_bottom': pedal_cross_coutour_y_bottom, - 'X': pedal_cross_coutour_x, - 'Y_top': pedal_cross_coutour_y_top, - 'pedal_stem': pedal_stem_curvature_scale, - 'pedal_z': pedal_z_coutour_scale}) - - base_circle_radius = nw.new_node(Nodes.Value, - label='base_circle_radius') - base_circle_radius.outputs[0].default_value = kwargs["base_radius"][i] - - pedal_x_rotation = nw.new_node(Nodes.Value, - label='pedal_x_rotation') - pedal_x_rotation.outputs[0].default_value = kwargs["pedal_x_R"][i] - - base_z_rotation = nw.new_node(Nodes.Value, - label='base_z_rotation') - base_z_rotation.outputs[0].default_value = -1.57 + normal(0, 0.3) - - base_pedal_num = nw.new_node(Nodes.Integer, - label='base_pedal_num', - attrs={'integer': 10}) - base_pedal_num.integer = kwargs["base_pedal_num"][i] - - pedal_scale = nw.new_node(Nodes.Value, - label='pedal_scale') - pedal_scale.outputs[0].default_value = kwargs["base_pedal_scale"][i] - - base_z = nw.new_node(Nodes.Value, - label='base_z') - base_z.outputs[0].default_value = kwargs["base_z"][i] - - pedal_on_base = nw.new_node(nodegroup_pedal_on_base(R=kwargs["base_radius"][i]).name, - input_kwargs={'Radius': base_circle_radius, 'x_R': pedal_x_rotation, - 'z_R': base_z_rotation, - 'Resolution': base_pedal_num, 'Instance': pedal_geometry, - 'Scale': pedal_scale, 'base_z': base_z}) - pedal_bases.append(pedal_on_base) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': pedal_bases}) - - set_shade_smooth_1 = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': join_geometry}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_shade_smooth_1, - 'Material': surface.shaderfunc_to_material(material)}) - - realized = nw.new_node(Nodes.RealizeInstances, [set_material]) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realized}) - - -class SucculentFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(SucculentFactory, self).__init__(factory_seed, coarse=coarse) - self.mode = np.random.choice(["thin_pedal", "thick_pedal"], p=[0.65, 0.35]) - - def get_params(self, mode): - if mode == 'thin_pedal': - params = {} - params["cross_y_bottom"] = uniform(0.08, 0.25) - params["cross_y_top"] = uniform(-0.04, 0.02) - params["cross_x"] = uniform(0.3, 0.6) - # get geometry params on each base - num_bases = randint(5, 8) - params["num_bases"] = num_bases - base_radius, pedal_x_R, base_pedal_num, base_pedal_scale, base_z = [], [], [], [], [] - init_base_radius, diff_base_radius = uniform(0.09, 0.11), 0.1 - init_x_R, diff_x_R = uniform(-1.2, -1.35), uniform(-0.7, -1.1) - init_pedal_num = randint(num_bases, 15) - diff_pedal_scale = uniform(0.5, 0.9) - for i in range(num_bases): - base_radius.append(init_base_radius - (i * diff_base_radius) / num_bases) - pedal_x_R.append(init_x_R - (i * diff_x_R) / num_bases) - base_pedal_num.append(init_pedal_num - i + randint(0, 2)) - base_pedal_scale.append(1. - (i * diff_pedal_scale) / num_bases) - base_z.append(0. + i * uniform(0.005, 0.008)) - params["base_radius"] = base_radius - params["pedal_x_R"] = pedal_x_R - params["base_pedal_num"] = base_pedal_num - params["base_pedal_scale"] = base_pedal_scale - params["base_z"] = base_z - - contour_bit = randint(0, 3) - material_bit = randint(0, 3) - - if contour_bit == 0: - params["pedal_curve_param"] = [0.08, 0.4, 0.46, 0.36, 0.17, 0.05] - elif contour_bit == 1: - params["pedal_curve_param"] = [0.22, 0.37, 0.50, 0.49, 0.30, 0.08] - elif contour_bit == 2: - params["pedal_curve_param"] = [0.21, 0.26, 0.31, 0.36, 0.29, 0.16] - else: - raise NotImplemented - - if material_bit == 0: - params["material"] = succulent.shader_green_transition_succulent - elif material_bit == 1: - params["material"] = succulent.shader_pink_transition_succulent - elif material_bit == 2: - params["material"] = succulent.shader_green_succulent - else: - raise NotImplemented - - return params - - elif mode == 'thick_pedal': - params = {} - params["cross_y_bottom"] = uniform(0.22, 0.30) - params["cross_y_top"] = uniform(0.08, 0.15) - params["cross_x"] = uniform(0.14, 0.16) - # get geometry params on each base - num_bases = randint(3, 6) - params["num_bases"] = num_bases - base_radius, pedal_x_R, base_pedal_num, base_pedal_scale, base_z = [], [], [], [], [] - init_base_radius, diff_base_radius = uniform(0.12, 0.14), 0.11 - init_x_R, diff_x_R = uniform(-1.3, -1.4), uniform(-0.1, -1.2) - init_pedal_num = randint(num_bases, 12) - diff_pedal_scale = uniform(0.6, 0.9) - for i in range(num_bases): - base_radius.append(init_base_radius - (i * diff_base_radius) / num_bases) - pedal_x_R.append(init_x_R - (i * diff_x_R) / num_bases) - base_pedal_num.append(init_pedal_num - i + randint(0, 2)) - base_pedal_scale.append(1. - (i * diff_pedal_scale) / num_bases) - base_z.append(0. + i * uniform(0.005, 0.006)) - params["base_radius"] = base_radius - params["pedal_x_R"] = pedal_x_R - params["base_pedal_num"] = base_pedal_num - params["base_pedal_scale"] = base_pedal_scale - params["base_z"] = base_z - - contour_bit = randint(0, 2) - material_bit = randint(0, 2) - - if contour_bit == 0: - params["pedal_curve_param"] = [0.10, 0.36, 0.44, 0.45, 0.30, 0.24] - elif contour_bit == 1: - params["pedal_curve_param"] = [0.16, 0.35, 0.48, 0.42, 0.30, 0.18] - else: - raise NotImplemented - - if material_bit == 0: - params["material"] = succulent.shader_yellow_succulent - elif material_bit == 1: - params["material"] = succulent.shader_whitish_green_succulent - else: - raise NotImplemented - - return params - else: - raise NotImplemented - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.get_params(self.mode) - - surface.add_geomod(obj, geometry_succulent_nodes, apply=True, attributes=[], input_kwargs=params) - - obj.scale = (0.2, 0.2, 0.2) - obj.location.z += 0.01 - butil.apply_transform(obj, loc=True, scale=True) - - tag_object(obj, 'succulent') - - return obj - - -if __name__ == '__main__': - fac = SucculentFactory(0) - fac.create_asset() \ No newline at end of file diff --git a/infinigen/assets/table_decorations/sink.py b/infinigen/assets/table_decorations/sink.py deleted file mode 100644 index 343554bb2..000000000 --- a/infinigen/assets/table_decorations/sink.py +++ /dev/null @@ -1,785 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Hongyu Wen: sink geometry -# - Meenal Parakh: material assignment -# - Stamatis Alexandropoulos: taps -# - Alexander Raistrick: placeholder, optimize detail, redo cutter - -import random - -import bpy -import mathutils - -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.assets.utils import bbox_from_mesh -from infinigen.assets.utils.extract_nodegroup_parts import extract_nodegroup_geo - -from infinigen.core.util.math import FixedSeed -from infinigen.core import tagging, tags as t -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.material_assignments import AssetList - - -class SinkFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=[1., 1., 1.], upper_height=None): - super(SinkFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - self.factory_seed = factory_seed - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions, upper_height=upper_height) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - self.params.update(self.material_params) - - self.tap_factory = TapFactory(factory_seed) - - def get_material_params(self): - material_assignments = AssetList['SinkFactory']() - params = { - "Sink": material_assignments['sink'].assign_material(), - "Tap": material_assignments['tap'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = U() < scratch_prob - is_edge_wear = U() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions, upper_height, use_default=False, open=False): - width = U(0.4, 1.0) - depth = U(0.4, 0.5) - curvature = U(1.0, 1.0) - if upper_height is None: - upper_height = U(0.2, 0.4) - lower_height = U(0.00, 0.01) - hole_radius = U(0.02, 0.05) - margin = U(0.02, 0.05) - watertap_margin = U(0.1, 0.12) - - params = { - 'Width': width, - 'Depth': depth, - 'Curvature': curvature, - 'Upper Height': upper_height, - 'Lower Height': lower_height, - 'HoleRadius': hole_radius, - 'Margin': margin, - 'WaterTapMargin': watertap_margin, - 'ProtrudeAboveCounter': U(0.01, 0.025), - } - return params - - def _extract_geo_results(self): - - params = self.params.copy() - params.pop('ProtrudeAboveCounter') - - with butil.TemporaryObject(butil.spawn_vert()) as temp: - obj = extract_nodegroup_geo( - temp, nodegroup_sink_geometry(), 'Geometry', ng_params=params - ) - cutter = extract_nodegroup_geo( - temp, nodegroup_sink_geometry(), 'Cutter', ng_params=params - ) - - return obj, cutter - - def create_placeholder(self, i, **kwargs) -> bpy.types.Object: - - obj, cutter = self._extract_geo_results() - butil.delete(cutter) - - min_corner, max_corner = butil.bounds(obj) - min_corner[-1] = max_corner[-1] - self.params['ProtrudeAboveCounter'] - top_slice_placeholder = bbox_from_mesh.box_from_corners(min_corner, max_corner) - - butil.delete(obj) - - return top_slice_placeholder - - def create_asset(self,i, placeholder, state=None, **params): - - obj, cutter = self._extract_geo_results() - tagging.tag_system.relabel_obj(obj) - - cutter.parent = obj - cutter.name = repr(self) + f'.spawn_placeholder({i}).cutter' - cutter.hide_render = True - - tap_loc = (-self.params['Depth'] / 2, 0, self.params['Upper Height']) - tap = self.tap_factory.spawn_asset(i, loc=tap_loc, rot=(0,0,0)) - tap.parent = obj - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - -class TapFactory(AssetFactory): - - def __init__(self, factory_seed): - super().__init__(factory_seed) - with FixedSeed(factory_seed): - self.params, self.scratch, self.edge_wear = self.get_material_params() - - - @staticmethod - def tap_parameters(): - params = { - 'base_width' : U(0.570,0.630), - 'tap_head': U(0.7,1.1), - 'roation_z': U(5.5,7.0), - 'tap_height': U(0.5,1), - 'base_radius': U(0.0,0.3), - 'Switch': True if U()>0.5 else False, - 'Y': U(-0.5, -0.06), - 'hand_type': True if U()>0.2 else False, - 'hands_length_x': U(0.750,1.25), - 'hands_length_Y': U(0.950, 1.550), - 'one_side': True if U()>0.5 else False, - 'different_type': True if U()>0.8 else False - } - return params - - - def get_material_params(self): - material_assignments = AssetList['TapFactory']() - tap_material = material_assignments['tap'].assign_material() - - wrapped_params = { - 'Tap': surface.shaderfunc_to_material(tap_material) - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = U() < scratch_prob - is_edge_wear = U() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - def create_asset(self, **_): - obj = butil.spawn_cube() - butil.modify_mesh(obj, 'NODES', node_group=nodegroup_water_tap(), ng_inputs=self.params, apply=True) - obj.scale = (0.4,)*3 - obj.rotation_euler.z += np.pi - butil.apply_transform(obj) - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - -@node_utils.to_nodegroup('nodegroup_handle', singleton=False, type='GeometryNodeTree') -def nodegroup_handle(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, input_kwargs={ - 'Start': (0.0000, 0.0000, 0.0000), - 'Start Handle': (0.0000, 0.0000, 0.7000), - 'End Handle': (0.2000, 0.0000, 0.7000), - 'End': (1.0000, 0.0000, 0.9000) - }) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 0.9750), (1.0000, 0.1625)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: 1.3000}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': bezier_segment, 'Radius': multiply}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.2000}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': set_curve_radius, - 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True - }) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["X"], 1: 0.2000, 3: 1.0000, 4: 2.5000}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Y"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': separate_xyz.outputs["X"], - 'Y': multiply_1, - 'Z': separate_xyz.outputs["Z"] - }) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh, 'Position': combine_xyz}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': set_position, 'Level': 2}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivision_surface}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_water_tap', singleton=False, type='GeometryNodeTree') -def nodegroup_water_tap(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'base_width', U(0.2,0.3)), - ('NodeSocketFloat', 'tap_head', U(0.7,1.1)), - ('NodeSocketFloat', 'roation_z',U(5.5,7.0)), - ('NodeSocketFloat', 'tap_height', U(0.5,1)), - ('NodeSocketFloatDistance', 'base_radius', U(0.0,0.1)), - ('NodeSocketBool', 'Switch',True if U()>0.5 else False), - ('NodeSocketFloat', 'Y', U(-0.5, -0.06)), - ('NodeSocketBool', 'hand_type', True if U()>0.2 else False), - ('NodeSocketFloat', 'hands_length_x', U(0.750,1.25)), - ('NodeSocketFloat', 'hands_length_Y', U(0.950, 1.550)), - ('NodeSocketBool', 'one_side', True if U()>0.5 else False), - ('NodeSocketBool', 'different_type', True if U()>0.8 else False), - ('NodeSocketBool', 'length_one_side', True if U()>0.8 else False)]) - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketMaterial', 'Tap', None)]) - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.0500}) - - fill_curve_1 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': curve_circle.outputs["Curve"]}) - - extrude_mesh_1 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve_1, 'Offset Scale': 0.1500}) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': 0.2000, 'Height': 0.7000}) - - fillet_curve = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': quadrilateral, 'Count': 19, 'Radius': 0.1000}, - attrs={'mode': 'POLY'}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': fillet_curve}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve, 'Offset Scale': 0.0500}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0.0000, 0.0000, 0.6000)}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.0300}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': curve_line, 'Profile Curve': curve_circle_1.outputs["Curve"]}) - - curve_circle_2 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.2000}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_2.outputs["Curve"], 'Translation': (0.0000, 0.2000, 0.0000)}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_geometry, 'Rotation': (-1.5708, 1.5708, 0.0000), 'Scale': (1.0000, 0.7000, 1.0000)}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 0.2000, 'Y': group_input.outputs["Y"]}) - - bezier_segment = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': 177, 'Start': (0.0000, 0.0000, 0.0000), 'Start Handle': (0.0000, 1.2000, 0.0000), 'End Handle': combine_xyz_3, 'End': (-0.0500, 0.1000, 0.0000)}) - - trim_curve = nw.new_node(Nodes.TrimCurve, input_kwargs={'Curve': bezier_segment, 3: 0.6625, 5: 3.0000}) - - transform_geometry_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': trim_curve, 'Rotation': (1.5708, 0.0000, 2.5220), 'Scale': (5.2000, 0.5000, 7.8000)}) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.0300}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': transform_geometry_6, 'Profile Curve': curve_circle_3.outputs["Curve"]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Switch"], 14: transform_geometry_1, 15: curve_to_mesh_2}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': switch.outputs[6], 'Profile Curve': curve_circle_1.outputs["Curve"]}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - greater_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: -0.0100}, attrs={'operation': 'GREATER_THAN'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Switch"], 2: greater_than, 3: 1.0000}, - attrs={'input_type': 'FLOAT'}) - - separate_geometry = nw.new_node(Nodes.SeparateGeometry, - input_kwargs={'Geometry': curve_to_mesh_1, 'Selection': switch_1.outputs["Output"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': 1.0000, 'Z': group_input.outputs["tap_head"]}) - - switch_2 = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Switch"], 8: combine_xyz, 9: (1.0000, 1.0000, 1.0000)}, - attrs={'input_type': 'VECTOR'}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': separate_geometry.outputs["Selection"], 'Translation': (0.0000, 0.0000, 0.6000), 'Scale': switch_2.outputs[3]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh, transform_geometry_2]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["roation_z"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': 1.0000, 'Z': group_input.outputs["tap_height"]}) - - transform_geometry_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, 'Rotation': combine_xyz_1, 'Scale': combine_xyz_2}) - - handle = nw.new_node(nodegroup_handle().name) - - transform_geometry_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': handle, 'Translation': (0.0000, -0.2000, 0.0000), 'Rotation': (0.0000, 0.0000, 3.6652), 'Scale': (0.3000, 0.3000, 0.3000)}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': handle, 'Translation': (0.0000, 0.2000, 0.0000), 'Rotation': (0.0000, 0.0000, 2.6180), 'Scale': (0.3000, 0.3000, 0.3000)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_geometry_4, transform_geometry_3]}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 41, 'Side Segments': 39, 'Radius': 0.0300, 'Depth': 0.1000}) - - transform_geometry_7 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': (0.0000, 0.0500, 0.1000), 'Rotation': (1.5708, 0.0000, 0.0000)}) - - switch_5 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["one_side"], 14: transform_geometry_7}) - - transform_geometry_8 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': (0.0000, -0.0500, 0.1000), 'Rotation': (1.5708, 0.0000, 0.0000)}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [switch_5.outputs[6], transform_geometry_8]}) - - cylinder_1 = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': 41, 'Side Segments': 39, 'Radius': 0.0050, 'Depth': 0.1000}) - - transform_geometry_9 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder_1.outputs["Mesh"], 'Translation': (0.0000, 0.0800, 0.1500), 'Scale': (1.0000, 1.0000, 1.1000)}) - - switch_4 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["one_side"], 14: transform_geometry_9}) - - transform_geometry_10 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder_1.outputs["Mesh"], 'Translation': (0.0000, -0.0800, 0.1500), 'Rotation': (0.0000, 0.0000, 0.0855), 'Scale': (1.0000, 1.0000, 1.1000)}) - - transform_geometry_17 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_geometry_10, 'Translation': (0.0000, -0.0100, -0.0050), 'Scale': (4.1000, 1.0000, 1.0000)}) - - switch_8 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["length_one_side"], 14: transform_geometry_10, 15: transform_geometry_17}) - - switch_7 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["one_side"], 14: transform_geometry_10, 15: switch_8.outputs[6]}) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [switch_4.outputs[6], switch_7.outputs[6]]}) - - join_geometry_5 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [join_geometry_3, join_geometry_4]}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["hands_length_x"], 'Y': group_input.outputs["hands_length_Y"], 'Z': 1.0000}) - - transform_geometry_11 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_5, 'Scale': combine_xyz_4}) - - switch_3 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["hand_type"], 14: join_geometry_2, 15: transform_geometry_11}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.0500}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': curve_circle.outputs["Curve"]}) - - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve, 'Offset Scale': 0.1500}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_geometry_5, switch_3.outputs[6], extrude_mesh.outputs["Mesh"]]}) - - bezier_segment_1 = nw.new_node(Nodes.CurveBezierSegment, - input_kwargs={'Resolution': 54, 'Start': (0.0000, 0.0000, 0.0000), 'Start Handle': (0.0000, 0.0000, 0.7000), 'End Handle': (0.2000, 0.0000, 0.7000), 'End': (1.0000, 0.0000, 0.9000)}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0000, 0.9750), (0.6295, 0.4125), (1.0000, 0.1625)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: float_curve, 1: 1.3000}, attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': bezier_segment_1, 'Radius': multiply}) - - curve_circle_4 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': 0.1000}) - - curve_to_mesh_3 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle_4.outputs["Curve"], 'Fill Caps': True}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': separate_xyz_1.outputs["X"], 1: 0.2000, 3: 1.0000, 4: 2.5000}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz_1.outputs["X"], 'Y': multiply_1, 'Z': separate_xyz_1.outputs["Z"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh_3, 'Position': combine_xyz_5, 'Offset': (0.0000, 0.0000, 0.0000)}) - - subdivision_surface = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': set_position, 'Level': 1}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': subdivision_surface}) - - transform_geometry_12 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_shade_smooth, 'Translation': (0.0000, 0.0000, 0.1000), 'Rotation': (0.0000, 0.0000, 0.6807), 'Scale': (0.4000, 0.4000, 0.3000)}) - - curve_circle_5 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': 307, 'Radius': 0.0550}) - - fill_curve_2 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': curve_circle_5.outputs["Curve"]}) - - extrude_mesh_2 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve_2, 'Offset Scale': 0.1500}) - - cylinder_2 = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Vertices': 100, 'Radius': 0.0100, 'Depth': 0.7000}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': cylinder_2.outputs["Mesh"]}) - - transform_geometry_13 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': set_position_1, 'Translation': (0.3000, 0.0000, 0.2500), 'Rotation': (0.0000, -2.0420, 0.0000), 'Scale': (1.7000, 3.1000, 1.0000)}) - - cylinder_3 = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Vertices': 318, 'Radius': 0.0200, 'Depth': 0.0300}) - - transform_geometry_14 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cylinder_3.outputs["Mesh"], 'Translation': (0.5950, 0.0000, 0.3800)}) - - join_geometry_7 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_geometry_13, transform_geometry_14]}) - - transform_geometry_15 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_7, 'Scale': (0.9000, 1.0000, 1.0000)}) - - join_geometry_8 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_geometry_12, extrude_mesh_2.outputs["Mesh"], transform_geometry_15]}) - - transform_geometry_16 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_8, 'Rotation': (0.0000, 0.0000, 3.1416)}) - - switch_6 = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["different_type"], 14: join_geometry_1, 15: transform_geometry_16}) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': group_input.outputs["base_width"], 'Height': 0.7000}) - - fillet_curve = nw.new_node(Nodes.FilletCurve, - input_kwargs={'Curve': quadrilateral, 'Count': 19, 'Radius': group_input.outputs["base_radius"]}, - attrs={'mode': 'POLY'}) - - fill_curve_1 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': fillet_curve}) - - extrude_mesh_1 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve_1, 'Offset Scale': 0.0500}) - - join_geometry_6 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [switch_6.outputs[6], extrude_mesh_1.outputs["Mesh"]]}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': join_geometry_6, - 'Material': group_input.outputs["Tap"] - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_sink_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_sink_geometry(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Depth', 2.0000), ('NodeSocketFloat', 'Curvature', 0.9500), - ('NodeSocketFloat', 'Upper Height', 1.0000), ('NodeSocketFloat', 'Lower Height', -0.0500), - ('NodeSocketFloatDistance', 'HoleRadius', 0.1000), ('NodeSocketFloat', 'Margin', 0.5000), - ('NodeSocketFloat', 'WaterTapMargin', 0.5000), - ('NodeSocketMaterial', 'Tap', None), - ('NodeSocketMaterial', 'Sink', None),]) - - reroute_3 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Depth"]}) - - reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Width"]}) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': reroute_3, 'Height': reroute_2}) - - minimum = nw.new_node(Nodes.Math, input_kwargs={0: reroute_3, 1: reroute_2}, attrs={'operation': 'MINIMUM'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: minimum, 1: 0.1000}, attrs={'operation': 'MULTIPLY'}) - - # inside of sink curve - sink_interior_border = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': quadrilateral, 'Count': 50, 'Radius': multiply}, - attrs={'mode': 'POLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Curvature"], - 'Y': group_input.outputs["Curvature"] - }) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': sink_interior_border, 'Scale': combine_xyz_1}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["HoleRadius"]}) - - join_geometry_4 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_1, curve_circle.outputs["Curve"]]}) - - fill_curve_1 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': join_geometry_4}) - - #fill_curve_1 = tagging.tag_nodegroup(nw, fill_curve_1, t.Subpart.SupportSurface) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Lower Height"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': reroute}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': fill_curve_1, 'Translation': combine_xyz_2}) - - extrude_mesh_2 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': transform_2, - 'Offset Scale': -0.0100, - 'Individual': False - }) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': curve_circle.outputs["Curve"], - 'Scale': (0.7000, 0.7000, 1.0000) - }) - - join_geometry_6 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_circle.outputs["Curve"], transform_5]}) - - fill_curve_4 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': join_geometry_6}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: reroute, 1: -0.0100}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - transform_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': fill_curve_4, 'Translation': combine_xyz_4}) - - extrude_mesh_4 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': transform_6, - 'Offset Scale': group_input.outputs["Lower Height"], - 'Individual': False - }) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Lower Height"], 1: -0.0100}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_6}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': curve_line, - 'Profile Curve': curve_circle.outputs["Curve"] - }) - - transform_7 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_to_mesh, 'Translation': combine_xyz_2}) - - join_geometry_5 = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [extrude_mesh_2.outputs["Mesh"], transform_2, extrude_mesh_4.outputs["Mesh"], transform_7] - }) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': sink_interior_border, 'Scale': (0.9900, 0.9900, 1.0000)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform, sink_interior_border]}) - - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': join_geometry}) - - extrude_mesh_1 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': fill_curve, - 'Offset Scale': group_input.outputs["Lower Height"] - }) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: 0.0000}, - attrs={'operation': 'LESS_THAN'}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"], 1: group_input.outputs["Curvature"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: group_input.outputs["Curvature"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Y': multiply_2, 'Z': separate_xyz_1.outputs["Z"]}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': extrude_mesh_1.outputs["Mesh"], - 'Selection': less_than, - 'Position': combine_xyz - }) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["Margin"]}) - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["Margin"]}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: group_input.outputs["WaterTapMargin"]}) - - quadrilateral_1 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': add_4, 'Height': add_2}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["WaterTapMargin"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3}) - - transform_8 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': quadrilateral_1, 'Translation': combine_xyz_7}) - - fillet_curve_1 = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': transform_8, 'Count': 10, 'Radius': multiply}, - attrs={'mode': 'POLY'}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [sink_interior_border, fillet_curve_1]}) - - fill_curve_2 = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': join_geometry_2}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Lower Height"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Upper Height"], 1: multiply_4}) - - extrude_mesh_3 = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve_2, 'Offset Scale': add_5}) - - reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Lower Height"]}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': reroute_1}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': extrude_mesh_3.outputs["Mesh"], - 'Translation': combine_xyz_3 - }) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': transform_3}) - - #watertap = nw.new_node(nodegroup_water_tap().name, input_kwargs={'Tap': group_input.outputs['Tap']}) - - add_6 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Depth"], - 1: group_input.outputs["WaterTapMargin"] - }) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: add_6, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_5, 'Z': group_input.outputs["Upper Height"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={ - 'Geometry': [join_geometry_5, set_position, join_geometry_3]#, transform_geometry] - }) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': join_geometry_1, - 'Material': group_input.outputs["Sink"] - }) - - add_7 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["WaterTapMargin"], 1: group_input.outputs["Margin"]}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: add_7, 1: 2.5600}, attrs={'operation': 'DIVIDE'}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': divide}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': set_material, 'Offset': combine_xyz_8}) - - # region CREATE CUTTER (manually added by araistrick post-fact) - - sink_interior_border_simplified = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={ - 'Curve': quadrilateral, 'Count': 3, 'Radius': multiply - }, - attrs={'mode': 'POLY'} - ) - - scaled_sink_interior_border = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': sink_interior_border_simplified, - 'Scale': (1.01, 1.01, 1) #scale it up just a little to avoid zclip - }) - - fill_interior = nw.new_node( - Nodes.FillCurve, - input_kwargs={'Curve': scaled_sink_interior_border}, - attrs={'mode': 'NGONS'} - ) - - extrude_amt = nw.scalar_add( - group_input.outputs["Lower Height"], - group_input.outputs["Upper Height"], - 0.05 - ) - extrude = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={ - 'Mesh': fill_interior, - 'Offset Scale': extrude_amt - }) - - # same translation as set_position_1, to keep it in sync - setpos_move_cutter = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': extrude, 'Offset': combine_xyz_8}) - - # endregion - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={ - 'Geometry': set_position_1, - 'Cutter': setpos_move_cutter - }) - - - -def geometry_node_to_bbox(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': bounding_box, 'Scale': (0.100, 0.100, 0.1000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/table_decorations/utils.py b/infinigen/assets/table_decorations/utils.py deleted file mode 100644 index 87ce885a7..000000000 --- a/infinigen/assets/table_decorations/utils.py +++ /dev/null @@ -1,324 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_star_profile', singleton=False, type='GeometryNodeTree') -def nodegroup_star_profile(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Resolution', 64), - ('NodeSocketInt', 'Points', 64), - ('NodeSocketFloatDistance', 'Inner Radius', 0.9000)]) - - star = nw.new_node('GeometryNodeCurveStar', - input_kwargs={'Points': group_input.outputs["Points"], 'Inner Radius': group_input.outputs["Inner Radius"], 'Outer Radius': 1.0000}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': star.outputs["Curve"], 'Count': group_input.outputs["Resolution"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Curve': resample_curve}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_flip_index', singleton=False, type='GeometryNodeTree') -def nodegroup_flip_index(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'V Resolution', 0), - ('NodeSocketInt', 'U Resolution', 0)]) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, - attrs={'operation': 'MODULO'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: group_input.outputs["U Resolution"]}, - attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, - attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'FLOOR'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: floor}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Index': add}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_cylinder_side', singleton=False, type='GeometryNodeTree') -def nodegroup_cylinder_side(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 0)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["V Resolution"], 1: 1.0000}, - attrs={'operation': 'SUBTRACT'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': group_input.outputs["U Resolution"], 'Side Segments': subtract}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', 3: cylinder.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute, 'Top': cylinder.outputs["Top"], 'Side': cylinder.outputs["Side"], 'Bottom': cylinder.outputs["Bottom"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_shifted_circle', singleton=False, type='GeometryNodeTree') -def nodegroup_shifted_circle(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Resolution', 32), - ('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'Z', 0.0000), - ('NodeSocketFloat', 'Rot Z', 0.0000)]) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Radius': group_input.outputs["Radius"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Z"]}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Rot Z"]}, attrs={'operation': 'RADIANS'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': radians}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_3.outputs["Curve"], 'Translation': combine_xyz, 'Rotation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_3}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_lofting', singleton=False, type='GeometryNodeTree') -def nodegroup_lofting(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Profile Curves', None), - ('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 32), - ('NodeSocketBool', 'Use Nurb', False)]) - - cylinderside = nw.new_node(nodegroup_cylinder_side().name, - input_kwargs={'U Resolution': group_input.outputs["U Resolution"], 'V Resolution': group_input.outputs["V Resolution"]}) - - index = nw.new_node(Nodes.Index) - - evaluate_on_domain = nw.new_node(Nodes.EvaluateonDomain, input_kwargs={1: index}, attrs={'data_type': 'INT', 'domain': 'CURVE'}) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: evaluate_on_domain.outputs[1]}, - attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - curve_line = nw.new_node(Nodes.CurveLine) - - domain_size = nw.new_node(Nodes.DomainSize, input_kwargs={'Geometry': group_input.outputs["Profile Curves"]}, attrs={'component': 'CURVE'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': domain_size.outputs["Spline Count"]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Profile Curves"], 'Selection': equal, 'Instance': resample_curve}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - position = nw.new_node(Nodes.InputPosition) - - flipindex = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': domain_size.outputs["Spline Count"], 'U Resolution': group_input.outputs["U Resolution"]}) - - sample_index_2 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input.outputs["Profile Curves"], 3: position, 'Index': flipindex}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': realize_instances, 'Position': sample_index_2.outputs[2]}) - - set_spline_type_1 = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}, attrs={'spline_type': 'CATMULL_ROM'}) - - set_spline_type = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}, attrs={'spline_type': 'NURBS'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Use Nurb"], 14: set_spline_type_1, 15: set_spline_type}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': switch.outputs[6], 'Count': group_input.outputs["V Resolution"]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - flipindex_1 = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': group_input.outputs["U Resolution"], 'U Resolution': group_input.outputs["V Resolution"]}) - - sample_index_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve_1, 3: position_1, 'Index': flipindex_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': cylinderside.outputs["Geometry"], 'Position': sample_index_3.outputs[2]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Top': cylinderside.outputs["Top"], 'Side': cylinderside.outputs["Side"], 'Bottom': cylinderside.outputs["Bottom"]}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_lofting_poly', singleton=False, type='GeometryNodeTree') -def nodegroup_lofting_poly(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Profile Curves', None), - ('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 32), - ('NodeSocketBool', 'Use Nurb', False)]) - - reroute_2 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["V Resolution"]}) - - cylinderside_001 = nw.new_node(nodegroup_cylinder_side().name, - input_kwargs={'U Resolution': group_input.outputs["U Resolution"], 'V Resolution': reroute_2}) - - index = nw.new_node(Nodes.Index) - - evaluate_on_domain = nw.new_node(Nodes.EvaluateonDomain, input_kwargs={1: index}, attrs={'domain': 'CURVE', 'data_type': 'INT'}) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: evaluate_on_domain.outputs[1]}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - curve_line = nw.new_node(Nodes.CurveLine) - - domain_size = nw.new_node(Nodes.DomainSize, - input_kwargs={'Geometry': group_input.outputs["Profile Curves"]}, - attrs={'component': 'CURVE'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': domain_size.outputs["Spline Count"]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Profile Curves"], 'Selection': equal, 'Instance': resample_curve}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - position = nw.new_node(Nodes.InputPosition) - - flipindex_001 = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': domain_size.outputs["Spline Count"], 'U Resolution': group_input.outputs["U Resolution"]}) - - sample_index_2 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input.outputs["Profile Curves"], 3: position, 'Index': flipindex_001}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': realize_instances, 'Position': sample_index_2.outputs[2]}) - - set_spline_type_1 = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}) - - set_spline_type = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}, attrs={'spline_type': 'NURBS'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Use Nurb"], 14: set_spline_type_1, 15: set_spline_type}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': switch.outputs[6], 'Count': reroute_2}) - - position_1 = nw.new_node(Nodes.InputPosition) - - flipindex_001_1 = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': group_input.outputs["U Resolution"], 'U Resolution': reroute_2}) - - sample_index_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve_1, 3: position_1, 'Index': flipindex_001_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': cylinderside_001.outputs["Geometry"], 'Position': sample_index_3.outputs[2]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Top': cylinderside_001.outputs["Top"], 'Side': cylinderside_001.outputs["Side"], 'Bottom': cylinderside_001.outputs["Bottom"]}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_warp_around_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_warp_around_curve(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketInt', 'Curve Resolution', 1024)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Curve Resolution"], 1: 1.0000}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': group_input.outputs["Curve"], 'Count': add}) - - position_1 = nw.new_node(Nodes.InputPosition) - - position_2 = nw.new_node(Nodes.InputPosition) - - separate_xyz_3 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_2}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Min"]}) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Max"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_3.outputs["Z"], 1: separate_xyz_1.outputs["Z"], 2: separate_xyz_2.outputs["Z"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Curve Resolution"], 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - round = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'ROUND'}) - - sample_index_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: position_1, 'Index': round}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - normal = nw.new_node(Nodes.InputNormal) - - sample_index_5 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: normal, 'Index': round}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_index_5.outputs[2], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - sample_index_4 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: curve_tangent, 'Index': round}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_index_4.outputs[2], 1: sample_index_5.outputs[2]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product.outputs["Vector"], 'Scale': separate_xyz.outputs["Y"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - add_2 = nw.new_node(Nodes.VectorMath, input_kwargs={0: sample_index_3.outputs[2], 1: add_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': add_2.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) diff --git a/infinigen/assets/table_decorations/vase.py b/infinigen/assets/table_decorations/vase.py deleted file mode 100644 index 437153ed5..000000000 --- a/infinigen/assets/table_decorations/vase.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint, choice, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -import infinigen.core.util.blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.table_decorations.utils import nodegroup_lofting, nodegroup_star_profile -from infinigen.assets.material_assignments import AssetList - -class VaseFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(VaseFactory, self).__init__(factory_seed, coarse=coarse) - - if dimensions is None: - z = uniform(0.17, 0.5) - x = z * uniform(0.3, 0.6) - dimensions = (x, x, z) - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['VaseFactory']() - params = { - 'Material': material_assignments['surface'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # all in meters - if dimensions is None: - z = uniform(0.25, 0.40) - x = uniform(0.2, 0.4) * z - dimensions = (x, x, z) - - x, y, z = dimensions - - U_resolution = 64 - V_resolution = 64 - - neck_scale = uniform(0.2, 0.8) - - parameters = { - 'Profile Inner Radius': choice([1.0, uniform(0.8, 1.0)]), - 'Profile Star Points': randint(16, U_resolution // 2 + 1), - 'U_resolution': U_resolution, - 'V_resolution': V_resolution, - 'Height': z, - 'Diameter': x, - 'Top Scale': neck_scale * uniform(0.8, 1.2), - 'Neck Mid Position': uniform(0.7, 0.95), - 'Neck Position': 0.5 * neck_scale + 0.5 + uniform(-0.05, 0.05), - 'Neck Scale': neck_scale, - 'Shoulder Position': uniform(0.3, 0.7), - 'Shoulder Thickness': uniform(0.1, 0.25), - 'Foot Scale': uniform(0.4, 0.6), - 'Foot Height': uniform(0.01, 0.1), - 'Material': choice(['glass', 'ceramic']) - } - - return parameters - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add(size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - surface.add_geomod(obj, geometry_vases, apply=True, input_kwargs=self.params) - butil.modify_mesh(obj, 'SOLIDIFY', apply=True, thickness=.002) - butil.modify_mesh(obj, 'SUBSURF', apply=True, levels=2, render_levels=2) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - -@node_utils.to_nodegroup('nodegroup_vase_profile', singleton=False, type='GeometryNodeTree') -def nodegroup_vase_profile(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Profile Curve', None), - ('NodeSocketFloat', 'Height', 0.0000), ('NodeSocketFloat', 'Diameter', 0.0000), - ('NodeSocketFloat', 'Top Scale', 0.0000), ('NodeSocketFloat', 'Neck Mid Position', 0.0000), - ('NodeSocketFloat', 'Neck Position', 0.5000), ('NodeSocketFloat', 'Neck Scale', 0.0000), - ('NodeSocketFloat', 'Shoulder Position', 0.0000), ('NodeSocketFloat', 'Shoulder Thickness', 0.0000), - ('NodeSocketFloat', 'Foot Scale', 0.0000), ('NodeSocketFloat', 'Foot Height', 0.0000)]) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Height"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Top Scale"], - 1: group_input.outputs["Diameter"] - }, attrs={'operation': 'MULTIPLY'}) - - neck_top = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 'Translation': combine_xyz_1, - 'Scale': multiply - }) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Height"], - 1: group_input.outputs["Neck Position"] - }, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Diameter"], - 1: group_input.outputs["Neck Scale"] - }, attrs={'operation': 'MULTIPLY'}) - - neck = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 'Translation': combine_xyz, - 'Scale': multiply_2 - }) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Neck Position"]}, - attrs={'use_clamp': True, 'operation': 'SUBTRACT'}) - - multiply_add = nw.new_node(Nodes.Math, input_kwargs={ - 0: subtract, - 1: group_input.outputs["Neck Mid Position"], - 2: group_input.outputs["Neck Position"] - }, attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: group_input.outputs["Height"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_3}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Neck Scale"], 1: group_input.outputs["Top Scale"]}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Diameter"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - neck_middle = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 'Translation': combine_xyz_2, - 'Scale': multiply_4 - }) - - neck_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [neck, neck_middle, neck_top]}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={ - 'Value': group_input.outputs["Shoulder Position"], - 3: group_input.outputs["Foot Height"], - 4: group_input.outputs["Neck Position"] - }) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Neck Position"], - 1: group_input.outputs["Foot Height"] - }, attrs={'operation': 'SUBTRACT'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: group_input.outputs["Shoulder Thickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_5}) - - minimum = nw.new_node(Nodes.Math, input_kwargs={0: add_1, 1: group_input.outputs["Neck Position"]}, - attrs={'operation': 'MINIMUM'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: minimum, 1: group_input.outputs["Height"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_6}) - - body_top = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 'Translation': combine_xyz_3, - 'Scale': group_input.outputs["Diameter"] - }) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: map_range.outputs["Result"], 1: multiply_5}, - attrs={'operation': 'SUBTRACT'}) - - maximum = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: group_input.outputs["Foot Height"]}, - attrs={'operation': 'MAXIMUM'}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: maximum, 1: group_input.outputs["Height"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_7}) - - body_bottom = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input.outputs["Profile Curve"], - 'Translation': combine_xyz_5, - 'Scale': group_input.outputs["Diameter"] - }) - - body_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [body_bottom, body_top]}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Foot Height"], - 1: group_input.outputs["Height"] - }, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_8}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Diameter"], - 1: group_input.outputs["Foot Scale"] - }, attrs={'operation': 'MULTIPLY'}) - - foot_top = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': group_input, - 'Translation': combine_xyz_4, - 'Scale': multiply_9 - }) - - foot_bottom = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': group_input, 'Scale': multiply_9}) - - foot_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [foot_bottom, foot_top]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [foot_geometry, body_geometry, neck_geometry]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_2}, - attrs={'is_active_output': True}) - - -def geometry_vases(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - starprofile = nw.new_node(nodegroup_star_profile().name, input_kwargs={ - 'Resolution': kwargs['U_resolution'], - 'Points': kwargs['Profile Star Points'], - 'Inner Radius': kwargs['Profile Inner Radius'] - }) - - vaseprofile = nw.new_node(nodegroup_vase_profile().name, input_kwargs={ - 'Profile Curve': starprofile.outputs["Curve"], - 'Height': kwargs['Height'], - 'Diameter': kwargs['Diameter'], - 'Top Scale': kwargs['Top Scale'], - 'Neck Mid Position': kwargs['Neck Mid Position'], - 'Neck Position': kwargs['Neck Position'], - 'Neck Scale': kwargs['Neck Scale'], - 'Shoulder Position': kwargs['Shoulder Position'], - 'Shoulder Thickness': kwargs['Shoulder Thickness'], - 'Foot Scale': kwargs['Foot Scale'], - 'Foot Height': kwargs['Foot Height'] - }) - - lofting = nw.new_node(nodegroup_lofting().name, - input_kwargs={'Profile Curves': vaseprofile, 'U Resolution': 64, 'V Resolution': 64}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, input_kwargs={ - 'Geometry': lofting.outputs["Geometry"], - 'Selection': lofting.outputs["Top"] - }) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': delete_geometry, 'Material': kwargs['Material']}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_material}, - attrs={'is_active_output': True}) - - diff --git a/infinigen/assets/tables/cocktail_table.py b/infinigen/assets/tables/cocktail_table.py deleted file mode 100644 index ec7e30f7d..000000000 --- a/infinigen/assets/tables/cocktail_table.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Yiming Zuo: primary author -# - Alexander Raistrick: implement placeholder - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint, choice - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.surface import NoApply -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core import tagging, tags as t - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.tables.table_utils import nodegroup_create_anchors, nodegroup_create_legs_and_strechers -from infinigen.assets.tables.table_top import nodegroup_generate_table_top - -from infinigen.assets.tables.legs.single_stand import nodegroup_generate_single_stand -from infinigen.assets.tables.legs.straight import nodegroup_generate_leg_straight -from infinigen.assets.tables.legs.wheeled import nodegroup_wheeled_leg - -from infinigen.assets.tables.strechers import nodegroup_strecher - -from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList - -@node_utils.to_nodegroup('geometry_create_legs', singleton=False, type='GeometryNodeTree') -def geometry_create_legs(nw: NodeWrangler, **kwargs): - - createanchors = nw.new_node(nodegroup_create_anchors().name, - input_kwargs={'Profile N-gon': kwargs['Leg Number'], 'Profile Width': kwargs['Leg Placement Top Relative Scale']*kwargs['Top Profile Width'], 'Profile Aspect Ratio': 1.0000}) - - if kwargs['Leg Style'] == "single_stand": - leg = nw.new_node(nodegroup_generate_single_stand(**kwargs).name, - input_kwargs={'Leg Height': kwargs['Leg Height'], - 'Leg Diameter': kwargs['Leg Diameter'], - 'Resolution': 64}) - - leg = nw.new_node(nodegroup_create_legs_and_strechers().name, - input_kwargs={'Anchors': createanchors, - 'Keep Legs': True, - 'Leg Instance': leg, - 'Table Height': kwargs['Top Height'], - 'Leg Bottom Relative Scale': kwargs['Leg Placement Bottom Relative Scale'], - 'Align Leg X rot': True - }) - - elif kwargs['Leg Style'] == "straight": - leg = nw.new_node(nodegroup_generate_leg_straight(**kwargs).name, - input_kwargs={'Leg Height': kwargs['Leg Height'], - 'Leg Diameter': kwargs['Leg Diameter'], - 'Resolution': 32, - 'N-gon': kwargs['Leg NGon'], - 'Fillet Ratio': 0.1}) - - strecher = nw.new_node(nodegroup_strecher().name, - input_kwargs={'Profile Width': kwargs['Leg Diameter'] * 0.5}) - - leg = nw.new_node(nodegroup_create_legs_and_strechers().name, - input_kwargs={ - 'Anchors': createanchors, - 'Keep Legs': True, - 'Leg Instance': leg, - 'Table Height': kwargs['Top Height'], - 'Strecher Instance': strecher, - 'Strecher Index Increment': kwargs['Strecher Increament'], - 'Strecher Relative Position': kwargs['Strecher Relative Pos'], - 'Leg Bottom Relative Scale': kwargs['Leg Placement Bottom Relative Scale'], - 'Align Leg X rot': True - }) - - elif kwargs['Leg Style'] == "wheeled": - leg = nw.new_node(nodegroup_wheeled_leg(**kwargs).name, - input_kwargs={ - 'Joint Height': kwargs['Leg Joint Height'], - 'Leg Diameter': kwargs['Leg Diameter'], - 'Top Height': kwargs['Top Height'], - 'Wheel Width': kwargs['Leg Wheel Width'], - 'Wheel Rotation': kwargs['Leg Wheel Rot'], - 'Pole Length': kwargs['Leg Pole Length'], - 'Leg Number': kwargs['Leg Pole Number'], - }) - - else: - raise NotImplementedError - - leg = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': leg, 'Material': kwargs['LegMaterial']}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': leg}, attrs={'is_active_output': True}) - -def geometry_assemble_table(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - generatetabletop = nw.new_node(nodegroup_generate_table_top().name, - input_kwargs={ - 'Thickness': kwargs['Top Thickness'], - 'N-gon': kwargs['Top Profile N-gon'], - 'Profile Width': kwargs['Top Profile Width'], - 'Aspect Ratio': kwargs['Top Profile Aspect Ratio'], - 'Fillet Ratio': kwargs['Top Profile Fillet Ratio'], - 'Fillet Radius Vertical': kwargs['Top Vertical Fillet Ratio'], - }) - - tabletop_instance = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': generatetabletop, - 'Translation': (0.0000, 0.0000, kwargs['Top Height'])}) - - tabletop_instance = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': tabletop_instance, 'Material': kwargs['TopMaterial']}) - - legs = nw.new_node(geometry_create_legs(**kwargs).name) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [tabletop_instance, legs]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': generatetabletop.outputs["Curve"]}) - fill_curve = nw.new_node(Nodes.FillCurve, input_kwargs={'Curve': resample_curve}) - - voff = kwargs['Top Height'] + kwargs['Top Thickness'] - extrude_mesh = nw.new_node(Nodes.ExtrudeMesh, input_kwargs={'Mesh': fill_curve, 'Offset Scale': -voff, 'Individual': False}) - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [extrude_mesh.outputs["Mesh"], fill_curve]}) - transform_geometry_1 = nw.new_node( - Nodes.Transform, input_kwargs={ - 'Geometry': join_geometry_1, 'Translation': (0, 0, voff) - } - ) - switch = nw.new_node(Nodes.Switch, input_kwargs={1: kwargs['is_placeholder'], 14: join_geometry, 15: transform_geometry_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': switch}, attrs={'is_active_output': True}) - - -class TableCocktailFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(TableCocktailFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - from infinigen.assets.clothes import blanket - from infinigen.assets.scatters.clothes import ClothesCover - # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), - # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() - self.clothes_scatter = NoApply() - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['TableCocktailFactory']() - params = { - "TopMaterial": material_assignments['top'].assign_material(), - "LegMaterial": material_assignments['leg'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # all in meters - if dimensions is None: - x = uniform(0.5, 0.8) - z = uniform(1.0, 1.5) - dimensions = ( - x, x, z - ) - - x, y, z = dimensions - - NGon = choice([4, 32]) - if NGon >= 32: - round_table = True - else: - round_table = False - - leg_style = choice(['straight', 'single_stand']) - if leg_style == "single_stand": - leg_number = 1 - leg_diameter = uniform(0.7*x, 0.9*x) - - leg_curve_ctrl_pts = [(0.0, uniform(0.1, 0.2)), - (0.5, uniform(0.1, 0.2)), (0.9, uniform(0.2, 0.3)), (1.0, 1.0)] - - elif leg_style == "straight": - leg_diameter = uniform(0.05, 0.07) - - if round_table: - leg_number = choice([3, 4]) - else: - leg_number = NGon - - leg_curve_ctrl_pts = [(0.0, 1.0), (0.4, uniform(0.85, 0.95)), (1.0, uniform(0.4, 0.6))] - - else: - raise NotImplementedError - - top_thickness = uniform(0.02, 0.05) - - parameters = { - 'Top Profile N-gon': 32 if round_table else 4, - 'Top Profile Width': x if round_table else 1.414 * x, - 'Top Profile Aspect Ratio': 1.0, - 'Top Profile Fillet Ratio': 0.499 if round_table else uniform(0.0, 0.05), - 'Top Thickness': top_thickness, - 'Top Vertical Fillet Ratio': uniform(0.1, 0.3), - # 'Top Material': choice(['marble', 'tiled_wood', 'plastic', 'glass']), - 'Height': z, - 'Top Height': z - top_thickness, - 'Leg Number': leg_number, - 'Leg Style': leg_style, - 'Leg NGon': choice([4, 32]), - 'Leg Placement Top Relative Scale': 0.7, - 'Leg Placement Bottom Relative Scale': uniform(1.1, 1.3), - 'Leg Height': 1.0, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood', 'glass']), - 'Strecher Relative Pos': uniform(0.2, 0.6), - 'Strecher Increament': choice([0, 1, 2]) - } - - return parameters - - def _execute_geonodes(self, is_placeholder): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - kwargs = {**self.params, 'is_placeholder': is_placeholder} - surface.add_geomod(obj, geometry_assemble_table, apply=True, input_kwargs=kwargs) - tagging.tag_system.relabel_obj(obj) - - return obj - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - return self._execute_geonodes(is_placeholder=True) - - def create_asset(self, **_): - return self._execute_geonodes(is_placeholder=False) - - def finalize_assets(self, assets): - self.clothes_scatter.apply(assets) - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) diff --git a/infinigen/assets/tables/dining_table.py b/infinigen/assets/tables/dining_table.py deleted file mode 100644 index 9c956b499..000000000 --- a/infinigen/assets/tables/dining_table.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -from collections.abc import Iterable - -import bpy -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint, choice - -# from infinigen.assets.materials import metal, metal_shader_list -# from infinigen.assets.materials.leather_and_fabrics import fabric -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.surface import NoApply -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core import tagging, tags as t - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.tables.table_utils import nodegroup_create_anchors, nodegroup_create_legs_and_strechers -from infinigen.assets.tables.table_top import nodegroup_generate_table_top - -from infinigen.assets.tables.legs.single_stand import nodegroup_generate_single_stand -from infinigen.assets.tables.legs.straight import nodegroup_generate_leg_straight -from infinigen.assets.tables.legs.square import nodegroup_generate_leg_square - -from infinigen.assets.tables.strechers import nodegroup_strecher - -from infinigen.core.util.random import log_uniform -from infinigen.assets.material_assignments import AssetList - - -@node_utils.to_nodegroup('geometry_create_legs', singleton=False, type='GeometryNodeTree') -def geometry_create_legs(nw: NodeWrangler, **kwargs): - createanchors = nw.new_node(nodegroup_create_anchors().name, input_kwargs={ - 'Profile N-gon': kwargs['Leg Number'], - 'Profile Width': kwargs['Leg Placement Top Relative Scale'] * kwargs['Top Profile Width'], - 'Profile Aspect Ratio': kwargs['Top Profile Aspect Ratio'] - }) - - if kwargs['Leg Style'] == "single_stand": - leg = nw.new_node(nodegroup_generate_single_stand(**kwargs).name, input_kwargs={ - 'Leg Height': kwargs['Leg Height'], - 'Leg Diameter': kwargs['Leg Diameter'], - 'Resolution': 64 - }) - - leg = nw.new_node(nodegroup_create_legs_and_strechers().name, input_kwargs={ - 'Anchors': createanchors, - 'Keep Legs': True, - 'Leg Instance': leg, - 'Table Height': kwargs['Top Height'], - 'Leg Bottom Relative Scale': kwargs['Leg Placement Bottom Relative Scale'], - 'Align Leg X rot': True - }) - - elif kwargs['Leg Style'] == "straight": - leg = nw.new_node(nodegroup_generate_leg_straight(**kwargs).name, input_kwargs={ - 'Leg Height': kwargs['Leg Height'], - 'Leg Diameter': kwargs['Leg Diameter'], - 'Resolution': 32, - 'N-gon': kwargs['Leg NGon'], - 'Fillet Ratio': 0.1 - }) - - strecher = nw.new_node(nodegroup_strecher().name, - input_kwargs={'Profile Width': kwargs['Leg Diameter'] * 0.5}) - - leg = nw.new_node(nodegroup_create_legs_and_strechers().name, input_kwargs={ - 'Anchors': createanchors, - 'Keep Legs': True, - 'Leg Instance': leg, - 'Table Height': kwargs['Top Height'], - 'Strecher Instance': strecher, - 'Strecher Index Increment': kwargs['Strecher Increament'], - 'Strecher Relative Position': kwargs['Strecher Relative Pos'], - 'Leg Bottom Relative Scale': kwargs['Leg Placement Bottom Relative Scale'], - 'Align Leg X rot': True - }) - - elif kwargs['Leg Style'] == "square": - leg = nw.new_node(nodegroup_generate_leg_square(**kwargs).name, input_kwargs={ - 'Height': kwargs['Leg Height'], - 'Width': 0.707 * kwargs['Leg Placement Top Relative Scale'] * kwargs['Top Profile Width'] * kwargs[ - 'Top Profile Aspect Ratio'], - 'Has Bottom Connector': (kwargs['Strecher Increament'] > 0), - 'Profile Width': kwargs['Leg Diameter'] - }) - - leg = nw.new_node(nodegroup_create_legs_and_strechers().name, input_kwargs={ - 'Anchors': createanchors, - 'Keep Legs': True, - 'Leg Instance': leg, - 'Table Height': kwargs['Top Height'], - 'Leg Bottom Relative Scale': kwargs['Leg Placement Bottom Relative Scale'], - 'Align Leg X rot': True - }) - - else: - raise NotImplementedError - - leg = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': leg, 'Material': kwargs['LegMaterial']}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': leg}, - attrs={'is_active_output': True}) - - -def geometry_assemble_table(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - generatetabletop = nw.new_node(nodegroup_generate_table_top().name, input_kwargs={ - 'Thickness': kwargs['Top Thickness'], - 'N-gon': kwargs['Top Profile N-gon'], - 'Profile Width': kwargs['Top Profile Width'], - 'Aspect Ratio': kwargs['Top Profile Aspect Ratio'], - 'Fillet Ratio': kwargs['Top Profile Fillet Ratio'], - 'Fillet Radius Vertical': kwargs['Top Vertical Fillet Ratio'] - }) - - tabletop_instance = nw.new_node(Nodes.Transform, input_kwargs={ - 'Geometry': generatetabletop, - 'Translation': (0.0000, 0.0000, kwargs['Top Height']) - }) - - tabletop_instance = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': tabletop_instance, 'Material': kwargs['TopMaterial']}) - - legs = nw.new_node(geometry_create_legs(**kwargs).name) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [tabletop_instance, legs]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, - attrs={'is_active_output': True}) - - -class TableDiningFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(TableDiningFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - from infinigen.assets.clothes import blanket - - from infinigen.assets.scatters.clothes import ClothesCover - # self.clothes_scatter = ClothesCover(factory_fn=blanket.BlanketFactory, width=log_uniform(.8, 1.2), - # size=uniform(.8, 1.2)) if uniform() < .3 else NoApply() - self.clothes_scatter = NoApply() - self.material_params, self.scratch, self.edge_wear = self.get_material_params() - - self.params.update(self.material_params) - - def get_material_params(self): - material_assignments = AssetList['TableDiningFactory']() - params = { - "TopMaterial": material_assignments['top'].assign_material(), - "LegMaterial": material_assignments['leg'].assign_material(), - } - wrapped_params = { - k: surface.shaderfunc_to_material(v) for k, v in params.items() - } - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = uniform() < scratch_prob - is_edge_wear = uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return wrapped_params, scratch, edge_wear - - - @staticmethod - def sample_parameters(dimensions): - - if dimensions is None: - - width = uniform(0.91, 1.16) - - if uniform() < 0.7: - # oblong - length = uniform(1.4, 2.8) - else: - # approx square - length = width * normal(1, 0.1) - - dimensions = ( - length, - width, - uniform(0.65, 0.85) - ) - - # all in meters - x, y, z = dimensions - - NGon = 4 - - leg_style = choice(['straight', 'single_stand', 'square'], p=[0.5, 0.1, 0.4]) - # leg_style = choice(['straight']) - - if leg_style == "single_stand": - leg_number = 2 - leg_diameter = uniform(0.22 * x, 0.28 * x) - - leg_curve_ctrl_pts = [(0.0, uniform(0.1, 0.2)), (0.5, uniform(0.1, 0.2)), (0.9, uniform(0.2, 0.3)), - (1.0, 1.0)] - - top_scale = uniform(0.6, 0.7) - bottom_scale = 1.0 - - elif leg_style == "square": - leg_number = 2 - leg_diameter = uniform(0.07, 0.10) - - leg_curve_ctrl_pts = None - - top_scale = 0.8 - bottom_scale = 1.0 - - elif leg_style == "straight": - leg_diameter = uniform(0.05, 0.07) - - leg_number = 4 - - leg_curve_ctrl_pts = [(0.0, 1.0), (0.4, uniform(0.85, 0.95)), (1.0, uniform(0.4, 0.6))] - - top_scale = 0.8 - bottom_scale = uniform(1.0, 1.2) - - else: - raise NotImplementedError - - top_thickness = uniform(0.03, 0.06) - - parameters = { - 'Top Profile N-gon': NGon, - 'Top Profile Width': 1.414 * x, - 'Top Profile Aspect Ratio': y / x, - 'Top Profile Fillet Ratio': uniform(0.0, 0.02), - 'Top Thickness': top_thickness, - 'Top Vertical Fillet Ratio': uniform(0.1, 0.3), - # 'Top Material': choice(['marble', 'tiled_wood', 'metal', 'fabric'], p=[.3, .3, .2, .2]), - 'Height': z, - 'Top Height': z - top_thickness, - 'Leg Number': leg_number, - 'Leg Style': leg_style, - 'Leg NGon': 4, - 'Leg Placement Top Relative Scale': top_scale, - 'Leg Placement Bottom Relative Scale': bottom_scale, - 'Leg Height': 1.0, - 'Leg Diameter': leg_diameter, - 'Leg Curve Control Points': leg_curve_ctrl_pts, - # 'Leg Material': choice(['metal', 'wood', 'glass', 'plastic']), - 'Strecher Relative Pos': uniform(0.2, 0.6), - 'Strecher Increament': choice([0, 1, 2]) - } - - return parameters - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add(size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), - scale=(1, 1, 1)) - obj = bpy.context.active_object - - # surface.add_geomod(obj, geometry_assemble_table, apply=False, input_kwargs=self.params) - surface.add_geomod(obj, geometry_assemble_table, apply=True, input_kwargs=self.params) - tagging.tag_system.relabel_obj(obj) - assert tagging.tagged_face_mask(obj, {t.Subpart.SupportSurface}).sum() != 0 - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - #def finalize_assets(self, assets): - # self.clothes_scatter.apply(assets) - -class SideTableFactory(TableDiningFactory): - - def __init__(self, factory_seed, coarse=False, dimensions=None): - if dimensions is None: - w = 0.55 * normal(1, 0.05) - h = 0.95 * w * normal(1, 0.05) - dimensions = (w, w, h) - super().__init__(factory_seed, coarse=coarse, dimensions=dimensions) - -class CoffeeTableFactory(TableDiningFactory): - - def __init__(self, factory_seed, coarse=False, dimensions=None): - if dimensions is None: - dimensions = ( - uniform(1, 1.5), - uniform(0.6, 0.9), - uniform(0.4, 0.5) - ) - super().__init__(factory_seed, coarse=coarse, dimensions=dimensions) - diff --git a/infinigen/assets/tables/legs/single_stand.py b/infinigen/assets/tables/legs/single_stand.py deleted file mode 100644 index 1425e9190..000000000 --- a/infinigen/assets/tables/legs/single_stand.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_utils import nodegroup_n_gon_cylinder, nodegroup_generate_radius_curve - -@node_utils.to_nodegroup('nodegroup_generate_single_stand', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_single_stand(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Leg Height', 0.0000), - ('NodeSocketFloat', 'Leg Diameter', 1.0000), - ('NodeSocketInt', 'Resolution', 64)]) - - generateradiuscurve = nw.new_node(nodegroup_generate_radius_curve(kwargs['Leg Curve Control Points']).name, input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - ngoncylinder = nw.new_node(nodegroup_n_gon_cylinder().name, - input_kwargs={'Radius Curve': generateradiuscurve, 'Height': group_input.outputs["Leg Height"], 'N-gon': group_input.outputs["Resolution"], 'Profile Width': group_input.outputs["Leg Diameter"], 'Aspect Ratio': 1.0000, 'Fillet Ratio': 0.0000, 'Resolution': group_input.outputs["Resolution"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': ngoncylinder.outputs["Mesh"]}, - attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tables/legs/square.py b/infinigen/assets/tables/legs/square.py deleted file mode 100644 index 7d99d728e..000000000 --- a/infinigen/assets/tables/legs/square.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_utils import nodegroup_n_gon_profile, nodegroup_merge_curve - -@node_utils.to_nodegroup('nodegroup_generate_leg_square', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_leg_square(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[ - ('NodeSocketFloat', 'Width', 0.0000), - ('NodeSocketFloat', 'Height', 0.0000), - ('NodeSocketFloatDistance', 'Fillet Radius', 0.0300), - ('NodeSocketBool', 'Has Bottom Connector', True), - ('NodeSocketInt', 'Profile N-gon', 4), - ('NodeSocketFloatDistance', 'Profile Width', 0.1000), - ('NodeSocketFloatDistance', 'Profile Aspect Ratio', 0.5000), - ('NodeSocketFloat', 'Profile Fillet Ratio', 0.1000)]) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Has Bottom Connector"], 1: 4.0000}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Has Bottom Connector"], 3: 4.7124, 4: 6.2832}) - - arc = nw.new_node('GeometryNodeCurveArc', - input_kwargs={'Resolution': add, 'Radius': 0.7071, 'Sweep Angle': map_range.outputs["Result"]}) - - mergecurve = nw.new_node(nodegroup_merge_curve().name, input_kwargs={'Curve': arc.outputs["Curve"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Has Bottom Connector"], 3: 1.5708, 4: 3.1416}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': mergecurve, 'Tilt': map_range_1.outputs["Result"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': set_curve_tilt, 'Rotation': (0.0000, 0.0000, -0.7854)}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform, 'Translation': (0.0000, 0.0000, -0.5000), 'Rotation': (1.5708, 0.0000, 0.0000)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Width"], 'Y': 1.0000, 'Z': group_input.outputs["Height"]}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_1, 'Scale': combine_xyz}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': transform_2, 'Radius': 1.0000}) - - fillet_curve = nw.new_node(Nodes.FilletCurve, - input_kwargs={'Curve': set_curve_radius, 'Count': 8, 'Radius': group_input.outputs["Fillet Radius"], 'Limit Radius': True}, - attrs={'mode': 'POLY'}) - - ngonprofile = nw.new_node(nodegroup_n_gon_profile().name, - input_kwargs={'Profile N-gon': group_input.outputs["Profile N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Profile Aspect Ratio': group_input.outputs["Profile Aspect Ratio"], 'Profile Fillet Ratio': group_input.outputs["Profile Fillet Ratio"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': fillet_curve, 'Profile Curve': ngonprofile, 'Fill Caps': True}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, 'Rotation': (0.0000, 0.0000, 1.5708)}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, input_kwargs={'Geometry': transform_3, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tables/legs/straight.py b/infinigen/assets/tables/legs/straight.py deleted file mode 100644 index 0f619d1e5..000000000 --- a/infinigen/assets/tables/legs/straight.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_utils import nodegroup_n_gon_cylinder, nodegroup_generate_radius_curve - -@node_utils.to_nodegroup('nodegroup_generate_leg_straight', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_leg_straight(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Leg Height', 0.0000), - ('NodeSocketFloat', 'Leg Diameter', 1.0000), - ('NodeSocketInt', 'Resolution', 0), - ('NodeSocketInt', 'N-gon', 32), - ('NodeSocketFloat', 'Fillet Ratio', 0.0100)]) - - generateradiuscurve = nw.new_node(nodegroup_generate_radius_curve(kwargs['Leg Curve Control Points']).name, input_kwargs={'Resolution': group_input.outputs["Resolution"]}) - - ngoncylinder = nw.new_node(nodegroup_n_gon_cylinder().name, - input_kwargs={'Radius Curve': generateradiuscurve, 'Height': group_input.outputs["Leg Height"], 'N-gon': group_input.outputs["N-gon"], 'Profile Width': group_input.outputs["Leg Diameter"], 'Aspect Ratio': 1.0000, 'Fillet Ratio': group_input.outputs["Fillet Ratio"], 'Resolution': group_input.outputs["Resolution"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': ngoncylinder.outputs["Mesh"]}, - attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tables/legs/wheeled.py b/infinigen/assets/tables/legs/wheeled.py deleted file mode 100644 index 874914234..000000000 --- a/infinigen/assets/tables/legs/wheeled.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_top import nodegroup_capped_cylinder -from infinigen.assets.tables.table_utils import nodegroup_arc_top, nodegroup_n_gon_cylinder, nodegroup_align_bottom_to_floor, nodegroup_create_anchors, nodegroup_create_legs_and_strechers - -@node_utils.to_nodegroup('nodegroup_chair_wheel', singleton=False, type='GeometryNodeTree') -def nodegroup_chair_wheel(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Arc Sweep Angle', 240.0000), - ('NodeSocketFloat', 'Wheel Width', 0.0000), - ('NodeSocketFloat', 'Wheel Rotation', 0.5000), - ('NodeSocketFloat', 'Pole Width', 0.0000), - ('NodeSocketFloat', 'Pole Aspect Ratio', 0.6000), - ('NodeSocketFloat', 'Pole Length', 3.0000)]) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': group_input.outputs["Wheel Width"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Wheel Width"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_1, 'End': combine_xyz_2}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.0200 - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.5000 - - cappedcylinder = nw.new_node(nodegroup_capped_cylinder().name, - input_kwargs={'Thickness': value, 'Radius': value_1, 'Cap Relative Scale': 0.0100}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: value, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': cappedcylinder, 'Translation': combine_xyz, 'Rotation': (-1.5708, 0.0000, 0.0000)}) - - position = nw.new_node(Nodes.InputPosition) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': position}, attrs={'axis': 'Y'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': curve_line, 'Instance': transform, 'Rotation': align_euler_to_vector}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.0800}) - - arctop = nw.new_node(nodegroup_arc_top().name, - input_kwargs={'Diameter': add, 'Sweep Angle': group_input.outputs["Arc Sweep Angle"]}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Wheel Width"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', input_kwargs={'Width': multiply_2, 'Height': 0.0200}) - - fillet_curve = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': quadrilateral, 'Count': 4, 'Radius': 0.0300, 'Limit Radius': True}, - attrs={'mode': 'POLY'}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': arctop, 'Profile Curve': fillet_curve, 'Fill Caps': True}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.1000}, attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.4000}, attrs={'operation': 'MULTIPLY'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Side Segments': 8, 'Fill Segments': 4, 'Radius': multiply_3, 'Depth': multiply_4}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.4400}, attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: value_1, 1: 0.4500}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_5, 'Z': multiply_6}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [instance_on_points, curve_to_mesh, transform_2]}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_7}) - - transform_6 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_4}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Pole Length"], 1: 0.1500}, - attrs={'operation': 'SUBTRACT'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Pole Width"], 1: -0.3535, 2: -0.3000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': subtract, 'Z': multiply_add}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Wheel Rotation"]}, attrs={'operation': 'RADIANS'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': radians}) - - transform_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_6, 'Translation': combine_xyz_5, 'Rotation': combine_xyz_6}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (1.0000, 0.0000, -1.0000), 'End': (1.0000, 0.0000, 1.0000)}) - - ngoncylinder = nw.new_node(nodegroup_n_gon_cylinder().name, - input_kwargs={'Radius Curve': curve_line_1, 'Height': group_input.outputs["Pole Length"], 'N-gon': 4, 'Profile Width': group_input.outputs["Pole Width"], 'Aspect Ratio': group_input.outputs["Pole Aspect Ratio"], 'Fillet Ratio': 0.1500, 'Resolution': 32}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': ngoncylinder.outputs["Mesh"], 'Rotation': (0.0000, -1.5708, 0.0000)}) - - subdivision_surface_1 = nw.new_node(Nodes.SubdivisionSurface, input_kwargs={'Mesh': transform_3, 'Level': 0}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_4, subdivision_surface_1]}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.1500 - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Scale': value_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_wheeled_leg', singleton=False, type='GeometryNodeTree') -def nodegroup_wheeled_leg(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Joint Height', 0.0000), - ('NodeSocketFloat', 'Leg Diameter', 0.0000), - ('NodeSocketFloat', 'Top Height', 0.0000), - ('NodeSocketFloat', 'Arc Sweep Angle', 240.0000), - ('NodeSocketFloat', 'Wheel Width', 0.1300), - ('NodeSocketFloat', 'Wheel Rotation', 0.5000), - ('NodeSocketFloat', 'Pole Length', 1.8000), - ('NodeSocketInt', 'Leg Number', 5)]) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 0.0010 - - createanchors = nw.new_node(nodegroup_create_anchors().name, - input_kwargs={'Profile N-gon': group_input.outputs["Leg Number"], 'Profile Width': value_1, 'Profile Aspect Ratio': 1.0000}) - - chair_wheel = nw.new_node(nodegroup_chair_wheel().name, - input_kwargs={'Arc Sweep Angle': group_input.outputs["Arc Sweep Angle"], 'Wheel Width': group_input.outputs["Wheel Width"], 'Wheel Rotation': group_input.outputs["Wheel Rotation"], 'Pole Width': 0.5000, 'Pole Length': group_input.outputs["Pole Length"]}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': chair_wheel, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 2.0000, 1: value_1}, attrs={'operation': 'DIVIDE'}) - - createlegsandstrechers = nw.new_node(nodegroup_create_legs_and_strechers().name, - input_kwargs={'Anchors': createanchors, 'Keep Legs': True, 'Leg Instance': transform_geometry, 'Table Height': 0.0250, 'Leg Bottom Relative Scale': divide, 'Strecher Index Increment': 1, 'Strecher Relative Position': 1.0000, 'Leg Bottom Offset': 0.0250, 'Align Leg X rot': True}) - - alignbottomtofloor = nw.new_node(nodegroup_align_bottom_to_floor().name, input_kwargs={'Geometry': createlegsandstrechers}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Leg Diameter"]}, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Joint Height"], 1: alignbottomtofloor.outputs["Offset"]}, - attrs={'operation': 'SUBTRACT'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Vertices': 64, 'Radius': multiply, 'Depth': subtract}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: alignbottomtofloor.outputs["Offset"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Translation': combine_xyz_1}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: 0.0025}, attrs={'operation': 'SUBTRACT'}) - - subtract_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Top Height"], 1: group_input.outputs["Joint Height"]}, - attrs={'operation': 'SUBTRACT'}) - - cylinder_1 = nw.new_node('GeometryNodeMeshCylinder', input_kwargs={'Vertices': 64, 'Radius': subtract_1, 'Depth': subtract_2}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={1: subtract_2}, attrs={'operation': 'MULTIPLY'}) - - subtract_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Top Height"], 1: multiply_2}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': subtract_3}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cylinder_1.outputs["Mesh"], 'Translation': combine_xyz_2}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [alignbottomtofloor.outputs["Geometry"], transform_geometry_2, transform_geometry_3]}) - - # multiply_3 = nw.new_node(Nodes.Math, - # input_kwargs={0: group_input.outputs["Top Height"], 1: -1.0000}, - # attrs={'operation': 'MULTIPLY'}) - - # combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_3}) - - # transform_geometry_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry, 'Translation': combine_xyz_3}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tables/lofting.py b/infinigen/assets/tables/lofting.py deleted file mode 100644 index 7156e4944..000000000 --- a/infinigen/assets/tables/lofting.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - - - -@node_utils.to_nodegroup('nodegroup_flip_index', singleton=False, type='GeometryNodeTree') -def nodegroup_flip_index(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'V Resolution', 0), - ('NodeSocketInt', 'U Resolution', 0)]) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, - attrs={'operation': 'MODULO'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: group_input.outputs["U Resolution"]}, - attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["V Resolution"]}, - attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'FLOOR'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: floor}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Index': add}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_cylinder_side', singleton=False, type='GeometryNodeTree') -def nodegroup_cylinder_side(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 0)]) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["V Resolution"], 1: 1.0000}, - attrs={'operation': 'SUBTRACT'}) - - cylinder = nw.new_node('GeometryNodeMeshCylinder', - input_kwargs={'Vertices': group_input.outputs["U Resolution"], 'Side Segments': subtract}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': cylinder.outputs["Mesh"], 'Name': 'uv_map', 3: cylinder.outputs["UV Map"]}, - attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': store_named_attribute, 'Top': cylinder.outputs["Top"], 'Side': cylinder.outputs["Side"], 'Bottom': cylinder.outputs["Bottom"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_shifted_circle', singleton=False, type='GeometryNodeTree') -def nodegroup_shifted_circle(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Resolution', 32), - ('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'Z', 0.0000), - ('NodeSocketFloat', 'Rot Z', 0.0000)]) - - curve_circle_3 = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution"], 'Radius': group_input.outputs["Radius"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Z"]}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Rot Z"]}, attrs={'operation': 'RADIANS'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': radians}) - - transform_3 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curve_circle_3.outputs["Curve"], 'Translation': combine_xyz, 'Rotation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_3}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_shifted_square', singleton=False, type='GeometryNodeTree') -def nodegroup_shifted_square(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Resolution', 10), - ('NodeSocketFloatDistance', 'Width', 1.0000), - ('NodeSocketFloat', 'Z', 0.0000), - ('NodeSocketFloat', 'Rot Z', 0.5000)]) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': group_input.outputs["Width"], 'Height': group_input.outputs["Width"]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': quadrilateral, 'Count': group_input.outputs["Resolution"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Z"]}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Rot Z"]}, attrs={'operation': 'RADIANS'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': radians}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': resample_curve, 'Translation': combine_xyz, 'Rotation': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Curve': transform_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_lofting', singleton=False, type='GeometryNodeTree') -def nodegroup_lofting(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Profile Curves', None), - ('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 32), - ('NodeSocketBool', 'Use Nurb', False)]) - - cylinderside = nw.new_node(nodegroup_cylinder_side().name, - input_kwargs={'U Resolution': group_input.outputs["U Resolution"], 'V Resolution': group_input}) - - index = nw.new_node(Nodes.Index) - - evaluate_on_domain = nw.new_node(Nodes.EvaluateonDomain, input_kwargs={1: index}, attrs={'domain': 'CURVE', 'data_type': 'INT'}) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: evaluate_on_domain.outputs[1]}, - attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - curve_line = nw.new_node(Nodes.CurveLine) - - domain_size = nw.new_node(Nodes.DomainSize, input_kwargs={'Geometry': group_input}, attrs={'component': 'CURVE'}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': domain_size.outputs["Spline Count"]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input, 'Selection': equal, 'Instance': resample_curve}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - position = nw.new_node(Nodes.InputPosition) - - flipindex = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': domain_size.outputs["Spline Count"], 'U Resolution': group_input.outputs["U Resolution"]}) - - sample_index_2 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': group_input, 3: position, 'Index': flipindex}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': realize_instances, 'Position': sample_index_2.outputs[2]}) - - set_spline_type_1 = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}, attrs={'spline_type': 'CATMULL_ROM'}) - - set_spline_type = nw.new_node(Nodes.SplineType, input_kwargs={'Curve': set_position}, attrs={'spline_type': 'NURBS'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Use Nurb"], 14: set_spline_type_1, 15: set_spline_type}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': switch.outputs[6], 'Count': group_input}) - - position_1 = nw.new_node(Nodes.InputPosition) - - flipindex_1 = nw.new_node(nodegroup_flip_index().name, - input_kwargs={'V Resolution': group_input.outputs["U Resolution"], 'U Resolution': group_input}) - - sample_index_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve_1, 3: position_1, 'Index': flipindex_1}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': cylinderside.outputs["Geometry"], 'Position': sample_index_3.outputs[2]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Top': cylinderside.outputs["Top"], 'Side': cylinderside.outputs["Side"], 'Bottom': cylinderside.outputs["Bottom"]}, - attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_warp_around_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_warp_around_curve(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketInt', 'U Resolution', 32), - ('NodeSocketInt', 'V Resolution', 32), - ('NodeSocketFloat', 'Radius', 1.0000)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Count': group_input.outputs["V Resolution"]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - index = nw.new_node(Nodes.Index) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["U Resolution"]}, - attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'FLOOR'}) - - sample_index_3 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: position_1, 'Index': floor}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - normal = nw.new_node(Nodes.InputNormal) - - sample_index_5 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: normal, 'Index': floor}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_index_5.outputs[2], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - sample_index_4 = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': resample_curve, 3: curve_tangent, 'Index': floor}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_index_4.outputs[2], 1: sample_index_5.outputs[2]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product.outputs["Vector"], 'Scale': separate_xyz.outputs["Y"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: add.outputs["Vector"], 'Scale': group_input.outputs["Radius"]}, - attrs={'operation': 'SCALE'}) - - add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: sample_index_3.outputs[2], 1: scale_2.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': add_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -def geometry_nodes(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - integer = nw.new_node(Nodes.Integer) - integer.integer = 32 - - shiftedsquare = nw.new_node(nodegroup_shifted_square().name, input_kwargs={'Resolution': integer}) - - shiftedcircle = nw.new_node(nodegroup_shifted_circle().name, input_kwargs={'Resolution': integer, 'Radius': 0.9200, 'Z': 2.5600}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': shiftedcircle, 'Rotation': (0.0000, 0.0000, 0.7854)}) - - shiftedsquare_1 = nw.new_node(nodegroup_shifted_square().name, input_kwargs={'Resolution': integer, 'Z': 10.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: integer, 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - star = nw.new_node('GeometryNodeCurveStar', - input_kwargs={'Points': divide, 'Inner Radius': 0.5000, 'Outer Radius': 0.6600}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': star.outputs["Curve"], 'Translation': (0.0000, 0.0000, 7.6000), 'Rotation': (0.0000, 0.0000, 0.7854)}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [shiftedsquare, transform_geometry, shiftedsquare_1, transform_geometry_1]}) - - v_resolution = nw.new_node(Nodes.Integer, label='V Resolution') - v_resolution.integer = 64 - - lofting = nw.new_node(nodegroup_lofting().name, - input_kwargs={'Profile Curves': join_geometry, 'U Resolution': integer, 'V Resolution': v_resolution}) - - object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': bpy.data.objects['BezierCurve']}) - - warparoundcurve = nw.new_node(nodegroup_warp_around_curve().name, - input_kwargs={'Geometry': lofting.outputs["Geometry"], 'Curve': object_info.outputs["Geometry"], 'U Resolution': integer, 'V Resolution': v_resolution}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': warparoundcurve}, attrs={'is_active_output': True}) - - - -def apply(obj, selection=None, **kwargs): - surface.add_geomod(obj, geometry_nodes, selection=selection, attributes=[]) -apply(bpy.context.active_object) \ No newline at end of file diff --git a/infinigen/assets/tables/strechers.py b/infinigen/assets/tables/strechers.py deleted file mode 100644 index aa20b8987..000000000 --- a/infinigen/assets/tables/strechers.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.assets.tables.table_utils import nodegroup_n_gon_cylinder - -@node_utils.to_nodegroup('nodegroup_strecher', singleton=False, type='GeometryNodeTree') -def nodegroup_strecher(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (1.0000, 0.0000, 1.0000), 'End': (1.0000, 0.0000, -1.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'N-gon', 32), - ('NodeSocketFloat', 'Profile Width', 0.200)]) - - ngoncylinder = nw.new_node(nodegroup_n_gon_cylinder().name, - input_kwargs={'Radius Curve': curve_line, 'Height': 1.0000, 'N-gon': group_input.outputs["N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Aspect Ratio': 1.0000, 'Resolution': 64}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': ngoncylinder.outputs["Mesh"]}, - attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tables/table_top.py b/infinigen/assets/tables/table_top.py deleted file mode 100644 index 6aa594807..000000000 --- a/infinigen/assets/tables/table_top.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.tables.table_utils import nodegroup_n_gon_cylinder, nodegroup_create_cap -from infinigen.core.tagging import tag_nodegroup -from infinigen.core import tags as t - -@node_utils.to_nodegroup('nodegroup_capped_cylinder', singleton=False, type='GeometryNodeTree') -def nodegroup_capped_cylinder(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Thickness', 0.5000), - ('NodeSocketFloat', 'Radius', 0.2000), - ('NodeSocketFloatDistance', 'Cap Flatness', 4.0000), - ('NodeSocketFloat', 'Fillet Radius Vertical', 0.4000), - ('NodeSocketFloat', 'Cap Relative Scale', 1.0000), - ('NodeSocketFloat', 'Cap Relative Z Offset', 0.0000), - ('NodeSocketInt', 'Resolution', 64)]) - - create_cap = nw.new_node(nodegroup_create_cap().name, - input_kwargs={'Radius': group_input.outputs["Cap Flatness"], 'Resolution': group_input.outputs["Resolution"]}, - label='CreateCap') - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Thickness"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply, 1: group_input.outputs["Cap Relative Z Offset"]}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Radius"], 1: 0.5}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: group_input.outputs["Cap Relative Scale"]}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': create_cap, 'Translation': combine_xyz_5, 'Scale': add_1}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Radius"], 1: 1.0}, attrs={'operation': 'MULTIPLY'}) - - generatetabletop = nw.new_node(nodegroup_generate_table_top().name, - input_kwargs={'Thickness': multiply, 'N-gon': group_input.outputs["Resolution"], 'Profile Width': multiply_2, 'Aspect Ratio': 1.0000, 'Fillet Ratio': 0.0000, 'Fillet Radius Vertical': group_input.outputs["Fillet Radius Vertical"]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_5, generatetabletop]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry_2}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_generate_table_top', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_table_top(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (1.0000, 0.0000, 1.0000), 'End': (1.0000, 0.0000, -1.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Thickness', 0.5000), - ('NodeSocketInt', 'N-gon', 0), - ('NodeSocketFloat', 'Profile Width', 0.5000), - ('NodeSocketFloat', 'Aspect Ratio', 0.5000), - ('NodeSocketFloat', 'Fillet Ratio', 0.2000), - ('NodeSocketFloat', 'Fillet Radius Vertical', 0.0000)]) - - ngoncylinder = nw.new_node(nodegroup_n_gon_cylinder().name, - input_kwargs={'Radius Curve': curve_line, 'Height': group_input.outputs["Thickness"], 'N-gon': group_input.outputs["N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Aspect Ratio': group_input.outputs["Aspect Ratio"], 'Fillet Ratio': group_input.outputs["Fillet Ratio"], 'Profile Resolution': 512, 'Resolution': 10}) - - arc = nw.new_node('GeometryNodeCurveArc', input_kwargs={'Resolution': 4, 'Radius': 0.7071, 'Sweep Angle': 4.7124}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': arc.outputs["Curve"], 'Rotation': (0.0000, 0.0000, -0.7854)}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 1.5708, 0.0000)}) - - transform_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_2, 'Translation': (0.0000, 0.5000, 0.0000)}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': group_input.outputs["Fillet Radius Vertical"], 'Z': 1.0000}) - - transform_4 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_3, 'Scale': combine_xyz}) - - fillet_curve = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': transform_4, 'Count': 8, 'Radius': group_input.outputs["Fillet Radius Vertical"], 'Limit Radius': True}, - attrs={'mode': 'POLY'}) - - transform_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': fillet_curve, 'Rotation': (1.5708, 1.5708, 0.0000), 'Scale': group_input.outputs["Thickness"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': ngoncylinder.outputs["Profile Curve"], 'Profile Curve': transform_6}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Thickness"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_5 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_to_mesh, 'Translation': combine_xyz_1}) - - index = nw.new_node(Nodes.Index) - - equal = nw.new_node(Nodes.Compare, input_kwargs={'A': index, 'B': 0}, attrs={'data_type': 'INT', 'operation': 'EQUAL'}) - - cap = tag_nodegroup(nw, ngoncylinder.outputs["Caps"], t.Subpart.SupportSurface, selection=equal) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [transform_5, cap]}) - - flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={'Mesh': join_geometry}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Thickness"]}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': flip_faces, 'Translation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={ - 'Geometry': transform_1, - 'Curve': ngoncylinder.outputs["Profile Curve"], - }) - -def geometry_generate_table_top_wrapper(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Profile N-gon', kwargs['Profile N-gon']), - ('NodeSocketFloat', 'Profile Width', kwargs['Profile Width']), - ('NodeSocketFloat', 'Profile Aspect Ratio', kwargs['Profile Aspect Ratio']), - ('NodeSocketFloat', 'Profile Fillet Ratio', kwargs['Profile Fillet Ratio']), - ('NodeSocketFloat', 'Thickness', kwargs['Thickness']), - ('NodeSocketFloat', 'Vertical Fillet Ratio', kwargs['Vertical Fillet Ratio'])] - ) - - generatetabletop = nw.new_node(nodegroup_generate_table_top().name, - input_kwargs={'Thickness': group_input.outputs["Thickness"], 'N-gon': group_input.outputs["Profile N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Aspect Ratio': group_input.outputs["Profile Aspect Ratio"], 'Fillet Ratio': group_input.outputs["Profile Fillet Ratio"], 'Fillet Radius Vertical': group_input.outputs["Vertical Fillet Ratio"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': generatetabletop}, attrs={'is_active_output': True}) - -class TableTopFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(TableTopFactory, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.params = self.sample_parameters() - - @staticmethod - def sample_parameters(): - # all in meters - return { - 'Profile N-gon': 4, - 'Profile Width': 1.0, - 'Profile Aspect Ratio': 1.0, - 'Profile Fillet Ratio': 0.2000, - 'Thickness': 0.1000, - 'Vertical Fillet Ratio': 0.2000 - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - surface.add_geomod(obj, geometry_generate_table_top_wrapper, apply=False, input_kwargs=self.params) - - return obj \ No newline at end of file diff --git a/infinigen/assets/tables/table_utils.py b/infinigen/assets/tables/table_utils.py deleted file mode 100644 index 94703f79e..000000000 --- a/infinigen/assets/tables/table_utils.py +++ /dev/null @@ -1,514 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - - -import bpy -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -@node_utils.to_nodegroup('nodegroup_n_gon_profile', singleton=False, type='GeometryNodeTree') -def nodegroup_n_gon_profile(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Profile N-gon', 4), - ('NodeSocketFloat', 'Profile Width', 1.0000), - ('NodeSocketFloat', 'Profile Aspect Ratio', 1.0000), - ('NodeSocketFloat', 'Profile Fillet Ratio', 0.2000)]) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.5000 - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': group_input.outputs["Profile N-gon"], 'Radius': value}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 3.1416, 1: group_input.outputs["Profile N-gon"]}, - attrs={'operation': 'DIVIDE'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': divide}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': curve_circle.outputs["Curve"], 'Rotation': combine_xyz_1}) - - transform_2 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform, 'Rotation': (0.0000, 0.0000, -1.5708)}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Profile Aspect Ratio"], 1: group_input.outputs["Profile Width"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["Profile Width"], 'Y': multiply, 'Z': 1.0000}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_2, 'Scale': combine_xyz}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Profile Width"], 1: group_input.outputs["Profile Fillet Ratio"]}, - attrs={'operation': 'MULTIPLY'}) - - fillet_curve_1 = nw.new_node('GeometryNodeFilletCurve', - input_kwargs={'Curve': transform_1, 'Count': 8, 'Radius': multiply_1, 'Limit Radius': True}, - attrs={'mode': 'POLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Output': fillet_curve_1}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_n_gon_cylinder', singleton=False, type='GeometryNodeTree') -def nodegroup_n_gon_cylinder(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Radius Curve', None), - ('NodeSocketFloat', 'Height', 0.5000), - ('NodeSocketInt', 'N-gon', 0), - ('NodeSocketFloat', 'Profile Width', 0.5000), - ('NodeSocketFloat', 'Aspect Ratio', 0.5000), - ('NodeSocketFloat', 'Fillet Ratio', 0.2000), - ('NodeSocketInt', 'Profile Resolution', 64), - ('NodeSocketInt', 'Resolution', 128)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_1}) - - set_curve_tilt = nw.new_node(Nodes.SetCurveTilt, input_kwargs={'Curve': curve_line, 'Tilt': 3.1416}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': set_curve_tilt, 'Count': group_input.outputs["Resolution"]}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': resample_curve, 2: spline_parameter_1.outputs["Factor"]}) - - ngonprofile = nw.new_node(nodegroup_n_gon_profile().name, - input_kwargs={'Profile N-gon': group_input.outputs["N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Profile Aspect Ratio': group_input.outputs["Aspect Ratio"], 'Profile Fillet Ratio': group_input.outputs["Fillet Ratio"]}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': ngonprofile, 'Count': group_input.outputs["Profile Resolution"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': capture_attribute.outputs["Geometry"], 'Profile Curve': resample_curve_1, 'Fill Caps': True}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_2 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position_1}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curves': group_input.outputs["Radius Curve"], 'Factor': capture_attribute.outputs[2]}, - attrs={'use_all_curves': True}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': sample_curve.outputs["Position"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"]}) - - length = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz}, attrs={'operation': 'LENGTH'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["X"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_2.outputs["Y"], 1: length.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Radius Curve"], 2: separate_xyz_1.outputs["Z"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"], 3: multiply, 4: 0.0000}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_1, 'Y': multiply_2, 'Z': map_range.outputs["Result"]}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': curve_to_mesh, 'Position': combine_xyz_2}) - - index = nw.new_node(Nodes.Index) - - domain_size = nw.new_node(Nodes.DomainSize, input_kwargs={'Geometry': curve_to_mesh}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: domain_size.outputs["Face Count"], 1: 2.0000}, - attrs={'operation': 'SUBTRACT'}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={2: index, 3: subtract}, - attrs={'operation': 'LESS_THAN', 'data_type': 'INT'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': curve_to_mesh, 'Selection': less_than}, - attrs={'domain': 'FACE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': set_position, 'Profile Curve': resample_curve_1, 'Caps': delete_geometry}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_generate_radius_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_radius_curve(nw: NodeWrangler, curve_control_points): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': (1.0000, 0.0000, 1.0000), 'End': (1.0000, 0.0000, -1.0000)}) - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketInt', 'Resolution', 128)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': group_input.outputs["Resolution"]}) - - position = nw.new_node(Nodes.InputPosition) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], curve_control_points) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': float_curve, 'Y': 1.0000, 'Z': 1.0000}) - - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: position, 1: combine_xyz_1}, attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': resample_curve, 'Position': multiply.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_create_anchors', singleton=False, type='GeometryNodeTree') -def nodegroup_create_anchors(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Profile N-gon', 0), - ('NodeSocketFloat', 'Profile Width', 0.5000), - ('NodeSocketFloat', 'Profile Aspect Ratio', 0.5000), - ('NodeSocketFloat', 'Profile Rotation', 0.0000)]) - - equal = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Profile N-gon"], 3: 1}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - equal_1 = nw.new_node(Nodes.Compare, - input_kwargs={2: group_input.outputs["Profile N-gon"], 3: 2}, - attrs={'operation': 'EQUAL', 'data_type': 'INT'}) - - ngonprofile = nw.new_node(nodegroup_n_gon_profile().name, - input_kwargs={'Profile N-gon': group_input.outputs["Profile N-gon"], 'Profile Width': group_input.outputs["Profile Width"], 'Profile Aspect Ratio': group_input.outputs["Profile Aspect Ratio"], 'Profile Fillet Ratio': 0.0000}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, input_kwargs={'Curve': ngonprofile}, attrs={'mode': 'EVALUATED'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Profile Width"], 1: 0.3535}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Profile Width"], 1: -0.3535}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz, 'End': combine_xyz_1}) - - curve_to_points_1 = nw.new_node(Nodes.CurveToPoints, input_kwargs={'Curve': curve_line}, attrs={'mode': 'EVALUATED'}) - - switch_1 = nw.new_node(Nodes.Switch, - input_kwargs={1: equal_1, 14: curve_to_points.outputs["Points"], 15: curve_to_points_1.outputs["Points"]}) - - points = nw.new_node('GeometryNodePoints') - - switch = nw.new_node(Nodes.Switch, input_kwargs={1: equal, 14: switch_1.outputs[6], 15: points}) - - set_point_radius = nw.new_node(Nodes.SetPointRadius, input_kwargs={'Points': switch.outputs[6]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Profile Rotation"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': set_point_radius, 'Rotation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_create_legs_and_strechers', singleton=False, type='GeometryNodeTree') -def nodegroup_create_legs_and_strechers(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Anchors', None), - ('NodeSocketBool', 'Keep Legs', False), - ('NodeSocketGeometry', 'Leg Instance', None), - ('NodeSocketFloat', 'Table Height', 0.0000), - ('NodeSocketFloat', 'Leg Bottom Relative Scale', 0.0000), - ('NodeSocketFloat', 'Leg Bottom Relative Rotation', 0.0000), - ('NodeSocketBool', 'Keep Odd Strechers', True), - ('NodeSocketBool', 'Keep Even Strechers', True), - ('NodeSocketGeometry', 'Strecher Instance', None), - ('NodeSocketInt', 'Strecher Index Increment', 0), - ('NodeSocketFloat', 'Strecher Relative Position', 0.5000), - ('NodeSocketFloat', 'Leg Bottom Offset', 0.0000), - ('NodeSocketBool', 'Align Leg X rot', False)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Table Height"]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': group_input.outputs["Anchors"], 'Translation': combine_xyz}) - - position = nw.new_node(Nodes.InputPosition) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Leg Bottom Offset"]}) - - subtract = nw.new_node(Nodes.VectorMath, input_kwargs={0: combine_xyz, 1: combine_xyz_3}, attrs={'operation': 'SUBTRACT'}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: subtract.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': subtract_1.outputs["Vector"], 'Angle': group_input.outputs["Leg Bottom Relative Rotation"]}, - attrs={'rotation_type': 'Z_AXIS'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["Leg Bottom Relative Scale"], 'Y': group_input.outputs["Leg Bottom Relative Scale"], 'Z': 1.0000}) - - multiply = nw.new_node(Nodes.VectorMath, input_kwargs={0: vector_rotate, 1: combine_xyz_4}, attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: multiply.outputs["Vector"]}, - attrs={'operation': 'SUBTRACT'}) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': subtract_2}, attrs={'axis': 'Z'}) - - align_euler_to_vector_3 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': align_euler_to_vector, 'Vector': position}, - attrs={'pivot_axis': 'Z'}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={0: group_input.outputs["Align Leg X rot"], 8: align_euler_to_vector, 9: align_euler_to_vector_3}, - attrs={'input_type': 'VECTOR'}) - - length = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_2}, attrs={'operation': 'LENGTH'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': 1.0000, 'Z': length.outputs["Value"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform, 'Instance': group_input.outputs["Leg Instance"], 'Rotation': switch.outputs[3], 'Scale': combine_xyz_2}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: group_input.outputs["Keep Legs"], 15: realize_instances}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Strecher Relative Position"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - scale = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_2, 'Scale': multiply_1}, attrs={'operation': 'SCALE'}) - - position_2 = nw.new_node(Nodes.InputPosition) - - add = nw.new_node(Nodes.VectorMath, input_kwargs={0: scale.outputs["Vector"], 1: position_2}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': transform, 'Position': add.outputs["Vector"]}) - - index = nw.new_node(Nodes.Index) - - modulo = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: 2.0000}, attrs={'operation': 'MODULO'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: modulo, 1: group_input.outputs["Keep Odd Strechers"]}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: modulo}, attrs={'operation': 'NOT'}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: group_input.outputs["Keep Even Strechers"], 1: op_not}) - - op_or = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and, 1: op_and_1}, attrs={'operation': 'OR'}) - - domain_size = nw.new_node(Nodes.DomainSize, input_kwargs={'Geometry': transform}, attrs={'component': 'POINTCLOUD'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: domain_size.outputs["Point Count"], 1: group_input.outputs["Strecher Index Increment"]}, - attrs={'operation': 'DIVIDE'}) - - equal = nw.new_node(Nodes.Compare, input_kwargs={0: divide, 1: 2.0000}, attrs={'operation': 'EQUAL'}) - - boolean = nw.new_node(Nodes.Boolean, attrs={'boolean': True}) - - index_1 = nw.new_node(Nodes.Index) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: domain_size.outputs["Point Count"], 1: 2.0000}, - attrs={'operation': 'DIVIDE'}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={2: index_1, 3: divide_1}, - attrs={'operation': 'LESS_THAN', 'data_type': 'INT'}) - - switch_2 = nw.new_node(Nodes.Switch, input_kwargs={0: equal, 6: boolean, 7: less_than}, attrs={'input_type': 'BOOLEAN'}) - - op_and_2 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_or, 1: switch_2.outputs[2]}) - - position_1 = nw.new_node(Nodes.InputPosition) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: index, 1: group_input.outputs["Strecher Index Increment"]}) - - modulo_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: domain_size.outputs["Point Count"]}, - attrs={'operation': 'MODULO'}) - - field_at_index = nw.new_node(Nodes.FieldAtIndex, input_kwargs={'Index': modulo_1, 3: position_1}, attrs={'data_type': 'FLOAT_VECTOR'}) - - subtract_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position_1, 1: field_at_index.outputs[2]}, - attrs={'operation': 'SUBTRACT'}) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Vector': subtract_3.outputs["Vector"]}, attrs={'axis': 'Z'}) - - align_euler_to_vector_2 = nw.new_node(Nodes.AlignEulerToVector, input_kwargs={'Rotation': align_euler_to_vector_1}, attrs={'pivot_axis': 'Z'}) - - length_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: subtract_3.outputs["Vector"]}, attrs={'operation': 'LENGTH'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': 1.0000, 'Y': 1.0000, 'Z': length_1.outputs["Value"]}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, 'Selection': op_and_2, 'Instance': group_input.outputs["Strecher Instance"], 'Rotation': align_euler_to_vector_2, 'Scale': combine_xyz_1}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': instance_on_points_1}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [switch_1.outputs[6], realize_instances_1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': join_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_create_cap', singleton=False, type='GeometryNodeTree') -def nodegroup_create_cap(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketInt', 'Resolution', 64)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Radius"], 1: 257.0000}, - attrs={'operation': 'MULTIPLY'}) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': group_input.outputs["Resolution"], 'Rings': multiply, 'Radius': group_input.outputs["Radius"]}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': uv_sphere.outputs["Mesh"], 'Name': 'uv_map', 3: uv_sphere.outputs["UV Map"]}, - attrs={'data_type': 'FLOAT_VECTOR', 'domain': 'CORNER'}) - - power = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Radius"], 1: 2.0000}, attrs={'operation': 'POWER'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: power, 1: 1.0000}, attrs={'operation': 'SUBTRACT'}) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: subtract}, attrs={'operation': 'SQRT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: sqrt, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_1}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': store_named_attribute, 'Translation': combine_xyz}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - less_than = nw.new_node(Nodes.Compare, input_kwargs={0: separate_xyz.outputs["Z"]}, attrs={'operation': 'LESS_THAN'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, input_kwargs={'Geometry': transform, 'Selection': less_than}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Mesh': delete_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_arc_top', singleton=False, type='GeometryNodeTree') -def nodegroup_arc_top(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloatDistance', 'Diameter', 1.0000), - ('NodeSocketFloat', 'Sweep Angle', 180.0000)]) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Diameter"], 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Sweep Angle"], 2: -90.0000}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: multiply_add, 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'RADIANS'}) - - radians_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Sweep Angle"]}, attrs={'operation': 'RADIANS'}) - - arc = nw.new_node('GeometryNodeCurveArc', - input_kwargs={'Resolution': 32, 'Radius': divide, 'Start Angle': radians, 'Sweep Angle': radians_1}) - - transform_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': arc.outputs["Curve"], 'Rotation': (1.5708, 0.0000, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_1}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_align_bottom_to_floor', singleton=False, type='GeometryNodeTree') -def nodegroup_align_bottom_to_floor(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': group_input.outputs["Geometry"]}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': bounding_box.outputs["Min"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Translation': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_geometry_1, 'Offset': multiply}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_bent', singleton=False, type='GeometryNodeTree') -def nodegroup_bent(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Amount', -0.1000)]) - - position = nw.new_node(Nodes.InputPosition) - - length = nw.new_node(Nodes.VectorMath, input_kwargs={0: position}, attrs={'operation': 'LENGTH'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: separate_xyz.outputs["X"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["Amount"]}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, input_kwargs={'Vector': position, 'Angle': multiply_1}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': vector_rotate}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_merge_curve', singleton=False, type='GeometryNodeTree') -def nodegroup_merge_curve(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={'Curve': group_input.outputs["Curve"]}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': curve_to_mesh_1}) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={'Mesh': merge_by_distance}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Curve': mesh_to_curve}, attrs={'is_active_output': True}) \ No newline at end of file diff --git a/infinigen/assets/tableware/bottle.py b/infinigen/assets/tableware/bottle.py deleted file mode 100644 index 7868fa4d4..000000000 --- a/infinigen/assets/tableware/bottle.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import bmesh -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import read_co, subdivide_edge_ring, subsurf -from infinigen.assets.utils.draw import spin -from infinigen.assets.utils.object import join_objects, new_cylinder -from infinigen.assets.utils.uv import wrap_front_back -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.materials import text -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList - - -class BottleFactory(AssetFactory): - z_neck_offset = .05 - z_waist_offset = .15 - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(self.factory_seed): - self.z_length = uniform(.15, .25) - self.x_length = self.z_length * uniform(.15, .25) - self.x_cap = uniform(.3, .35) - self.bottle_type = np.random.choice(['beer', 'bordeaux', 'champagne', 'coke', 'vintage']) - self.bottle_width = uniform(.002, .005) - self.z_waist = 0 - match self.bottle_type: - case 'beer': - self.z_neck = uniform(.5, .6) - self.z_cap = uniform(.05, .08) - neck_size = uniform(.06, .1) - neck_ratio = uniform(.4, .5) - self.x_anchors = [0, 1, 1, (neck_ratio + 1) / 2 + (1 - neck_ratio) / 2 * self.x_cap, - neck_ratio + (1 - neck_ratio) * self.x_cap, self.x_cap, self.x_cap, 0] - self.z_anchors = [0, 0, self.z_neck, self.z_neck + uniform(.6, .7) * neck_size, - self.z_neck + neck_size, 1 - self.z_cap, 1, 1] - self.is_vector = [0, 1, 1, 0, 1, 1, 1, 0] - case 'bordeaux': - self.z_neck = uniform(.6, .7) - self.z_cap = uniform(.1, .15) - neck_size = uniform(.1, .15) - self.x_anchors = 0, 1, 1, (1 + self.x_cap) / 2, self.x_cap, self.x_cap, 0 - self.z_anchors = [0, 0, self.z_neck, self.z_neck + uniform(.6, .7) * neck_size, - self.z_neck + neck_size, 1, 1] - self.is_vector = [0, 1, 1, 0, 1, 1, 0] - case 'champagne': - self.z_neck = uniform(.4, .5) - self.z_cap = uniform(.05, .08) - self.x_anchors = [0, 1, 1, 1, (1 + self.x_cap) / 2, self.x_cap, self.x_cap, 0] - self.z_anchors = [0, 0, self.z_neck, self.z_neck + uniform(.08, .1), - self.z_neck + uniform(.15, .18), 1 - self.z_cap, 1, 1] - self.is_vector = [0, 1, 1, 0, 0, 1, 1, 0] - case 'coke': - self.z_waist = uniform(.4, .5) - self.z_neck = self.z_waist + uniform(.2, .25) - self.z_cap = uniform(.05, .08) - self.x_anchors = [0, uniform(.85, .95), 1, uniform(.85, .95), 1, 1, self.x_cap, self.x_cap, - 0] - self.z_anchors = [0, 0, uniform(.08, .12), uniform(.18, .25), self.z_waist, self.z_neck, - 1 - self.z_cap, 1, 1] - self.is_vector = [0, 1, 0, 0, 1, 1, 1, 1, 0] - case 'vintage': - self.z_waist = uniform(.1, .15) - self.z_neck = uniform(.7, .75) - self.z_cap = uniform(.0, .08) - x_lower = uniform(.85, .95) - self.x_anchors = [0, x_lower, (x_lower + 1) / 2, 1, 1, (self.x_cap + 1) / 2, self.x_cap, - self.x_cap, 0] - self.z_anchors = [0, 0, self.z_waist - uniform(.1, .15), self.z_waist, self.z_neck, - self.z_neck + uniform(.1, .2), 1 - self.z_cap, 1, 1] - self.is_vector = [0, 1, 0, 1, 1, 0, 1, 1, 0] - - material_assignments = AssetList['BottleFactory']() - self.surface = material_assignments['surface'].assign_material() - self.wrap_surface = material_assignments['wrap_surface'].assign_material() - if self.wrap_surface == text.Text: - self.wrap_surface = text.Text(self.factory_seed, False) - - self.cap_surface = material_assignments['cap_surface'].assign_material() - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - self.scratch, self.edge_wear = material_assignments['wear_tear'] - self.scratch = None if uniform() > scratch_prob else self.scratch - self.edge_wear = None if uniform() > edge_wear_prob else self.edge_wear - - self.texture_shared = uniform() < .2 - self.cap_subsurf = uniform() < .5 - - def create_asset(self, **params) -> bpy.types.Object: - bottle = self.make_bottle() - wrap = self.make_wrap(bottle) - cap = self.make_cap() - obj = join_objects([bottle, wrap, cap]) - - return obj - - def finalize_assets(self, assets): - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - def make_bottle(self): - x_anchors = np.array(self.x_anchors) * self.x_length - z_anchors = np.array(self.z_anchors) * self.z_length - anchors = x_anchors, 0, z_anchors - obj = spin(anchors, np.nonzero(self.is_vector)[0]) - subsurf(obj, 1, True) - subsurf(obj, 1) - if self.bottle_width > 0: - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.bottle_width) - self.surface.apply(obj, translucent=True) - return obj - - def make_wrap(self, bottle): - obj = new_cylinder(vertices=128) - with butil.ViewportMode(obj, 'EDIT'): - bm = bmesh.from_edit_mesh(obj.data) - geom = [f for f in bm.faces if len(f.verts) > 4] - bmesh.ops.delete(bm, geom=geom, context='FACES_ONLY') - bmesh.update_edit_mesh(obj.data) - subdivide_edge_ring(obj, 16) - z_max = self.z_neck - uniform(.02, self.z_neck_offset) * (self.z_neck - self.z_waist) - z_min = self.z_waist + uniform(.02, self.z_waist_offset) * (self.z_neck - self.z_waist) - radius = np.max(read_co(bottle)[:, 0]) + 2e-3 - obj.scale = radius, radius, (z_max - z_min) * self.z_length - obj.location[-1] = z_min * self.z_length - butil.apply_transform(obj, True) - wrap_front_back(obj, self.wrap_surface, self.texture_shared) - return obj - - def make_cap(self): - obj = new_cylinder(vertices=128) - obj.scale = [(self.x_cap + .1) * self.x_length, (self.x_cap + .1) * self.x_length, - (self.z_cap + .01) * self.z_length] - obj.location[-1] = (1 - self.z_cap) * self.z_length - butil.apply_transform(obj, loc=True) - subsurf(obj, 1, self.cap_subsurf) - self.cap_surface.apply(obj) - return obj diff --git a/infinigen/assets/tableware/cup.py b/infinigen/assets/tableware/cup.py deleted file mode 100644 index 7a6cc1ea9..000000000 --- a/infinigen/assets/tableware/cup.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.tableware.base import TablewareFactory -from infinigen.assets.materials import text -from infinigen.assets.utils.decorate import read_co, remove_vertices, subsurf, write_attribute -from infinigen.assets.utils.object import join_objects -from infinigen.assets.utils.draw import spin -from infinigen.assets.utils.uv import wrap_sides -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.assets.material_assignments import AssetList - - -class CupFactory(TablewareFactory): - allow_transparent = True - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.x_end = .25 - self.is_short = uniform(0, 1) < .5 - if self.is_short: - self.is_profile_straight = uniform(0, 1) < .2 - self.x_lowest = log_uniform(.6, .9) - self.depth = log_uniform(.25, .5) - self.has_guard = uniform(0, 1) < .8 - else: - self.is_profile_straight = True - self.x_lowest = log_uniform(.9, 1.) - self.depth = log_uniform(.5, 1.) - self.has_guard = False - if self.is_profile_straight: - self.handle_location = uniform(.45, .65) - else: - self.handle_location = uniform(-.1, .3) - self.handle_type = 'shear' if uniform(0, 1) < .5 else 'round' - self.handle_radius = self.depth * uniform(.2, .4) - self.handle_inner_radius = self.handle_radius * log_uniform(.2, .3) - self.handle_taper_x = uniform(0, 2) - self.handle_taper_y = uniform(0, 2) - self.x_lower_ratio = log_uniform(.8, 1.) - self.thickness = log_uniform(.01, .04) - self.has_wrap = uniform() < .3 - self.has_wrap = True - self.wrap_margin = uniform(.1, .2) - - material_assignments = AssetList['CupFactory']() - self.surface = material_assignments['surface'].assign_material() - self.wrap_surface = material_assignments['wrap_surface'].assign_material() - if self.wrap_surface == text.Text: - self.wrap_surface = text.Text(self.factory_seed, False) - self.scratch = self.edge_wear = None - - self.has_inside = uniform(0, 1) < .5 - self.scale = log_uniform(.15, .3) - - def create_asset(self, **params) -> bpy.types.Object: - if self.is_profile_straight: - x_anchors = 0, self.x_lowest * self.x_end, self.x_end - z_anchors = 0, 0, self.depth - else: - x_anchors = 0, self.x_lowest * self.x_end, (self.x_lowest + self.x_lower_ratio * ( - 1 - self.x_lowest)) * self.x_end, self.x_end - z_anchors = 0, 0, self.depth * .5, self.depth - anchors = x_anchors, np.zeros_like(x_anchors), z_anchors - obj = spin(anchors, [1], 16) - subsurf(obj, 1) - butil.modify_mesh(obj, 'BEVEL', True, offset_type='PERCENT', width_pct=uniform(10, 50), segments=8) - if self.has_wrap: - wrap = self.make_wrap(obj) - else: - wrap = None - self.solidify_with_inside(obj, self.thickness) - handle_location = x_anchors[-2] * (1 - self.handle_location) + x_anchors[-1] * self.handle_location, \ - 0, \ - z_anchors[-2] * (1 - self.handle_location) + z_anchors[-1] * self.handle_location - angle_low = np.arctan((x_anchors[-1] - x_anchors[-2]) / (z_anchors[-1] - z_anchors[-2])) - angle_height = np.arctan((x_anchors[2] - x_anchors[1]) / (z_anchors[2] - z_anchors[1])) - handle_angle = uniform(angle_low, angle_height + 1e-3) - if self.has_guard: - obj = self.add_handle(obj, handle_location, handle_angle) - if self.has_wrap: - butil.select_none() - obj = join_objects([obj, wrap]) - obj.scale = [self.scale] * 3 - butil.apply_transform(obj) - return obj - - def add_handle(self, obj, handle_location, handle_angle): - bpy.ops.mesh.primitive_torus_add(location=handle_location, major_radius=self.handle_radius, - minor_radius=self.handle_inner_radius) - handle = bpy.context.active_object - handle.rotation_euler = np.pi / 2, handle_angle, 0 - butil.modify_mesh(handle, 'SIMPLE_DEFORM', deform_method='TAPER', angle=self.handle_taper_x, - deform_axis='X') - butil.modify_mesh(handle, 'SIMPLE_DEFORM', deform_method='TAPER', angle=self.handle_taper_y, - deform_axis='Y') - butil.modify_mesh(handle, 'BOOLEAN', object=obj, operation='DIFFERENCE') - butil.select_none() - objs = butil.split_object(handle) - i = np.argmax([np.max(read_co(o)[:, 0]) for o in objs]) - handle = objs[i] - objs.remove(handle) - butil.delete(objs) - subsurf(handle, 1) - write_attribute(handle, lambda nw: 1, "guard", "FACE") - return join_objects([obj, handle]) - - def make_wrap(self, obj): - butil.select_none() - obj = deep_clone_obj(obj) - remove_vertices(obj, lambda x, y, z: (z / self.depth < self.wrap_margin) | ( - z / self.depth > 1 - self.wrap_margin + uniform(.0, .1)) | ( - np.abs(np.arctan2(y, x)) < np.pi * self.wrap_margin)) - obj.scale = 1 + 1e-2, 1 + 1e-2, 1 - butil.apply_transform(obj) - write_attribute(obj, lambda nw: 1, "text", "FACE") - return obj - - def finalize_assets(self, assets): - super().finalize_assets(assets) - if self.has_wrap: - for obj in assets if isinstance(assets, list) else [assets]: - wrap_sides(obj, self.wrap_surface, 'u', 'v', 'z', selection='text') - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) \ No newline at end of file diff --git a/infinigen/assets/tableware/knife.py b/infinigen/assets/tableware/knife.py deleted file mode 100644 index 39a753c35..000000000 --- a/infinigen/assets/tableware/knife.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import bmesh -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import read_co, subsurf, write_co -from infinigen.core.util.random import log_uniform -from .base import TablewareFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.assets.utils.object import new_grid - - -class KnifeFactory(TablewareFactory): - x_end = .5 - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.x_length = log_uniform(.4, .7) - self.has_guard = uniform(0, 1) < .7 - if self.has_guard: - self.y_length = log_uniform(.1, .5) - self.y_guard = self.y_length * log_uniform(.2, .4) - else: - self.y_length = log_uniform(.1, .2) - self.y_guard = self.y_length * log_uniform(.3, .5) - self.x_guard = uniform(0, .2) - self.has_tip = uniform(0, 1) < .7 - self.thickness = log_uniform(.02, .03) - y_off_rand = uniform(0, 1) - self.y_offset = .2 if y_off_rand < 1 / 8 else .5 if y_off_rand < 1 / 4 else uniform(.2, .6) - self.guard_type = 'round' if uniform(0, 1) < .6 else 'double' - self.guard_depth = log_uniform(.2, 1.) * self.thickness - self.scale = log_uniform(.2, .3) - - def create_asset(self, **params) -> bpy.types.Object: - x_anchors = np.array( - [self.x_end, uniform(.5, .8) * self.x_end, uniform(.3, .4) * self.x_end, 1e-3, 0, -1e-3, -2e-3, - -self.x_end * self.x_length + 1e-3, -self.x_end * self.x_length]) - y_anchors = np.array( - [1e-3, self.y_length * log_uniform(.75, .95), self.y_length, self.y_length, self.y_length, - self.y_guard, self.y_guard, self.y_guard, self.y_guard]) - if not self.has_guard: - indices = [0, 1, 2, 4, 5, 7, 8] - x_anchors = x_anchors[indices] - y_anchors = y_anchors[indices] - if self.has_tip: - indices = [0] + list(range(len(x_anchors))) - x_anchors = x_anchors[indices] - x_anchors[0] += 1e-3 - y_anchors = y_anchors[indices] - y_anchors[1] += 3e-3 - - obj = new_grid(x_subdivisions=len(x_anchors) - 1, y_subdivisions=1) - x = np.concatenate([x_anchors] * 2) - y = np.concatenate([y_anchors, np.zeros_like(y_anchors)]) - y[0::len(y_anchors)] += self.y_offset * self.y_length - if self.has_tip: - y[1::len(y_anchors)] += self.y_offset * self.y_length - y[2::len(y_anchors)] += self.y_offset * (self.y_length - y_anchors[2]) - else: - y[1::len(y_anchors)] += self.y_offset * (self.y_length - y_anchors[1]) - z = np.concatenate([np.zeros_like(x_anchors)] * 2) - write_co(obj, np.stack([x, y, z], -1)) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) - self.make_knife_tip(obj) - subsurf(obj, 1) - selection = lambda nw, x: nw.compare('LESS_THAN', x, -self.x_guard * self.x_length * self.x_end) - if self.guard_type == 'double': - selection = self.make_double_sided(selection) - self.add_guard(obj, selection) - subsurf(obj, 1) - obj.scale = [self.scale] * 3 - butil.apply_transform(obj) - return obj - - def make_knife_tip(self, obj): - with butil.ViewportMode(obj, 'EDIT'): - bm = bmesh.from_edit_mesh(obj.data) - for e in bm.edges: - u, v = e.verts - x0, y0, z0 = u.co - x1, y1, z1 = v.co - if x0 >= 0 and x1 >= 0 and abs(x0 - x1) < 2e-4: - if y0 > self.y_offset * self.y_length and y1 > self.y_offset * self.y_length: - bmesh.ops.pointmerge(bm, verts=[u, v], merge_co=(u.co + v.co) / 2) - bmesh.update_edit_mesh(obj.data) - bpy.ops.mesh.select_mode(type="EDGE") - bpy.ops.mesh.select_loose(extend=False) - bpy.ops.mesh.delete(type='EDGE') diff --git a/infinigen/assets/tableware/spoon.py b/infinigen/assets/tableware/spoon.py deleted file mode 100644 index 5570a1733..000000000 --- a/infinigen/assets/tableware/spoon.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.utils.decorate import subsurf, write_co -from infinigen.core.util.random import log_uniform -from .base import TablewareFactory -from infinigen.core.util.math import FixedSeed -from infinigen.core.util import blender as butil -from infinigen.assets.utils.object import new_grid - - -class SpoonFactory(TablewareFactory): - x_end = .15 - is_fragile = True - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.x_length = log_uniform(.2, .8) - self.y_length = log_uniform(.06, .12) - self.z_depth = log_uniform(.08, .25) - self.z_offset = uniform(.0, .05) - self.thickness = log_uniform(.008, .015) - self.has_guard = uniform(0, 1) < .4 - self.guard_type = 'round' if uniform(0, 1) < .6 else 'double' - self.guard_depth = log_uniform(.2, 1.) * self.thickness - self.scale = log_uniform(.15, .25) - - def create_asset(self, **params) -> bpy.types.Object: - x_anchors = np.array([log_uniform(.07, .25), 0, -.08, -.12, -self.x_end, -self.x_end - self.x_length, - -self.x_end - self.x_length * log_uniform(1.2, 1.4)]) - y_anchors = np.array([self.y_length * log_uniform(.1, .8), self.y_length * log_uniform(1., 1.2), - self.y_length * log_uniform(.6, 1.), self.y_length * log_uniform(.2, .4), - log_uniform(.01, .02), log_uniform(.02, .05), log_uniform(.01, .02)]) - z_anchors = np.array( - [0, 0, 0, 0, self.z_offset, self.z_offset + uniform(-.02, .04), self.z_offset + uniform(-.02, 0)]) - obj = new_grid(x_subdivisions=len(x_anchors) - 1, y_subdivisions=2) - x = np.concatenate([x_anchors] * 3) - y = np.concatenate([y_anchors, np.zeros_like(y_anchors), -y_anchors]) - z = np.concatenate([z_anchors] * 3) - x[len(x_anchors)] += .02 - z[len(x_anchors) + 1] = -self.z_depth - write_co(obj, np.stack([x, y, z], -1)) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=self.thickness) - subsurf(obj, 1) - selection = lambda nw, x: nw.compare('LESS_THAN', x, -self.x_end) - if self.guard_type == 'double': - selection = self.make_double_sided(selection) - self.add_guard(obj, selection) - subsurf(obj, 2) - obj.scale = [self.scale] * 3 - butil.apply_transform(obj) - return obj diff --git a/infinigen/assets/trees/__init__.py b/infinigen/assets/trees/__init__.py deleted file mode 100644 index a45582997..000000000 --- a/infinigen/assets/trees/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .generate import BushFactory, TreeFactory, random_season, random_leaf_collection -from .tree_flower import TreeFlowerFactory diff --git a/infinigen/assets/trees/branch.py b/infinigen/assets/trees/branch.py deleted file mode 100644 index b68eb8b9c..000000000 --- a/infinigen/assets/trees/branch.py +++ /dev/null @@ -1,397 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_surface_bump', singleton=False, type='GeometryNodeTree') -def nodegroup_surface_bump(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Displacement', 0.0200), - ('NodeSocketFloat', 'Scale', 50.0000), - ('NodeSocketFloat', 'Seed', 0.0000)]) - - normal = nw.new_node(Nodes.InputNormal) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': group_input.outputs["Seed"], 'Scale': group_input.outputs["Scale"]}, - attrs={'noise_dimensions': '4D'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: noise_texture.outputs["Fac"]}, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: group_input.outputs["Displacement"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: normal, 1: multiply}, attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': multiply_1.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_position}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_generate_anchor', singleton=False, type='GeometryNodeTree') -def nodegroup_generate_anchor(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'curve parameter', 0.0000), - ('NodeSocketFloat', 'trim_bottom', 0.2000), - ('NodeSocketFloat', 'trim_top', 0.0000), - ('NodeSocketInt', 'seed', 0), - ('NodeSocketFloat', 'density', 0.5000), - ('NodeSocketFloat', 'keep probablity', 0.0000)]) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["density"]}, attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: group_input.outputs["keep probablity"]}, attrs={'operation': 'MULTIPLY'}) - - minimum = nw.new_node(Nodes.Math, input_kwargs={0: multiply}, attrs={'operation': 'MINIMUM'}) - - curve_to_points_1 = nw.new_node(Nodes.CurveToPoints, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Length': minimum}, - attrs={'mode': 'LENGTH'}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={'Probability': group_input.outputs["keep probablity"], 'Seed': group_input.outputs["seed"]}, - attrs={'data_type': 'BOOLEAN'}) - - greater_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["curve parameter"], 1: group_input.outputs["trim_bottom"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: group_input.outputs["curve parameter"], 1: group_input.outputs["trim_top"]}, - attrs={'operation': 'LESS_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_than, 1: less_than}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: random_value_3.outputs[3], 1: op_and}) - - op_not = nw.new_node(Nodes.BooleanMath, input_kwargs={0: op_and_1}, attrs={'operation': 'NOT'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': curve_to_points_1.outputs["Points"], 'Selection': op_not}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Points': delete_geometry}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('nodegroup_create_instance', singleton=False, type='GeometryNodeTree') -def nodegroup_create_instance(nw: NodeWrangler): - # Code generated using version 2.6.4 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketBool', 'Selection', True), - ('NodeSocketBool', 'Pick Instance', False), - ('NodeSocketVector', 'Tangent', (0.0000, 0.0000, 1.0000)), - ('NodeSocketFloat', 'Rot x deg', 0.0000), - ('NodeSocketFloat', 'Rot x range', 0.2000), - ('NodeSocketFloat', 'Scale', 1.0000), - ('NodeSocketInt', 'Seed', 0)]) - - random_value_1 = nw.new_node(Nodes.RandomValue, input_kwargs={3: 6.2832, 'Seed': group_input.outputs["Seed"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': random_value_1.outputs[1]}) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Rotation': combine_xyz_1, 'Vector': group_input.outputs["Tangent"]}, - attrs={'axis': 'Y'}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], 'Selection': group_input.outputs["Selection"], 'Instance': group_input.outputs["Instance"], 'Pick Instance': group_input.outputs["Pick Instance"], 'Rotation': align_euler_to_vector, 'Scale': group_input.outputs["Scale"]}) - - radians = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Rot x deg"]}, attrs={'operation': 'RADIANS'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Rot x range"]}, attrs={'operation': 'SUBTRACT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: radians, 1: subtract}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: 1.0000, 1: group_input.outputs["Rot x range"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: radians, 1: add}, attrs={'operation': 'MULTIPLY'}) - - random_value_2 = nw.new_node(Nodes.RandomValue, input_kwargs={2: multiply, 3: multiply_1, 'Seed': group_input.outputs["Seed"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': random_value_2.outputs[1]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, input_kwargs={'Instances': instance_on_points, 'Rotation': combine_xyz_2}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Instances': rotate_instances}, attrs={'is_active_output': True}) - -def generate_branch(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine) - - # group_input = nw.new_node(Nodes.GroupInput, - # expose_input=[('NodeSocketGeometry', 'Geometry', None), - # ('NodeSocketCollection', 'leaf collection', None), - # ('NodeSocketCollection', 'fruit collection', None), - # ('NodeSocketInt', 'resolution', 256), - # ('NodeSocketInt', 'seed', 0), - # ('NodeSocketFloat', 'main branch noise amount', 0.3000), - # ('NodeSocketFloat', 'main branch noise scale', 1.1000), - # ('NodeSocketFloatDistance', 'overall radius', 0.0200), - # ('NodeSocketFloat', 'twig density', 10.0000), - # ('NodeSocketFloat', 'twig rotation', 45.0000), - # ('NodeSocketFloat', 'twig scale', 5.0000), - # ('NodeSocketFloat', 'twig noise amount', 0.3000), - # ('NodeSocketFloat', 'leaf density', 15.0000), - # ('NodeSocketFloat', 'leaf scale', 0.3000), - # ('NodeSocketFloat', 'leaf rot', 45.0000), - # ('NodeSocketFloat', 'fruit density', 10.0000), - # ('NodeSocketFloat', 'fruit scale', 0.0500), - # ('NodeSocketFloat', 'fruit rot', 0.0000)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line, 'Count': kwargs["resolution"]}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': spline_parameter.outputs["Factor"], 'Y': kwargs["seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': kwargs["main branch noise scale"]}, - attrs={'noise_dimensions': '2D'}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, - attrs={'operation': 'SUBTRACT'}) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': spline_parameter.outputs["Factor"], 2: 0.2000}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"], 'Scale': map_range.outputs["Result"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 'Scale': kwargs["main branch noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': resample_curve, 'Offset': scale_1.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, input_kwargs={'Geometry': set_position, 2: spline_parameter.outputs["Factor"]}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 13.0000}) - - generateanchor = nw.new_node(nodegroup_generate_anchor().name, - input_kwargs={'Curve': capture_attribute, 'curve parameter': capture_attribute.outputs[2], 'trim_top': 0.9000, 'seed': add, 'density': kwargs["fruit density"], 'keep probablity': 0.3000}) - - collection_info_1 = nw.new_node(Nodes.CollectionInfo, - input_kwargs={'Collection': kwargs["fruit collection"], 'Separate Children': True, 'Reset Children': True}) - - createinstance = nw.new_node(nodegroup_create_instance().name, - input_kwargs={'Points': generateanchor, 'Instance': collection_info_1, 'Pick Instance': True, 'Rot x deg': kwargs["fruit rot"], 'Scale': kwargs["fruit scale"], 'Seed': kwargs["seed"]}) - - keep_probablity = nw.new_node(Nodes.Value, label='keep probablity') - keep_probablity.outputs[0].default_value = 0.3000 - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["twig density"], 1: keep_probablity}, - attrs={'operation': 'DIVIDE'}) - - curve_to_points = nw.new_node(Nodes.CurveToPoints, input_kwargs={'Curve': capture_attribute, 'Count': divide}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'End': (0.0000, 0.0000, 0.1000)}) - - divide_1 = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["resolution"], 1: 2.0000}, - attrs={'operation': 'DIVIDE'}) - - resample_curve_2 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line_1, 'Count': divide_1}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': resample_curve_2, 2: spline_parameter_1.outputs["Factor"]}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 37.0000}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={'Probability': keep_probablity, 'Seed': add_1}, - attrs={'data_type': 'BOOLEAN'}) - - index = nw.new_node(Nodes.Index) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: 0.0500}, attrs={'operation': 'MULTIPLY'}) - - greater_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: index, 3: multiply}, - attrs={'data_type': 'INT', 'operation': 'GREATER_EQUAL'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: 0.9000}, attrs={'operation': 'MULTIPLY'}) - - less_equal = nw.new_node(Nodes.Compare, - input_kwargs={2: index, 3: multiply_1}, - attrs={'data_type': 'INT', 'operation': 'LESS_EQUAL'}) - - op_and = nw.new_node(Nodes.BooleanMath, input_kwargs={0: greater_equal, 1: less_equal}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, input_kwargs={0: random_value.outputs[3], 1: op_and}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["twig rotation"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - map_range_2 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': capture_attribute.outputs[2], 3: 1.0000, 4: 0.1000}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_2.outputs["Result"], 1: kwargs["twig scale"]}, - attrs={'operation': 'MULTIPLY'}) - - createinstance_1 = nw.new_node(nodegroup_create_instance().name, - input_kwargs={'Points': curve_to_points.outputs["Points"], 'Instance': capture_attribute_1.outputs["Geometry"], 'Selection': op_and_1, 'Tangent': curve_to_points.outputs["Tangent"], 'Rot x deg': multiply_2, 'Scale': multiply_3, 'Seed': kwargs["seed"]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': createinstance_1}) - - position = nw.new_node(Nodes.InputPosition) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'W': kwargs["seed"], 'Scale': 1.5000}, - attrs={'noise_dimensions': '4D'}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_1.outputs["Color"], 1: (0.5000, 0.5000, 0.5000)}, - attrs={'operation': 'SUBTRACT'}) - - map_range_3 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': capture_attribute_1.outputs[2], 2: 0.2000}) - - scale_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 'Scale': map_range_3.outputs["Result"]}, - attrs={'operation': 'SCALE'}) - - scale_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale_2.outputs["Vector"], 'Scale': kwargs["twig noise amount"]}, - attrs={'operation': 'SCALE'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': realize_instances, 'Offset': scale_3.outputs["Vector"]}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position_1, 1: curve_tangent}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: kwargs["seed"], 1: 17.0000}) - - generateanchor_1 = nw.new_node(nodegroup_generate_anchor().name, - input_kwargs={'Curve': capture_attribute_2.outputs["Geometry"], 'curve parameter': capture_attribute_1.outputs[2], 'trim_top': 1.0000, 'seed': add_2, 'density': kwargs["leaf density"], 'keep probablity': 0.3000}) - - collection_info = nw.new_node(Nodes.CollectionInfo, - input_kwargs={'Collection': kwargs["leaf collection"], 'Separate Children': True, 'Reset Children': True}) - - createinstance_2 = nw.new_node(nodegroup_create_instance().name, - input_kwargs={'Points': generateanchor_1, 'Instance': collection_info, 'Pick Instance': True, 'Tangent': capture_attribute_2.outputs["Attribute"], 'Rot x deg': kwargs["leaf rot"], 'Scale': kwargs["leaf scale"], 'Seed': kwargs["seed"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': capture_attribute.outputs[2], 3: 1.0000, 4: 0.4000}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: kwargs["overall radius"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': capture_attribute, 'Radius': multiply_4}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: kwargs["resolution"], 1: kwargs["overall radius"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: 6.2832}, attrs={'operation': 'MULTIPLY'}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': multiply_6}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - map_range_4 = nw.new_node(Nodes.MapRange, input_kwargs={'Value': capture_attribute_1.outputs[2], 3: 0.8000, 4: 0.1000}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_4.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_7, 1: kwargs["overall radius"]}, - attrs={'operation': 'MULTIPLY'}) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': capture_attribute_2.outputs["Geometry"], 'Radius': multiply_8}) - - divide_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_6, 1: 2.0000}, attrs={'operation': 'DIVIDE'}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': divide_2}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': curve_circle_1.outputs["Curve"], 'Fill Caps': True}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [curve_to_mesh, curve_to_mesh_1]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry, 'Material': kwargs['material']}) - - surfacebump = nw.new_node(nodegroup_surface_bump().name, input_kwargs={'Geometry': set_material, 'Displacement': 0.0050}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [createinstance, createinstance_2, surfacebump]}) - - transform = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': join_geometry_1, 'Rotation': (-1.5708, 0.0000, 0.0000)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform}, attrs={'is_active_output': True}) - -class BranchFactory(AssetFactory): - def __init__(self, factory_seed, twig_col, fruit_col, coarse=False): - super().__init__(factory_seed, coarse=coarse) - - self.avg_fruit_dim = np.cbrt(np.mean([np.prod(list(o.dimensions)) for o in fruit_col.objects])) - - with FixedSeed(factory_seed): - self.branch_params = self.sample_branch_params() - - self.branch_params['leaf collection'] = twig_col - self.branch_params['fruit collection'] = fruit_col - self.branch_params['material'] = twig_col.objects[0].active_material - - def sample_branch_params(self): - return { - 'resolution': 256, - 'main branch noise amount': uniform(0.2, 0.4), - 'main branch noise scale': uniform(0.9, 1.3), - 'overall radius': uniform(0.015, 0.025), - 'twig density': uniform(5, 15), - 'twig rotation': uniform(30, 60), - 'twig scale': uniform(3, 7), - 'twig noise amount': uniform(0.2, 0.4), - 'leaf density': uniform(5, 25), - 'leaf scale': uniform(0.25, 0.35), - 'leaf rot': uniform(30, 60), - 'fruit scale': uniform(0.15, 0.25), - 'fruit rot': 0.0, - 'fruit density': np.clip(uniform(1, 5) / self.avg_fruit_dim, 0.01, 50) - } - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - phenome = self.branch_params.copy() - phenome['seed'] = randint(10000000) - - surface.add_geomod(obj, generate_branch, input_kwargs=phenome) - - return obj \ No newline at end of file diff --git a/infinigen/assets/trees/generate.py b/infinigen/assets/trees/generate.py deleted file mode 100644 index 7102914f0..000000000 --- a/infinigen/assets/trees/generate.py +++ /dev/null @@ -1,431 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alexander Raistrick, Yiming Zuo, Alejandro Newell, Lingjie Mei - - -import pdb -import logging - -import gin -import numpy as np -from numpy.random import uniform, normal - -import bpy - -from infinigen.assets.trees import tree, treeconfigs, branch -from infinigen.assets.leaves import leaf, leaf_v2, leaf_pine, leaf_ginko, leaf_broadleaf, leaf_maple -from infinigen.assets.fruits import apple, blackberry, coconutgreen, durian, starfruit, strawberry, compositional_fruit -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from . import tree_flower - -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util import camera as camera_util - -from infinigen.core.placement.factory import AssetFactory, make_asset_collection -from infinigen.core.placement import detail -from infinigen.core.placement.split_in_view import split_inview - -from infinigen.core import surface - -from infinigen.assets.weather.cloud.generate import CloudFactory -from infinigen.assets.utils.decorate import write_attribute - -from infinigen.core.tagging import tag_object, tag_nodegroup -from ..utils.misc import toggle_show, toggle_hide - -logger = logging.getLogger(__name__) - -@gin.configurable -class GenericTreeFactory(AssetFactory): - - scale = 0.35 # trees are defined in weird units currently, need converting to meters - - def __init__( - self, - factory_seed, - genome: tree.TreeParams, - child_col, - trunk_surface, - realize=False, - meshing_camera=None, - cam_meshing_max_dist=1e7, - coarse_mesh_placeholder=False, - adapt_mesh_method='remesh', - decimate_placeholder_levels=0, - min_dist=None, - coarse=False - ): - - super(GenericTreeFactory, self).__init__(factory_seed, coarse=coarse) - - self.genome = genome - self.child_col = child_col - self.trunk_surface = trunk_surface - self.realize = realize - - self.camera = meshing_camera - self.cam_meshing_max_dist = cam_meshing_max_dist - self.adapt_mesh_method = adapt_mesh_method - self.decimate_placeholder_levels = decimate_placeholder_levels - self.coarse_mesh_placeholder = coarse_mesh_placeholder - - self.min_dist = min_dist - - def create_placeholder(self, i, loc, rot): - - logger.debug(f'generating tree skeleton') - skeleton_obj = tree.tree_skeleton( - self.genome.skeleton, self.genome.trunk_spacecol, self.genome.roots_spacecol, init_pos=(0, 0, 0), scale=self.scale) - - if self.coarse_mesh_placeholder: - pholder = self._create_coarse_mesh(skeleton_obj) - else: - pholder = butil.spawn_cube(size=4) - - butil.parent_to(skeleton_obj, pholder, no_inverse=True) - return pholder - - - def _create_coarse_mesh(self, skeleton_obj): - logger.debug('generating skinned mesh') - coarse_mesh = deep_clone_obj(skeleton_obj) - surface.add_geomod(coarse_mesh, tree.skin_tree, input_kwargs={'params': self.genome.skinning}, apply=True) - - if self.decimate_placeholder_levels > 0: - butil.modify_mesh(coarse_mesh, 'DECIMATE', decimate_type='UNSUBDIV', iterations=self.decimate_placeholder_levels) - - return coarse_mesh - - def finalize_placeholders(self, placeholders): - if not self.coarse_mesh_placeholder: - return - with FixedSeed(self.factory_seed): - logger.debug(f'adding {self.trunk_surface} to {len(placeholders)=}') - self.trunk_surface.apply(placeholders) - - def asset_parameters(self, distance: float, vis_distance: float) -> dict: - if self.min_dist is not None and distance < self.min_dist: - logger.warn(f'{self} recieved {distance=} which violates {self.min_dist=}. Ignoring') - distance = self.min_dist - return dict(face_size=detail.target_face_size(distance), distance=distance) - - def create_asset(self, placeholder, face_size, distance, **kwargs) -> bpy.types.Object: - - skeleton_obj = placeholder.children[0] - - if not self.coarse_mesh_placeholder: - skin_obj = self._create_coarse_mesh(skeleton_obj) - self.trunk_surface.apply(skin_obj) - butil.parent_to(skeleton_obj, skin_obj, no_inverse=True) - else: - skin_obj = butil.deep_clone_obj(placeholder) - - if self.child_col is not None: - assert self.genome.child_placement is not None - - max_needed_child_fs = ( - detail.target_face_size(self.min_dist, global_multiplier=1) - if self.min_dist is not None - else None - ) - - logger.debug(f'adding tree children using {self.child_col=}') - butil.select_none() - surface.add_geomod(skeleton_obj, tree.add_tree_children, input_kwargs=dict( - child_col=self.child_col, params=self.genome.child_placement, - realize=self.realize, merge_dist=max_needed_child_fs - )) - - if self.camera is not None and distance < self.cam_meshing_max_dist: - - assert self.adapt_mesh_method != 'remesh' - - skin_obj_cleanup = skin_obj - skin_obj, outofview, vert_dists, _ = split_inview(skin_obj, cam=self.camera, vis_margin=0.15) - butil.parent_to(outofview, skin_obj, no_inverse=True, no_transform=True) - - butil.delete(skin_obj_cleanup) - face_size = detail.target_face_size(vert_dists.min()) - - skin_obj.hide_render = False - - if self.adapt_mesh_method == 'remesh': - butil.modify_mesh(skin_obj, 'SUBSURF', levels=self.decimate_placeholder_levels + 1) # one extra level to smooth things out or remesh is jaggedy - - with butil.DisableModifiers(skin_obj): - detail.adapt_mesh_resolution(skin_obj, face_size, method=self.adapt_mesh_method, apply=True) - - butil.parent_to(skin_obj, placeholder, no_inverse=True, no_transform=True) - - if self.realize: - - logger.debug(f'realizing tree children') - butil.apply_modifiers(skin_obj) - butil.apply_modifiers(skeleton_obj) - - butil.join_objects([skin_obj, skeleton_obj]) - assert len(skin_obj.children) == 0 - else: - butil.parent_to(skeleton_obj, skin_obj, no_inverse=True) - - tag_object(skin_obj, 'tree') - butil.apply_modifiers(skin_obj) - - return skin_obj - - -@gin.configurable -def random_season(weights=None): - options = ['autumn', 'summer', 'spring', 'winter'] - - if weights is not None: - weights = np.array([weights[k] for k in options]) - else: - weights = np.array([0.25, 0.3, 0.4, 0.1]) - return np.random.choice(options, p=weights/weights.sum()) - -@gin.configurable -def random_species(season='summer', pine_chance=0.): - tree_species_code = np.random.rand(32) - - if season is None: - season = random_season() - - if tree_species_code[-1] < pine_chance: - return treeconfigs.pine_tree(), 'leaf_pine' - # elif tree_species_code < 0.2: - # tree_args = treeconfigs.palm_tree() - # elif tree_species_code < 0.3: - # tree_args = treeconfigs.baobab_tree() - else: - return treeconfigs.random_tree(tree_species_code, season), None - -def random_tree_child_factory(seed, leaf_params, leaf_type, season, **kwargs): - - if season is None: - season = random_season() - - fruit_scale = 0.2 - - if leaf_type is None: - return None, None - elif leaf_type == 'leaf': - return leaf.LeafFactory(seed, leaf_params, **kwargs), surface.registry('greenery') - elif leaf_type == 'leaf_pine': - return leaf_pine.LeafFactoryPine(seed, season, **kwargs), None - elif leaf_type == 'leaf_ginko': - return leaf_ginko.LeafFactoryGinko(seed, season, **kwargs), None - elif leaf_type == 'leaf_maple': - return leaf_maple.LeafFactoryMaple(seed, season, **kwargs), None - elif leaf_type == 'leaf_broadleaf': - return leaf_broadleaf.LeafFactoryBroadleaf(seed, season, **kwargs), None - elif leaf_type == 'leaf_v2': - return leaf_v2.LeafFactoryV2(seed, **kwargs), None - elif leaf_type == 'berry': - return leaf.BerryFactory(seed, leaf_params, **kwargs), None - elif leaf_type == 'apple': - return apple.FruitFactoryApple(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'blackberry': - return blackberry.FruitFactoryBlackberry(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'coconutgreen': - return coconutgreen.FruitFactoryCoconutgreen(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'durian': - return durian.FruitFactoryDurian(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'starfruit': - return starfruit.FruitFactoryStarfruit(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'strawberry': - return strawberry.FruitFactoryStrawberry(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'compositional_fruit': - return compositional_fruit.FruitFactoryCompositional(seed, scale=fruit_scale, **kwargs), None - elif leaf_type == 'flower': - return tree_flower.TreeFlowerFactory(seed, rad=uniform(0.15, 0.25), **kwargs), None - elif leaf_type == 'cloud': - return CloudFactory(seed), None - else: - raise ValueError(f'Unrecognized {leaf_type=}') - -def make_leaf_collection(seed, - leaf_params, n_leaf, leaf_types, decimate_rate=0.0, - season=None): - - logger.debug(f'Starting make_leaf_collection({seed=}, {n_leaf=} ...)') - - if season is None: - season = random_season() - - weights = [] - - if not isinstance(leaf_types, list): - leaf_types = [leaf_types] - - child_factories = [] - for leaf_type in leaf_types: - if leaf_type is not None: - leaf_factory, _ = random_tree_child_factory(seed, leaf_params, leaf_type=leaf_type, season=season) - child_factories.append(leaf_factory) - weights.append(1.0) - - weights = np.array(weights) - weights /= np.sum(weights) # normalize to 1 - - col = make_asset_collection(child_factories, n_leaf, verbose=True, weights=weights) - # if leaf_surface is not None: - # leaf_surface.apply(list(col.objects)) - toggle_show(col) - for obj in col.objects: - if decimate_rate > 0: - butil.modify_mesh(obj, 'DECIMATE', ratio=1.0-decimate_rate, apply=True) - butil.apply_transform(obj, rot=True, scale=True) - butil.apply_modifiers(obj) - toggle_hide(col) - return col - -def random_leaf_collection(season, n=5): - (_, _, leaf_params), leaf_type = random_species(season=season) - return make_leaf_collection(np.random.randint(1e5), leaf_params, n_leaf=n, leaf_types=leaf_type or 'leaf_v2', decimate_rate=0.97) - -def make_twig_collection( - seed, - twig_params, leaf_params, - trunk_surface, - n_leaf, n_twig, - leaf_types, - season=None, - twig_valid_dist=6 -): - - logger.debug(f'Starting make_twig_collection({seed=}, {n_leaf=}, {n_twig=}...)') - - if season is None: - season = random_season() - - if leaf_types is not None: - child_col = make_leaf_collection(seed, leaf_params, n_leaf, leaf_types, season=season, decimate_rate=0.97) - else: - child_col = None - - twig_factory = GenericTreeFactory(seed, twig_params, child_col, trunk_surface=trunk_surface, realize=True) - col = make_asset_collection(twig_factory, n_twig, verbose=False, distance=twig_valid_dist) - - if child_col is not None: - child_col.hide_viewport = False - butil.delete(list(child_col.objects)) - return col - -def make_branch_collection(seed, twig_col, fruit_col, n_branch, coarse=False): - - logger.debug(f'Starting make_branch_collection({seed=}, ...)') - - branch_factory = branch.BranchFactory(seed, twig_col=twig_col, fruit_col=fruit_col, coarse=coarse) - col = make_asset_collection(branch_factory, n_branch, verbose=False) - - return col - -@gin.configurable -class TreeFactory(GenericTreeFactory): - - n_leaf = 5 - n_twig = 2 - - @staticmethod - def get_leaf_type(season): - # return np.random.choice(['leaf', 'leaf_v2', 'flower', 'berry', 'leaf_ginko'], p=[0, 0.70, 0.15, 0, 0.15]) - # return - # return 'leaf_maple' - leaf_type = np.random.choice(['leaf', 'leaf_v2', 'leaf_broadleaf', 'leaf_ginko', 'leaf_maple'], p=[0, 0.0, 0.70, 0.15, 0.15]) - flower_type = np.random.choice(['flower', 'berry', None], p=[1.0, 0.0, 0.0]) - if season == "spring": - return [flower_type] - else: - return [leaf_type] - # return [leaf_type, flower_type] - # return ['leaf_broadleaf', 'leaf_maple', 'leaf_ginko', 'flower'] - - @staticmethod - def get_fruit_type(): - # return np.random.choice(['leaf', 'leaf_v2', 'flower', 'berry', 'leaf_ginko'], p=[0, 0.70, 0.15, 0, 0.15]) - # return - # return 'leaf_maple' - fruit_type = np.random.choice(['apple', 'blackberry', 'coconutgreen', - 'durian', 'starfruit', 'strawberry', 'compositional_fruit'], - p=[0.2, 0.0, 0.2, 0.2, 0.2, 0.0, 0.2]) - - return fruit_type - - def __init__(self, seed, season=None, coarse=False, fruit_chance=1.0, **kwargs): - - with FixedSeed(seed): - if season is None: - season = np.random.choice(['summer', 'winter', 'autumn', 'spring']) - - with FixedSeed(seed): - (tree_params, twig_params, leaf_params), leaf_type = random_species(season) - - leaf_type = leaf_type or self.get_leaf_type(season) - if not isinstance(leaf_type, list): - leaf_type = [leaf_type] - - trunk_surface = surface.registry('bark') - - if uniform() < fruit_chance: - fruit_type = self.get_fruit_type() - else: - fruit_type = None - - super(TreeFactory, self).__init__(seed, tree_params, child_col=None, trunk_surface=trunk_surface, coarse=coarse, **kwargs) - - with FixedSeed(seed): - colname = f'assets:{self}.twigs' - use_cached = colname in bpy.data.collections - if use_cached == coarse: - logger.warning(f'In {self}, encountered {use_cached=} yet {coarse=}, unexpected since twigs are typically generated only in coarse') - - if colname not in bpy.data.collections: - twig_col = make_twig_collection(seed, twig_params, leaf_params, trunk_surface, self.n_leaf, self.n_twig, leaf_type, season=season) - if fruit_type is not None: - fruit_col = make_leaf_collection(seed, leaf_params, self.n_leaf, fruit_type, season=season, decimate_rate=0.0) - else: - fruit_col = butil.get_collection('Empty', reuse=True) - - self.child_col = make_branch_collection(seed, twig_col, fruit_col, n_branch=self.n_twig) - self.child_col.name = colname - - assert self.child_col.name == colname, f'Blender truncated {colname} to {self.child_col.name}' - else: - self.child_col = bpy.data.collections[colname] - -@gin.configurable -class BushFactory(GenericTreeFactory): - - n_leaf = 3 - n_twig = 3 - max_distance = 50 - - def __init__(self, seed, coarse=False, **kwargs): - - with FixedSeed(seed): - shrub_shape = np.random.randint(2) - trunk_surface = surface.registry('bark') - tree_params, twig_params, leaf_params = treeconfigs.shrub(shrub_shape=shrub_shape) - - super(BushFactory, self).__init__(seed, tree_params, child_col=None, trunk_surface=trunk_surface, coarse=coarse, **kwargs) - - with FixedSeed(seed): - - leaf_type = np.random.choice(['leaf', 'leaf_v2', 'flower', 'berry'], p=[0.1, 0.4, 0.5, 0]) - - colname = f'assets:{self}.twigs' - use_cached = colname in bpy.data.collections - if use_cached == coarse: - logger.warning(f'In {self}, encountered {use_cached=} yet {coarse=}, unexpected since twigs are typically generated only in coarse') - - if colname not in bpy.data.collections: - self.child_col = make_twig_collection(seed, twig_params, leaf_params, trunk_surface, self.n_leaf, self.n_twig, leaf_type) - self.child_col.name = colname - assert self.child_col.name == colname, f'Blender truncated {colname} to {self.child_col.name}' - else: - self.child_col = bpy.data.collections[colname] \ No newline at end of file diff --git a/infinigen/assets/trees/tree_flower.py b/infinigen/assets/trees/tree_flower.py deleted file mode 100644 index aee0973ee..000000000 --- a/infinigen/assets/trees/tree_flower.py +++ /dev/null @@ -1,602 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Yiming Zuo - modifications -# - Alexander Raistrick - authored original flower.py - - -# Code generated using version v2.0.1 of the node_transpiler -import bpy -import mathutils -from numpy.random import uniform, normal -import numpy as np - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface - -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.util import blender as butil, color -from infinigen.core.util.math import FixedSeed, dict_lerp -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_polar_to_cart_old', singleton=True) -def nodegroup_polar_to_cart_old(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Addend', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketVector', 'Vector', (0.0, 0.0, 0.0))]) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"]}, - attrs={'operation': 'SINE'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': cosine, 'Z': sine}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["Vector"], 1: combine_xyz_4, 2: group_input.outputs["Addend"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply_add.outputs["Vector"]}) - -@node_utils.to_nodegroup('nodegroup_follow_curve', singleton=True) -def nodegroup_follow_curve(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Curve Min', 0.5), - ('NodeSocketFloat', 'Curve Max', 1.0)]) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 2: separate_xyz.outputs["Z"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: attribute_statistic.outputs["Min"], 2: attribute_statistic.outputs["Max"], 3: group_input.outputs["Curve Min"], 4: group_input.outputs["Curve Max"]}) - - curve_length = nw.new_node(Nodes.CurveLength, - input_kwargs={'Curve': group_input.outputs["Curve"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: curve_length}, - attrs={'operation': 'MULTIPLY'}) - - sample_curve = nw.new_node(Nodes.SampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Length': multiply}, - attrs={'mode': 'LENGTH'}) - - cross_product = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Tangent"], 1: sample_curve.outputs["Normal"]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: cross_product.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - scale_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Normal"], 'Scale': separate_xyz.outputs["Y"]}, - attrs={'operation': 'SCALE'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': sample_curve.outputs["Position"], 'Offset': add.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - -@node_utils.to_nodegroup('nodegroup_norm_index', singleton=True) -def nodegroup_norm_index(nw): - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Count', 0)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Count"]}, - attrs={'operation': 'DIVIDE'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'T': divide}) - -@node_utils.to_nodegroup('nodegroup_flower_petal', singleton=True) -def nodegroup_flower_petal(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Length', 0.2), - ('NodeSocketFloat', 'Point', 1.0), - ('NodeSocketFloat', 'Point height', 0.5), - ('NodeSocketFloat', 'Bevel', 6.8), - ('NodeSocketFloat', 'Base width', 0.2), - ('NodeSocketFloat', 'Upper width', 0.3), - ('NodeSocketInt', 'Resolution H', 8), - ('NodeSocketInt', 'Resolution V', 4), - ('NodeSocketFloat', 'Wrinkle', 0.1), - ('NodeSocketFloat', 'Curl', 0.0)]) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution H"], 1: 2.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - grid = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Vertices X': group_input.outputs["Resolution V"], 'Vertices Y': multiply_add}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': grid, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 0.05}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply, 'Y': separate_xyz.outputs["Y"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 7.9, 'Detail': 0.0, 'Distortion': 0.2}, - attrs={'noise_dimensions': '2D'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: -0.5}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: group_input.outputs["Wrinkle"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["X"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: 2.0}, - attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Bevel"]}, - attrs={'operation': 'POWER'}) - - multiply_add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: multiply_add_1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_3, 1: group_input.outputs["Upper width"], 2: group_input.outputs["Base width"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz_1.outputs["Y"], 1: multiply_add_2}, - attrs={'operation': 'MULTIPLY'}) - - power_1 = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Point"]}, - attrs={'operation': 'POWER'}) - - multiply_add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: power_1, 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - multiply_5 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_add_3, 1: group_input.outputs["Point height"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Point height"], 1: -1.0, 2: 1.0}, - attrs={'operation': 'MULTIPLY_ADD'}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_5, 1: multiply_add_4}) - - multiply_6 = nw.new_node(Nodes.Math, - input_kwargs={0: add_2, 1: multiply_add_1}, - attrs={'operation': 'MULTIPLY'}) - - multiply_7 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: multiply_6}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Y': multiply_4, 'Z': multiply_7}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': combine_xyz_1}) - - multiply_8 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Length"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': multiply_8}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Curl"]}) - - group_1 = nw.new_node(nodegroup_polar_to_cart_old().name, - input_kwargs={'Addend': combine_xyz_3, 'Value': reroute, 'Vector': multiply_8}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 8, 'Start': (0.0, 0.0, 0.0), 'Middle': combine_xyz_3, 'End': group_1}) - - group = nw.new_node(nodegroup_follow_curve().name, - input_kwargs={'Geometry': set_position, 'Curve': quadratic_bezier, 'Curve Min': 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': tag_nodegroup(nw, group, 'petal')}) - -@node_utils.to_nodegroup('nodegroup_phyllo_points', singleton=True) -def nodegroup_phyllo_points(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Count', 50), - ('NodeSocketFloat', 'Min Radius', 0.0), - ('NodeSocketFloat', 'Max Radius', 2.0), - ('NodeSocketFloat', 'Radius exp', 0.5), - ('NodeSocketFloat', 'Min angle', -0.5236), - ('NodeSocketFloat', 'Max angle', 0.7854), - ('NodeSocketFloat', 'Min z', 0.0), - ('NodeSocketFloat', 'Max z', 1.0), - ('NodeSocketFloat', 'Clamp z', 1.0), - ('NodeSocketFloat', 'Yaw offset', -1.5708)]) - - mesh_line = nw.new_node(Nodes.MeshLine, - input_kwargs={'Count': group_input.outputs["Count"]}) - - mesh_to_points = nw.new_node(Nodes.MeshToPoints, - input_kwargs={'Mesh': mesh_line}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': mesh_to_points, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - index = nw.new_node(Nodes.Index) - - cosine = nw.new_node(Nodes.Math, - input_kwargs={0: index}, - attrs={'operation': 'COSINE'}) - - sine = nw.new_node(Nodes.Math, - input_kwargs={0: index}, - attrs={'operation': 'SINE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': cosine, 'Y': sine}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Count"]}, - attrs={'operation': 'DIVIDE'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: group_input.outputs["Radius exp"]}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': power, 3: group_input.outputs["Min Radius"], 4: group_input.outputs["Max Radius"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: map_range.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': multiply.outputs["Vector"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 2: group_input.outputs["Clamp z"], 3: group_input.outputs["Min z"], 4: group_input.outputs["Max z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': map_range_2.outputs["Result"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': combine_xyz_1}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 3: group_input.outputs["Min angle"], 4: group_input.outputs["Max angle"]}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1, 3: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: group_input.outputs["Yaw offset"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range_3.outputs["Result"], 'Y': random_value.outputs[1], 'Z': add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Points': set_position, 'Rotation': combine_xyz_2}) - -@node_utils.to_nodegroup('nodegroup_plant_seed', singleton=True) -def nodegroup_plant_seed(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'Dimensions', (0.0, 0.0, 0.0)), - ('NodeSocketIntUnsigned', 'U', 4), - ('NodeSocketInt', 'V', 8)]) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Dimensions"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"]}) - - multiply_add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'MULTIPLY_ADD'}) - - quadratic_bezier_1 = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': group_input.outputs["U"], 'Start': (0.0, 0.0, 0.0), 'Middle': multiply_add.outputs["Vector"], 'End': combine_xyz}) - - group = nw.new_node(nodegroup_norm_index().name, - input_kwargs={'Count': group_input.outputs["U"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.3159, 0.4469), (1.0, 0.0156)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 4: 3.0}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': quadratic_bezier_1, 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["V"], 'Radius': separate_xyz.outputs["Y"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': tag_nodegroup(nw, curve_to_mesh, 'seed')}) - -def shader_flower_center(nw): - ambient_occlusion = nw.new_node(Nodes.AmbientOcclusion) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': ambient_occlusion.outputs["Color"]}) - colorramp.color_ramp.elements.new(1) - colorramp.color_ramp.elements[0].position = 0.4841 - colorramp.color_ramp.elements[0].color = (0.0127, 0.0075, 0.0026, 1.0) - colorramp.color_ramp.elements[1].position = 0.8591 - colorramp.color_ramp.elements[1].color = (0.0848, 0.0066, 0.0007, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = (1.0, 0.6228, 0.1069, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -def shader_petal(nw, petal_color_name): - - translucent_color_change = uniform(0.1, 0.6) - specular = normal(0.6, 0.1) - roughness = normal(0.4, 0.05) - translucent_amt = normal(0.3, 0.05) - - petal_color = nw.new_node(Nodes.RGB) - petal_color.outputs[0].default_value = color.color_category(petal_color_name) - - translucent_color = nw.new_node(Nodes.MixRGB, [translucent_color_change, petal_color, color.color_category(petal_color_name)]) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': translucent_color}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': petal_color, 'Specular': specular, 'Roughness': roughness }) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': translucent_amt, 1: principled_bsdf, 2: translucent_bsdf}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': mix_shader}) - -def geo_flower(nw, petal_material, center_material): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Center Rad', 0.0), - ('NodeSocketVector', 'Petal Dims', (0.0, 0.0, 0.0)), - ('NodeSocketFloat', 'Seed Size', 0.0), - ('NodeSocketFloat', 'Min Petal Angle', 0.1), - ('NodeSocketFloat', 'Max Petal Angle', 1.36), - ('NodeSocketFloat', 'Wrinkle', 0.01), - ('NodeSocketFloat', 'Curl', 13.89)]) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 8, 'Rings': 8, 'Radius': group_input.outputs["Center Rad"]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 1.0, 0.05)}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed Size"], 1: 1.5}, - attrs={'operation': 'MULTIPLY'}) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, - input_kwargs={'Mesh': transform, 'Distance Min': multiply, 'Density Max': 50000.0}, - attrs={'distribute_method': 'POISSON'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Seed Size"], 1: 10.0}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Y': group_input.outputs["Seed Size"]}) - - group_3 = nw.new_node(nodegroup_plant_seed().name, - input_kwargs={'Dimensions': combine_xyz, 'U': 6, 'V': 6}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'W': 13.8, 'Scale': 2.41}, - attrs={'musgrave_dimensions': '4D'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 3: 0.34, 4: 1.21}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range.outputs["Result"], 'Y': 1.0, 'Z': 1.0}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': distribute_points_on_faces.outputs["Points"], 'Instance': group_3, 'Rotation': (0.0, -1.5708, 0.0541), 'Scale': combine_xyz_1}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [realize_instances, transform]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': join_geometry_1, 'Material': center_material}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Center Rad"], 1: 6.2832}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': group_input.outputs["Petal Dims"]}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 1.2}, - attrs={'operation': 'MULTIPLY'}) - - reroute_3 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Center Rad"]}) - - reroute_1 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Min Petal Angle"]}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Max Petal Angle"]}) - - group_1 = nw.new_node(nodegroup_phyllo_points().name, - input_kwargs={'Count': multiply_3, 'Min Radius': reroute_3, 'Max Radius': reroute_3, 'Radius exp': 0.0, 'Min angle': reroute_1, 'Max angle': reroute, 'Max z': 0.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: separate_xyz.outputs["Y"]}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - reroute_2 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Wrinkle"]}) - - reroute_4 = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': group_input.outputs["Curl"]}) - - group = nw.new_node(nodegroup_flower_petal().name, - input_kwargs={'Length': separate_xyz.outputs["X"], 'Point': 0.56, 'Point height': -0.1, 'Bevel': 1.83, 'Base width': separate_xyz.outputs["Y"], 'Upper width': subtract, - 'Resolution H': 8, 'Resolution V': 16, 'Wrinkle': reroute_2, 'Curl': reroute_4}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_1.outputs["Points"], 'Instance': group, 'Rotation': group_1.outputs["Rotation"]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 3.73, 'Detail': 5.41, 'Distortion': -1.0}) - - subtract_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, - attrs={'operation': 'SUBTRACT'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.025 - - multiply_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract_1.outputs["Vector"], 1: value}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': realize_instances_1, 'Offset': multiply_4.outputs["Vector"]}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': set_position, 'Material': petal_material}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, set_material]}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': join_geometry, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_shade_smooth}) - -class TreeFlowerFactory(AssetFactory): - - def __init__(self, factory_seed, rad=uniform(0.15, 0.25), diversity_fac=0.25): - super(TreeFlowerFactory, self).__init__(factory_seed=factory_seed) - - self.rad = rad - self.diversity_fac = diversity_fac - - self.petal_color = np.random.choice(['pink', 'white', 'red', 'yellowish'], p=[0.4, 0.2, 0.2, 0.2]) - - with FixedSeed(factory_seed): - self.petal_material = surface.shaderfunc_to_material(shader_petal, self.petal_color) - self.center_material = surface.shaderfunc_to_material(shader_flower_center) - self.species_params = self.get_flower_params(self.rad) - - @staticmethod - def get_flower_params(overall_rad=0.05): - pct_inner = uniform(0.05, 0.4) - base_width = 2 * np.pi * overall_rad * pct_inner / normal(20, 5) - top_width = overall_rad * np.clip(normal(0.7, 0.3), base_width * 1.2, 100) - - min_angle, max_angle = np.deg2rad(np.sort(uniform(-20, 100, 2))) - - return { - 'Center Rad': overall_rad * pct_inner, - 'Petal Dims': np.array([overall_rad * (1 - pct_inner), base_width, top_width], dtype=np.float32), - 'Seed Size': uniform(0.005, 0.01), - 'Min Petal Angle': min_angle, - 'Max Petal Angle': max_angle, - 'Wrinkle': uniform(0.003, 0.02), - 'Curl': np.deg2rad(normal(30, 50)) - } - - def create_asset(self, **kwargs) -> bpy.types.Object: - - vert = butil.spawn_vert('flower') - mod = surface.add_geomod(vert, geo_flower, - input_kwargs={'petal_material': self.petal_material, 'center_material': self.center_material}) - - inst_params = self.get_flower_params(self.rad * normal(1, 0.05)) - params = dict_lerp(self.species_params, inst_params, 0.25) - surface.set_geomod_inputs(mod, params) - - butil.apply_modifiers(vert, mod) - - vert.rotation_euler.z = uniform(0, 360) - tag_object(vert, 'flower') - return vert \ No newline at end of file diff --git a/infinigen/assets/trees/treeconfigs.py b/infinigen/assets/trees/treeconfigs.py deleted file mode 100644 index c464ae53e..000000000 --- a/infinigen/assets/trees/treeconfigs.py +++ /dev/null @@ -1,776 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alejandro Newell - - -from logging import root -import numpy as np - -import bpy -from .utils import mesh, helper -from .tree import TreeParams - -subsubtwig_config = {'n': 2, 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 3, 'std': 1, 'momentum': 1, 'sz': .4}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .2, 'rnd_idx': 2*idx+2, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}} -subtwig_config = {'n': 3, 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 6, 'std': 1, 'momentum': 1, 'sz': .6 - .1 * idx}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .1, 'rnd_idx': 2*idx+1, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}, - 'children': [subsubtwig_config]} -twig_config = {'n': 1, 'decay': .8, 'valid_leaves': [-2, -1], - 'path_kargs': lambda idx: {'n_pts': 7, 'sz': .5, 'std': .5, 'momentum': .7}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, 0]}, - 'children': [subtwig_config]} - - -def random_pine_rot(): - theta = np.random.uniform(2*np.pi) - return [np.sin(theta), 0.0, np.cos(theta)] - - -subsubtwig_config = {'n': 20, 'symmetry': False, - 'path_kargs': lambda idx: {'n_pts': 2, 'std': 1, 'momentum': 1, 'sz': .2}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .2, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': random_pine_rot}} -subtwig_config = {'n': 7, 'symmetry': False, - 'path_kargs': lambda idx: {'n_pts': 10, 'std': .3, 'momentum': 1, 'sz': .2 - .01 * idx}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .1, - 'ang_min': np.pi/8, 'ang_max': np.pi/8 + np.pi/16, 'axis2': random_pine_rot}, - 'children': [subsubtwig_config]} -pinetwig_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': 7, 'sz': .5, 'std': .2, 'momentum': .7}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, 0]}, - 'children': [subtwig_config]} - - -subsubsubtwig_config = {'n': 1, 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 2, 'std': 1, 'momentum': 1, 'sz': .4}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .2, 'rnd_idx': idx+1, - 'ang_min': np.pi/8, 'ang_max': np.pi/8 + np.pi/32, 'axis2': [0, 0, 1]}} -subsubtwig_config = {'n': 3, 'symmetry': False, - 'path_kargs': lambda idx: {'n_pts': 3, 'std': 1, 'momentum': 1, 'sz': .6 - .1 * idx}, - 'spawn_kargs': lambda idx: {'rng': [0.1, 1.0], 'z_bias': .1, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}, - 'children': [subsubsubtwig_config]} -subtwig_config = {'n': 8, 'symmetry': False, - 'path_kargs': lambda idx: {'n_pts': 7, 'std': 1, 'momentum': 1, 'sz': .6 - .1 * idx}, - 'spawn_kargs': lambda idx: {'rng': [0.2, 1.0], 'z_bias': .1, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}, - 'children': [subsubtwig_config]} -bambootwig_config = {'n': 1, 'decay': .8, 'valid_leaves': [-2, -1], - 'path_kargs': lambda idx: {'n_pts': 15, 'sz': 1.0, 'std': .05, 'momentum': .7, 'pull_dir': [0, 0, -0.3], 'pull_factor': 0.5, 'pull_init': 0.0}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, 0]}, - 'children': [subtwig_config]} - - -subtwig_config = {'n': 37, 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 2, 'std': 1, 'momentum': 1, 'sz': .4}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .2, 'rnd_idx': idx+2, - 'ang_min': 0.3*np.pi, 'ang_max': 0.3*np.pi + np.pi/16, 'axis2': [0, 0, 1]}} -palmtwig_config = {'n': 1, 'decay': .8, 'valid_leaves': [-2, -1], - 'path_kargs': lambda idx: {'n_pts': 40, 'sz': .5, 'std': .05, 'momentum': .7, 'pull_dir': [0, 0, -0.3], 'pull_factor': 0.5, 'pull_init': 0.0}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, 0]}, - 'children': [subtwig_config]} - - -subtwig_config = {'n': 3, 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 3, 'std': 1, 'momentum': 1, 'sz': .6 - .1 * idx}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'z_bias': .1, 'rnd_idx': 2*idx+1, - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}, - 'children': []} -shrubtwig_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': 6, 'sz': .5, 'std': .5, 'momentum': .7}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, 0]}, - 'children': [subtwig_config]} - - -def generate_twig_config(): - n_twig_pts = np.random.randint(10) + 5 - twig_len = np.random.uniform(3, 4) - twig_sz = twig_len / n_twig_pts - avail_idxs = np.arange(n_twig_pts) - start_idx = 1 + int(n_twig_pts * np.random.uniform(0, .3)) - sample_density = np.random.choice( - np.arange(np.ceil(np.sqrt(n_twig_pts)), dtype=int) + 1) - avail_sub_idxs = avail_idxs[start_idx::sample_density] - - init_z = np.random.uniform(0, .3) - z_rnd_factor = np.random.uniform(0.01, .05) - - skip_subtwig = np.random.rand() < .3 - subsub_sz = np.random.uniform(.02, .1) - subtwig_momentum = np.random.uniform(0, 1) - subtwig_std = np.random.rand() ** 2 - sz_decay = np.random.uniform(.9, 1) - pull_factor = np.random.uniform(0, .3) - - if not skip_subtwig: - n_sub_pts = np.random.randint(10) + 5 - sub_sz = np.random.uniform(1, twig_len-.5) / n_sub_pts - idx_decay = (sub_sz * (np.random.rand() * .8 + .1)) / n_sub_pts - avail_idxs = np.arange(n_sub_pts) - start_idx = int(n_sub_pts * np.random.rand() * .5) + 1 - sample_density = np.random.choice([1, 2, 3]) - avail_idxs = avail_idxs[start_idx::sample_density] - - ang_offset = np.random.rand() * np.pi / 3 - ang_range = np.random.rand() * ang_offset - - subsubtwig_config = {'n': len(avail_idxs), 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 3, 'std': 1, 'momentum': 1, 'sz': subsub_sz, - 'pull_dir': [0, 0, init_z + np.random.randn() * z_rnd_factor], - 'pull_factor': pull_factor}, - 'spawn_kargs': lambda idx: {'rnd_idx': avail_idxs[idx], - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}} - subtwig_config = {'n': len(avail_sub_idxs), 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': n_sub_pts, - 'std': subtwig_std, 'momentum': subtwig_momentum, - 'sz': sub_sz - idx_decay * idx, 'sz_decay': sz_decay, - 'pull_dir': [0, 0, init_z + np.random.randn() * z_rnd_factor], - 'pull_factor': pull_factor}, - 'spawn_kargs': lambda idx: {'rng': [.2, .9], 'rnd_idx': avail_sub_idxs[idx], - 'ang_min': ang_offset, 'ang_max': ang_offset + ang_range, 'axis2': [0, 0, 1]}, - 'children': [subsubtwig_config] - } - - else: - subtwig_config = {'n': len(avail_sub_idxs), 'symmetry': True, - 'path_kargs': lambda idx: {'n_pts': 3, 'std': 1, 'momentum': 1, 'sz': subsub_sz, - 'pull_dir': [0, 0, init_z + np.random.randn() * z_rnd_factor], - 'pull_factor': pull_factor}, - 'spawn_kargs': lambda idx: {'rnd_idx': avail_sub_idxs[idx], - 'ang_min': np.pi/4, 'ang_max': np.pi/4 + np.pi/16, 'axis2': [0, 0, 1]}} - - twig_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': n_twig_pts, 'sz': twig_sz, 'std': .5, 'momentum': .5, - 'pull_dir': [0, 0, init_z + np.random.randn() * z_rnd_factor], - 'pull_factor': pull_factor}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 1, -init_z]}, - 'children': [subtwig_config]} - - return twig_config - - -def basic_tree(init_pos=np.array([[0, 0, 0]])): - def init_att_fn(nodes): - pt_offset = init_pos[0] + np.array([0, 0, 11]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[7, 7, 7], pt_offset=pt_offset) - return branch_pts - - def root_att_fn(nodes): - # Pass this into root_kargs to initialize a root system - pt_offset = init_pos[0] + np.array([0, 0, -3.5]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[5, 5, 4], pt_offset=pt_offset) - return branch_pts - - branch_config = {'n': 5, 'spawn_kargs': lambda idx: {'rng': [.5, .8]}, - 'path_kargs': lambda idx: {'n_pts': 5, 'sz': .4, 'std': 1.4, 'momentum': .4}, - 'children': []} - tree_config = {'n': 4, - 'path_kargs': lambda idx: ({'n_pts': 15, 'sz': .8, 'std': 1, 'momentum': .7} - if idx > 0 else - {'n_pts': 15, 'sz': 1, 'std': .1, 'momentum': .7}), - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config]} - - twig_kargs = {'config': shrubtwig_config, - 'radii_kargs': {'Max radius': .1}, - 'leaf_kargs': {'Density': 1, 'Min scale': .4, 'Max scale': .6, 'Multi inst': 2} - } - tree_kargs = {'config': tree_config, 'init_pos': init_pos, - 'radii_kargs': {'Min radius': .04, 'Exponent': 2}, - 'leaf_kargs': {'Density': 1, 'Min scale': .35, 'Max scale': .45}, - 'space_kargs': {'atts': init_att_fn, 'D': .3, 's': .4, 'd': 10, - 'pull_dir': [0, 0, .5], 'n_steps': 20}, - 'root_kargs': None #{'atts': None, 'D': .2, 's': .3, 'd': 2, - #'dir_rand': .3, 'mag_rand': .2, - #'pull_dir': None, 'n_steps': 30}, - } - - return tree_kargs, twig_kargs - - -def palm_tree(init_pos=np.array([[0, 0, 0]])): - def tmp_att_fn(nodes): - # pt_offset = init_pos[0] + np.array([0,0,20]) - pt_offset = nodes[-1] - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[1, 1, 1], pt_offset=pt_offset) - return branch_pts - - # select a random horizontal angle - pull_angle = np.random.uniform(0.0, 2*np.pi) - - tree_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': 20, 'sz': .8, 'std': 0.1, 'momentum': 0.95, 'pull_dir': [np.cos(pull_angle), np.sin(pull_angle), 0.0], 'pull_factor': np.random.uniform(0., 1.5), 'pull_init': 0.0}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': []} - - leaf_kargs = {'leaf_width': .1, 'alpha': 0.3, 'use_wave': False} - twig_kargs = {'config': palmtwig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'max_density': 20, 'scale': 2.0, 'rot_x': (-0.5, -0.4), 'rot_z': (-0.1, 0.1)} - } - tree_kargs = {'config': tree_config, 'D_': .3, 's': .4, 'd': 10, 'init_pos': init_pos, - 'pull_dir': [0, 0, .5], 'n_updates': 20, 'init_att_fn': tmp_att_fn, - 'radii_kargs': {'max_radius': 0.7, 'merge_size': .3, 'min_radius': 0.1, 'growth_amt': 1.01}, - 'leaf_kargs': {'max_density': 20, 'scale': .3, 'rot_x': (-1.0, 1.0), 'rot_z': (-0.1, 0.1)} - } - - return tree_kargs, twig_kargs, leaf_kargs - - -def baobab_tree(init_pos=np.array([[0, 0, 0]])): - def tmp_att_fn(nodes): - # pt_offset = init_pos[0] + np.array([0,0,20]) - pt_offset = nodes[-1] - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=50, - scaling=[7, 7, 1], pt_offset=pt_offset) - return branch_pts - - # select a random horizontal angle - pull_angle = np.random.uniform(0.0, 2*np.pi) - - tree_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': 20, 'sz': .8, 'std': 0.1, 'momentum': 0.95}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': []} - - leaf_kargs = {'leaf_width': .5, 'alpha': 0.3, 'use_wave': False} - twig_kargs = {'config': shrubtwig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'max_density': 20, 'scale': 0.5, 'rot_x': (-0.5, -0.4), 'rot_z': (-0.1, 0.1)} - } - tree_kargs = {'config': tree_config, 'D_': .5, 's': .6, 'd': 10, 'init_pos': init_pos, - 'pull_dir': [0, 0, .5], 'n_updates': 20, 'init_att_fn': tmp_att_fn, - 'radii_kargs': {'max_radius': 2.0, 'merge_size': .3, 'min_radius': 0.1, 'growth_amt': 1.10}, - 'leaf_kargs': {'max_density': 30, 'scale': 0.7, 'rot_x': (0, 1.0), 'rot_z': (-1.0, 1.0)} - } - - return tree_kargs, twig_kargs, leaf_kargs - - -def bamboo_tree(init_pos=np.array([[0, 0, 0]])): - height = np.random.randint(25, 35) - - def tmp_att_fn(nodes): - # pt_offset = init_pos[0] + np.array([0,0,20]) - pt_offset = nodes[-1] - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=50, - scaling=[0.5, 0.5, 4]) - # rotate the points - rot_axis = (nodes[-1] - nodes[-2]) / \ - np.linalg.norm((nodes[-1] - nodes[-2])) - rot_axis = (rot_axis + np.array([0, 0, 1])) / 2. - - branch_pts = np.array([helper.rodrigues_rot( - pts, rot_axis, np.pi) for pts in branch_pts]) - - branch_pts += pt_offset - - return branch_pts - - # select a random horizontal angle - pull_angle = np.random.uniform(0.0, 2*np.pi) - - tree_config = { - 'n': 1, - 'path_kargs': lambda idx: { - 'n_pts': height, 'sz': .8, 'std': 0.1, 'momentum': 0.95, - 'pull_dir': [np.cos(pull_angle), np.sin(pull_angle), 0.0], - 'pull_factor': np.random.uniform(0.1, 0.6), 'pull_init': 0.0}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [] - } - - leaf_kargs = {'leaf_width': .1, 'alpha': 0.3, 'use_wave': False} - twig_kargs = {'config': bambootwig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'max_density': 20, 'scale': 1.5, 'rot_x': (-0.5, -0.4), 'rot_z': (-0.1, 0.1)} - } - tree_kargs = {'config': tree_config, - 'D_': .3, 's': .4, 'd': 10, 'init_pos': init_pos, - 'pull_dir': [0, 0, .5], 'n_updates': 20, 'init_att_fn': tmp_att_fn, - 'radii_kargs': {'max_radius': 0.3, 'merge_size': .1, 'min_radius': 0.2, 'growth_amt': 1.01}, - 'leaf_kargs': {'max_density': 20, 'scale': .3, 'rot_x': (-1.0, 1.0), 'rot_z': (-0.1, 0.1)} - } - - return tree_kargs, twig_kargs, leaf_kargs - - -def shrub(init_pos=np.array([[0, 0, 0]]), shrub_shape=0): - scale = 0.2 - - - - def att_fn_ball(nodes): - pt_offset = init_pos[0] + np.array([0, 0, 7*scale]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_uv_sphere_add, n=2000, - scaling=[7*scale, 7*scale, 7*scale], pt_offset=pt_offset) - return branch_pts - - def att_fn_cone(nodes): - pt_offset = init_pos[0] + np.array([0, 0, 9*scale]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cone_add, n=2000, - scaling=[5*scale, 5*scale, 10*scale], pt_offset=pt_offset) - return branch_pts - - def att_fn_cube(nodes): - pt_offset = init_pos[0] + np.array([0, 0, 9*scale]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=2000, - scaling=[4*scale, 4*scale, 7*scale], pt_offset=pt_offset) - return branch_pts - - if shrub_shape == 0: - tmp_att_fn = att_fn_ball - elif shrub_shape == 1: - tmp_att_fn = att_fn_cone - elif shrub_shape == 2: - tmp_att_fn = att_fn_cube - else: - raise NotImplementedError - - leaf_kargs = {'leaf_width': np.random.rand() * .5 + .1, - 'alpha': np.random.rand() * .3} - branch_config = {'n': 5, 'spawn_kargs': lambda idx: {'rng': [.5, .8]}, - 'path_kargs': lambda idx: {'n_pts': 5, 'sz': .4, 'std': 1.4, 'momentum': .4}, - 'children': []} - tree_config = {'n': 1, - 'path_kargs': lambda idx: ({'n_pts': 3, 'sz': .8, 'std': 1, 'momentum': .7} - if idx > 0 else - {'n_pts': 3, 'sz': 1, 'std': .1, 'momentum': .7}), - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config]} - - twig_kargs = TreeParams( - skeleton=shrubtwig_config, - trunk_spacecol=None, - roots_spacecol=None, - child_placement={'Density': 1, 'Min scale': .4, 'Max scale': .6, 'Multi inst': 2}, - skinning={'Max radius': .1} - ) - - tree_kargs = TreeParams( - skeleton=tree_config, - trunk_spacecol={'atts': tmp_att_fn, 'D': .3, 's': .4, 'd': 10}, - roots_spacecol=None, - child_placement={'depth_range': (0, 2.7), 'Density': 0.7, 'Min scale': 1.2*scale, 'Max scale': 1.4*scale, 'Multi inst': 3, 'Pitch offset': 1., 'Pitch variance': 2., 'Yaw variance': 2.}, - skinning={'Min radius': 0.005, 'Max radius': 0.025, 'Exponent': 2} - ) - - return tree_kargs, twig_kargs, leaf_kargs - - # branch_config = {'n': 5, 'spawn_kargs': lambda idx: {'rng': [.5,.8]}, - # 'path_kargs': lambda idx: {'n_pts': 5, 'sz': .4, 'std': 1.4, 'momentum': .4}, - # 'children': []} - # twig_config = {'n': 4, - # 'path_kargs': lambda idx: ({'n_pts': 15, 'sz': .8, 'std': 1, 'momentum': .7} - # if idx > 0 else - # {'n_pts': 15, 'sz': 1, 'std': .1, 'momentum': .7}), - # 'spawn_kargs': lambda idx: {'init_vec': [0,0,1]}, - # 'children': [branch_config]} - - # twig_kargs = {'config': shrubtwig_config, - # 'radii_kargs': {'Max radius': .1}, - # 'leaf_kargs': {'Density': 1, 'Min scale': .4, 'Max scale': .6, 'Multi inst': 2} - # } - # tree_kargs = {'config': twig_config, 'init_pos': init_pos, - # 'radii_kargs': {'Min radius': .04, 'Exponent': 2}, - # 'leaf_kargs': {'Density': 1, 'Min scale': .35, 'Max scale': .45}, - # 'space_kargs': {'atts': init_att_fn, 'D': .3, 's': .4, 'd': 10, - # 'pull_dir': [0,0,.5], 'n_steps': 20}, - # 'root_kargs': {'atts': None, 'D': .2, 's': .3, 'd': 2, - # 'dir_rand': .3, 'mag_rand': .2, - # 'pull_dir': None, 'n_steps': 30}, - # } - - -def basic_stem(init_pos=np.array([[0, 0, 0]])): - branch_config = {'n': 3, 'spawn_kargs': lambda idx: {'rng': [.1 * (idx + 1), .1 * (idx + 2)]}, - 'path_kargs': lambda idx: {'n_pts': 20 - 2 * idx, 'sz': .5, 'std': 1.5, - 'momentum': .7, 'decay_mom': False, - 'pull_dir': [0, 0, 1], 'pull_factor': 1.5 + idx * .2}, - 'children': []} - tree_config = {'n': 1, - 'path_kargs': lambda idx: ({'n_pts': 30, 'sz': .5, 'std': 2, - 'momentum': .8, 'decay_mom': False, - 'pull_dir': [0, 0, 1], 'pull_factor': 2 + idx * .5}), - 'spawn_kargs': lambda idx: {'init_vec': [np.random.randn(), np.random.randn(), 1]}, - 'children': [branch_config]} - - tree_kargs = {'config': tree_config, 'init_pos': init_pos, - 'radii_kargs': {'Min radius': .02, 'Max radius': .1, 'Exponent': 2}, - 'leaf_kargs': {'Density': 0, 'Min scale': .35, 'Max scale': .45}, - 'space_kargs': {}, 'root_kargs': {}, - } - - return tree_kargs, None, {} - - -def space_tree_wrap(cds, n_init=5): - def tmp_att_fn(nodes): - return cds - - tree_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': 1, 'sz': .8, 'std': 1, 'momentum': .7}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}} - - twig_kargs = {'config': twig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'max_density': 5, 'scale': .5}} - tree_kargs = {'config': tree_config, 'D_': .15, 's': .2, 'd': 0.5, 'dir_rand': .3, 'mag_rand': .2, - 'pull_dir': [0, 0, 0], 'n_updates': 40, 'init_att_fn': tmp_att_fn, - 'radii_kargs': {'max_radius': .04, 'merge_size': 0.1, 'min_radius': .01, 'growth_amt': 1.02}, - 'leaf_kargs': {}} - - rand_pts = np.random.choice(np.arange(len(cds)), n_init, replace=False) - tree_kargs['init_pos'] = cds[rand_pts] - - return tree_kargs, twig_kargs - - -def space_tree(obj, init_pos=np.array([[0, 0, 0]])): - def init_att_fn(nodes): - return mesh.sample_vtxs(obj, n=1000, emit_from="VOLUME", seed=np.random.randint(100)) - - twig_kargs = {'config': shrubtwig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'Density': 1, 'Min scale': .4, 'Max scale': .6}} - tree_kargs = {'config': {'n': 0}, 'init_pos': init_pos, - 'leaf_kargs': {'Density': 0}, - 'radii_kargs': {'Min radius': .01, 'Scaling': .05, 'Exponent': 2}, - 'space_kargs': {'atts': init_att_fn, 'D': .1, 's': .2, 'd': 10, - 'dir_rand': .2, 'mag_rand': .2, - 'pull_dir': [0, .5, 0], 'n_steps': 100}, - } - - return tree_kargs, twig_kargs - - -def pine_tree(init_pos=np.array([[0, 0, 0]])): - - def tmp_att_fn(nodes): - tmp_v = nodes[nodes[:, 2] > 3] - atts = [tmp_v.copy() + np.random.randn(*tmp_v.shape) - * .5 for _ in range(5)] - return np.concatenate(atts, 0)[::5] - - def root_att_fn(nodes): - # Pass this into root_kargs to initialize a root system - pt_offset = init_pos[0] + np.array([0, 0, -3.5]) - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[5, 5, 4], pt_offset=pt_offset) - return branch_pts - - per_layer = 4 - tree_ht = np.random.randint(20, 30) - max_sz = .8 - start_ht = int(tree_ht * np.random.uniform(0.1, 0.3)) - n = tree_ht - start_ht - - branch_config = {'n': n * per_layer, - 'path_kargs': lambda idx: {'n_pts': np.random.randint(np.floor(((n - idx // per_layer) / n) * 6), - np.ceil(((n - idx // per_layer) / n) * 8)) + 3, - 'std': .3, 'momentum': .9, 'sz': max_sz - (max_sz / tree_ht) * (idx // per_layer)}, - 'spawn_kargs': lambda idx: {'rng': [.5, 1], 'z_bias': .2, 'rnd_idx': (idx // per_layer)+start_ht, - 'ang_min': np.pi/2, 'ang_max': np.pi/2 + np.pi/16, - 'axis2': [np.random.randn(), np.random.randn(), .5]}, - 'children': [] - } - pinetree_config = {'n': 1, - 'path_kargs': lambda idx: {'n_pts': tree_ht + 1, 'sz': .8, 'std': 0.1, 'momentum': .7}, - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config]} - - leaf_kargs = {'leaf_width': .05, 'alpha': 0, 'use_wave': False} - twig_kargs = TreeParams( - skeleton=pinetwig_config, - trunk_spacecol=None, roots_spacecol=None, - skinning={'Min radius': .005, 'Max radius': 0.03, 'Exponent': 1.3, 'Scaling': 0.1, 'Profile res': 3}, - child_placement={'depth_range': (0, 5.0), 'Density': 1.0, 'Min scale': .7, 'Max scale': .9}, - ) - - tree_kargs = TreeParams( - skeleton=pinetree_config, - skinning={'Min radius': 0.02, 'Exponent': 1.5, 'Max radius': 0.2}, - trunk_spacecol={'atts': tmp_att_fn, 'D': .3, 's': .4, 'd': 10, - 'pull_dir': [0, 0, .5], 'n_steps': 20}, - roots_spacecol=None,#{'atts': None, 'D': .2, 's': .3, 'd': 2, - # 'dir_rand': .3, 'mag_rand': .2, - # 'pull_dir': None, 'n_steps': 30}, - child_placement={'depth_range': (0, 2.7), 'Density': 1.0, 'Min scale': .7, 'Max scale': .9} - ) - - return tree_kargs, twig_kargs, leaf_kargs - -def coral(): - def tmp_att_fn(nodes): - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[7, 7, 7], pt_offset=[0, 0, 11]) - return branch_pts - - branch_config = {'n': 5, 'spawn_kargs': lambda idx: {'rng': [.5, .8]}, - 'path_kargs': lambda idx: {'n_pts': 5, 'sz': .4, 'std': 1.4, 'momentum': .4}, - 'children': []} - tree_config = {'n': 4, - 'path_kargs': lambda idx: ({'n_pts': 15, 'sz': .8, 'std': 1, 'momentum': .7} - if idx > 0 else - {'n_pts': 15, 'sz': 1, 'std': .1, 'momentum': .7}), - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config]} - - twig_kargs = {'config': twig_config, - 'radii_kargs': {'max_radius': .1, 'merge_size': .2}, - 'leaf_kargs': {'max_density': 20, 'scale': .4}} - tree_kargs = {'config': tree_config, 'D_': .3, 's': .4, 'd': 10, - 'pull_dir': [0, 0, .5], 'n_updates': 20, 'init_att_fn': tmp_att_fn, - 'radii_kargs': {'max_radius': .7, 'merge_size': .3, 'min_radius': .03, 'growth_amt': 1.01}, - 'leaf_kargs': {'max_density': 5, 'scale': .3}} - - return tree_kargs, twig_kargs - - -def parse_genome(tree_genome): - genome_keys = ['size', 'trunk_warp', 'n_trunks', - 'branch_start', 'branch_angle', 'multi_branch', - 'branch density', 'branch_len', - 'branch_warp', 'pull_dir_vt', - 'pull_dir_hz', 'outgrowth', 'branch_thickness', - 'twig_density', 'twig_scale'] - return {k: tree_genome[k_idx] for k_idx, k in enumerate(genome_keys)} - - -def calc_height(x, min_ht=5, max_ht=30, bias=-.05, uniform=.5): - def map_fn(val): return np.tan((val-.5+bias)*np.pi*(1.1-uniform)) - rng = map_fn(0), map_fn(1) - y = map_fn(x) - y = (y - rng[0]) / (rng[1] - rng[0]) - y = y * (max_ht - min_ht) + min_ht - return y - - -def generate_tree_config(tree_genome=None, season='autumn'): - """ - Main latent params that we might want to control: - - overall size/"age" - - trunk straightness - - additional "trunks" - - starting height of branches - - outgoing branch angle (parallel to ground vs angled up vs angled proporitionally to height) - - branch density - - branch length (fn of height) - - branch straightness - - pull direction (up/down/to the side) - - outgrowth (space filling) / "density" - - branch thickness (ideally this behaves reasonably based on everything else) - """ - if tree_genome is None: - tree_genome = np.random.rand(32) - - cfg = parse_genome(tree_genome) - sz = calc_height(cfg['size'], min_ht=12) - n_tree_pts = int(sz) - n_trunks = int(10 ** (cfg['n_trunks']**1.6)) - ex = np.exp((6 - (5 if n_trunks > 1 else 0)) * (cfg['trunk_warp']-.1)) - trunk_std = ((1 - (ex / (1 + ex)))*4) ** 2 - trunk_mtm = max(.2, min(.95, (1 / (trunk_std + 1)) + - np.random.randn() * .2)) - radial_out = False # False # np.random.rand() < .3 - avail_idxs = np.arange(n_tree_pts) - start_idx = 1 + int(n_tree_pts * np.random.uniform(.1, .7)) - sample_density = np.random.choice( - np.arange(np.ceil(np.sqrt(n_tree_pts)), dtype=int) + 1) - avail_idxs = avail_idxs[start_idx::sample_density] - multi_branch = int(5 ** (cfg['multi_branch']**1.6)) - avail_idxs = np.repeat(avail_idxs, multi_branch).flatten() - - n = len(avail_idxs) - - start_ht = sz * (start_idx / sz) - box_ht = (sz - start_ht) * .6 - - def tmp_att_fn(nodes): - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[sz/2, sz/2, box_ht], pt_offset=[0, 0, start_ht + sz * .4]) - return branch_pts - - max_sz = 1 - - if radial_out: - start_ht = int(sz * .1) - per_layer = np.random.randint(3, 6) - branch_config = {'n': n * per_layer, - 'path_kargs': lambda idx: {'n_pts': np.random.randint(np.floor(((n - idx // per_layer) / n) * 6), - np.ceil(((n - idx // per_layer) / n) * 8)) + 3, - 'std': .3, 'momentum': .9, 'sz': max_sz - (max_sz / sz) * (idx // per_layer), - 'pull_dir': [0, 0, np.random.rand()], - 'pull_factor': np.random.rand()}, - 'spawn_kargs': lambda idx: {'rnd_idx': avail_idxs[idx // per_layer], - 'ang_min': np.pi/2, 'ang_max': np.pi/2 + np.pi/16, - 'axis2': [np.random.randn(), np.random.randn(), .5]}} - - else: - branch_config = {'n': n, - 'path_kargs': lambda idx: {'n_pts': int(n_tree_pts*np.random.uniform(.4, .6)), - 'sz': 1, 'std': 1.4, 'momentum': .4, - 'pull_dir': [0, 0, np.random.rand()], - 'pull_factor': np.random.rand()}, - 'spawn_kargs': lambda idx: {'rnd_idx': avail_idxs[idx]}} - - tree_config = {'n': n_trunks, - 'path_kargs': lambda idx: ({'n_pts': n_tree_pts, 'sz': 1, - 'std': trunk_std, 'momentum': trunk_mtm, - 'pull_dir': [0, 0, 0]}), - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config] - } - - tmp_D = .3 + .2 * (sz / 30) # .3 * sz / 8 - tmp_s = tmp_D * 1.3 - if n < 5: - n_updates = np.random.choice([2, 3, int(1 + sz // 2)]) - else: - n_updates = np.random.choice([2, 2, 2, 3, 4, 5]) - - max_radius = 0.2 - merge_size = 2.5 - cfg['branch_thickness'] - - if season == "winter": - twig_density = 0.0 if cfg['twig_density'] < 0.5 else 0.5 * cfg['twig_density'] - twig_inst = 1 + 0 * np.random.randint(3, 5) - else: - twig_density = 0.5 + 0.5 * cfg['twig_density'] - twig_inst = np.random.randint(1, 3) - - return TreeParams( - skeleton=tree_config, - skinning={'Max radius': max_radius, 'Min radius': .02, 'Exponent': merge_size}, - trunk_spacecol={'atts': tmp_att_fn, 'D': tmp_D, 's': tmp_s, 'd': 10, - 'pull_dir': [0, 0, np.random.randn() * .3], 'n_steps': n_updates}, - roots_spacecol=None, #{'atts': None, 'D': .05, 's': .1, 'd': 2, 'dir_rand': .05, 'mag_rand': .05, 'pull_dir': None, 'n_steps': 30}, - child_placement={'depth_range': (0, 5.0), 'Density': twig_density, 'Multi inst': twig_inst, - 'Pitch variance': 1.0, 'Yaw variance': 10.0, 'Min scale': 1.1, 'Max scale': 1.3} - ) - - -def random_tree(tree_genome=None, season='autumn'): - leaf_kargs = {'leaf_width': np.random.rand() * .5 + .1, - 'alpha': np.random.rand() * .3} - - if season == "winter": - leaf_density = np.random.uniform(.0, 0.1) - leaf_inst = 1 - elif season == "spring": # flowers should be less dense - leaf_density = np.random.uniform(.3, 0.7) - leaf_inst = 2 - else: - leaf_density = np.random.uniform(.4, 1.0) - leaf_inst = 3 - - - twig_kargs = TreeParams( - skeleton=generate_twig_config(), - skinning={'Max radius': 0.01, 'Min radius': 0.005}, - trunk_spacecol=None, - roots_spacecol=None, - child_placement={'Density': leaf_density, 'Multi inst': leaf_inst, - 'Min scale': .3, 'Max scale': .4} - ) - tree_kargs = generate_tree_config(tree_genome, season=season) - return tree_kargs, twig_kargs, leaf_kargs - - -def generate_coral_config(tree_genome=None): - """ - Main latent params that we might want to control: - - overall size/"age" - - trunk straightness - - additional "trunks" - - starting height of branches - - outgoing branch angle (parallel to ground vs angled up vs angled proporitionally to height) - - branch density - - branch length (fn of height) - - branch straightness - - pull direction (up/down/to the side) - - outgrowth (space filling) / "density" - - branch thickness (ideally this behaves reasonably based on everything else) - """ - if tree_genome is None: - tree_genome = np.random.rand(32) - - cfg = parse_genome(tree_genome) - sz = calc_height(cfg['size']) - n_tree_pts = int(sz) - n_trunks = np.random.randint(5, 20) # int(10 ** (cfg['n_trunks']**1.6)) - ex = np.exp((6 - (5 if n_trunks > 1 else 0)) * (cfg['trunk_warp']-.1)) - trunk_std = ((1 - (ex / (1 + ex)))*4) ** 2 - trunk_mtm = max(.2, min(.95, (1 / (trunk_std + 1)) + - np.random.randn() * .2)) - radial_out = False # np.random.rand() < .3 - avail_idxs = np.arange(n_tree_pts) - start_idx = 1 + int(n_tree_pts * np.random.uniform(0, .7)) - sample_density = np.random.choice( - np.arange(np.ceil(np.sqrt(n_tree_pts)), dtype=int) + 1) - avail_idxs = avail_idxs[start_idx::sample_density] - multi_branch = int(5 ** (cfg['multi_branch']**1.6)) - avail_idxs = np.repeat(avail_idxs, multi_branch).flatten() - - n = 0 # len(avail_idxs) - - start_ht = sz * (start_idx / sz) + 1 - box_ht = (sz - start_ht) * .6 - - def tmp_att_fn(nodes): - branch_pts = mesh.get_pts_from_shape(bpy.ops.mesh.primitive_cube_add, n=500, - scaling=[sz/2, sz/2, box_ht], pt_offset=[0, 0, start_ht + sz * .4]) - return branch_pts - - max_sz = 1 - - branch_config = {'n': n, - 'path_kargs': lambda idx: {'n_pts': int(n_tree_pts*np.random.uniform(.4, .6)), - 'sz': 1, 'std': .4, 'momentum': .8, - 'pull_dir': [0, 0, np.random.rand()], - 'pull_factor': np.random.rand()}, - 'spawn_kargs': lambda idx: {'rnd_idx': avail_idxs[idx]}} - - tree_config = {'n': n_trunks, - 'path_kargs': lambda idx: ({'n_pts': n_tree_pts, 'sz': 1, - 'std': trunk_std, 'momentum': trunk_mtm, - 'pull_dir': [0, 0, 1]}), - 'spawn_kargs': lambda idx: {'init_vec': [0, 0, 1]}, - 'children': [branch_config] - } - - tmp_D = .3 + .2 * (sz / 30) # .3 * sz / 8 - tmp_s = tmp_D * 1.3 - if n < 5: - n_updates = np.random.choice([2, 3, int(1 + sz // 2)]) - else: - n_updates = np.random.choice([2, 2, 2, 3, 4, 5]) - # print(sz, n_updates) - n_updates = 3 - max_radius = .3 # 00 - merge_size = np.random.uniform(.2, .7) - growth_amt = 1.01 - - return {'config': tree_config, 'D_': tmp_D, 's': tmp_s, 'd': 10, - 'pull_dir': [0, 0, np.random.randn() * .3], - # np.random.randint(15) + 3, - 'init_att_fn': tmp_att_fn, 'n_updates': n_updates, - 'radii_kargs': {'max_radius': max_radius, 'merge_size': merge_size, 'min_radius': .2, 'growth_amt': growth_amt}, - 'leaf_kargs': {'max_density': 0 if np.random.rand() < .1 else np.random.uniform(5, 20), - 'scale': np.random.uniform(.5, 1)}, - } - - -def random_coral(genome=None): - leaf_kargs = {} - twig_kargs = {} - tree_kargs = generate_coral_config(genome) - return tree_kargs, twig_kargs, leaf_kargs diff --git a/infinigen/assets/trees/utils/geometrynodes.py b/infinigen/assets/trees/utils/geometrynodes.py deleted file mode 100644 index 34f67732e..000000000 --- a/infinigen/assets/trees/utils/geometrynodes.py +++ /dev/null @@ -1,706 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alejandro Newell - -import bpy -import numpy as np - -from . import helper, mesh -from .materials import new_link - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils - -C = bpy.context -D = bpy.data - -def add_node_modifier(obj): - # Add geometry node modifier - helper.set_active_obj(obj) - # bpy.ops.node.new_geometry_nodes_modifier() # Blender 3.2 - bpy.ops.object.modifier_add(type='NODES') # Blender 3.1 - return obj.modifiers[-1] - - -def setup_inps(ng, inp, nodes): - for k_idx, (k, node, attr) in enumerate(nodes): - new_link(ng, inp, k_idx, node, attr) - ng.inputs[k_idx].name = k - - -@node_utils.to_nodegroup('CollectionDistribute', singleton=False) -def coll_distribute(nw, merge_dist=None): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketBool', 'Selection', True), - ('NodeSocketCollection', 'Collection', None), - ('NodeSocketInt', 'Multi inst', 1), - ('NodeSocketFloat', 'Density', 0.5), - ('NodeSocketFloat', 'Min scale', 0.0), - ('NodeSocketFloat', 'Max scale', 1.0), - ('NodeSocketFloat', 'Pitch scaling', 0.2), - ('NodeSocketFloat', 'Pitch offset', 0.0), - ('NodeSocketFloat', 'Pitch variance', 0.4), - ('NodeSocketFloat', 'Yaw variance', 0.4), - ('NodeSocketBool', 'Realize Instance', False)]) - - mesh_to_curve = nw.new_node('GeometryNodeMeshToCurve', - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Selection': group_input.outputs["Selection"]}) - - curve_to_points = nw.new_node('GeometryNodeCurveToPoints', - input_kwargs={'Curve': mesh_to_curve, 'Count': group_input.outputs["Multi inst"]}) - - mesh_to_points = nw.new_node('GeometryNodeMeshToPoints', - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Selection': group_input.outputs["Selection"]}) - - position = nw.new_node(Nodes.InputPosition) - - transfer_attribute_index = nw.new_node(Nodes.SampleNearest, - input_kwargs={'Geometry': mesh_to_points}) - - transfer_attribute = nw.new_node(Nodes.SampleIndex, - input_kwargs={'Geometry': mesh_to_points, 'Value': position, 'Index': transfer_attribute_index}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_points.outputs["Points"], 'Position': (transfer_attribute, "Value")}) - - random_value = nw.new_node(Nodes.RandomValue) - - math = nw.new_node(Nodes.Math, - input_kwargs={0: random_value.outputs[1], 1: group_input.outputs["Density"]}, - attrs={'operation': 'LESS_THAN'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': curve_to_points.outputs["Rotation"]}) - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"], 1: 1.5708}) - - math_2 = nw.new_node(Nodes.Math, - input_kwargs={0: math_1, 1: group_input.outputs["Pitch scaling"]}, - attrs={'operation': 'MULTIPLY'}) - - math_3 = nw.new_node(Nodes.Math, - input_kwargs={0: math_2, 1: group_input.outputs["Pitch offset"]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': math_3, 'Z': separate_xyz.outputs["Z"]}) - - math_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Pitch variance"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: math_4, 3: group_input.outputs["Pitch variance"]}) - - math_5 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Yaw variance"], 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: math_5, 3: group_input.outputs["Yaw variance"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Z': random_value_2.outputs[1]}) - - vector_math = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: combine_xyz_1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: group_input.outputs["Min scale"], 3: group_input.outputs["Max scale"]}) - - geo = nw.new_node(Nodes.CollectionInfo, - input_kwargs={'Collection': group_input.outputs["Collection"], 'Separate Children': True, 'Reset Children': True}) - - if merge_dist is not None: - geo = nw.new_node(Nodes.MergeByDistance, [geo, None, merge_dist]) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, 'Selection': math, 'Instance': geo, 'Pick Instance': True, 'Rotation': vector_math.outputs["Vector"], 'Scale': random_value_3.outputs[1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input.outputs["Realize Instance"], 14: instance_on_points, 15: realize_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': switch.outputs[6]}) - - -@node_utils.to_nodegroup('PhylloDist', singleton=False) -def phyllotaxis_distribute(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketInt', 'Count', 50), - ('NodeSocketFloat', 'Max radius', 2.0), - ('NodeSocketFloat', 'Radius exp', 0.5), - ('NodeSocketFloat', 'Inner pct', 0.0), - ('NodeSocketFloat', 'Min angle', -0.5236), - ('NodeSocketFloat', 'Max angle', 0.7854), - ('NodeSocketFloat', 'Min scale', 0.3), - ('NodeSocketFloat', 'Max scale', 0.3), - ('NodeSocketFloat', 'Min z', 0.0), - ('NodeSocketFloat', 'Max z', 1.0), - ('NodeSocketFloat', 'Clamp z', 1.0), - ('NodeSocketFloat', 'Yaw offset', -np.pi / 2)]) - - mesh_line = nw.new_node('GeometryNodeMeshLine', - input_kwargs={'Count': group_input.outputs["Count"]}) - - mesh_to_points = nw.new_node('GeometryNodeMeshToPoints', - input_kwargs={'Mesh': mesh_line}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': mesh_to_points, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - index = nw.new_node('GeometryNodeInputIndex') - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 1.0 - - math = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: value}, - attrs={'operation': 'DIVIDE'}) - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: math}, - attrs={'operation': 'FLOOR'}) - - math_6 = nw.new_node(Nodes.Math, - input_kwargs={0: math_1, 1: 2.3998}, - attrs={'operation': 'MULTIPLY'}) - - math_2 = nw.new_node(Nodes.Math, - input_kwargs={0: math}, - attrs={'operation': 'FRACT'}) - - math_5 = nw.new_node(Nodes.Math, - input_kwargs={0: math_2, 1: 6.2832}, - attrs={'operation': 'MULTIPLY'}) - - math_7 = nw.new_node(Nodes.Math, - input_kwargs={0: math_6, 1: math_5}) - - math_8 = nw.new_node(Nodes.Math, - input_kwargs={0: math_7}, - attrs={'operation': 'COSINE'}) - - math_9 = nw.new_node(Nodes.Math, - input_kwargs={0: math_7}, - attrs={'operation': 'SINE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': math_8, 'Y': math_9}) - - math_3 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Count"], 1: value}, - attrs={'operation': 'DIVIDE'}) - - math_4 = nw.new_node(Nodes.Math, - input_kwargs={0: math_1, 1: math_3}, - attrs={'operation': 'DIVIDE'}) - - math_10 = nw.new_node(Nodes.Math, - input_kwargs={0: math_4, 1: group_input.outputs["Radius exp"]}, - attrs={'operation': 'POWER'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': math_10, 3: group_input.outputs["Inner pct"]}) - - math_11 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: group_input.outputs["Max radius"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': math_4, 3: 1.5708, 4: 1.5708}) - - math_12 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"]}, - attrs={'operation': 'SINE'}) - - math_13 = nw.new_node(Nodes.Math, - input_kwargs={0: math_11, 1: math_12}, - attrs={'operation': 'MULTIPLY'}) - - vector_math = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: math_13}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': vector_math.outputs["Vector"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': math_4, 2: group_input.outputs["Clamp z"], 3: group_input.outputs["Min z"], 4: group_input.outputs["Max z"]}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], 'Z': map_range_2.outputs["Result"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': combine_xyz_1}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 2: map_range.outputs["Result"]}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range.outputs["Result"], 1: attribute_statistic.outputs["Max"], 2: attribute_statistic.outputs["Min"], 3: group_input.outputs["Min angle"], 4: group_input.outputs["Max angle"]}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.1, 3: 0.1}) - - math_14 = nw.new_node(Nodes.Math, - input_kwargs={0: math_7, 1: group_input.outputs["Yaw offset"]}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': map_range_3.outputs["Result"], 'Y': random_value_1.outputs[1], 'Z': math_14}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: group_input.outputs["Min scale"], 3: group_input.outputs["Max scale"]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, 'Instance': group_input.outputs["Geometry"], 'Rotation': combine_xyz_2, 'Scale': random_value.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': instance_on_points}) - - -@node_utils.to_nodegroup('FollowCurve', singleton=False) -def follow_curve(nw): - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Offset', 0.5)]) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': capture_attribute.outputs["Attribute"]}) - - math = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["Z"], 1: group_input.outputs["Offset"]}) - - sample_curve = nw.new_node('GeometryNodeSampleCurve', - input_kwargs={'Curve': group_input.outputs["Curve"], 'Length': math}) - - vector_math = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Tangent"], 1: sample_curve.outputs["Normal"]}, - attrs={'operation': 'CROSS_PRODUCT'}) - - vector_math_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math.outputs["Vector"], 'Scale': separate_xyz.outputs["X"]}, - attrs={'operation': 'SCALE'}) - - vector_math_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: sample_curve.outputs["Normal"], 'Scale': separate_xyz.outputs["Y"]}, - attrs={'operation': 'SCALE'}) - - vector_math_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math_1.outputs["Vector"], 1: vector_math_2.outputs["Vector"]}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'Position': sample_curve.outputs["Position"], 'Offset': vector_math_3.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - - -@node_utils.to_nodegroup('SetTreeRadius', singleton=False, type='GeometryNodeTree') -def set_tree_radius(nw): - # Code generated using version 2.3.1 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketBool', 'Selection', True), - ('NodeSocketFloat', 'Reverse depth', 0.5), - ('NodeSocketFloat', 'Scaling', 0.2), - ('NodeSocketFloat', 'Exponent', 1.5), - ('NodeSocketFloat', 'Min radius', 0.02), - ('NodeSocketFloat', 'Max radius', 5.0), - ('NodeSocketInt', 'Profile res', 20), - ('NodeSocketFloatDistance', 'Merge dist', 0.001)]) - - mesh_to_curve = nw.new_node(Nodes.MeshToCurve, - input_kwargs={'Mesh': group_input.outputs["Geometry"], 'Selection': group_input.outputs["Selection"]}) - - set_spline_type = nw.new_node(Nodes.CurveSplineType, - input_kwargs={'Curve': mesh_to_curve}, - attrs={'spline_type': 'BEZIER'}) - - set_handle_type = nw.new_node(Nodes.SetHandleType, - input_kwargs={'Curve': set_spline_type}) - - position = nw.new_node(Nodes.InputPosition) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 1.0}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture.outputs["Color"], 'Scale': 0.02}, - attrs={'operation': 'SCALE'}) - - set_handle_positions = nw.new_node(Nodes.SetHandlePositions, - input_kwargs={'Curve': set_handle_type, 'Offset': scale.outputs["Vector"]}) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: True, 14: mesh_to_curve, 15: set_handle_positions}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Reverse depth"], 1: group_input.outputs["Scaling"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: 0.1}, - attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: group_input.outputs["Exponent"]}, - attrs={'operation': 'POWER'}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: power, 1: group_input.outputs["Min radius"]}, - attrs={'operation': 'MAXIMUM'}) - - minimum = nw.new_node(Nodes.Math, - input_kwargs={0: maximum, 1: group_input.outputs["Max radius"]}, - attrs={'operation': 'MINIMUM'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': switch.outputs[6], 'Radius': minimum}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Profile res"]}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': curve_to_mesh, 'Shade Smooth': False}) - - merge_by_distance = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': set_shade_smooth, 'Distance': group_input.outputs["Merge dist"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': merge_by_distance}) - - -@node_utils.to_material('BarkMat2', singleton=False) -def bark_shader_2(nw): - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset_barkgeo2'}) - - reroute = nw.new_node(Nodes.Reroute, - input_kwargs={'Input': attribute.outputs["Color"]}) - - math = nw.new_node(Nodes.Math, - input_kwargs={0: reroute, 1: 0.0}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': math}) - for i in range(2): - colorramp_1.color_ramp.elements.new(0) - colorramp_1.color_ramp.elements[0].position = 0.0 - # colorramp_1.color_ramp.elements[0].color = (0.0025, 0.0019, 0.0017, 1.0) - colorramp_1.color_ramp.elements[0].color = (0.1004, 0.049, 0.0344, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.163 - colorramp_1.color_ramp.elements[1].color = (0.1004, 0.049, 0.0344, 1.0) - colorramp_1.color_ramp.elements[2].position = 0.4529 - colorramp_1.color_ramp.elements[2].color = (0.1094, 0.0656, 0.054, 1.0) - colorramp_1.color_ramp.elements[3].position = 0.6268 - colorramp_1.color_ramp.elements[3].color = (0.0712, 0.0477, 0.0477, 1.0) - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: reroute}, - attrs={'operation': 'SUBTRACT'}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp_1.outputs["Color"], 'Roughness': math_1}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -@node_utils.to_material('BarkMat1', singleton=False) -def bark_shader_1(nw): - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': mapping, 'Detail': 16.0, 'Roughness': 0.62}) - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'offset_barkgeo1'}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': noise_texture.outputs["Fac"], 'Color2': attribute.outputs["Color"]}, - attrs={'blend_type': 'MULTIPLY'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}) - colorramp.color_ramp.elements.new(1) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (0.0171, 0.005, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.4636 - colorramp.color_ramp.elements[1].color = (0.1132, 0.0653, 0.0471, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = (0.2243, 0.1341, 0.1001, 1.0) - - colorramp_2 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': noise_texture.outputs["Fac"]}) - colorramp_2.color_ramp.elements[0].position = 0.0 - colorramp_2.color_ramp.elements[0].color = (0.5173, 0.5173, 0.5173, 1.0) - colorramp_2.color_ramp.elements[1].position = 1.0 - colorramp_2.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], 'Roughness': colorramp_2.outputs["Color"]}, - attrs={'subsurface_method': 'BURLEY'}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -@node_utils.to_nodegroup('BarkGeo2', singleton=False) -def bark_geo_2(nw): - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - position = nw.new_node(Nodes.InputPosition) - - vector = nw.new_node(Nodes.Vector) - vector.vector = (0.1, 0.1, 0.1) - - vector_math_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: vector}, - attrs={'operation': 'MULTIPLY'}) - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 0.38 - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 5.0 - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 2.0 - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': vector_math_1.outputs["Vector"], 'Scale': value, 'Detail': value_1}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': value_2, 'Color1': noise_texture.outputs["Color"], 'Color2': (0.0, 0.0, 0.0, 1.0)}) - - vector_math_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math_1.outputs["Vector"], 1: mix}) - - value_4 = nw.new_node(Nodes.Value) - value_4.outputs[0].default_value = 0.0 - - value_3 = nw.new_node(Nodes.Value) - value_3.outputs[0].default_value = 20.0 - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': vector_math_2.outputs["Vector"], 'W': value_4, 'Scale': value_3}, - attrs={'voronoi_dimensions': '4D', 'feature': 'F2'}) - - math_3 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture.outputs["Distance"], 1: voronoi_texture.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': vector_math_2.outputs["Vector"], 'W': value_4, 'Scale': value_3}, - attrs={'voronoi_dimensions': '4D'}) - - math_4 = nw.new_node(Nodes.Math, - input_kwargs={0: voronoi_texture_1.outputs["Distance"], 1: voronoi_texture_1.outputs["Distance"]}, - attrs={'operation': 'MULTIPLY'}) - - math_5 = nw.new_node(Nodes.Math, - input_kwargs={0: math_3, 1: math_4}, - attrs={'operation': 'SUBTRACT'}) - - value_5 = nw.new_node(Nodes.Value) - value_5.outputs[0].default_value = 0.6 - - math_7 = nw.new_node(Nodes.Math, - input_kwargs={0: math_5, 1: value_5}, - attrs={'operation': 'MINIMUM'}) - - math_6 = nw.new_node(Nodes.Math, - input_kwargs={0: math_5, 1: value_5}, - attrs={'operation': 'MAXIMUM'}) - - value_6 = nw.new_node(Nodes.Value) - value_6.outputs[0].default_value = 0.1 - - math_8 = nw.new_node(Nodes.Math, - input_kwargs={0: math_6, 1: value_6}, - attrs={'operation': 'MULTIPLY'}) - - math_9 = nw.new_node(Nodes.Math, - input_kwargs={0: math_7, 1: math_8}, - attrs={'operation': 'SUBTRACT'}) - - normal = nw.new_node(Nodes.InputNormal) - - vector_math_3 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: math_9, 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - face_area = nw.new_node('GeometryNodeInputMeshFaceArea') - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: face_area}, - attrs={'operation': 'SQRT'}) - - value_7 = nw.new_node(Nodes.Value) - value_7.outputs[0].default_value = 2.0 - - math = nw.new_node(Nodes.Math, - input_kwargs={0: math_1, 1: value_7}, - attrs={'operation': 'MULTIPLY'}) - - vector_math_4 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math_3.outputs["Vector"], 1: math}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': vector_math_4.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: math_7}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'offset_barkgeo2': capture_attribute.outputs["Attribute"]}) - - -@node_utils.to_nodegroup('BarkGeo1', singleton=False) -def bark_geo_1(nw): - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - position = nw.new_node(Nodes.InputPosition) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.2 - - vector_math = nw.new_node(Nodes.VectorMath, - input_kwargs={0: position, 1: value}, - attrs={'operation': 'MULTIPLY'}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 10.0 - - value_2 = nw.new_node(Nodes.Value) - value_2.outputs[0].default_value = 15.0 - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': vector_math.outputs["Vector"], 'Scale': value_1, 'Distortion': value_2}) - - normal = nw.new_node(Nodes.InputNormal) - - vector_math_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: wave_texture.outputs["Color"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - face_area = nw.new_node('GeometryNodeInputMeshFaceArea') - - math_1 = nw.new_node(Nodes.Math, - input_kwargs={0: face_area}, - attrs={'operation': 'SQRT'}) - - value_3 = nw.new_node(Nodes.Value) - value_3.outputs[0].default_value = 1.0 - - math = nw.new_node(Nodes.Math, - input_kwargs={0: math_1, 1: value_3}, - attrs={'operation': 'MULTIPLY'}) - - vector_math_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: vector_math_1.outputs["Vector"], 1: math}, - attrs={'operation': 'MULTIPLY'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': vector_math_2.outputs["Vector"]}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, 1: wave_texture.outputs["Color"]}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], 'offset_barkgeo1': capture_attribute.outputs["Attribute"]}) - - -''' -def create_berry(sphere): - # Create a sphere - phyllotaxis_distribute('berry', sphere, - min_radius_pct=0, max_radius=1, - sin_max=2.5, sin_clamp_max=.8, - z_max=.8, z_clamp=.7) -''' - - -def sample_points_and_normals(obj, max_density=3, - surface_dist=1, max_points=10000): - # Need to instantiate point distribute - m = add_node_modifier(obj) - ng = m.node_group - inp = ng.nodes.get('Group Input') - out = ng.nodes.get('Group Output') - dist = ng.nodes.new(type='GeometryNodeDistributePointsOnFaces') - pos = ng.nodes.new('GeometryNodeInputPosition') - scale_factor = ng.nodes.new('ShaderNodeValue') - mult_normal = ng.nodes.new('ShaderNodeVectorMath') - add_pos = ng.nodes.new('ShaderNodeVectorMath') - set_pos = ng.nodes.new('GeometryNodeSetPosition') - to_vtx = ng.nodes.new('GeometryNodePointsToVertices') - - new_link(ng, inp, 'Geometry', dist, 'Mesh') - new_link(ng, dist, 'Normal', mult_normal, 0) - new_link(ng, scale_factor, 0, mult_normal, 1) - new_link(ng, pos, 0, add_pos, 0) - new_link(ng, mult_normal, 0, add_pos, 1) - new_link(ng, dist, 'Points', set_pos, 'Geometry') - new_link(ng, add_pos, 0, set_pos, 'Position') - new_link(ng, set_pos, 'Geometry', to_vtx, 'Points') - new_link(ng, to_vtx, 'Mesh', out, 'Geometry') - - mult_normal.operation = 'MULTIPLY' - scale_factor.outputs[0].default_value = surface_dist - dist.distribute_method = 'POISSON' - dist.inputs.get('Density Max').default_value = max_density - - # Get point coordinates - dgraph = C.evaluated_depsgraph_get() - obj_eval = obj.evaluated_get(dgraph) - vtx = mesh.vtx2cds(obj_eval.data.vertices, obj_eval.matrix_world) - - # Get normals - scale_factor.outputs[0].default_value = 1 - for l in ng.links: - if l.from_node == pos: - ng.links.remove(l) - - dgraph = C.evaluated_depsgraph_get() - obj_eval = obj.evaluated_get(dgraph) - normals = mesh.vtx2cds(obj_eval.data.vertices, np.eye(4)) - - obj.modifiers.remove(obj.modifiers[-1]) - D.node_groups.remove(ng) - - idxs = mesh.subsample_vertices(vtx, max_num=max_points) - return vtx[idxs], normals[idxs] - diff --git a/infinigen/assets/trees/utils/helper.py b/infinigen/assets/trees/utils/helper.py deleted file mode 100644 index 55a724a4c..000000000 --- a/infinigen/assets/trees/utils/helper.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alejandro Newell - - -import bpy -import numpy as np - -from infinigen.core.util.logging import Suppress - -C = bpy.context -D = bpy.data - - -def set_active_obj(obj): - if not C.active_object == obj: - try: - bpy.ops.object.mode_set(mode='OBJECT') - except: - pass - bpy.ops.object.select_all(action='DESELECT') - obj.select_set(True) - C.view_layer.objects.active = obj - bpy.ops.object.mode_set(mode='OBJECT') - - -def config_rendering(resolution=(480, 640), renderer='cycles', render_samples=64, - render_exr=False, thread_limit=8): - """Adjust rendering settings. - - Args: - resolution: Integer tuple for image resolution - renderer: Either 'cycles' or 'eevee' - render_samples: Integer that determines sample quality, rendering time - use_gpu: Whether to use the GPU for rendering - render_exr: Set true to output segmentation and depth ground truth - """ - - if renderer == 'eevee': - C.scene.render.engine = 'BLENDER_EEVEE' - C.scene.eevee.taa_render_samples = render_samples - - elif renderer == 'cycles': - C.scene.render.engine = 'CYCLES' - # C.scene.cycles.device = 'GPU' - C.scene.cycles.samples = render_samples - C.scene.cycles.use_denoising = True - # C.scene.cycles.denoiser = 'OPTIX' - - C.scene.render.resolution_x = resolution[1] - C.scene.render.resolution_y = resolution[0] - # C.scene.render.threads_mode = 'FIXED' - # C.scene.render.threads = thread_limit - - if render_exr: - C.scene.render.image_settings.file_format = 'OPEN_EXR_MULTILAYER' - C.scene.render.image_settings.color_mode = 'RGBA' - C.scene.render.image_settings.color_depth = '32' - C.window.view_layer.use_pass_object_index = True - C.window.view_layer.use_pass_material_index = True - C.window.view_layer.use_pass_z = True - - else: - C.scene.render.image_settings.color_mode = 'RGB' - - -def create_collection(name, objs): - c_names = [] - for c_idx, c in enumerate(D.collections): - if c_idx > 0: - c_names += [c.name] - - name_ = name - count = 1 - while name_ in c_names: - name_ = f'{name}_{count}' - count += 1 - - bpy.ops.object.select_all(action='DESELECT') - for o in objs: - o.select_set(True) - - with Suppress(): - bpy.ops.object.move_to_collection(collection_index=0, is_new=True, new_collection_name=name_) - - return name_ - - -def traverse_tree(t): - # https://blender.stackexchange.com/questions/172559/python-how-to-move-collection-into-another-collection - yield t - for child in t.children: - yield from traverse_tree(child) - - -def parent_lookup(coll): - parent_lookup = {} - for coll in traverse_tree(coll): - for c in coll.children.keys(): - parent_lookup.setdefault(c, coll) - return parent_lookup - - -def collect_collections(name, colls): - # Get all collections of the scene and their parents in a dict - coll_scene = C.scene.collection - coll_parents = parent_lookup(coll_scene) - - # Create target collection - D.collections.new(name) - coll_target = D.collections[name] - coll_scene.children.link(coll_target) - - for coll in colls: - coll_parent = coll_parents.get(coll.name) - coll_parent.children.unlink(coll) - coll_target.children.link(coll) - - -def remove_collection(name): - collection = D.collections.get(name) - for obj in collection.objects: - D.objects.remove(obj, do_unlink=True) - D.collections.remove(collection) - - -def hide_collection(collection): - if isinstance(collection, str): - name = collection - collection = D.collections[name] - else: - name = collection.name - - vlayer = C.scene.view_layers[0] - vlayer.layer_collection.children[name].hide_viewport = True - collection.hide_render = True - - -def clear_collections(): - c_names = [] - for c_idx, c in enumerate(D.collections): - if c_idx > 0: - c_names += [c.name] - - for c_name in c_names: - remove_collection(c_name) - - -def run_cleanup(): - for d in [D.meshes, D.materials, D.images, D.particles]: - for d_ in d: - if d_.users == 0: - d.remove(d_) - for d in [D.textures, D.node_groups]: - for d_ in d: - d.remove(d_) - - -def reset_scene(add_camera=False, clear_materials=False, obj_to_keep_list=[]): - """Clear and reset scene.""" - set_active_obj(D.objects[0]) - - for obj in D.objects: - obj.hide_viewport = False - - # Delete everything - clear_collections() - # bpy.ops.object.select_all(action='SELECT') - for obj in bpy.context.scene.objects: - if obj.name not in obj_to_keep_list: - obj.select_set(True) - bpy.ops.object.delete(confirm=False) - run_cleanup() - - if add_camera: - # Initialize camera - v = min(1,max(0,(np.random.randn() * .3 + .5))) - v = 0 - camera_height = .5 + 3 * v # np.random.uniform(1,5) # + np.random.randn() * .2 - camera_pitch = np.pi * .45 # + np.random.randn() * np.pi * .1 - camera_pitch = min(max(camera_pitch, np.pi * .4), np.pi * .5) - camera_pitch = np.pi * .65 # (1-v) * np.pi * .6 + np.pi * .2 - - camera_pitch = np.pi * 0.5 - camera_height = 3 - - bpy.ops.object.camera_add(location=(0, -6, camera_height), rotation=(camera_pitch, 0, 0)) - cam = D.objects[0] - C.scene.camera = cam - cam.data.lens = 20 - - if clear_materials: # Regardless of number of users - for m_idx in range(len(D.materials)): - D.materials.remove(D.materials[-1]) - - -# ============================================================================== -# Transformation utils -# ============================================================================== - - -def compute_dists(a, b): - deltas = a[:,None] - b[None] - d = np.linalg.norm(deltas, axis=-1) - return d, deltas - - -def get_cos_sin(angle, convert_to_rad=False): - if convert_to_rad: - angle = angle * np.pi / 180 - return np.cos(angle), np.sin(angle) - - -def rodrigues_rot(vec, axis, angle, convert_to_rad=False): - axis = axis / np.linalg.norm(axis) - cs, sn = get_cos_sin(angle, convert_to_rad) - return vec * cs + sn * np.cross(axis, vec) + axis * np.dot(axis, vec) * (1 - cs) - - -def get_T_mat(distance, angle, convert_to_rad=True): - T = np.identity(3) - T[0,2] = distance - rot = np.identity(3) - cs, sn = get_cos_sin(angle, convert_to_rad) - rot[0,:2] = cs, -sn - rot[1,:2] = sn, cs - - return np.matmul(rot, T) - - -def valid_pos(d0=2, d1=10): - camera_pos = C.scene.camera.location - view_angle = C.scene.camera.rotation_euler[2] - tmp_ang = (C.scene.camera.data.angle / 2) * .9 - tmp_ang = np.random.rand() * 2 * tmp_ang - tmp_ang - tmp_ang += view_angle - tmp_dist = np.random.rand() * (d1 - d0) + d0 - root_pos = np.array([camera_pos[0], camera_pos[1]]) - v_dir = np.array([-np.sin(tmp_ang), np.cos(tmp_ang)]) - - return root_pos + tmp_dist * v_dir diff --git a/infinigen/assets/trees/utils/materials.py b/infinigen/assets/trees/utils/materials.py deleted file mode 100644 index 3ada36d39..000000000 --- a/infinigen/assets/trees/utils/materials.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alejandro Newell, Lingjie Mei - - -import numpy as np -import os -import sys -import colorsys - -import bpy - -from infinigen.core.util.color import hsv2rgba -from . import helper - -C = bpy.context -D = bpy.data - - -def get_materials(prefix=''): - return [m for m in D.materials if f'{prefix}Material' in m.name] - - -def new_material(prefix=''): - n_idx = len(get_materials(prefix)) - m = D.materials.new(f'{prefix}Material{n_idx:04d}') - m.use_nodes = True - - return m - - -def init_color_material(color, prefix='', hsv_variance=[0,0,0], - roughness=.8, specular=.05, is_hsv=True, - is_emission=False, emit_strength=1): - m = new_material(prefix) - nt = m.node_tree - color = np.array(color) + np.random.randn(3) * np.array(hsv_variance) - color = list(color.clip(0,1)) - color = (hsv2rgba(*color)) - - if is_emission: - out_node = nt.nodes.get('Material Output') - nt.nodes.new('ShaderNodeEmission') - em = nt.nodes.get('Emission') - em.inputs.get('Strength').default_value = emit_strength - em.inputs.get('Color').default_value = color - new_link(nt, em, 'Emission', out_node, 'Surface') - - else: - bsdf_node = nt.nodes.get('Principled BSDF') - bsdf_node.inputs.get('Base Color').default_value = color - bsdf_node.inputs.get('Roughness').default_value = roughness - bsdf_node.inputs.get('Specular').default_value = specular - - return m - - -def assign_material(obj, m=None, prefix='', m_idx=0, slot_idx=0): - helper.set_active_obj(obj) - while len(obj.material_slots) < (slot_idx+1): - bpy.ops.object.material_slot_add() - obj.active_material_index = slot_idx - - if m is not None: - obj.active_material = m - else: - obj.active_material = get_materials(prefix)[m_idx] - - -def uv_smart_project(obj): - helper.set_active_obj(obj) - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.uv.smart_project() - bpy.ops.object.mode_set(mode='OBJECT') - - -def new_link(nt, node1, field1, node2, field2): - node_out = node1.outputs[field1] if isinstance(field1, int) else node1.outputs.get(field1) - node_inp = node2.inputs[field2] if isinstance(field2, int) else node2.inputs.get(field2) - nt.links.new(node_out, node_inp) - - -def create_leaf_material(src_hue, glow=False): - m = new_material('Leaf') - nt = m.node_tree - - if glow: - out_node = nt.nodes.get('Material Output') - nt.nodes.new('ShaderNodeEmission') - em = nt.nodes.get('Emission') - em.inputs.get('Strength').default_value = 1 - em.inputs.get('Color').default_value = (*colorsys.hsv_to_rgb(src_hue + np.random.randn() * .1, 1, 1), 1) - new_link(nt, em, 'Emission', out_node, 'Surface') - - else: - info_node = nt.nodes.new('ShaderNodeObjectInfo') - add_node = nt.nodes.new('ShaderNodeVectorMath') - mult_node = nt.nodes.new('ShaderNodeVectorMath') - add2_node = nt.nodes.new('ShaderNodeVectorMath') - noise_node = nt.nodes.new('ShaderNodeTexWhiteNoise') - sep_node = nt.nodes.new('ShaderNodeSeparateXYZ') - hsv_node = nt.nodes.new('ShaderNodeCombineHSV') - - sep_loc_node = nt.nodes.new('ShaderNodeSeparateXYZ') - loc_mult_node = nt.nodes.new('ShaderNodeMath') - loc_add_node = nt.nodes.new('ShaderNodeMath') - - bsdf_node = nt.nodes.get('Principled BSDF') - mult_node.operation = 'MULTIPLY' - loc_mult_node.operation = 'MULTIPLY' - - add_node.inputs[1].default_value += np.random.randn(3) - # mult_node.inputs[1].default_value = [.07,.2,.2] - # add2_node.inputs[1].default_value = [.22,.9,.1] - # loc_mult_node.inputs[1].default_value = 0 - mult_node.inputs[1].default_value = [.05,.4,.4] - add2_node.inputs[1].default_value = [src_hue + np.random.randn() * .05,.6,.1] - loc_mult_node.inputs[1].default_value = 0 #-.01 - # add2_node.inputs[1].default_value += np.random.randn(3) * .1 - - # Get HSV color (output of sep_node) - new_link(nt, info_node, 'Random', add_node, 0) - new_link(nt, add_node, 0, noise_node, 'Vector') - new_link(nt, noise_node, 'Color', mult_node, 0) - new_link(nt, mult_node, 0, add2_node, 0) - new_link(nt, add2_node, 0, sep_node, 0) - - # Modify H based on Z - nt.links.new(info_node.outputs.get('Location'), sep_loc_node.inputs[0]) - nt.links.new(sep_loc_node.outputs.get('Z'), loc_mult_node.inputs[0]) - nt.links.new(loc_mult_node.outputs[0], loc_add_node.inputs[0]) - nt.links.new(sep_node.outputs[0], loc_add_node.inputs[1]) - - # Combine and assign color - nt.links.new(loc_add_node.outputs[0], hsv_node.inputs.get('H')) - nt.links.new(sep_node.outputs[1], hsv_node.inputs.get('S')) - nt.links.new(sep_node.outputs[2], hsv_node.inputs.get('V')) - nt.links.new(hsv_node.outputs[0], bsdf_node.inputs.get('Base Color')) - - -def get_tex_nodes(m): - """Returns Image Texture node, creates one if it doesn't exist.""" - nt = m.node_tree - m.cycles.displacement_method = 'DISPLACEMENT' - - # Check whether the Image Texture node has been added - diff_img_node = nt.nodes.get('Image Texture') - rough_img_node = nt.nodes.get('Image Texture.001') - disp_img_node = nt.nodes.get('Image Texture.002') - - if diff_img_node is None: - # Create new node for linking images - nt.nodes.new('ShaderNodeTexImage') - nt.nodes.new('ShaderNodeTexImage') - nt.nodes.new('ShaderNodeTexImage') - nt.nodes.new('ShaderNodeMapRange') - diff_img_node = nt.nodes.get('Image Texture') - rough_img_node = nt.nodes.get('Image Texture.001') - rough_scaling_node = nt.nodes.get('Map Range') - disp_img_node = nt.nodes.get('Image Texture.002') - - # Link to main node - bsdf_node = nt.nodes.get('Principled BSDF') - nt.links.new(diff_img_node.outputs.get('Color'), - bsdf_node.inputs.get('Base Color')) - nt.links.new(rough_img_node.outputs.get('Color'), - rough_scaling_node.inputs.get('Value')) - nt.links.new(rough_scaling_node.outputs.get('Result'), - bsdf_node.inputs.get('Roughness')) - - # Set up nodes for mixing in color - disp_node = nt.nodes.new('ShaderNodeDisplacement') - disp_node.space = 'WORLD' - disp_node.inputs.get('Scale').default_value = 0.05 - out_node = nt.nodes.get('Material Output') - nt.links.new(disp_img_node.outputs.get('Color'), - disp_node.inputs.get('Height')) - nt.links.new(disp_node.outputs.get('Displacement'), - out_node.inputs.get('Displacement')) - - return diff_img_node, rough_img_node, disp_img_node - - -def setup_material(m, txt_paths, metal_prob=.2, transm_prob=.2, emit_prob=0): - """Initialize material given list of paths to diff, rough, disp images.""" - - # Load any images that haven't been loaded already - img_ref = [tpath.split('/')[-1] for tpath in txt_paths] - for img_idx, img in enumerate(img_ref): - if not img in D.images: - try: - D.images.load(txt_paths[img_idx]) - except: - pass - - # Initialize and update diff, rough, and disp shader nodes - txt_nodes = get_tex_nodes(m) - for n_idx, n in enumerate(txt_nodes): - try: - im = D.images.get(img_ref[n_idx]) - if n_idx > 0: - im.colorspace_settings.name = 'Non-Color' - n.image = im - except: - pass - - nt = m.node_tree - bsdf = nt.nodes.get('Principled BSDF') - rough_scale = nt.nodes.get('Map Range') - - bsdf.inputs.get('Metallic').default_value = 0 - bsdf.inputs.get('Transmission').default_value = 0 - bsdf.inputs.get('IOR').default_value = 1.45 - rough_scale.inputs.get('To Max').default_value = 1 - - if np.random.rand() < metal_prob: - bsdf.inputs.get('Metallic').default_value = 1 - rough_scale.inputs.get('To Max').default_value = .5 - - elif np.random.rand() < transm_prob: - bsdf.inputs.get('Transmission').default_value = 1 - bsdf.inputs.get('IOR').default_value = 1.05 + np.random.rand() * .3 - rough_scale.inputs.get('To Max').default_value = .2 - - if np.random.rand() < emit_prob: - out_node = nt.nodes.get('Material Output') - - nt.nodes.new('ShaderNodeEmission') - nt.nodes.new('ShaderNodeTexNoise') - nt.nodes.new('ShaderNodeValToRGB') # ColorRamp - nt.nodes.new('ShaderNodeMixShader') - - em = nt.nodes.get('Emission') - em.inputs.get('Strength').default_value = 5 - em.inputs.get('Color').default_value = (*colorsys.hsv_to_rgb(np.random.rand(), 1, 1), 1) - - noise = nt.nodes.get('Noise Texture') - noise.inputs.get('Scale').default_value = np.random.uniform(1,10) - noise.inputs.get('Distortion').default_value = np.random.uniform(3,10) - - ramp = nt.nodes.get('ColorRamp') - ramp.color_ramp.elements[0].position = .4 - ramp.color_ramp.elements[1].position = .45 - new_link(nt, noise, 'Color', ramp, 'Fac') - - mix = nt.nodes.get('Mix Shader') - new_link(nt, ramp, 'Color', mix, 'Fac') - new_link(nt, bsdf, 'BSDF', mix, 'Shader') - new_link(nt, em, 'Emission', mix, 'Shader') - new_link(nt, mix, 'Shader', out_node, 'Surface') diff --git a/infinigen/assets/trees/utils/mesh.py b/infinigen/assets/trees/utils/mesh.py deleted file mode 100644 index 2217af26f..000000000 --- a/infinigen/assets/trees/utils/mesh.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Alejandro Newell - - -import bpy -import numpy as np - -from . import helper - -from mathutils import Vector, Matrix - -C = bpy.context -D = bpy.data - - -def init_mesh(name, verts=[], edges=[], faces=[], coll=None): - mesh = D.meshes.new(name) - obj = D.objects.new(mesh.name, mesh) - - if coll is None: - coll = bpy.context.scene.collection - else: - coll = D.collections[coll] - - coll.objects.link(obj) - helper.set_active_obj(obj) - - mesh.from_pydata(verts, edges, faces) - - return obj - - -def duplicate_obj(obj, name): - new_obj = obj.copy() - new_obj.name = name - new_obj.data = new_obj.data.copy() - - col = obj.users_collection[0] - col.objects.link(new_obj) - - helper.set_active_obj(new_obj) - return new_obj - - -def finalize_obj(obj): - helper.set_active_obj(obj) - bpy.ops.object.convert(target='MESH') - - -def init_vertex(pos): - bpy.ops.mesh.primitive_cube_add(size=1, enter_editmode=True, align='WORLD', - location=pos, scale=(1, 1, 1)) - bpy.ops.mesh.merge(type='COLLAPSE') - bpy.ops.object.editmode_toggle() - - return C.active_object - - -def get_all_vtx_pos(obj): - n_cds = len(obj.data.vertices) - all_cds = np.zeros(n_cds * 3) - obj.data.vertices.foreach_get('co', all_cds) - return all_cds.reshape(-1, 3) - - -def vtx2cds(vtxs, world_mat): - n_cds = len(vtxs) - all_cds = np.zeros(n_cds * 3) - vtxs.foreach_get('co', all_cds) - all_cds = all_cds.reshape(-1,3) - all_cds = add_ones(all_cds.reshape(-1, 3)) - m_world = np.array(world_mat) - all_cds = np.matmul(m_world, all_cds.T).T[:,:3] - - return all_cds - - -def sample_vtxs(obj, emit_from='VOLUME', n=1000, seed=1): - # Make object current active object - bpy.ops.object.mode_set(mode='OBJECT') - C.view_layer.objects.active = obj - - # Add particle system modifier - bpy.ops.object.modifier_add(type='PARTICLE_SYSTEM') - p = D.particles[-1] - - # Adjust modifier settings - p.count = n - p.frame_end = 1 - p.emit_from = emit_from - p.distribution = 'RAND' - p.use_modifier_stack = True - p.physics_type = 'NO' - obj.particle_systems[-1].seed = seed - - # Get particle locations (relative to object) - obj_eval = obj.evaluated_get(C.evaluated_depsgraph_get()) - all_cds = np.zeros(n * 3) - obj_eval.particle_systems[-1].particles.foreach_get('location', all_cds) - - obj.modifiers.remove(obj.modifiers[-1]) - D.particles.remove(D.particles[-1]) - - return all_cds.reshape(-1,3) - - -def get_pts_from_shape(shape_fn, n=10, emit_from="VOLUME", loc=(0,0,0), - scaling=1, pt_offset=0): - if isinstance(pt_offset, list): - pt_offset = np.array([pt_offset]) - if isinstance(scaling, list): - scaling = Vector(scaling) - shape_fn(location=loc) - obj = C.active_object - obj.scale *= scaling - pts = sample_vtxs(obj, n=n, emit_from=emit_from, seed=np.random.randint(100)) - pts += pt_offset - D.objects.remove(obj) - return pts - - -def select_vtx_by_pos(obj, pos): - bpy.ops.object.mode_set(mode = 'EDIT') - bpy.ops.mesh.select_mode(type="VERT") - bpy.ops.mesh.select_all(action = 'DESELECT') - bpy.ops.object.mode_set(mode = 'OBJECT') - n_cds = len(obj.data.vertices) - all_cds = np.zeros(n_cds * 3) - obj.data.vertices.foreach_get('co', all_cds) - idx = np.abs(all_cds.reshape(n_cds, 3) - pos).sum(1).argmin() - obj.data.vertices[idx].select = True - bpy.ops.object.mode_set(mode = 'EDIT') - - return idx - - -def select_vtx_by_idx(obj, idx, deselect=False): - if not isinstance(idx, list): - idx = [idx] - bpy.ops.object.mode_set(mode = 'EDIT') - bpy.ops.mesh.select_mode(type="VERT") - if deselect: - bpy.ops.mesh.select_all(action = 'DESELECT') - bpy.ops.object.mode_set(mode = 'OBJECT') - for i in idx: - obj.data.vertices[i].select = True - bpy.ops.object.mode_set(mode = 'EDIT') - - return idx - - -def extrude_path(obj, path): - helper.set_active_obj(obj) - bpy.ops.object.mode_set(mode='EDIT') - src_idx = select_vtx_by_pos(obj, path[0]) - deltas = path[1:] - path[:-1] - start_idx = len(obj.data.vertices) - for i in range(len(deltas)): - bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value":deltas[i]}) - - return src_idx, start_idx - - -def get_vtx_obj(): - if not 'vtx' in D.objects: - bpy.ops.object.mode_set(mode = 'OBJECT') - bpy.ops.mesh.primitive_cube_add(size=2, enter_editmode=False, align='WORLD', - location=(0, 0, 0), scale=(1, 1, 1)) - bpy.ops.object.editmode_toggle() - bpy.ops.mesh.merge(type='COLLAPSE') - bpy.ops.object.editmode_toggle() - obj = C.active_object - obj.name = 'vtx' - - return D.objects['vtx'] - - -def subsample_vertices(v, max_num=500): - if len(v) > max_num: - rand_order = np.random.permutation(len(v)) - return np.sort(rand_order[:max_num]) - else: - return np.arange(len(v)) - - -def add_ones(x): - return np.concatenate([x, np.ones_like(x[:,:1])], 1) - - -def get_world_coords(obj, subset=None): - dgraph = C.evaluated_depsgraph_get() - obj_eval = obj.evaluated_get(dgraph) - - vts = obj_eval.data.vertices - all_cds = np.zeros(len(vts) * 3) - vts.foreach_get('co', all_cds) - all_cds = add_ones(all_cds.reshape(-1, 3)) - if subset is not None: - all_cds = all_cds[subset] - - m_world = np.array(obj_eval.matrix_world) - all_cds = np.matmul(m_world, all_cds.T).T[:,:3] - - return all_cds - - -def arr_world_to_camera_view(scene, obj, coord): - # Modified to support array operations from bpy_extras.object_utils.world_to_camera_view - cam_matrix = np.array(obj.matrix_world.normalized().inverted()) - co_local = np.matmul(cam_matrix, add_ones(coord).T).T[:,:3] - z = -co_local[:,2] - - camera = obj.data - frame = [np.array(v) for v in camera.view_frame(scene=scene)[:3]] - if camera.type != 'ORTHO': - frame = [(-v / v[2])[None,:] * z[:,None] for v in frame] - for i in range(len(frame)): - frame[i][z == 0][:,:2] = .5 - - min_x, max_x = frame[2][:,0], frame[1][:,0] - min_y, max_y = frame[1][:,1], frame[0][:,1] - - x = (co_local[:,0] - min_x) / (max_x - min_x) - y = (co_local[:,1] - min_y) / (max_y - min_y) - - return np.stack([x, y, z], 1) - - -def get_coords_clip(obj, f0, f1, subset=None): - all_cds = [] - for i in range(f0,f1): - C.scene.frame_set(i) - cds = get_world_coords(obj, subset) - all_cds += [cds] - - return np.stack(all_cds, 0) - - -def get_visible_vertices(cam, vertices, co2D=None, limit=0.02): - if co2D is None: - co2D = arr_world_to_camera_view(C.scene, cam, vertices) - - bpy.ops.mesh.primitive_cube_add() - bpy.ops.transform.resize(value=(0.01, 0.01, 0.01)) - cube = C.active_object - - in_frame = (co2D[:,0] >= 0) & (co2D[:,0] <= 1) - in_frame &= (co2D[:,1] >= 0) & (co2D[:,1] <= 1) - in_frame &= (co2D[:,2] > 0) - - is_visible = in_frame.copy() - - valid_idxs = np.arange(len(in_frame))[in_frame] - - for i in valid_idxs: - v = Vector(vertices[i]) - cube.location = v - depsgraph = C.evaluated_depsgraph_get() - - # Try a ray cast, in order to test the vertex visibility from the camera - location= C.scene.ray_cast(depsgraph, cam.location, (v - cam.location).normalized() ) - # If the ray hits something and if this hit is close to the vertex, we assume this is the vertex - if not (location[0] and (v - location[1]).length < limit): - is_visible[i] = False - - bpy.ops.object.select_all(action='DESELECT') - cube.select_set(True) - bpy.ops.object.delete(confirm=False) - - return co2D, is_visible, in_frame - -def sanity_check_viz(all_pts, is_visible, in_frame, frame_idx=0): - C.scene.frame_set(frame_idx) - for i in range(all_pts.shape[1]): - pt = all_pts[frame_idx,i] - vis = is_visible[frame_idx,i] - - bpy.ops.mesh.primitive_cube_add() - bpy.ops.transform.resize(value=(0.02, 0.02, 0.02)) - cube = C.active_object - cube.location = pt - bpy.ops.object.material_slot_add() - cube.material_slots[0].material = D.materials[2] if vis else D.materials[1] - if not in_frame[frame_idx,i]: - cube.material_slots[0].material = D.materials[0] diff --git a/infinigen/assets/tropic_plants/coconut_tree.py b/infinigen/assets/tropic_plants/coconut_tree.py deleted file mode 100644 index c2f6c5b9e..000000000 --- a/infinigen/assets/tropic_plants/coconut_tree.py +++ /dev/null @@ -1,1294 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util import blender as butil -from infinigen.assets.tropic_plants.leaf_palm_tree import LeafPalmTreeFactory -from infinigen.assets.fruits.coconutgreen import FruitFactoryCoconutgreen - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_top', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal_2 = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal_2, 1: combine_xyz_3}, - attrs={'operation': 'MULTIPLY'}) - - index_1 = nw.new_node(Nodes.Index) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: 63.0}, - attrs={'operation': 'GREATER_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': greater_than}) - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_bottom', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 1: combine_xyz}, - attrs={'operation': 'MULTIPLY'}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: 64.0}, - attrs={'operation': 'LESS_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': less_than}) - - -@node_utils.to_nodegroup('nodegroup_trunk_radius_001', singleton=False, type='GeometryNodeTree') -def nodegroup_trunk_radius_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.01, 3: 0.05}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}, - attrs={'clamp': False}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'FLOOR'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: floor}, - attrs={'operation': 'SUBTRACT'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': subtract}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0156), (0.2545, 0.2), (0.5182, 0.0344), (0.7682, 0.2375), (1.0, 0.0)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_2}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value.outputs[1], 1: add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add_1}) - - -@node_utils.to_nodegroup('nodegroup_coutour_cross_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_coutour_cross_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 128, 'Radius': 0.05}) - - pedal_cross_coutour_x = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_x') - pedal_cross_coutour_x.outputs[0].default_value = 0.3 - - pedal_cross_contour_bottom = nw.new_node(nodegroup_pedal_cross_contour_bottom().name, - input_kwargs={'X': pedal_cross_coutour_x}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], - 'Selection': pedal_cross_contour_bottom.outputs["Value"], - 'Offset': pedal_cross_contour_bottom.outputs["Vector"]}) - - pedal_cross_coutour_y = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_y') - pedal_cross_coutour_y.outputs[0].default_value = 0.3 - - pedal_cross_contour_top = nw.new_node(nodegroup_pedal_cross_contour_top().name, - input_kwargs={'Y': pedal_cross_coutour_y, 'X': pedal_cross_coutour_x}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, - 'Selection': pedal_cross_contour_top.outputs["Value"], - 'Offset': pedal_cross_contour_top.outputs["Vector"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 7.0, 'Detail': 15.0}, - attrs={'noise_dimensions': '4D'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Fac"], 'Scale': 0.0}, - attrs={'operation': 'SCALE'}) - - set_position_5 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_2, 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_5}) - - -@node_utils.to_nodegroup('nodegroup_pedal_z_contour', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_z_contour(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.4094), (0.1773, 0.475), (0.3795, 0.5062), (0.5864, 0.5187), (0.7202, 0.5084), - (0.8636, 0.4781), (1.0, 0.375)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_3 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 0.0688), (0.2545, 0.2281), (0.5023, 0.2563), (0.9773, 0.2656)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.2)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_3, 'Center': (0.0, 0.0, 0.2), 'Angle': multiply}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate}) - - -@node_utils.to_nodegroup('nodegroup_node_group_002', singleton=False, type='ShaderNodeTree') -def nodegroup_node_group_002(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0)), - ('NodeSocketFloat', 'attribute', 0.0), - ('NodeSocketFloat', 'voronoi scale', 50.0), - ('NodeSocketFloatFactor', 'voronoi randomness', 1.0), - ('NodeSocketFloat', 'seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketFloat', 'noise amount', 1.4), - ('NodeSocketFloat', 'hue min', 0.6), - ('NodeSocketFloat', 'hue max', 1.085)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: texture_coordinate.outputs["Object"], 1: group_input.outputs["seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], - 'Scale': group_input.outputs["noise scale"], 'Detail': 1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs["noise amount"]}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["attribute"], - 'Scale': group_input.outputs["voronoi scale"], - 'Randomness': group_input.outputs["voronoi randomness"]}, - attrs={'voronoi_dimensions': '1D'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 3: group_input.outputs["hue min"], - 4: group_input.outputs["hue max"]}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': map_range.outputs["Result"], - 'Color': group_input.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': hue_saturation_value}) - - -@node_utils.to_nodegroup('nodegroup_coconutvein', singleton=False, type='GeometryNodeTree') -def nodegroup_coconutvein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_2 = nw.new_node(Nodes.Index) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index_2, 1: 400.0, 2: 0.0}, - attrs={'clamp': False}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Factor': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0), (0.2455, 0.0), (0.5091, 0.0), (0.7636, 0.1625), (1.0, 0.4688)]) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Scale': 1.0}, - attrs={'noise_dimensions': '4D'}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: float_curve, 1: noise_texture.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"]}) - - -@node_utils.to_nodegroup('nodegroup_tree_trunk_geometry_001', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_trunk_geometry_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - trunkradius_001 = nw.new_node(nodegroup_trunk_radius_001().name) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': trunkradius_001}) - - trunk_resolution = nw.new_node(Nodes.Integer, - label='TrunkResolution', - attrs={'integer': 32}) - trunk_resolution.integer = 32 - - trunk_radius = nw.new_node(Nodes.Value, - label='TrunkRadius') - trunk_radius.outputs[0].default_value = 0.02 - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': trunk_resolution, 'Radius': trunk_radius}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh, 'Integer': trunk_resolution}) - - -@node_utils.to_nodegroup('nodegroup_truncated_leaf_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_leaf_selection(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_3 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: 1600.0, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(0.92, 0.98)}, - attrs={'operation': 'MULTIPLY'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_3, 1: multiply_1}, - attrs={'operation': 'GREATER_THAN'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: np.clip(normal(0.8, 0.1), 0.7, 0.9)}, - attrs={'operation': 'MULTIPLY'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_3, 1: multiply_2}, - attrs={'operation': 'LESS_THAN'}) - - op_or = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}, - attrs={'operation': 'OR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_or}) - - -@node_utils.to_nodegroup('nodegroup_random_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_random_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.5, 3: 0.5, 'Seed': 1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2, 'Seed': 3}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_2.outputs[1], - 'Z': random_value_3.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1}) - - -@node_utils.to_nodegroup('nodegroup_leaf_truncated_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_truncated_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: 0.0}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: add}, - attrs={'operation': 'MODULO'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: add}, - attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 6.28}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_truncated_leaf_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_leaf_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': (0.0, 0.0, 0.15)}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 64}) - integer.integer = 64 - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': integer}) - - pedal_stem_curvature_scale = nw.new_node(Nodes.Value, - label='pedal_stem_curvature_scale') - pedal_stem_curvature_scale.outputs[0].default_value = 0.2 - - pedal_stem_curvature = nw.new_node(nodegroup_pedal_stem_curvature().name, - input_kwargs={'Value': pedal_stem_curvature_scale}) - - set_position_4 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve_1, 'Offset': pedal_stem_curvature}) - - pedal_z_coutour_scale = nw.new_node(Nodes.Value, - label='pedal_z_coutour_scale') - pedal_z_coutour_scale.outputs[0].default_value = uniform(0.2, 0.4) - - pedal_z_contour = nw.new_node(nodegroup_pedal_z_contour().name, - input_kwargs={'Value': pedal_z_coutour_scale}) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_position_4, 'Radius': pedal_z_contour}) - - coutour_cross_geometry = nw.new_node(nodegroup_coutour_cross_geometry().name) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': coutour_cross_geometry, - 'Fill Caps': True}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh_1, - 'Material': surface.shaderfunc_to_material(shader_top_core)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_2}) - - -@node_utils.to_nodegroup('nodegroup_trunk_radius', singleton=False, type='GeometryNodeTree') -def nodegroup_trunk_radius(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.01, 3: 0.05}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.2}, - attrs={'clamp': False}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'FLOOR'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: floor}, - attrs={'operation': 'SUBTRACT'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': subtract}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0969), (0.5864, 0.1406), (1.0, 0.2906)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: uniform(0.1, 0.25)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_2}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value.outputs[1], 1: add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add_1}) - - -@node_utils.to_nodegroup('nodegroup_tree_cracks', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_cracks(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: spline_parameter.outputs["Length"]}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: capture_attribute.outputs[2], 1: uniform(0.1, 0.25)}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], - 'Z': multiply}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 400.0, 'Randomness': 10.0}, - attrs={'voronoi_dimensions': '4D', 'distance': 'CHEBYCHEV'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) - colorramp.color_ramp.elements[0].position = 0.6091 - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.6818 - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: colorramp.outputs["Color"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_1.outputs["Vector"], 1: (-0.01, -0.01, -0.01)}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Vector': multiply_2.outputs["Vector"]}) - - -@node_utils.to_nodegroup('nodegroup_leaf_instance_selection_bottom_remove', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_instance_selection_bottom_remove(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Ring', 10.0), - ('NodeSocketFloat', 'Segment', 0.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: group_input.outputs["Ring"]}, - attrs={'operation': 'DIVIDE'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Segment"], 1: 4.0}, - attrs={'operation': 'SUBTRACT'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: subtract}, - attrs={'operation': 'GREATER_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': greater_than}) - - -@node_utils.to_nodegroup('nodegroup_leaf_random_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_random_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_3.outputs[1], - 'Z': random_value_2.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_leaf_rotate_downward', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_rotate_downward(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: 0.0}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: add}, - attrs={'operation': 'MODULO'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: add}, - attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 6.28}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -def shader_coconut_green_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate_1 = nw.new_node(Nodes.TextureCoord) - - noise_texture_1 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate_1.outputs["Object"], 'Scale': 1.0, - 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateColor, - input_kwargs={'Color': noise_texture_1.outputs["Color"]}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Green"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["Blue"], 1: 0.4, 2: 0.7, 3: 0.6}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'spline parameter'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': attribute_1.outputs["Fac"]}) - colorramp.color_ramp.elements.new(0) - colorramp.color_ramp.elements[0].position = 0.0 - colorramp.color_ramp.elements[0].color = (0.0908, 0.2664, 0.013, 1.0) - colorramp.color_ramp.elements[1].position = 0.01 - colorramp.color_ramp.elements[1].color = (0.0908, 0.2664, 0.013, 1.0) - colorramp.color_ramp.elements[2].position = 1.0 - colorramp.color_ramp.elements[2].color = (0.2462, 0.4125, 0.0044, 1.0) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range_1.outputs["Result"], - 'Value': map_range_2.outputs["Result"], - 'Color': colorramp.outputs["Color"]}) - - attribute_2 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'cross section parameter'}) - - group = nw.new_node(nodegroup_node_group_002().name, - input_kwargs={'Color': hue_saturation_value_1, 'attribute': attribute_2.outputs["Fac"], - 'seed': 10.0}) - - group_1 = nw.new_node(nodegroup_node_group_002().name, - input_kwargs={'Color': group, 'attribute': attribute_1.outputs["Fac"], 'voronoi scale': 10.0, - 'voronoi randomness': 0.6446, 'seed': -10.0, 'noise amount': 0.48, - 'hue min': 1.32, 'hue max': 0.9}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': group_1, 'Specular': 0.4773, 'Roughness': 0.4455}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -@node_utils.to_nodegroup('nodegroup_coconut_vein_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_vein_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Resolution': 400, 'Start': (0.0, 0.0, 0.0), 'Middle': (0.0, 0.2, 0.5), - 'End': (0.0, 0.0, 1.0)}) - - coconutvein = nw.new_node(nodegroup_coconutvein().name) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': quadratic_bezier, 'Offset': coconutvein}) - - treetrunkgeometry_001 = nw.new_node(nodegroup_tree_trunk_geometry_001().name, - input_kwargs={'Curve': set_position}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': treetrunkgeometry_001.outputs["Mesh"], - 'Translation': (0.0, 0.0, -0.1)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform_1}) - - -@node_utils.to_nodegroup('nodegroup_coconut_random_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_random_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_2.outputs[1], 'Y': random_value_3.outputs[1], - 'Z': random_value_4.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_truncated_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 0.5)]) - - truncated_leaf_stem = nw.new_node(nodegroup_truncated_leaf_stem().name) - - normal_1 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_1}, - attrs={'axis': 'Z'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Instance': truncated_leaf_stem, - 'Rotation': align_euler_to_vector_1}) - - leaf_truncated_rotate = nw.new_node(nodegroup_leaf_truncated_rotate().name, - input_kwargs={'Value': group_input.outputs[2]}) - - rotate_instances_2 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_2, - 'Rotation': leaf_truncated_rotate}) - - rotate_instances_3 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_2, 'Rotation': (-0.9599, 0.0, 1.5708)}) - - random_rotate = nw.new_node(nodegroup_random_rotate().name) - - rotate_instances_4 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_3, 'Rotation': random_rotate}) - - random_value_5 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.6}) - - scale_instances_4 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_4, 'Scale': random_value_5.outputs[1]}) - - index_2 = nw.new_node(Nodes.Index) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index_2, 1: randint(8, 12)}, - attrs={'operation': 'MODULO'}) - - scale_instances_3 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_4, 'Selection': modulo, - 'Scale': (0.0, 0.0, 0.0)}) - - truncated_leaf_selection = nw.new_node(nodegroup_truncated_leaf_selection().name, - input_kwargs={'Value': group_input.outputs["Value1"]}) - - scale_instances_5 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_3, 'Selection': truncated_leaf_selection, - 'Scale': (0.0, 0.0, 0.0)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': scale_instances_5}) - - -@node_utils.to_nodegroup('nodegroup_tree_trunk_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_trunk_geometry(nw: NodeWrangler, radius): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - trunkradius = nw.new_node(nodegroup_trunk_radius().name) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': trunkradius}) - - treecracks = nw.new_node(nodegroup_tree_cracks().name, - input_kwargs={'Geometry': set_curve_radius}) - - trunk_resolution = nw.new_node(Nodes.Integer, - label='TrunkResolution', - attrs={'integer': 32}) - trunk_resolution.integer = 32 - - trunk_radius = nw.new_node(Nodes.Value, - label='TrunkRadius') - trunk_radius.outputs[0].default_value = radius - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': trunk_resolution, 'Radius': trunk_radius}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': treecracks.outputs["Geometry"], - 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': curve_to_mesh, 'Level': 5}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': subdivide_mesh, 'Offset': treecracks.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Integer': trunk_resolution, - 'Mesh': curve_to_mesh}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_top', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_top(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketFloat', 'Ring', 10.0), - ('NodeSocketFloat', 'Segment', 0.5), - ('NodeSocketGeometry', 'Instance', None)]) - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector}) - - leafrotatedownward = nw.new_node(nodegroup_leaf_rotate_downward().name, - input_kwargs={'Value': group_input.outputs["Value"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Rotation': leafrotatedownward}) - - leafrandomrotate = nw.new_node(nodegroup_leaf_random_rotate().name) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances, 'Rotation': leafrandomrotate}) - - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.9, 3: 1.2}) - - scale_instances_2 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_1, 'Scale': random_value_4.outputs[1]}) - - leafinstanceselectionbottomremove = nw.new_node(nodegroup_leaf_instance_selection_bottom_remove().name, - input_kwargs={'Ring': group_input.outputs["Ring"], - 'Segment': group_input.outputs["Segment"]}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_2, - 'Selection': leafinstanceselectionbottomremove, - 'Scale': (0.0, 0.0, 0.0)}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={5: 1}, - attrs={'data_type': 'INT'}) - - scale_instances_1 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances, 'Selection': random_value.outputs[2], - 'Scale': (0.0, 0.0, 0.0)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': scale_instances_1}) - - -@node_utils.to_nodegroup('nodegroup_coconut_instance_on_points', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_instance_on_points(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Ring', 0.5), - ('NodeSocketFloat', 'Segment', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Segment"], 1: 0.0}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: add}, - attrs={'operation': 'DIVIDE'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Ring"], 1: 0.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: 4.0}, - attrs={'operation': 'SUBTRACT'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: subtract}, - attrs={'operation': 'GREATER_THAN'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add_1, 1: 2.0}, - attrs={'operation': 'SUBTRACT'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: subtract_1}, - attrs={'operation': 'LESS_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: op_and}, - attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_not}) - - -@node_utils.to_nodegroup('nodegroup_coconut_group', singleton=False, type='GeometryNodeTree') -def nodegroup_coconut_group(nw: NodeWrangler, coconut): - # Code generated using version 2.4.3 of the node_transpiler - - uv_sphere_1 = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': 8, 'Rings': 6, 'Radius': 0.15}) - - object_info_2 = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': coconut}) - - transform_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': object_info_2.outputs["Geometry"], - 'Translation': (0.0, 0.0, -1.2)}) - - normal_1 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_1}, - attrs={'axis': 'Z'}) - - instance_on_points_3 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': uv_sphere_1, 'Instance': transform_2, - 'Rotation': align_euler_to_vector_1, 'Scale': (-1.0, -1.0, -1.0)}) - - coconut_random_rotate = nw.new_node(nodegroup_coconut_random_rotate().name) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_3, 'Rotation': coconut_random_rotate}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.15, 3: 0.4}) - - scale_instances_6 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances, 'Scale': random_value_2.outputs[1]}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: 20.0}, - attrs={'operation': 'LESS_THAN'}) - - scale_instances_2 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_6, 'Selection': less_than, - 'Scale': (0.0, 0.0, 0.0)}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={5: 2, 'Seed': 2}, - attrs={'data_type': 'INT'}) - - scale_instances_4 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_2, - 'Selection': random_value_1.outputs[2], 'Scale': (0.0, 0.0, 0.0)}) - - coconut_vein_geometry = nw.new_node(nodegroup_coconut_vein_geometry().name) - - normal_2 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_2 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_2}, - attrs={'axis': 'Z'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': uv_sphere_1, 'Instance': coconut_vein_geometry, - 'Rotation': align_euler_to_vector_2}) - - index_2 = nw.new_node(Nodes.Index) - - less_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: index_2, 1: 30.0}, - attrs={'operation': 'LESS_THAN'}) - - scale_instances_3 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points_2, 'Selection': less_than_1, - 'Scale': (0.0, 0.0, 0.0)}) - - random_value_5 = nw.new_node(Nodes.RandomValue, - input_kwargs={5: 1, 'Seed': 4}, - attrs={'data_type': 'INT'}) - - scale_instances_5 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_3, - 'Selection': random_value_5.outputs[2], 'Scale': (0.0, 0.0, 0.0)}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': scale_instances_5, - 'Material': surface.shaderfunc_to_material(shader_coconut_green_shader)}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [scale_instances_4, set_material_2]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry_2}) - - -def shader_top_core(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0, 1.0, 0.1)}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': uniform(100, 400)}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 2.0, 'Distortion': 5.0, 'Detail': 10.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.4, 'Color1': voronoi_texture.outputs["Distance"], - 'Color2': wave_texture.outputs["Color"]}) - - d_hsv = (uniform(0.02, 0.05), uniform(0.3, 0.6), uniform(0.01, 0.05)) - b_hsv = d_hsv[:1] + (uniform(0.6, 0.9), uniform(0.3, 0.6)) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}) - colorramp.color_ramp.elements[0].position = 0.2409 - colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) - colorramp.color_ramp.elements[1].position = 0.6045 - colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], - 'Roughness': colorramp.outputs["Alpha"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -def shader_trunk(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': 20.0}, - attrs={'voronoi_dimensions': '4D'}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Scale': uniform(1.0, 3.0), 'Distortion': 5.0, 'Detail Scale': 3.0}, - attrs={'bands_direction': 'Z'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': voronoi_texture_1.outputs["Distance"], - 'Color2': wave_texture.outputs["Color"]}) - - d_hsv = (uniform(0.02, 0.05), uniform(0.01, 0.05) if randint(0, 2) == 1 else uniform(0.5, 0.8), uniform(0.03, 0.09)) - b_hsv = d_hsv[:-1] + (uniform(0.1, 0.3),) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) - colorramp.color_ramp.elements[0].position = 0.4682 - colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) - colorramp.color_ramp.elements[1].position = 0.5591 - colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (10.0, 10.0, 0.2)}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 100.0, 'Randomness': 10.0}, - attrs={'voronoi_dimensions': '4D', 'distance': 'CHEBYCHEV'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) - colorramp_1.color_ramp.elements[0].position = 0.2818 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.3045 - colorramp_1.color_ramp.elements[1].color = (0.5284, 0.5034, 0.4327, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': uniform(0.1, 0.3), 'Color1': colorramp.outputs["Color"], - 'Color2': colorramp_1.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': voronoi_texture.outputs["Distance"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -def geometry_coconut_tree_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - leaf = kwargs["leaf"][0] - coconut = kwargs["coconut"][0] - radius = kwargs["trunk_radius"] - - trunk_height = nw.new_node(Nodes.Value, - label='trunk_height') - trunk_height.outputs[0].default_value = 5.0 - - top_x, top_y = np.random.normal(0.0, 1.), np.random.normal(0.0, 1.) - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': top_x, 'Y': top_y, 'Z': trunk_height}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.0, 0.0, 0.0), - 'Middle': (top_x / uniform(1.0, 2.0), top_y / uniform(1.0, 2.0), uniform(1.5, 3.0)), - 'End': combine_xyz_2}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': quadratic_bezier, 'Length': 0.02}, #'Count': 20000 - attrs={'mode': 'LENGTH'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve}) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - top_segment = nw.new_node(Nodes.Integer, - label='TopSegment', - attrs={'integer': 12}) - top_segment.integer = randint(8, 14) - - top_ring = nw.new_node(Nodes.Integer, - label='TopRing', - attrs={'integer': 8}) - top_ring.integer = randint(8, 11) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': top_segment, 'Rings': top_ring, 'Radius': uniform(0.15, 0.2)}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 1.0, uniform(0.8, 2.0))}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform, - 'Material': surface.shaderfunc_to_material(shader_top_core)}) - - coconut_group = nw.new_node(nodegroup_coconut_group(coconut=coconut).name) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': coconut_group, 'Scale': (-1.0, -1.0, -1.0)}) - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal}, - attrs={'axis': 'Z'}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.2 - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': transform, 'Instance': transform_1, - 'Rotation': align_euler_to_vector, 'Scale': value}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={5: randint(1, 3)}, - attrs={'data_type': 'INT'}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points_1, 'Selection': random_value.outputs[2], - 'Scale': (0.0, 0.0, 0.0)}) - - coconut_instance_on_points = nw.new_node(nodegroup_coconut_instance_on_points().name, - input_kwargs={'Ring': top_ring, 'Segment': top_segment}) - - scale_instances_1 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances, 'Selection': coconut_instance_on_points, - 'Scale': (0.0, 0.0, 0.0)}) - - object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': leaf}) - - leafontop = nw.new_node(nodegroup_leaf_on_top().name, - input_kwargs={'Points': transform, 'Value': top_segment, 'Ring': top_segment, - 'Segment': top_ring, 'Instance': object_info.outputs["Geometry"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, scale_instances_1, leafontop]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, 'Selection': endpoint_selection, - 'Instance': join_geometry_1}) - - treetrunkgeometry = nw.new_node(nodegroup_tree_trunk_geometry(radius=radius).name, - input_kwargs={'Curve': set_position}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': treetrunkgeometry.outputs["Geometry"], - 'Material': surface.shaderfunc_to_material(shader_trunk)}) - - truncatedstemgeometry = nw.new_node(nodegroup_truncated_stem_geometry().name, - input_kwargs={'Points': treetrunkgeometry.outputs["Mesh"], 1: trunk_height, - 2: treetrunkgeometry.outputs["Integer"]}) - - geos = [instance_on_points, set_material] - if uniform(0.0, 1.0) < 0.3: - geos.append(truncatedstemgeometry) - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': geos}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry}) - - -class CoconutTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(CoconutTreeFactory, self).__init__(factory_seed, coarse=coarse) - - def create_asset(self, params={}, **kwargs): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # Make the Leaf and Delete It Later - lf_seed = randint(0, 1000, size=(1,))[0] - leaf_model = LeafPalmTreeFactory(factory_seed=lf_seed) - p = { - 'leaf_x_curvature': uniform(0.3, 0.8) - } - leaf = leaf_model.create_asset(p) - params["leaf"] = [leaf] - - co_seed = randint(0, 1000, size=(1,))[0] - coconut_model = FruitFactoryCoconutgreen(factory_seed=co_seed) - coconut = coconut_model.create_asset() - params["coconut"] = [coconut] - params["trunk_radius"] = uniform(0.2, 0.3) - - surface.add_geomod(obj, geometry_coconut_tree_nodes, selection=None, attributes=[], input_kwargs=params) - butil.delete([leaf, coconut]) - with butil.SelectObjects(obj): - bpy.ops.object.material_slot_remove() - bpy.ops.object.shade_flat() - - return obj - - -if __name__ == '__main__': - model = CoconutTreeFactory(0) - model.create_asset() diff --git a/infinigen/assets/tropic_plants/leaf_banana_tree.py b/infinigen/assets/tropic_plants/leaf_banana_tree.py deleted file mode 100644 index 4ad93aae5..000000000 --- a/infinigen/assets/tropic_plants/leaf_banana_tree.py +++ /dev/null @@ -1,684 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util.color import hsv2rgba -from infinigen.assets.tropic_plants.tropic_plant_utils import ( - nodegroup_nodegroup_leaf_gen, - nodegroup_nodegroup_leaf_rotate_x, - nodegroup_nodegroup_leaf_shader, - nodegroup_nodegroup_move_to_origin, - nodegroup_nodegroup_sub_vein, - shader_stem_material -) -from infinigen.core.util import blender as butil -from infinigen.core.tagging import tag_object, tag_nodegroup - - -@node_utils.to_nodegroup('nodegroup_nodegroup_apply_wave', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_apply_wave(nw: NodeWrangler, leaf_h_wave_control_points, - leaf_w_wave_control_points, leaf_edge_wave_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Wave Scale Y', 1.0), - ('NodeSocketFloat', 'Wave Scale X', 1.0), - ('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Width Scale', 0.0), - ('NodeSocketFloat', 'Wave Scale E', 1.0)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - map_range_6 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: -0.6, 2: 0.6}) - - float_curve_3 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_6.outputs["Result"]}) - node_utils.assign_curve(float_curve_3.mapping.curves[0], - [(0.0, 0.5), - (0.1, leaf_edge_wave_control_points[0] + .5), - (0.2, leaf_edge_wave_control_points[1] + .5), - (0.3, leaf_edge_wave_control_points[2] + .5), - (0.4, leaf_edge_wave_control_points[3] + .5), - (0.5, leaf_edge_wave_control_points[4] + .5), - (0.6, leaf_edge_wave_control_points[5] + .5), - (0.7, leaf_edge_wave_control_points[6] + .5), - (0.8, leaf_edge_wave_control_points[7] + .5), - (0.9, leaf_edge_wave_control_points[8] + .5), - (1.0, 0.5)]) - - map_range_7 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_3, 3: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: separate_xyz.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute, 2: group_input.outputs["Width Scale"]}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': map_range_4.outputs["Result"]}) - colorramp.color_ramp.elements[0].position = 0.015 - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = uniform(0.3, 0.5) - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_7.outputs["Result"], 1: colorramp.outputs["Color"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: group_input.outputs["Wave Scale E"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_1}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz_3}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: separate_xyz_1.outputs["Y"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, .5), - (0.2, leaf_h_wave_control_points[0] + .5), - (0.4, leaf_h_wave_control_points[1] + .5), - (0.6, leaf_h_wave_control_points[2] + .5), - (0.8, leaf_h_wave_control_points[3] + .5), - (1.0, leaf_h_wave_control_points[4] + .5)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 3: -1.0}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Wave Scale Y"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_2}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_2, 'Offset': combine_xyz}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: group_input.outputs["X Modulated"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["X Modulated"], - 1: attribute_statistic_1.outputs["Min"], - 2: attribute_statistic_1.outputs["Max"]}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, leaf_w_wave_control_points[0] + .5 + normal(0., 0.02)), - (0.1, leaf_w_wave_control_points[1] + .5 + normal(0., 0.02)), - (0.25, leaf_w_wave_control_points[2] + .5 + normal(0., 0.02)), - (0.4, leaf_w_wave_control_points[3] + .5 + normal(0., 0.02)), - (0.5, 0.5), - (0.6, leaf_w_wave_control_points[3] + .5 + normal(0., 0.02)), - (0.75, leaf_w_wave_control_points[2] + .5 + normal(0., 0.02)), - (0.9, leaf_w_wave_control_points[1] + .5 + normal(0., 0.02)), - (1.0, leaf_w_wave_control_points[0] + .5 + normal(0., 0.02))], - handles=['AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO', 'AUTO']) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_1, 3: -1.0}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: group_input.outputs["Wave Scale X"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_3}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Offset': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - - -def shader_leaf_material(nw: NodeWrangler, stem_color_hsv): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein'}) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 6.8, - 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.8, 4: 1.2}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'subvein offset'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Color"], 2: -0.94}) - - main_leaf_hsv = (uniform(0.26, 0.37), uniform(0.8, 1.0), uniform(0.15, 0.55)) - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 2.0, 'Color': hsv2rgba(main_leaf_hsv)}) - - main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.02),) + main_leaf_hsv[1:] - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': hue_saturation_value, - 'Color2': hsv2rgba(main_leaf_hsv_2)}) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range.outputs["Result"], - 'Value': map_range_1.outputs["Result"], 'Color': mix}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': hsv2rgba(stem_color_hsv), - 'Color2': hue_saturation_value_1}) - - group = nw.new_node(nodegroup_nodegroup_leaf_shader().name, - input_kwargs={'Color': mix_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group}) - - -@node_utils.to_nodegroup('nodegroup_round_tropical_leaf', singleton=False, type='GeometryNodeTree') -def nodegroup_round_tropical_leaf(nw: NodeWrangler, jigsaw_depth, leaf_h_wave_control_points, - leaf_w_wave_control_points, leaf_edge_wave_control_points, - leaf_contour_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'To Max', -0.4), - ('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Wave Scale Y', 0.3), - ('NodeSocketFloat', 'Wave Scale X', 0.5), - ('NodeSocketFloat', 'Wave Scale E', 0.5), - ('NodeSocketFloat', 'Leaf Width Scale', 0.0)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Level': 10}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': subdivide_mesh}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh_1, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - nodegroup_leaf_gen = nw.new_node(nodegroup_nodegroup_leaf_gen(leaf_contour_control_points).name, - input_kwargs={'Mesh': capture_attribute.outputs["Geometry"], - 'Displancement scale': 0.0, 'Vein Asymmetry': 0.3023, - 'Vein Density': 0.0, 'Jigsaw Scale': uniform(5.0, 20.0), - 'Jigsaw Depth': jigsaw_depth, - 'Vein Angle': 0.3, 'Wave Displacement': 0.0, 'Midrib Length': 0.333, - 'Stem Length': 0.6, 'Midrib Width': uniform(0.8, 1.4), - 'Leaf Width Scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_sub_vein = nw.new_node(nodegroup_nodegroup_sub_vein().name, - input_kwargs={'X': 0.0, 'Y': nodegroup_leaf_gen.outputs["Vein Coord"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': nodegroup_leaf_gen.outputs["Mesh"], 'Offset': combine_xyz}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, - 2: nodegroup_sub_vein.outputs["Color Value"]}) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], - 2: nodegroup_leaf_gen.outputs["Vein Value"]}) - - nodegroup_apply_wave = nw.new_node(nodegroup_nodegroup_apply_wave(leaf_h_wave_control_points, - leaf_w_wave_control_points, - leaf_edge_wave_control_points).name, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], - 'Wave Scale Y': group_input.outputs["Wave Scale Y"], - 'Wave Scale X': group_input.outputs["Wave Scale X"], - 'X Modulated': nodegroup_leaf_gen.outputs["X Modulated"], - 'Wave Scale E': group_input.outputs["Wave Scale E"]}) - - nodegroup_move_to_origin = nw.new_node(nodegroup_nodegroup_move_to_origin().name, - input_kwargs={'Geometry': nodegroup_apply_wave}) - - nodegroup_leaf_rotate_x = nw.new_node(nodegroup_nodegroup_leaf_rotate_x().name, - input_kwargs={'Geometry': nodegroup_move_to_origin, - 'To Max': group_input.outputs["To Max"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Attribute': nodegroup_leaf_gen.outputs["Attribute"], - 'Coordinate': capture_attribute.outputs["Attribute"], - 'subvein': capture_attribute_1.outputs[2], - 'vein': capture_attribute_2.outputs[2], - 'Geometry': nodegroup_leaf_rotate_x}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, 1.0, 1.0))]) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'End Size': 0}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Selection': endpoint_selection, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector, - 'Scale': group_input.outputs["Scale"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Rotation': (-1.5708, 0.0, 0.0)}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances_1}) - - -@node_utils.to_nodegroup('nodegroup_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'To Min1', 0.2), - ('NodeSocketFloat', 'To Min2', -0.2)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Count': 100}) - - position_2 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"], - 3: group_input.outputs["To Min1"], 4: 0.0}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_2, 'Center': (0.0, 0.0, 2.0), - 'Angle': map_range_1.outputs["Result"]}, - attrs={'rotation_type': 'Y_AXIS'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Position': vector_rotate}) - - position_1 = nw.new_node(Nodes.InputPosition) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_2.outputs["Factor"], 3: group_input.outputs[2], - 4: 0.0}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': map_range_2.outputs["Result"]}, - attrs={'rotation_type': 'X_AXIS'}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Position': vector_rotate_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_2}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 0.4, 4: 0.8}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': 0.02}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh}) - - -def geometry_leaf_nodes(nw: NodeWrangler, **kwargs): - - leaf_x_curvature = nw.new_node(Nodes.Value, - label='leaf_x_curvature') - leaf_x_curvature.outputs[0].default_value = -kwargs['leaf_x_curvature'] - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - wave_x_scale = nw.new_node(Nodes.Value, - label='wave_x_scale') - wave_x_scale.outputs[0].default_value = kwargs['leaf_h_wave_scale'] - - wave_y_scale = nw.new_node(Nodes.Value, - label='wave_y_scale') - wave_y_scale.outputs[0].default_value = kwargs['leaf_w_wave_scale'] - - wave_e_scale = nw.new_node(Nodes.Value, - label='wave_e_scale') - wave_e_scale.outputs[0].default_value = kwargs['leaf_edge_wave_scale'] - - leaf_width_scale = nw.new_node(Nodes.Value, - label='leaf_width_scale') - leaf_width_scale.outputs[0].default_value = kwargs['leaf_width'] - - leaf_h_wave_control_points = kwargs['leaf_h_wave_control_points'] - leaf_w_wave_control_points = kwargs['leaf_w_wave_control_points'] - leaf_edge_wave_control_points = kwargs['leaf_edge_wave_control_points'] - leaf_contour_control_points = kwargs['leaf_contour_control_points'] - leaf_jigsaw_depth = kwargs['leaf_jigsaw_depth'] - - round_tropical_leaf = nw.new_node(nodegroup_round_tropical_leaf(leaf_jigsaw_depth, - leaf_h_wave_control_points, - leaf_w_wave_control_points, - leaf_edge_wave_control_points, - leaf_contour_control_points).name, - input_kwargs={'To Max': leaf_x_curvature, 'Mesh': group_input.outputs["Geometry"], - 'Wave Scale Y': wave_x_scale, 'Wave Scale X': wave_y_scale, - 'Leaf Width Scale': leaf_width_scale, 'Wave Scale E': wave_e_scale}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': round_tropical_leaf.outputs["Geometry"], - 'Attribute': round_tropical_leaf.outputs["Attribute"], - 'Coordinate': round_tropical_leaf.outputs["Coordinate"], - 'subvein offset': round_tropical_leaf.outputs["subvein"], - 'vein': round_tropical_leaf.outputs["vein"]}) - - -def geometry_plant_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0, 0.0, 2.0), 'End': (0.0, 0.0, 0.0)}) - - stem_y_curvature = nw.new_node(Nodes.Value, - label='stem_y_curvature') - stem_y_curvature.outputs[0].default_value = uniform(-0.5, 0.5) - - stem_x_curvature = nw.new_node(Nodes.Value, - label='stem_x_curvature') - stem_x_curvature.outputs[0].default_value = -kwargs['leaf_x_curvature'] - - stem_curvature = nw.new_node(nodegroup_stem_curvature().name, - input_kwargs={'Curve': curve_line_1, 1: stem_y_curvature, 2: stem_x_curvature}) - - stem_geometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': stem_curvature}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': stem_geometry, - 'Material': surface.shaderfunc_to_material( - lambda x: shader_stem_material(x, stem_color_hsv= - kwargs['stem_color_hsv']))}) - - leaf_x_curvature = nw.new_node(Nodes.Value, - label='leaf_x_curvature') - leaf_x_curvature.outputs[0].default_value = -kwargs['leaf_x_curvature'] - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - wave_x_scale = nw.new_node(Nodes.Value, - label='wave_x_scale') - wave_x_scale.outputs[0].default_value = kwargs['leaf_h_wave_scale'] - - wave_y_scale = nw.new_node(Nodes.Value, - label='wave_y_scale') - wave_y_scale.outputs[0].default_value = kwargs['leaf_w_wave_scale'] - - wave_e_scale = nw.new_node(Nodes.Value, - label='wave_edge_scale') - wave_e_scale.outputs[0].default_value = kwargs['leaf_edge_wave_scale'] - - leaf_width_scale = nw.new_node(Nodes.Value, - label='leaf_width_scale') - leaf_width_scale.outputs[0].default_value = kwargs['leaf_width'] - - leaf_h_wave_control_points = kwargs['leaf_h_wave_control_points'] - leaf_w_wave_control_points = kwargs['leaf_w_wave_control_points'] - leaf_edge_wave_control_points = kwargs['leaf_edge_wave_control_points'] - leaf_contour_control_points = kwargs['leaf_contour_control_points'] - leaf_jigsaw_depth = kwargs['leaf_jigsaw_depth'] - - round_tropical_leaf = nw.new_node(nodegroup_round_tropical_leaf(leaf_jigsaw_depth, - leaf_h_wave_control_points, - leaf_w_wave_control_points, - leaf_edge_wave_control_points, - leaf_contour_control_points).name, - input_kwargs={'To Max': leaf_x_curvature, 'Mesh': group_input.outputs["Geometry"], - 'Wave Scale Y': wave_x_scale, 'Wave Scale X': wave_y_scale, - 'Leaf Width Scale': leaf_width_scale, 'Wave Scale E': wave_e_scale}) - - leaf_scale = nw.new_node(Nodes.Value, - label='leaf_scale') - leaf_scale.outputs[0].default_value = normal(1.0, 0.3) - - leaf_on_stem = nw.new_node(nodegroup_leaf_on_stem().name, - input_kwargs={'Points': stem_curvature, - 'Instance': round_tropical_leaf.outputs["Geometry"], 'Scale': leaf_scale}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, leaf_on_stem]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry, - 'Translation': kwargs['plant_translation'], - 'Rotation': (0.0, 0.0, kwargs['plant_z_rotate']), - 'Scale': kwargs['plant_scale']}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, - 'Attribute': round_tropical_leaf.outputs["Attribute"], - 'Coordinate': round_tropical_leaf.outputs["Coordinate"], - 'subvein offset': round_tropical_leaf.outputs["subvein"], - 'vein': round_tropical_leaf.outputs["vein"]}) - - -class LeafBananaTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(LeafBananaTreeFactory, self).__init__(factory_seed, coarse=coarse) - - def get_leaf_contour(self, mode): - if mode == 'oval': - return [0.13, 0.275, 0.35, 0.365, 0.32, 0.21] - elif mode == 'pear': - return [0.30, 0.46, 0.46, 0.43, 0.37, 0.23] - else: - return NotImplementedError - - def get_h_wave_contour(self, mode): - if mode == 'flat': - return [normal(0., 0.03) for _ in range(6)] - elif mode == 's': - return [-0.1 + normal(0., 0.02), 0. + normal(0., 0.02), - 0.08 + normal(0., 0.02), 0. + normal(0., 0.02), - -0.05 + normal(0., 0.01)] - elif mode == 'w': - return [-0.08 + normal(0., 0.02), 0.07 + normal(0., 0.02), - -0.08 + normal(0., 0.02), 0.08 + normal(0., 0.02), - -0.05 + normal(0, 0.02)] - else: - raise NotImplementedError - - def get_w_wave_contour(self, mode): - if mode == 'fold': - return [-0.28 + normal(0., 0.02), -0.2 + normal(0., 0.02), - -0.13 + normal(0., 0.01), -0.06 + normal(0., 0.01)], uniform(0.1, 0.3) - elif mode == 'wing': - return [0.0 + normal(0., 0.02), 0.06 + normal(0., 0.02), - 0.07 + normal(0., 0.01), 0.04 + normal(0., 0.01)], uniform(0.0, 0.3) - else: - raise NotImplementedError - - def get_e_wave_contour(self, mode): - if mode == 'wavy': - return [-0.06 + normal(0., 0.01), 0.06 + normal(0., 0.01), -0.06 + normal(0., 0.01), - 0.06 + normal(0., 0.01), -0.06 + normal(0., 0.01), 0.06 + normal(0., 0.01), - -0.06 + normal(0., 0.01), 0.06 + normal(0., 0.01), -0.06 + normal(0., 0.01)], 10 - elif mode == 'flat': - return [0.0 for _ in range(9)], 0.0 - else: - raise NotImplementedError - - def update_params(self, **params): - if params.get('leaf_h_wave_control_points', None) is None: - mode = np.random.choice(['flat', 'w', 's'], p=[0.4, 0.3, 0.3]) - params['leaf_h_wave_control_points'] = self.get_h_wave_contour(mode) - - if params.get('leaf_w_wave_control_points', None) is None: - mode = np.random.choice(['fold', 'wing'], p=[0.2, 0.8]) - params['leaf_w_wave_control_points'], params['leaf_w_wave_scale'] = self.get_w_wave_contour(mode) - - if params.get('leaf_edge_wave_control_points', None) is None: - mode = np.random.choice(['wavy', 'flat'], p=[1.0, 0.0]) # 0.6, 0.4 - params['leaf_edge_wave_control_points'], params['leaf_edge_wave_scale'] = self.get_e_wave_contour(mode) - - if params.get('leaf_contour_control_points', None) is None: - mode = np.random.choice(['oval', 'pear'], p=[0.5, 0.5]) - params['leaf_contour_control_points'] = self.get_leaf_contour(mode) - - if params.get('leaf_jigsaw_depth', None) is None: - mode = np.random.choice([0, 1], p=[0.4, 0.6]) - params['leaf_jigsaw_depth'] = mode * uniform(0.8, 1.7) - - if params.get('leaf_width', None) is None: - params['leaf_width'] = uniform(0.5, 0.85) - - if params.get('leaf_h_wave_scale', None) is None: - params['leaf_h_wave_scale'] = uniform(0.02, 0.2) - - if params.get('leaf_w_wave_scale', None) is None: - params['leaf_w_wave_scale'] = uniform(0.05, 0.25) - - if params.get('leaf_x_curvature', None) is None: - params['leaf_x_curvature'] = uniform(0.0, 0.1) - - if params.get('stem_color_hsv', None) is None: - params['stem_color_hsv'] = (uniform(0.25, 0.32), uniform(0.8, 1.0), uniform(0.8, 1.0)) - - return params - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.update_params(**params) - surface.add_geomod(obj, geometry_leaf_nodes, apply=True, - attributes=['Attribute', 'Coordinate', - 'subvein offset', 'vein'], input_kwargs=params) - surface.add_material(obj, lambda x: shader_leaf_material(x, stem_color_hsv=params['stem_color_hsv']), - selection=None) - - tag_object(obj, 'leaf_banana_tree') - return obj - - -class PlantBananaTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(PlantBananaTreeFactory, self).__init__(factory_seed, coarse=coarse) - self.leaf_tropical_factory = LeafBananaTreeFactory(factory_seed) - - def update_params(self, **params): - params = self.leaf_tropical_factory.update_params(**params) - # Add new params update - if params.get('plant_translation', None) is None: - params['plant_translation'] = (0.0, 0.0, 0.0) - if params.get('plant_z_rotate', None) is None: - params['plant_z_rotate'] = uniform(-0.4, 0.4) - if params.get('plant_scale', None) is None: - s = uniform(0.8, 1.5) - params['plant_scale'] = (s, s, s) - return params - - def create_asset(self, **params): - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.update_params(**params) - surface.add_geomod(obj, geometry_plant_nodes, apply=True, - attributes=['Attribute', 'Coordinate', - 'subvein offset', 'vein'], input_kwargs=params) - surface.add_material(obj, lambda x: shader_leaf_material(x, stem_color_hsv=params['stem_color_hsv']), - selection=None) - - tag_object(obj, 'leaf_banana_tree') - return obj - - -if __name__ == '__main__': - fac = LeafBananaTreeFactory(0) - fac.create_asset() \ No newline at end of file diff --git a/infinigen/assets/tropic_plants/leaf_palm_plant.py b/infinigen/assets/tropic_plants/leaf_palm_plant.py deleted file mode 100644 index f7c476ccd..000000000 --- a/infinigen/assets/tropic_plants/leaf_palm_plant.py +++ /dev/null @@ -1,565 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.assets.tropic_plants.tropic_plant_utils import ( - nodegroup_nodegroup_leaf_gen, - nodegroup_nodegroup_leaf_rotate_x, - nodegroup_nodegroup_leaf_shader, - nodegroup_nodegroup_move_to_origin, - nodegroup_nodegroup_sub_vein, - hsv2rgba, - shader_stem_material, -) -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.nodes import Nodes, NodeWrangler, node_utils -from infinigen.core.util import blender as butil -from infinigen.core import surface - - -@node_utils.to_nodegroup('nodegroup_nodegroup_apply_wave', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_apply_wave(nw: NodeWrangler, leaf_h_wave_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Wave Scale Y', 1.0), - ('NodeSocketFloat', 'Wave Scale X', 1.0), - ('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Width Scale', 0.0)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: separate_xyz_1.outputs["Y"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, .5), - (0.2, leaf_h_wave_control_points[0] + .5), - (0.4, leaf_h_wave_control_points[1] + .5), - (0.6, leaf_h_wave_control_points[2] + .5), - (0.8, leaf_h_wave_control_points[3] + .5), - (1.0, leaf_h_wave_control_points[4] + .5)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Wave Scale Y"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: group_input.outputs["X Modulated"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["X Modulated"], - 1: attribute_statistic_1.outputs["Min"], - 2: attribute_statistic_1.outputs["Max"]}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 0.1625), (0.0955, 0.2844), (0.2318, 0.3594), (0.3727, 0.451), (0.5045, 0.5094), - (0.6045, 0.4447), (0.7886, 0.325), (1.0, 0.1594)], - handles=['AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO']) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_1, 3: -1.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: group_input.outputs["Wave Scale X"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_1}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Offset': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - - -@node_utils.to_nodegroup('nodegroup_palm_leaf_assemble', singleton=False, type='GeometryNodeTree') -def nodegroup_palm_leaf_assemble(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketFloat', 'Resolution', 0.0)]) - - index = nw.new_node(Nodes.Index) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Resolution"], 1: 2.0}, - attrs={'operation': 'DIVIDE'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: divide}, - attrs={'operation': 'LESS_THAN'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: 0.0}, - attrs={'operation': 'GREATER_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: less_than, 1: greater_than}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.9, 3: 1.1, 'Seed': 2}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], 'Selection': op_and, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector, - 'Scale': random_value.outputs[1]}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': instance_on_points_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances_1}) - - -@node_utils.to_nodegroup('nodegroup_round_tropical_leaf', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_palm_instance(nw: NodeWrangler, leaf_h_wave_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'To Max', -0.4), - ('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Wave Scale Y', 0.3), - ('NodeSocketFloat', 'Wave Scale X', 0.5), - ('NodeSocketFloat', 'Leaf Width Scale', 0.0)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Level': 10}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': subdivide_mesh}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh_1, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - nodegroup_leaf_gen = nw.new_node(nodegroup_nodegroup_leaf_gen().name, - input_kwargs={'Mesh': capture_attribute.outputs["Geometry"], - 'Displancement scale': 0.0, 'Vein Asymmetry': 0.3023, - 'Vein Density': 0.0, 'Jigsaw Scale': 10.0, 'Jigsaw Depth': 0.0, - 'Vein Angle': 0.3, 'Wave Displacement': 0.0, 'Midrib Length': 0.3336, - 'Midrib Width': 1.3, 'Stem Length': 0.6, - 'Leaf Width Scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_sub_vein = nw.new_node(nodegroup_nodegroup_sub_vein().name, - input_kwargs={'X': nodegroup_leaf_gen.outputs["X Modulated"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': nodegroup_leaf_gen.outputs["Mesh"], 'Offset': combine_xyz}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, - 2: nodegroup_sub_vein.outputs["Color Value"]}) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], - 2: nodegroup_leaf_gen.outputs["Vein Value"]}) - - nodegroup_apply_wave = nw.new_node(nodegroup_nodegroup_apply_wave(leaf_h_wave_control_points).name, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], - 'Wave Scale Y': group_input.outputs["Wave Scale Y"], - 'Wave Scale X': group_input.outputs["Wave Scale X"], - 'X Modulated': nodegroup_leaf_gen.outputs["X Modulated"], - 'Width Scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_move_to_origin = nw.new_node(nodegroup_nodegroup_move_to_origin().name, - input_kwargs={'Geometry': nodegroup_apply_wave}) - - nodegroup_leaf_rotate_x = nw.new_node(nodegroup_nodegroup_leaf_rotate_x().name, - input_kwargs={'Geometry': nodegroup_move_to_origin, - 'To Max': group_input.outputs["To Max"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Attribute': nodegroup_leaf_gen.outputs["Attribute"], - 'Coordinate': capture_attribute.outputs["Attribute"], - 'subvein': capture_attribute_1.outputs[2], - 'vein': capture_attribute_2.outputs[2], - 'Geometry': nodegroup_leaf_rotate_x}) - - -@node_utils.to_nodegroup('nodegroup_palmleafsector', singleton=False, type='GeometryNodeTree') -def nodegroup_palmleafsector(nw: NodeWrangler, leaf_h_wave_control_points): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'To Max', -0.4), - ('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Wave Scale Y', 0.3), - ('NodeSocketFloat', 'Wave Scale X', 0.5), - ('NodeSocketFloat', 'Leaf Width Scale', 0.0), - ('NodeSocketInt', 'Resolution1', 26), - ('NodeSocketFloat', 'Resolution2', 0.0)]) - - round_tropical_leaf = nw.new_node(nodegroup_leaf_palm_instance(leaf_h_wave_control_points).name, - input_kwargs={'To Max': group_input.outputs["To Max"], - 'Mesh': group_input.outputs["Mesh"], - 'Wave Scale Y': group_input.outputs["Wave Scale Y"], - 'Wave Scale X': group_input.outputs["Wave Scale X"], - 'Leaf Width Scale': group_input.outputs["Leaf Width Scale"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': group_input.outputs["Resolution1"], 'Radius': 0.01}) - - palm_leaf_assemble = nw.new_node(nodegroup_palm_leaf_assemble().name, - input_kwargs={'Points': curve_circle.outputs["Curve"], - 'Instance': round_tropical_leaf.outputs["Geometry"], - 'Resolution': group_input.outputs[6]}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': palm_leaf_assemble}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Attribute': round_tropical_leaf.outputs["Attribute"], - 'Coordinate': round_tropical_leaf.outputs["Coordinate"], - 'subvein': round_tropical_leaf.outputs["subvein"], - 'vein': round_tropical_leaf.outputs["vein"], 'Geometry': join_geometry}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, 1.0, 1.0))]) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'End Size': 0}) - - curve_tangent = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Selection': endpoint_selection, - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector, - 'Scale': group_input.outputs["Scale"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Rotation': (1.5708, 0.0, 3.1416)}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': rotate_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances_1}) - - -@node_utils.to_nodegroup('nodegroup_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Y Stem Rotate', 0.2), - ('NodeSocketFloat', 'X Stem Rotate', -0.2)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Count': 100}) - - position_2 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"], - 3: group_input.outputs["Y Stem Rotate"], 4: 0.0}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_2, 'Center': (0.0, 0.0, 2.0), - 'Angle': map_range_1.outputs["Result"]}, - attrs={'rotation_type': 'Y_AXIS'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Position': vector_rotate}) - - position_1 = nw.new_node(Nodes.InputPosition) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_2.outputs["Factor"], - 3: group_input.outputs["X Stem Rotate"], 4: 0.0}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': map_range_2.outputs["Result"]}, - attrs={'rotation_type': 'X_AXIS'}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Position': vector_rotate_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_2}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 0.4, 4: 0.8}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': uniform(0.03, 0.06)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh}) - - -def shader_leaf_material(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein'}) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 6.8, - 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.8, 4: 1.2}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'subvein offset'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Color"], 2: -0.94}) - - main_leaf_hsv = (uniform(0.3, 0.36), uniform(0.6, 0.7), uniform(0.2, 0.3)) - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 2.0, 'Color': hsv2rgba(main_leaf_hsv)}) - - main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.005),) + main_leaf_hsv[1:] - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': hue_saturation_value, - 'Color2': hsv2rgba(main_leaf_hsv_2)}) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range.outputs["Result"], - 'Value': map_range_1.outputs["Result"], 'Color': mix}) - - stem_color_hsv = main_leaf_hsv[:-1] + (main_leaf_hsv[-1] - uniform(0.05, 0.15),) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': hsv2rgba(stem_color_hsv), - 'Color2': hue_saturation_value_1}) - - group = nw.new_node(nodegroup_nodegroup_leaf_shader().name, - input_kwargs={'Color': mix_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group}) - - -def geometry_plant_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0, 0.0, kwargs['plant_stem_length']), 'End': (0.0, 0.0, 0.0)}) - - stem_y_curvature = nw.new_node(Nodes.Value, - label='stem_y_curvature') - stem_y_curvature.outputs[0].default_value = kwargs['stem_y_curvature'] - - stem_x_curvature = nw.new_node(Nodes.Value, - label='stem_x_curvature') - stem_x_curvature.outputs[0].default_value = kwargs['stem_x_curvature'] - - stem_curvature = nw.new_node(nodegroup_stem_curvature().name, - input_kwargs={'Curve': curve_line_1, 'Y Stem Rotate': stem_y_curvature, - 'X Stem Rotate': stem_x_curvature}) - - stem_geometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': stem_curvature}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': stem_geometry, - 'Material': surface.shaderfunc_to_material(shader_stem_material)}) - - leaf_x_curvature = nw.new_node(Nodes.Value, - label='leaf_x_curvature') - leaf_x_curvature.outputs[0].default_value = kwargs['leaf_x_curvature'] - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - wave_x_scale = nw.new_node(Nodes.Value, - label='wave_x_scale') - wave_x_scale.outputs[0].default_value = kwargs['leaf_h_wave_scale'] - - wave_y_scale = nw.new_node(Nodes.Value, - label='wave_y_scale') - wave_y_scale.outputs[0].default_value = 0.0 - - leaf_width_scale = nw.new_node(Nodes.Value, - label='leaf_width_scale') - leaf_width_scale.outputs[0].default_value = uniform(0.15, 0.2) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 24}) - integer.integer = randint(20, 30) - - palmleafsector = nw.new_node(nodegroup_palmleafsector(leaf_h_wave_control_points= - kwargs['leaf_h_wave_control_points']).name, - input_kwargs={'To Max': leaf_x_curvature, 'Mesh': group_input.outputs["Geometry"], - 'Wave Scale Y': wave_x_scale, 'Wave Scale X': wave_y_scale, - 'Leaf Width Scale': leaf_width_scale, 5: integer, 6: integer}) - - leaf_scale = nw.new_node(Nodes.Value, - label='leaf_scale') - leaf_scale.outputs[0].default_value = uniform(0.85, 1.25) - - leaf_on_stem = nw.new_node(nodegroup_leaf_on_stem().name, - input_kwargs={'Points': stem_curvature, 'Instance': palmleafsector.outputs["Geometry"], - 'Scale': leaf_scale}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, leaf_on_stem]}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': leaf_x_curvature}) - - transform_1 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Rotation': combine_xyz}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_1, - 'Translation': kwargs['plant_translation'], - 'Rotation': (0.0, 0.0, kwargs['plant_z_rotate']), - 'Scale': kwargs['plant_scale']}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, 'Attribute': palmleafsector.outputs["Attribute"], - 'Coordinate': palmleafsector.outputs["Coordinate"], - 'subvein offset': palmleafsector.outputs["subvein"], - 'vein': palmleafsector.outputs["vein"]}) - - -class LeafPalmPlantFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(LeafPalmPlantFactory, self).__init__(factory_seed, coarse=coarse) - - def get_h_wave_contour(self, mode): - if mode == 'flat': - return [normal(0., 0.03) for _ in range(6)] - elif mode == 's': - return [-0.5 + normal(0., 0.01), 0. + normal(0., 0.01), - 0.05 + normal(0., 0.01), 0. + normal(0., 0.01), - -0.05 + normal(0., 0.01)] - else: - raise NotImplementedError - - def update_params(self, params): - if params.get('leaf_h_wave_control_points', None) is None: - mode = np.random.choice(['flat', 's'], p=[0.7, 0.3]) - params['leaf_h_wave_control_points'] = self.get_h_wave_contour(mode) - if params.get('leaf_h_wave_scale', None) is None: - params['leaf_h_wave_scale'] = uniform(0.01, 0.15) - if params.get('leaf_x_curvature', None) is None: - params['leaf_x_curvature'] = uniform(0.0, 0.5) - if params.get('stem_x_curvature', None) is None: - params['stem_x_curvature'] = uniform(-0.1, 0.4) - if params.get('stem_y_curvature', None) is None: - params['stem_y_curvature'] = uniform(-0.15, 0.15) - if params.get('plant_translation', None) is None: - params['plant_translation'] = (0.0, 0.0, 0.0) - if params.get('plant_z_rotate', None) is None: - params['plant_z_rotate'] = uniform(-0.4, 0.4) - if params.get('plant_stem_length', None) is None: - params['plant_stem_length'] = uniform(1.5, 2.2) - if params.get('plant_scale', None) is None: - s = uniform(0.8, 1.3) - params['plant_scale'] = (s, s, s) - return params - - def create_asset(self, params={}, **kwargs): - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.update_params(params) - surface.add_geomod(obj, geometry_plant_nodes, apply=False, - attributes=['Attribute', 'Coordinate', - 'subvein offset', 'vein'], input_kwargs=params) - surface.add_material(obj, shader_leaf_material, selection=None) - - return obj - diff --git a/infinigen/assets/tropic_plants/leaf_palm_tree.py b/infinigen/assets/tropic_plants/leaf_palm_tree.py deleted file mode 100644 index a9842ce66..000000000 --- a/infinigen/assets/tropic_plants/leaf_palm_tree.py +++ /dev/null @@ -1,650 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - -import bpy - -import numpy as np -from numpy.random import uniform, normal, randint - -from infinigen.core.nodes import Nodes, NodeWrangler, node_utils -from infinigen.core.util.color import hsv2rgba -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.tropic_plants.tropic_plant_utils import ( - nodegroup_nodegroup_leaf_shader, - nodegroup_nodegroup_sub_vein, - nodegroup_nodegroup_leaf_gen, - nodegroup_nodegroup_move_to_origin, - nodegroup_nodegroup_leaf_rotate_x, - shader_stem_material -) -from infinigen.core.tagging import tag_object, tag_nodegroup - -@node_utils.to_nodegroup('nodegroup_nodegroup_apply_wave', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_apply_wave(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Wave Scale Y', 1.0), - ('NodeSocketFloat', 'Wave Scale X', 1.0), - ('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Width Scale', 0.0)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: separate_xyz_1.outputs["Y"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 1: attribute_statistic.outputs["Min"], - 2: attribute_statistic.outputs["Max"]}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.4875), (0.1091, 0.5), (0.3275, 0.4921), (0.7409, 0.5031), (1.0, 0.5063)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Wave Scale Y"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - attribute_statistic_1 = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: group_input.outputs["X Modulated"]}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["X Modulated"], - 1: attribute_statistic_1.outputs["Min"], - 2: attribute_statistic_1.outputs["Max"]}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_2.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 0.1625), (0.0955, 0.2844), (0.2318, 0.3594), (0.3727, 0.451), (0.5045, 0.5094), - (0.6045, 0.4447), (0.7886, 0.325), (1.0, 0.1594)], - handles=['AUTO', 'AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO']) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve_1, 3: -1.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: group_input.outputs["Wave Scale X"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_1}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position, 'Offset': combine_xyz_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem_selection(nw: NodeWrangler, gt, lt, th): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Samples', 0.0), - ('NodeSocketFloat', 'Random Value', 0.0)]) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Random Value"], 1: gt}, - attrs={'operation': 'GREATER_THAN'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Random Value"], 1: lt}, - attrs={'operation': 'LESS_THAN'}) - - op_and = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}) - - index = nw.new_node(Nodes.Index) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Samples"], 1: th * uniform(0.95, 1.05)}, - attrs={'operation': 'MULTIPLY'}) - - less_than_1 = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: multiply}, - attrs={'operation': 'LESS_THAN'}) - - op_and_1 = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: op_and, 1: less_than_1}) - - op_not = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: op_and_1}, - attrs={'operation': 'NOT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_not}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem_scale_up_down', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem_scale_up_down(nw: NodeWrangler, gap): - # Code generated using version 2.4.3 of the node_transpiler - - index_2 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Samples', 0.0)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index_2, 2: group_input.outputs["Samples"]}, - attrs={'clamp': False}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 1.0 - gap), (0.3, 1.0 - gap / 2.), (0.6, 1.0 - gap / 5.), (1.0, 1.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem_rotation_up_down', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem_rotation_up_down(nw: NodeWrangler, scale, gap): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Samples', 0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index_1, 2: group_input.outputs["Samples"], 3: 1.0, 4: 0.0}, - attrs={'clamp': False}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 1.0 - gap), (0.7, 1.0 - gap / 2.), (1.0, 1.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: scale}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem_rotation_in_out', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem_rotation_in_out(nw: NodeWrangler, in_out_scale=1.0): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Samples', 0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': index_1, 2: group_input.outputs["Samples"], 3: 1.0, 4: 0.0}, - attrs={'clamp': False}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0), (0.5136, 0.2188), (1.0, 0.8813)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: -0.5}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: in_out_scale}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_round_tropical_leaf', singleton=False, type='GeometryNodeTree') -def nodegroup_palm_leaf_instance(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'To Max', -0.4), - ('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Wave Scale Y', 0.3), - ('NodeSocketFloat', 'Wave Scale X', 0.5), - ('NodeSocketFloat', 'Leaf Width Scale', 0.0)]) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': group_input.outputs["Mesh"], 'Level': 8}) - - subdivide_mesh_1 = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': subdivide_mesh}) - - position = nw.new_node(Nodes.InputPosition) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': subdivide_mesh_1, 1: position}, - attrs={'data_type': 'FLOAT_VECTOR'}) - - nodegroup_leaf_gen = nw.new_node(nodegroup_nodegroup_leaf_gen().name, - input_kwargs={'Mesh': capture_attribute.outputs["Geometry"], - 'Displancement scale': 0.0, 'Vein Asymmetry': uniform(0.2, 0.4), - 'Vein Density': 0.0, 'Jigsaw Scale': 10.0, 'Jigsaw Depth': 0.0, - 'Vein Angle': 0.3, 'Wave Displacement': 0.0, 'Midrib Length': 0.3336, - 'Midrib Width': uniform(0.9, 1.5), 'Stem Length': uniform(0.55, 0.65), - 'Leaf Width Scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_sub_vein = nw.new_node(nodegroup_nodegroup_sub_vein().name, - input_kwargs={'X': nodegroup_leaf_gen.outputs["X Modulated"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_sub_vein.outputs["Value"], 1: 0.0005}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': nodegroup_leaf_gen.outputs["Mesh"], 'Offset': combine_xyz}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': set_position, - 2: nodegroup_sub_vein.outputs["Color Value"]}) - - capture_attribute_2 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': capture_attribute_1.outputs["Geometry"], - 2: nodegroup_leaf_gen.outputs["Vein Value"]}) - - nodegroup_apply_wave = nw.new_node(nodegroup_nodegroup_apply_wave().name, - input_kwargs={'Geometry': capture_attribute_2.outputs["Geometry"], - 'Wave Scale Y': group_input.outputs["Wave Scale Y"], - 'Wave Scale X': group_input.outputs["Wave Scale X"], - 'X Modulated': nodegroup_leaf_gen.outputs["X Modulated"], - 'Width Scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_move_to_origin = nw.new_node(nodegroup_nodegroup_move_to_origin().name, - input_kwargs={'Geometry': nodegroup_apply_wave}) - - nodegroup_leaf_rotate_x = nw.new_node(nodegroup_nodegroup_leaf_rotate_x().name, - input_kwargs={'Geometry': nodegroup_move_to_origin, - 'To Max': group_input.outputs["To Max"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Attribute': nodegroup_leaf_gen.outputs["Attribute"], - 'Coordinate': capture_attribute.outputs["Attribute"], - 'subvein': capture_attribute_1.outputs[2], - 'vein': capture_attribute_2.outputs[2], - 'Geometry': nodegroup_leaf_rotate_x}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_stem(nw: NodeWrangler, versions): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketGeometry', 'Instance', None), - ('NodeSocketVectorXYZ', 'Scale', (1.0, 1.0, 1.0)), - ('NodeSocketInt', 'Samples', 0)]) - - rotation_scale, rotation_gap = uniform(0.6, 1.2), uniform(0.2, 0.6) - scale_gap = uniform(0.2, 0.5) - in_out_scale = normal(0., 0.7) - leaves = [] - for L in [-1, 1]: - curve_tangent_1 = nw.new_node(Nodes.CurveTangent) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': curve_tangent_1}, - attrs={'pivot_axis': 'Y'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector_1}) - - scale_instances_4 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': instance_on_points_2, 'Scale': (1.0, L, 1.0)}) - - index_1 = nw.new_node(Nodes.Index) - - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={'ID': index_1, 'Seed': L + 1}) - - leaf_on_stem_selection_1 = nw.new_node(nodegroup_leaf_on_stem_selection(0, 0, 0).name, - input_kwargs={'Samples': group_input.outputs["Samples"], - 'Random Value': random_value_4.outputs[1]}) - - value_1 = nw.new_node(Nodes.Value) - value_1.outputs[0].default_value = 1.0 - - scale_instances_3 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_4, 'Selection': leaf_on_stem_selection_1, - 'Scale': value_1}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': scale_instances_3}) - - leaf_on_stem_rotation_up_down = nw.new_node(nodegroup_leaf_on_stem_rotation_up_down(rotation_scale * L, rotation_gap).name, - input_kwargs={'Samples': group_input.outputs["Samples"]}) - - rotate_instances_6 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': join_geometry_2, - 'Rotation': leaf_on_stem_rotation_up_down}) - - leaf_on_stem_rotation_in_out_001 = nw.new_node(nodegroup_leaf_on_stem_rotation_in_out(in_out_scale=in_out_scale).name, - input_kwargs={'Samples': group_input.outputs["Samples"]}) - - rotate_instances_7 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_6, - 'Rotation': leaf_on_stem_rotation_in_out_001}) - - leaf_on_stem_scale_up_down_1 = nw.new_node(nodegroup_leaf_on_stem_scale_up_down(scale_gap).name, - input_kwargs={'Samples': group_input.outputs["Samples"]}) - - scale_instances_9 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_7, - 'Scale': leaf_on_stem_scale_up_down_1}) - leaves.append(scale_instances_9) - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': leaves}) - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3, 3: 0.3}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.3, 3: 0.3}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_3.outputs[1]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': join_geometry, 'Rotation': combine_xyz}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.7}) - - scale_instances_6 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances, 'Scale': random_value_2.outputs[1]}) - - realize_instances = nw.new_node(Nodes.RealizeInstances, - input_kwargs={'Geometry': scale_instances_6}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': realize_instances}) - - -@node_utils.to_nodegroup('nodegroup_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None), - ('NodeSocketFloat', 'Y Stem Rotate', 0.2), - ('NodeSocketFloat', 'Stem Count', 0.0), - ('NodeSocketFloat', 'X Stem Rotate', -0.2)]) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Count': group_input.outputs["Stem Count"]}) - - position_2 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"], - 3: group_input.outputs["Y Stem Rotate"], 4: 0.0}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_2, 'Center': (0.0, 0.0, 2.0), - 'Angle': map_range_1.outputs["Result"]}, - attrs={'rotation_type': 'Y_AXIS'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve, 'Position': vector_rotate}) - - position_1 = nw.new_node(Nodes.InputPosition) - - spline_parameter_2 = nw.new_node(Nodes.SplineParameter) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter_2.outputs["Factor"], - 3: group_input.outputs["X Stem Rotate"], 4: 0.0}) - - vector_rotate_1 = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': map_range_2.outputs["Result"]}, - attrs={'rotation_type': 'X_AXIS'}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, 'Position': vector_rotate_1}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_2}) - - -@node_utils.to_nodegroup('nodegroup_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: uniform(0.1, 0.3), 4: 0.8}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], - 'Radius': map_range.outputs["Result"]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Radius': uniform(0.03, 0.06)}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh}) - - -def shader_leaf_material(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - attribute = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'vein'}) - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': 6.8, - 'Detail': 10.0, 'Roughness': 0.7}) - - separate_rgb = nw.new_node(Nodes.SeparateRGB, - input_kwargs={'Image': noise_texture.outputs["Color"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["G"], 1: 0.4, 2: 0.7, 3: 0.48, 4: 0.52}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_rgb.outputs["B"], 1: 0.4, 2: 0.7, 3: 0.8, 4: 1.2}) - - attribute_1 = nw.new_node(Nodes.Attribute, - attrs={'attribute_name': 'subvein offset'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': attribute_1.outputs["Color"], 2: -0.94}) - - main_leaf_hsv = (uniform(0.3, 0.36), uniform(0.8, 1.0), uniform(0.25, 0.45)) - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': 2.0, 'Color': hsv2rgba(main_leaf_hsv)}) - - main_leaf_hsv_2 = (main_leaf_hsv[0] + normal(0.0, 0.005),) + main_leaf_hsv[1:] - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': map_range_2.outputs["Result"], 'Color1': hue_saturation_value, - 'Color2': hsv2rgba(main_leaf_hsv_2)}) - - hue_saturation_value_1 = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Hue': map_range.outputs["Result"], - 'Value': map_range_1.outputs["Result"], 'Color': mix}) - - stem_color_hsv = main_leaf_hsv[:-1] + (main_leaf_hsv[-1] - uniform(0.05, 0.15),) - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': attribute.outputs["Color"], 'Color1': hsv2rgba(stem_color_hsv), - 'Color2': hue_saturation_value_1}) - - group = nw.new_node(nodegroup_nodegroup_leaf_shader().name, - input_kwargs={'Color': mix_1}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': group}) - - -def geometry_palm_tree_leaf_nodes(nw: NodeWrangler, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line_1 = nw.new_node(Nodes.CurveLine, - input_kwargs={'Start': (0.0, 0.0, 2.0), 'End': (0.0, 0.0, 0.0)}) - - leaf_x_curvature = nw.new_node(Nodes.Value, - label='leaf_x_curvature') - leaf_x_curvature.outputs[0].default_value = kwargs['leaf_x_curvature'] - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: leaf_x_curvature, 1: kwargs['leaf_instance_curvature_ratio']}, - attrs={'operation': 'MULTIPLY'}) - - integer_1 = nw.new_node(Nodes.Integer, - attrs={'integer': 50}) - integer_1.integer = kwargs['num_leaf_samples'] - - stem_x_curvature = nw.new_node(Nodes.Value, - label='stem_x_curvature') - stem_x_curvature.outputs[0].default_value = normal(0., 0.15) - - stem_curvature = nw.new_node(nodegroup_stem_curvature().name, - input_kwargs={'Curve': curve_line_1, 'Y Stem Rotate': leaf_x_curvature, - 'Stem Count': integer_1, 'X Stem Rotate': stem_x_curvature}) - - stem_geometry = nw.new_node(nodegroup_stem_geometry().name, - input_kwargs={'Curve': stem_curvature}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': stem_geometry, - 'Material': surface.shaderfunc_to_material(shader_stem_material)}) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - wave_x_scale = nw.new_node(Nodes.Value, - label='wave_x_scale') - wave_x_scale.outputs[0].default_value = 0.0 - - wave_y_scale = nw.new_node(Nodes.Value, - label='wave_y_scale') - wave_y_scale.outputs[0].default_value = 0.0 - - leaf_width_scale = nw.new_node(Nodes.Value, - label='leaf_width_scale') - leaf_width_scale.outputs[0].default_value = kwargs['leaf_instance_width'] - - palm_leaf_instance = nw.new_node(nodegroup_palm_leaf_instance().name, - input_kwargs={'To Max': multiply, 'Mesh': group_input.outputs["Geometry"], - 'Wave Scale Y': wave_x_scale, 'Wave Scale X': wave_y_scale, - 'Leaf Width Scale': leaf_width_scale}) - - leaf_scale = nw.new_node(Nodes.Value, label='leaf_scale') - leaf_scale.outputs[0].default_value = uniform(0.5, 0.7) - - leaf_on_stem = nw.new_node(nodegroup_leaf_on_stem(kwargs['versions']).name, - input_kwargs={'Points': stem_curvature, - 'Instance': palm_leaf_instance.outputs["Geometry"], 'Scale': leaf_scale, - 'Samples': integer_1}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material, leaf_on_stem]}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, - 'Translation': kwargs['plant_translation'], - 'Rotation': (0.0, 0.0, kwargs['plant_z_rotate']), - 'Scale': kwargs['plant_scale']}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': transform, - 'Attribute': palm_leaf_instance.outputs["Attribute"], - 'Coordinate': palm_leaf_instance.outputs["Coordinate"], - 'subvein offset': palm_leaf_instance.outputs["subvein"], - 'vein': palm_leaf_instance.outputs["vein"]}) - - -class LeafPalmTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(LeafPalmTreeFactory, self).__init__(factory_seed, coarse=coarse) - - def update_params(self, params): - if params.get('leaf_x_curvature', None) is None: - params['leaf_x_curvature'] = uniform(0.0, 0.8) - if params.get('leaf_instance_curvature_ratio', None) is None: - params['leaf_instance_curvature_ratio'] = uniform(0.3, 0.6) - if params.get('leaf_instance_width', None) is None: - params['leaf_instance_width'] = uniform(0.07, 0.15) - if params.get('num_leaf_samples', None) is None: - params['num_leaf_samples'] = int(randint(6, 10) / params['leaf_instance_width']) - if params.get('plant_translation', None) is None: - params['plant_translation'] = (0.0, 0.0, 0.0) - if params.get('plant_z_rotate', None) is None: - params['plant_z_rotate'] = uniform(-0.4, 0.4) - if params.get('versions', None) is None: - params['versions'] = 3 - if params.get('plant_scale', None) is None: - s = uniform(0.8, 1.5) - params['plant_scale'] = (s, s, s) - return params - - def create_asset(self, params={}, **kwargs): - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - params = self.update_params(params) - surface.add_geomod(obj, geometry_palm_tree_leaf_nodes, apply=True, - attributes=['Attribute', 'Coordinate', - 'subvein offset', 'vein'], input_kwargs=params) - surface.add_material(obj, shader_leaf_material, selection=None) - - tag_object(obj, 'leaf_palm_tree') - return obj - - -if __name__ == '__main__': - fac = LeafPalmTreeFactory(0) - fac.create_asset() \ No newline at end of file diff --git a/infinigen/assets/tropic_plants/palm_tree.py b/infinigen/assets/tropic_plants/palm_tree.py deleted file mode 100644 index 2edd00509..000000000 --- a/infinigen/assets/tropic_plants/palm_tree.py +++ /dev/null @@ -1,993 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -import gin -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util import blender as butil -from infinigen.assets.tropic_plants.leaf_palm_plant import LeafPalmPlantFactory - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_top', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_top(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal_2 = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal_2, 1: combine_xyz_3}, - attrs={'operation': 'MULTIPLY'}) - - index_1 = nw.new_node(Nodes.Index) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: 63.0}, - attrs={'operation': 'GREATER_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': greater_than}) - - -@node_utils.to_nodegroup('nodegroup_pedal_cross_contour_bottom', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_cross_contour_bottom(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - normal = nw.new_node(Nodes.InputNormal) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'X', 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X"], 'Y': group_input.outputs["Y"]}) - - multiply = nw.new_node(Nodes.VectorMath, - input_kwargs={0: normal, 1: combine_xyz}, - attrs={'operation': 'MULTIPLY'}) - - index = nw.new_node(Nodes.Index) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: 64.0}, - attrs={'operation': 'LESS_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': multiply.outputs["Vector"], 'Value': less_than}) - - -@node_utils.to_nodegroup('nodegroup_trunk_radius_001', singleton=False, type='GeometryNodeTree') -def nodegroup_trunk_radius_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.01, 3: 0.05}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.0}, - attrs={'clamp': False}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'FLOOR'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: floor}, - attrs={'operation': 'SUBTRACT'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': subtract}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.0156), (0.2545, 0.2), (0.5182, 0.0344), (0.7682, 0.2375), (1.0, 0.0)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: 1.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_2}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value.outputs[1], 1: add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add_1}) - - -@node_utils.to_nodegroup('nodegroup_coutour_cross_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_coutour_cross_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': 128, 'Radius': 0.05}) - - pedal_cross_coutour_x = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_x') - pedal_cross_coutour_x.outputs[0].default_value = 0.3 - - pedal_cross_contour_bottom = nw.new_node(nodegroup_pedal_cross_contour_bottom().name, - input_kwargs={'X': pedal_cross_coutour_x}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_circle.outputs["Curve"], - 'Selection': pedal_cross_contour_bottom.outputs["Value"], - 'Offset': pedal_cross_contour_bottom.outputs["Vector"]}) - - pedal_cross_coutour_y = nw.new_node(Nodes.Value, - label='pedal_cross_coutour_y') - pedal_cross_coutour_y.outputs[0].default_value = 0.3 - - pedal_cross_contour_top = nw.new_node(nodegroup_pedal_cross_contour_top().name, - input_kwargs={'Y': pedal_cross_coutour_y, 'X': pedal_cross_coutour_x}) - - set_position_2 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_1, - 'Selection': pedal_cross_contour_top.outputs["Value"], - 'Offset': pedal_cross_contour_top.outputs["Vector"]}) - - noise_texture_2 = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'W': 7.0, 'Detail': 15.0}, - attrs={'noise_dimensions': '4D'}) - - scale = nw.new_node(Nodes.VectorMath, - input_kwargs={0: noise_texture_2.outputs["Fac"], 'Scale': 0.0}, - attrs={'operation': 'SCALE'}) - - set_position_5 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': set_position_2, 'Offset': scale.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_5}) - - -@node_utils.to_nodegroup('nodegroup_pedal_z_contour', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_z_contour(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter.outputs["Factor"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0, 0.4094), (0.1773, 0.475), (0.3795, 0.5062), (0.5864, 0.5187), (0.7202, 0.5084), - (0.8636, 0.4781), (1.0, 0.375)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_pedal_stem_curvature', singleton=False, type='GeometryNodeTree') -def nodegroup_pedal_stem_curvature(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - position_3 = nw.new_node(Nodes.InputPosition) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': spline_parameter_1.outputs["Factor"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], - [(0.0, 0.0688), (0.2545, 0.2281), (0.5023, 0.2563), (0.9773, 0.2656)]) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.2)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_3, 'Center': (0.0, 0.0, 0.2), 'Angle': multiply}, - attrs={'rotation_type': 'X_AXIS'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': vector_rotate}) - - -@node_utils.to_nodegroup('nodegroup_node_group_002', singleton=False, type='ShaderNodeTree') -def nodegroup_node_group_002(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0)), - ('NodeSocketFloat', 'attribute', 0.0), - ('NodeSocketFloat', 'voronoi scale', 50.0), - ('NodeSocketFloatFactor', 'voronoi randomness', 1.0), - ('NodeSocketFloat', 'seed', 0.0), - ('NodeSocketFloat', 'noise scale', 10.0), - ('NodeSocketFloat', 'noise amount', 1.4), - ('NodeSocketFloat', 'hue min', 0.6), - ('NodeSocketFloat', 'hue max', 1.085)]) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: texture_coordinate.outputs["Object"], 1: group_input.outputs["seed"]}) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': add.outputs["Vector"], - 'Scale': group_input.outputs["noise scale"], 'Detail': 1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: noise_texture.outputs["Fac"], 1: group_input.outputs["noise amount"]}, - attrs={'operation': 'MULTIPLY'}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["attribute"], - 'Scale': group_input.outputs["voronoi scale"], - 'Randomness': group_input.outputs["voronoi randomness"]}, - attrs={'voronoi_dimensions': '1D'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: voronoi_texture.outputs["Distance"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 3: group_input.outputs["hue min"], - 4: group_input.outputs["hue max"]}) - - hue_saturation_value = nw.new_node('ShaderNodeHueSaturation', - input_kwargs={'Value': map_range.outputs["Result"], - 'Color': group_input.outputs["Color"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Color': hue_saturation_value}) - - -@node_utils.to_nodegroup('nodegroup_tree_trunk_geometry_001', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_trunk_geometry_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - trunkradius_001 = nw.new_node(nodegroup_trunk_radius_001().name) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': trunkradius_001}) - - trunk_resolution = nw.new_node(Nodes.Integer, - label='TrunkResolution', - attrs={'integer': 32}) - trunk_resolution.integer = 32 - - trunk_radius = nw.new_node(Nodes.Value, - label='TrunkRadius') - trunk_radius.outputs[0].default_value = 0.02 - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': trunk_resolution, 'Radius': trunk_radius}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius, 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': curve_to_mesh, 'Integer': trunk_resolution}) - - -@node_utils.to_nodegroup('nodegroup_truncated_leaf_selection', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_leaf_selection(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_3 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: 1600.0, 1: group_input.outputs["Value"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: uniform(0.98, 0.99)}, - attrs={'operation': 'MULTIPLY'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_3, 1: multiply_1}, - attrs={'operation': 'GREATER_THAN'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: np.clip(normal(0.65, 0.2), 0.7, 0.5)}, - attrs={'operation': 'MULTIPLY'}) - - less_than = nw.new_node(Nodes.Math, - input_kwargs={0: index_3, 1: multiply_2}, - attrs={'operation': 'LESS_THAN'}) - - op_or = nw.new_node(Nodes.BooleanMath, - input_kwargs={0: greater_than, 1: less_than}, - attrs={'operation': 'OR'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Boolean': op_or}) - - -@node_utils.to_nodegroup('nodegroup_random_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_random_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.5, 3: 0.5, 'Seed': 1}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.2, 3: 0.2, 'Seed': 3}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_2.outputs[1], - 'Z': random_value_3.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz_1}) - - -@node_utils.to_nodegroup('nodegroup_leaf_truncated_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_truncated_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: 0.0}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: add}, - attrs={'operation': 'MODULO'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: add}, - attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 6.28}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_truncated_leaf_stem', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_leaf_stem(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - curve_line = nw.new_node(Nodes.CurveLine, - input_kwargs={'End': (0.0, 0.0, 0.15)}) - - integer = nw.new_node(Nodes.Integer, - attrs={'integer': 64}) - integer.integer = 64 - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': curve_line, 'Count': integer}) - - pedal_stem_curvature_scale = nw.new_node(Nodes.Value, - label='pedal_stem_curvature_scale') - pedal_stem_curvature_scale.outputs[0].default_value = 0.2 - - pedal_stem_curvature = nw.new_node(nodegroup_pedal_stem_curvature().name, - input_kwargs={'Value': pedal_stem_curvature_scale}) - - set_position_4 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve_1, 'Offset': pedal_stem_curvature}) - - pedal_z_coutour_scale = nw.new_node(Nodes.Value, - label='pedal_z_coutour_scale') - pedal_z_coutour_scale.outputs[0].default_value = uniform(0.2, 0.4) - - pedal_z_contour = nw.new_node(nodegroup_pedal_z_contour().name, - input_kwargs={'Value': pedal_z_coutour_scale}) - - set_curve_radius_1 = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': set_position_4, 'Radius': pedal_z_contour}) - - coutour_cross_geometry = nw.new_node(nodegroup_coutour_cross_geometry().name) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': set_curve_radius_1, 'Profile Curve': coutour_cross_geometry, - 'Fill Caps': True}) - - set_material_2 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': curve_to_mesh_1, - 'Material': surface.shaderfunc_to_material(shader_top_core)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_material_2}) - - -@node_utils.to_nodegroup('nodegroup_trunk_radius', singleton=False, type='GeometryNodeTree') -def nodegroup_trunk_radius(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.01, 3: 0.05}) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': spline_parameter.outputs["Factor"], 3: 1.0, 4: 0.2}, - attrs={'clamp': False}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: spline_parameter.outputs["Factor"], 1: 10000.0}, - attrs={'operation': 'MULTIPLY'}) - - floor = nw.new_node(Nodes.Math, - input_kwargs={0: multiply}, - attrs={'operation': 'FLOOR'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: floor}, - attrs={'operation': 'SUBTRACT'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': subtract}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 0.0969), (0.5864, 0.1406), (1.0, 0.2906)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve, 1: uniform(0.1, 0.25)}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_2}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: random_value.outputs[1], 1: add}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': add_1}) - - -@node_utils.to_nodegroup('nodegroup_tree_cracks', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_cracks(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: spline_parameter.outputs["Length"]}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: capture_attribute.outputs[2], 1: uniform(0.1, 0.25)}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], - 'Z': multiply}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 400.0, 'Randomness': 10.0}, - attrs={'voronoi_dimensions': '4D', 'distance': 'CHEBYCHEV'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) - colorramp.color_ramp.elements[0].position = 0.6091 - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.6818 - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - normal = nw.new_node(Nodes.InputNormal) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: colorramp.outputs["Color"], 1: normal}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_1.outputs["Vector"], 1: (-0.01, -0.01, -0.01)}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Vector': multiply_2.outputs["Vector"]}) - - -@node_utils.to_nodegroup('nodegroup_leaf_instance_selection_bottom_remove', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_instance_selection_bottom_remove(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index_1 = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Ring', 10.0), - ('NodeSocketFloat', 'Segment', 0.5)]) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: index_1, 1: group_input.outputs["Ring"]}, - attrs={'operation': 'DIVIDE'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Segment"], 1: 4.0}, - attrs={'operation': 'SUBTRACT'}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: subtract}, - attrs={'operation': 'GREATER_THAN'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': greater_than}) - - -@node_utils.to_nodegroup('nodegroup_leaf_random_rotate', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_random_rotate(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - random_value_1 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.4, 3: 0.4}) - - random_value_3 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.4, 3: 0.4}) - - random_value_2 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: -0.6, 3: 0.6}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': random_value_1.outputs[1], 'Y': random_value_3.outputs[1], - 'Z': random_value_2.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_leaf_rotate_downward', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_rotate_downward(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - index = nw.new_node(Nodes.Index) - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Value', 0.5)]) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Value"], 1: 0.0}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index, 1: add}, - attrs={'operation': 'MODULO'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: modulo, 1: add}, - attrs={'operation': 'DIVIDE'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: divide, 1: 6.28}, - attrs={'operation': 'MULTIPLY'}) - - add2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply.outputs["Value"], 1: -1.57}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': add2}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vector': combine_xyz}) - - -@node_utils.to_nodegroup('nodegroup_truncated_stem_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_truncated_stem_geometry(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Value1', 0.5), - ('NodeSocketFloat', 'Value2', 0.5)]) - - truncated_leaf_stem = nw.new_node(nodegroup_truncated_leaf_stem().name) - - normal_1 = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector_1 = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal_1}, - attrs={'axis': 'Z'}) - - instance_on_points_2 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Instance': truncated_leaf_stem, - 'Rotation': align_euler_to_vector_1}) - - leaf_truncated_rotate = nw.new_node(nodegroup_leaf_truncated_rotate().name, - input_kwargs={'Value': group_input.outputs[2]}) - - rotate_instances_2 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_2, - 'Rotation': leaf_truncated_rotate}) - - rotate_instances_3 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_2, 'Rotation': (-0.9599, 0.0, 1.5708)}) - - random_rotate = nw.new_node(nodegroup_random_rotate().name) - - rotate_instances_4 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances_3, 'Rotation': random_rotate}) - - random_value_5 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.6}) - - scale_instances_4 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_4, 'Scale': random_value_5.outputs[1]}) - - index_2 = nw.new_node(Nodes.Index) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: index_2, 1: randint(6, 10)}, - attrs={'operation': 'MODULO'}) - - scale_instances_3 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_4, 'Selection': modulo, - 'Scale': (0.0, 0.0, 0.0)}) - - truncated_leaf_selection = nw.new_node(nodegroup_truncated_leaf_selection().name, - input_kwargs={'Value': group_input.outputs["Value1"]}) - - scale_instances_5 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_3, 'Selection': truncated_leaf_selection, - 'Scale': (0.0, 0.0, 0.0)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': scale_instances_5}) - - -@node_utils.to_nodegroup('nodegroup_tree_trunk_geometry', singleton=False, type='GeometryNodeTree') -def nodegroup_tree_trunk_geometry(nw: NodeWrangler, radius): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Curve', None)]) - - trunkradius = nw.new_node(nodegroup_trunk_radius().name) - - set_curve_radius = nw.new_node(Nodes.SetCurveRadius, - input_kwargs={'Curve': group_input.outputs["Curve"], 'Radius': trunkradius}) - - treecracks = nw.new_node(nodegroup_tree_cracks().name, - input_kwargs={'Geometry': set_curve_radius}) - - trunk_resolution = nw.new_node(Nodes.Integer, - label='TrunkResolution', - attrs={'integer': 32}) - trunk_resolution.integer = 32 - - trunk_radius = nw.new_node(Nodes.Value, - label='TrunkRadius') - trunk_radius.outputs[0].default_value = radius - - curve_circle = nw.new_node(Nodes.CurveCircle, - input_kwargs={'Resolution': trunk_resolution, 'Radius': trunk_radius}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': treecracks.outputs["Geometry"], - 'Profile Curve': curve_circle.outputs["Curve"], 'Fill Caps': True}) - - subdivide_mesh = nw.new_node(Nodes.SubdivideMesh, - input_kwargs={'Mesh': curve_to_mesh, 'Level': 2}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': subdivide_mesh, 'Offset': treecracks.outputs["Vector"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1, 'Integer': trunk_resolution, - 'Mesh': curve_to_mesh}) - - -@node_utils.to_nodegroup('nodegroup_leaf_on_top', singleton=False, type='GeometryNodeTree') -def nodegroup_leaf_on_top(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Points', None), - ('NodeSocketFloat', 'Value', 0.5), - ('NodeSocketFloat', 'Ring', 10.0), - ('NodeSocketFloat', 'Segment', 0.5), - ('NodeSocketGeometry', 'Instance', None)]) - - normal = nw.new_node(Nodes.InputNormal) - - align_euler_to_vector = nw.new_node(Nodes.AlignEulerToVector, - input_kwargs={'Vector': normal}, - attrs={'axis': 'Z'}) - - instance_on_points_1 = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': group_input.outputs["Points"], - 'Instance': group_input.outputs["Instance"], - 'Rotation': align_euler_to_vector}) - - leafrotatedownward = nw.new_node(nodegroup_leaf_rotate_downward().name, - input_kwargs={'Value': group_input.outputs["Value"]}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': instance_on_points_1, 'Rotation': leafrotatedownward}) - - leafrandomrotate = nw.new_node(nodegroup_leaf_random_rotate().name) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': rotate_instances, 'Rotation': leafrandomrotate}) - - random_value_4 = nw.new_node(Nodes.RandomValue, - input_kwargs={2: 0.5, 3: 1.0}) - - scale_instances_2 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': rotate_instances_1, 'Scale': random_value_4.outputs[1]}) - - leafinstanceselectionbottomremove = nw.new_node(nodegroup_leaf_instance_selection_bottom_remove().name, - input_kwargs={'Ring': group_input.outputs["Ring"], - 'Segment': group_input.outputs["Segment"]}) - - scale_instances = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances_2, - 'Selection': leafinstanceselectionbottomremove, - 'Scale': (0.0, 0.0, 0.0)}) - - random_value = nw.new_node(Nodes.RandomValue, - input_kwargs={5: 1}, - attrs={'data_type': 'INT'}) - - scale_instances_1 = nw.new_node(Nodes.ScaleInstances, - input_kwargs={'Instances': scale_instances, 'Selection': random_value.outputs[2], - 'Scale': (0.0, 0.0, 0.0)}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Instances': scale_instances_1}) - - - - -def shader_top_core(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (1.0, 1.0, 0.1)}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': uniform(100, 400)}) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 2.0, 'Distortion': 5.0, 'Detail': 10.0}) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': 0.4, 'Color1': voronoi_texture.outputs["Distance"], - 'Color2': wave_texture.outputs["Color"]}) - - d_hsv = (uniform(0.02, 0.05), uniform(0.3, 0.6), uniform(0.01, 0.05)) - b_hsv = d_hsv[:1] + (uniform(0.6, 0.9), uniform(0.3, 0.6)) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix}) - colorramp.color_ramp.elements[0].position = 0.2409 - colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) - colorramp.color_ramp.elements[1].position = 0.6045 - colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': colorramp.outputs["Color"], - 'Roughness': colorramp.outputs["Alpha"]}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - -def shader_trunk(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - texture_coordinate = nw.new_node(Nodes.TextureCoord) - - mapping = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"]}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping, 'Scale': 20.0}, - attrs={'voronoi_dimensions': '4D'}) - - wave_texture = nw.new_node(Nodes.WaveTexture, - input_kwargs={'Vector': mapping, 'Scale': uniform(1.0, 3.0), 'Distortion': 5.0, 'Detail Scale': 3.0}, - attrs={'bands_direction': 'Z'}) - - mix_1 = nw.new_node(Nodes.MixRGB, - input_kwargs={'Color1': voronoi_texture_1.outputs["Distance"], - 'Color2': wave_texture.outputs["Color"]}) - - d_hsv = (uniform(0.02, 0.05), uniform(0.01, 0.05) if randint(0, 2) == 1 else uniform(0.5, 0.8), uniform(0.03, 0.09)) - b_hsv = d_hsv[:-1] + (uniform(0.1, 0.3),) - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': mix_1}) - colorramp.color_ramp.elements[0].position = 0.4682 - colorramp.color_ramp.elements[0].color = hsv2rgba(d_hsv) - colorramp.color_ramp.elements[1].position = 0.5591 - colorramp.color_ramp.elements[1].color = hsv2rgba(b_hsv) - - mapping_1 = nw.new_node(Nodes.Mapping, - input_kwargs={'Vector': texture_coordinate.outputs["Object"], 'Scale': (10.0, 10.0, 0.2)}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': mapping_1, 'Scale': 100.0, 'Randomness': 10.0}, - attrs={'voronoi_dimensions': '4D', 'distance': 'CHEBYCHEV'}) - - colorramp_1 = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': voronoi_texture.outputs["Distance"]}) - colorramp_1.color_ramp.elements[0].position = 0.2818 - colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp_1.color_ramp.elements[1].position = 0.3045 - colorramp_1.color_ramp.elements[1].color = (0.5284, 0.5034, 0.4327, 1.0) - - mix = nw.new_node(Nodes.MixRGB, - input_kwargs={'Fac': uniform(0.1, 0.3), 'Color1': colorramp.outputs["Color"], - 'Color2': colorramp_1.outputs["Color"]}) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': mix, 'Roughness': voronoi_texture.outputs["Distance"], 'Specular': 0}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - -@gin.configurable -def geometry_palm_tree_nodes(nw: NodeWrangler, truncatedstem_chance=0.4, **kwargs): - # Code generated using version 2.4.3 of the node_transpiler - - leaf = kwargs["leaf"][0] - radius = kwargs["trunk_radius"] - - trunk_height = nw.new_node(Nodes.Value, - label='trunk_height') - trunk_height.outputs[0].default_value = 5.0 - - top_x, top_y = np.random.normal(0.0, 0.5), np.random.normal(0.0, 0.5) - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': top_x, 'Y': top_y, 'Z': trunk_height}) - - quadratic_bezier = nw.new_node(Nodes.QuadraticBezier, - input_kwargs={'Start': (0.0, 0.0, 0.0), - 'Middle': (top_x / uniform(1.0, 2.0), top_y / uniform(1.0, 2.0), uniform(1.5, 3.0)), - 'End': combine_xyz_2}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, - input_kwargs={'Curve': quadratic_bezier, 'Length': 0.02}, - attrs={'mode': 'LENGTH'}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve}) - - endpoint_selection = nw.new_node('GeometryNodeCurveEndpointSelection', - input_kwargs={'Start Size': 0}) - - top_segment = nw.new_node(Nodes.Integer, - label='TopSegment', - attrs={'integer': 12}) - top_segment.integer = randint(8, 14) - - top_ring = nw.new_node(Nodes.Integer, - label='TopRing', - attrs={'integer': 8}) - top_ring.integer = randint(10, 15) - - uv_sphere = nw.new_node(Nodes.MeshUVSphere, - input_kwargs={'Segments': top_segment, 'Rings': top_ring, 'Radius': uniform(0.15, 0.2)}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': uv_sphere, 'Scale': (1.0, 1.0, uniform(0.8, 2.0))}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': transform, - 'Material': surface.shaderfunc_to_material(shader_trunk)}) - - value = nw.new_node(Nodes.Value) - value.outputs[0].default_value = 0.2 - - object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': leaf}) - - leafontop = nw.new_node(nodegroup_leaf_on_top().name, - input_kwargs={'Points': transform, 'Value': top_segment, 'Ring': top_segment, - 'Segment': top_ring, 'Instance': object_info.outputs["Geometry"]}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, leafontop]}) - - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, - input_kwargs={'Points': set_position, 'Selection': endpoint_selection, - 'Instance': join_geometry_1}) - - treetrunkgeometry = nw.new_node(nodegroup_tree_trunk_geometry(radius=radius).name, - input_kwargs={'Curve': set_position}) - - set_material = nw.new_node(Nodes.SetMaterial, - input_kwargs={'Geometry': treetrunkgeometry.outputs["Geometry"], - 'Material': surface.shaderfunc_to_material(shader_trunk)}) - - truncatedstemgeometry = nw.new_node(nodegroup_truncated_stem_geometry().name, - input_kwargs={'Points': treetrunkgeometry.outputs["Mesh"], 1: trunk_height, - 2: treetrunkgeometry.outputs["Integer"]}) - - geos = [instance_on_points, set_material] - if uniform(0.0, 1.0) < truncatedstem_chance: - geos.append(truncatedstemgeometry) - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': geos}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': join_geometry}) - - -class PalmTreeFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False): - super(PalmTreeFactory, self).__init__(factory_seed, coarse=coarse) - - def create_asset(self, params={}, **kwargs): - bpy.ops.mesh.primitive_plane_add( - size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - # Make the Leaf and Delete It Later - lf_seed = randint(0, 1000, size=(1,))[0] - leaf_model = LeafPalmPlantFactory(factory_seed=lf_seed) - p = { - 'leaf_x_curvature': uniform(0.1, 0.3), - 'plant_z_rotate': uniform(0.0, 0.02), - 'stem_x_curvature': 0.0, - 'stem_y_curvature': uniform(-0.1, 0.1), - 'plant_stem_length': uniform(0.5, 1.2) - } - leaf = leaf_model.create_asset(p) - params["leaf"] = [leaf] - params["trunk_radius"] = uniform(0.2, 0.3) - - surface.add_geomod(obj, geometry_palm_tree_nodes, selection=None, attributes=[], input_kwargs=params) - butil.delete([leaf]) - with butil.SelectObjects(obj): - bpy.ops.object.material_slot_remove() - bpy.ops.object.shade_flat() - obj.scale = (2, 2, 2) - return obj diff --git a/infinigen/assets/tropic_plants/tropic_plant_utils.py b/infinigen/assets/tropic_plants/tropic_plant_utils.py deleted file mode 100644 index 5149909f1..000000000 --- a/infinigen/assets/tropic_plants/tropic_plant_utils.py +++ /dev/null @@ -1,827 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Beining Han - - -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core import surface -from infinigen.core.placement.factory import AssetFactory -import numpy as np -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util import blender as butil - - -@node_utils.to_nodegroup('nodegroup_node_group', singleton=False, type='GeometryNodeTree') -def nodegroup_node_group(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Coord', 0.0), - ('NodeSocketFloat', 'Shape', 0.5), - ('NodeSocketFloat', 'Density', 0.5), - ('NodeSocketFloat', 'Random Scale Seed', 0.5)]) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Coord"], 'Scale': group_input.outputs["Density"], - 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Density"], - 1: group_input.outputs["Random Scale Seed"]}, - attrs={'operation': 'MULTIPLY'}) - - vein_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Coord"], 'Scale': multiply}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: vein_1.outputs["Distance"], 1: 0.35}) - - round = nw.new_node(Nodes.Math, - input_kwargs={0: add}, - attrs={'operation': 'ROUND'}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: vein.outputs["Distance"], 1: round}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_1, 2: 0.02, 3: 0.95, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Shape"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_1, 1: 0.001, 2: 0.005, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Result': map_range_2.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord_001', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord_001(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': group_input.outputs["X Modulated"]}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], - [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.7227, 0.75), (1.0, 1.0)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_shape_with_jigsaw', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_shape_with_jigsaw(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Midrib Value', 1.0), - ('NodeSocketFloat', 'Vein Coord', 0.0), - ('NodeSocketFloat', 'Leaf Shape', 0.5), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.5)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Midrib Value"], 3: 1.0, 4: 0.0}) - - jigsaw = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord"], - 'Scale': group_input.outputs["Jigsaw Scale"]}, - label='Jigsaw', - attrs={'voronoi_dimensions': '1D'}) - - colorramp = nw.new_node(Nodes.ColorRamp, - input_kwargs={'Fac': jigsaw.outputs["Distance"]}) - colorramp.color_ramp.elements[0].position = 0.4795 - colorramp.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) - colorramp.color_ramp.elements[1].position = 0.5545 - colorramp.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Jigsaw Depth"], 1: 0.0}, - attrs={'operation': 'MULTIPLY'}) - - multiply_add = nw.new_node(Nodes.Math, - input_kwargs={0: colorramp.outputs["Color"], 1: multiply, - 2: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'MULTIPLY_ADD'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_add, 1: 0.001, 2: 0.002, 3: 1.0, 4: 0.0}) - - maximum = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, - attrs={'operation': 'MAXIMUM'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': maximum}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord_003', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord_003(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': divide}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], - [(0.0, 0.0), (0.0182, 0.05), (0.2909, 0.2199), (0.4182, 0.3063), (0.7045, 0.3), - (1.0, 0.8562)], handles=['AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO']) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': divide}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], - [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.6045, 0.4812), (0.7, 0.725), - (0.8273, 0.8437), (1.0, 1.0)], - handles=['AUTO', 'AUTO', 'AUTO', 'VECTOR', 'AUTO', 'AUTO', 'AUTO']) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_vein_coord_002', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_vein_coord_002(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.5), - ('NodeSocketFloat', 'Y', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Angle', 2.0), - ('NodeSocketFloat', 'Leaf Shape', 0.0)]) - - sign = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'SIGN'}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Vein Asymmetry"], 1: sign}, - attrs={'operation': 'MULTIPLY'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -1.0}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X Modulated"]}, - attrs={'operation': 'ABSOLUTE', 'use_clamp': True}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: absolute, 1: group_input.outputs["Leaf Shape"]}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - vein_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': divide}, - label='Vein Shape') - node_utils.assign_curve(vein_shape.mapping.curves[0], - [(0.0, 0.0), (0.0182, 0.05), (0.3364, 0.2386), (0.8091, 0.7312), (1.0, 0.9937)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': vein_shape, 4: 1.9}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["Vein Angle"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: multiply_1}, - attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_2, 1: group_input.outputs["Y"]}, - attrs={'operation': 'SUBTRACT'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: subtract}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Coord': add}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_shape', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_shape(nw: NodeWrangler, leaf_contour_control_points=None): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X Modulated', 0.0), - ('NodeSocketFloat', 'Y', 0.0), - ('NodeSocketFloat', 'scale', 0.0)]) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': group_input.outputs["X Modulated"], 'Y': group_input.outputs["Y"]}) - - clamp = nw.new_node(Nodes.Clamp, - input_kwargs={'Value': group_input.outputs["Y"], 'Min': -0.6, 'Max': 0.6}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': clamp}) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: combine_xyz, 1: combine_xyz_1}, - attrs={'operation': 'SUBTRACT'}) - - length = nw.new_node(Nodes.VectorMath, - input_kwargs={0: subtract.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - leaf_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}, - label='Leaf shape') - - if leaf_contour_control_points is not None: - node_utils.assign_curve(leaf_shape.mapping.curves[0], - [(0.0, 0.0), (0.1, leaf_contour_control_points[0]), - (0.25, leaf_contour_control_points[1]), - (0.4, leaf_contour_control_points[2]), - (0.55, leaf_contour_control_points[3]), - (0.7, leaf_contour_control_points[4]), - (0.85, leaf_contour_control_points[5]), (1.0, 0.0)]) - else: - node_utils.assign_curve(leaf_shape.mapping.curves[0], - [(0.0, 0.0), (0.15, 0.25), (0.3818, 0.35), (0.6273, 0.3625), (0.7802, 0.2957), - (0.8955, 0.2), (1.0, 0.0)]) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: leaf_shape, 1: group_input.outputs["scale"]}, - attrs={'operation': 'MULTIPLY'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: length.outputs["Value"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Leaf Shape': subtract_1, 'Value': multiply}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_leaf_gen', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_leaf_gen(nw: NodeWrangler, leaf_contour_control_points=None): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Mesh', None), - ('NodeSocketFloat', 'Displancement scale', 0.5), - ('NodeSocketFloat', 'Vein Asymmetry', 0.0), - ('NodeSocketFloat', 'Vein Density', 6.0), - ('NodeSocketFloat', 'Jigsaw Scale', 18.0), - ('NodeSocketFloat', 'Jigsaw Depth', 0.07), - ('NodeSocketFloat', 'Vein Angle', 1.0), - ('NodeSocketFloat', 'Sub-vein Displacement', 0.5), - ('NodeSocketFloat', 'Sub-vein Scale', 50.0), - ('NodeSocketFloat', 'Wave Displacement', 0.1), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8), - ('NodeSocketFloat', 'Leaf Width Scale', 0.0)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - nodegroup_midrib = nw.new_node(nodegroup_nodegroup_midrib().name, - input_kwargs={'X': separate_xyz.outputs["X"], 'Y': separate_xyz.outputs["Y"], - 'Midrib Length': group_input.outputs["Midrib Length"], - 'Midrib Width': group_input.outputs["Midrib Width"], - 'Stem Length': group_input.outputs["Stem Length"]}) - - nodegroup_shape = nw.new_node(nodegroup_nodegroup_shape(leaf_contour_control_points).name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Y': separate_xyz.outputs["Y"], - 'scale': group_input.outputs["Leaf Width Scale"]}) - - nodegroup_vein_coord_002 = nw.new_node(nodegroup_nodegroup_vein_coord_002().name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], - 'Vein Angle': group_input.outputs["Vein Angle"], - 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_vein_coord = nw.new_node(nodegroup_nodegroup_vein_coord().name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], - 'Vein Angle': group_input.outputs["Vein Angle"], - 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_vein_coord_003 = nw.new_node(nodegroup_nodegroup_vein_coord_003().name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], - 'Vein Angle': group_input.outputs["Vein Angle"], - 'Leaf Shape': nodegroup_shape.outputs["Value"]}) - - nodegroup_apply_vein_midrib = nw.new_node(nodegroup_nodegroup_apply_vein_midrib().name, - input_kwargs={'Midrib Value': nodegroup_midrib.outputs["Midrib Value"], - 'Leaf Shape': nodegroup_shape.outputs["Leaf Shape"], - 'Vein Density': group_input.outputs["Vein Density"], - 'Vein Coord - main': nodegroup_vein_coord_002, - 'Vein Coord - 1': nodegroup_vein_coord, - 'Vein Coord - 2': nodegroup_vein_coord_003}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Displancement scale"], 1: nodegroup_apply_vein_midrib}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Mesh"], 'Offset': combine_xyz}) - - nodegroup_shape_with_jigsaw = nw.new_node(nodegroup_nodegroup_shape_with_jigsaw().name, - input_kwargs={'Midrib Value': nodegroup_midrib.outputs["Midrib Value"], - 'Vein Coord': nodegroup_vein_coord_002, - 'Leaf Shape': nodegroup_shape.outputs["Leaf Shape"], - 'Jigsaw Scale': group_input.outputs["Jigsaw Scale"], - 'Jigsaw Depth': group_input.outputs["Jigsaw Depth"]}) - - less_than = nw.new_node(Nodes.Compare, - input_kwargs={0: nodegroup_shape_with_jigsaw, 1: 0.5}, - attrs={'operation': 'LESS_THAN'}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': set_position, 'Selection': less_than}) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': delete_geometry, 2: nodegroup_apply_vein_midrib}) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz_1 = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz_1.outputs["Y"], 1: -0.6, 2: 0.6}) - - float_curve_1 = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range_1.outputs["Result"]}) - node_utils.assign_curve(float_curve_1.mapping.curves[0], [(0.0, 0.0), (0.5182, 1.0), (1.0, 1.0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': nodegroup_shape.outputs["Leaf Shape"], 2: -1.0}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], - [(0.0045, 0.0063), (0.0409, 0.0375), (0.4182, 0.05), (1.0, 0.0)]) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: float_curve_1, 1: float_curve}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: multiply_1, 1: 0.7}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': multiply_2}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': capture_attribute.outputs["Geometry"], - 'Offset': combine_xyz_1}) - - nodegroup_vein_coord_001 = nw.new_node(nodegroup_nodegroup_vein_coord_001().name, - input_kwargs={'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Y': separate_xyz.outputs["Y"], - 'Vein Asymmetry': group_input.outputs["Vein Asymmetry"], - 'Vein Angle': group_input.outputs["Vein Angle"]}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Mesh': set_position_1, 'Attribute': capture_attribute.outputs[2], - 'X Modulated': nodegroup_midrib.outputs["X Modulated"], - 'Vein Coord': nodegroup_vein_coord_001, - 'Vein Value': nodegroup_apply_vein_midrib}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_midrib(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', -0.6), - ('NodeSocketFloat', 'Midrib Length', 0.4), - ('NodeSocketFloat', 'Midrib Width', 1.0), - ('NodeSocketFloat', 'Stem Length', 0.8)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -0.6, 2: 0.6}) - - stem_shape = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': map_range.outputs["Result"]}, - label='Stem shape') - node_utils.assign_curve(stem_shape.mapping.curves[0], - [(0.0, 0.5), (0.25, 0.4828), (0.5, 0.4938), (0.75, 0.503), (0.8773, 0.5125), (1.0, 0.5)]) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': stem_shape, 3: -1.0}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: group_input.outputs["X"]}, - attrs={'operation': 'SUBTRACT'}) - - noise_texture = nw.new_node(Nodes.NoiseTexture) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_5.outputs["Result"], 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Y"], 1: -70.0, - 2: group_input.outputs["Midrib Length"], - 3: group_input.outputs["Midrib Width"], 4: 0.0}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: multiply, 1: map_range_2.outputs["Result"]}) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: subtract}, - attrs={'operation': 'ABSOLUTE'}) - - subtract_1 = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: absolute}, - attrs={'operation': 'SUBTRACT'}) - - absolute_1 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Y"]}, - attrs={'operation': 'ABSOLUTE'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': absolute_1, 2: group_input.outputs["Stem Length"], 3: 1.0, 4: 0.0}) - - smooth_min = nw.new_node(Nodes.Math, - input_kwargs={0: subtract_1, 1: map_range_3.outputs["Result"], 2: 0.06}, - attrs={'operation': 'SMOOTH_MIN'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: smooth_min}, - attrs={'operation': 'DIVIDE', 'use_clamp': True}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': divide, 1: 0.001, 2: 0.03, 3: 1.0, 4: 0.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'X Modulated': subtract, 'Midrib Value': map_range_4.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_apply_vein_midrib', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_apply_vein_midrib(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'Midrib Value', 0.5), - ('NodeSocketFloat', 'Leaf Shape', 1.0), - ('NodeSocketFloat', 'Vein Density', 6.0), - ('NodeSocketFloat', 'Vein Coord - main', 0.0), - ('NodeSocketFloat', 'Vein Coord - 1', 0.0), - ('NodeSocketFloat', 'Vein Coord - 2', 0.0)]) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': group_input.outputs["Leaf Shape"], 1: -0.3, 2: 0.05, 3: 0.015, - 4: 0.0}) - - nodegroup = nw.new_node(nodegroup_node_group().name, - input_kwargs={'Coord': group_input.outputs["Vein Coord - 2"], - 'Shape': map_range.outputs["Result"], - 'Density': group_input.outputs["Vein Density"], 'Random Scale Seed': 3.57}) - - nodegroup_1 = nw.new_node(nodegroup_node_group().name, - input_kwargs={'Coord': group_input.outputs["Vein Coord - 1"], - 'Shape': map_range.outputs["Result"], - 'Density': group_input.outputs["Vein Density"], 'Random Scale Seed': 1.08}) - - vein = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'W': group_input.outputs["Vein Coord - main"], - 'Scale': group_input.outputs["Vein Density"], 'Randomness': 0.2}, - label='Vein', - attrs={'voronoi_dimensions': '1D'}) - - position = nw.new_node(Nodes.InputPosition) - - noise_texture = nw.new_node(Nodes.NoiseTexture, - input_kwargs={'Vector': position, 'Scale': 20.0}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': noise_texture.outputs["Fac"], 3: -1.0}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: 0.02}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: vein.outputs["Distance"], 1: multiply}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add, 2: 0.03, 3: 1.0, 4: 0.0}) - - multiply_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': multiply_1, 1: 0.001, 2: 0.01, 3: 1.0, 4: 0.0}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup_1, 1: map_range_5.outputs["Result"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, - input_kwargs={0: nodegroup, 1: multiply_2}, - attrs={'operation': 'MULTIPLY'}) - - multiply_4 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Midrib Value"], 1: multiply_3}, - attrs={'operation': 'MULTIPLY'}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Vein Value': multiply_4}) - -@node_utils.to_nodegroup('nodegroup_nodegroup_move_to_origin', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_move_to_origin(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position}) - - attribute_statistic = nw.new_node(Nodes.AttributeStatistic, - input_kwargs={'Geometry': group_input.outputs["Geometry"], - 2: separate_xyz.outputs["Y"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: 0.0, 1: attribute_statistic.outputs["Min"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Y': subtract}) - - set_position = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Offset': combine_xyz}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_leaf_rotate_x', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_leaf_rotate_x(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'To Max', -0.4)]) - - position_1 = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': position_1}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Y"], 4: group_input.outputs["To Max"]}, - attrs={'clamp': False}) - - vector_rotate = nw.new_node(Nodes.VectorRotate, - input_kwargs={'Vector': position_1, 'Angle': map_range.outputs["Result"]}, - attrs={'rotation_type': 'X_AXIS'}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': group_input.outputs["Geometry"], 'Position': vector_rotate}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': set_position_1}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_sub_vein', singleton=False, type='GeometryNodeTree') -def nodegroup_nodegroup_sub_vein(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketFloat', 'X', 0.5), - ('NodeSocketFloat', 'Y', 0.0)]) - - absolute = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["X"]}, - attrs={'operation': 'ABSOLUTE'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': absolute, 'Y': group_input.outputs["Y"]}) - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 30.0}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 2: 0.1, 4: 2.0}, - attrs={'clamp': False}) - - voronoi_texture_1 = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': combine_xyz, 'Scale': 150.0}, - attrs={'feature': 'DISTANCE_TO_EDGE'}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture_1.outputs["Distance"], 2: 0.1}) - - add = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}) - - multiply = nw.new_node(Nodes.Math, - input_kwargs={0: add, 1: -1.0}, - attrs={'operation': 'MULTIPLY'}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': map_range_1.outputs["Result"], 4: -1.0}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Value': multiply, 'Color Value': map_range_3.outputs["Result"]}) - - -@node_utils.to_nodegroup('nodegroup_nodegroup_leaf_shader', singleton=False, type='ShaderNodeTree') -def nodegroup_nodegroup_leaf_shader(nw: NodeWrangler): - # Code generated using version 2.4.3 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Color', (0.8, 0.8, 0.8, 1.0))]) - - diffuse_bsdf = nw.new_node(Nodes.DiffuseBSDF, - input_kwargs={'Color': group_input.outputs["Color"]}) - - glossy_bsdf = nw.new_node('ShaderNodeBsdfGlossy', - input_kwargs={'Color': group_input.outputs["Color"], 'Roughness': 0.3}) - - mix_shader = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.2, 1: diffuse_bsdf, 2: glossy_bsdf}) - - translucent_bsdf = nw.new_node(Nodes.TranslucentBSDF, - input_kwargs={'Color': group_input.outputs["Color"]}) - - mix_shader_1 = nw.new_node(Nodes.MixShader, - input_kwargs={'Fac': 0.3, 1: mix_shader, 2: translucent_bsdf}) - - group_output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Shader': mix_shader_1}) - - -def shader_stem_material(nw: NodeWrangler, stem_color_hsv=None): - # Code generated using version 2.4.3 of the node_transpiler - - if stem_color_hsv is None: - stem_color_hsv = (uniform(0.25, 0.32), uniform(0.6, 0.9), uniform(0.2, 0.6)) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': hsv2rgba(stem_color_hsv)}) - - material_output = nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': principled_bsdf}) - - - - diff --git a/infinigen/assets/underwater/seaweed.py b/infinigen/assets/underwater/seaweed.py deleted file mode 100644 index 7d0e684ad..000000000 --- a/infinigen/assets/underwater/seaweed.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this -# source tree. - -# Authors: Lingjie Mei - - -import colorsys - -import bpy -import numpy as np -from numpy.random import uniform - -from infinigen.assets.creatures.util.animation.driver_repeated import repeated_driver -from infinigen.assets.utils.decorate import read_co, subsurface2face_size, write_co -from infinigen.assets.utils.misc import assign_material -from infinigen.assets.utils.draw import make_circular_interp -import infinigen.core.util.blender as butil -from infinigen.core.placement.factory import AssetFactory -from infinigen.infinigen_gpl.extras.diff_growth import build_diff_growth -from infinigen.assets.utils.object import mesh2obj, data2mesh -from infinigen.assets.utils.mesh import polygon_angles -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes -from infinigen.core import surface -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup -from infinigen.core.nodes.node_utils import build_color_ramp - -class SeaweedFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.base_hue = uniform(.0, .1) if uniform(0, 1) < .5 else uniform(.3, .4) - self.material = surface.shaderfunc_to_material(self.shader_seaweed, self.base_hue) - self.freq = 1 / log_uniform(200, 500) - - def create_asset(self, face_size=0.01, **params): - growth_vec = 0, 0, uniform(3., 6.) - inhibit_shell = uniform(.6, .8) - max_polygons = int(log_uniform(2e3, 1e4)) - fac_noise = uniform(1.5, 2.5) - repulsion_radius = log_uniform(1., 1.5) - obj = self.differential_growth_make(fac_noise=fac_noise, inhibit_shell=inhibit_shell, - repulsion_radius=repulsion_radius, growth_vec=growth_vec, dt=.25, - max_polygons=max_polygons) - - obj.scale = [2 / max(obj.dimensions)] * 3 - obj.scale[-1] *= uniform(1.5, 2) - obj.location[-1] -= .02 - butil.apply_transform(obj, loc=True) - f_scale = make_circular_interp(2, 5, 5, log_uniform) - x, y, z = read_co(obj).T - scale = f_scale(np.arctan2(y, x) + np.pi) - co = np.stack([scale * x, scale * y, z], -1) - write_co(obj, co) - subsurface2face_size(obj, face_size / 2) - butil.modify_mesh(obj, 'TRIANGULATE') - butil.modify_mesh(obj, 'SMOOTH', factor=uniform(-.8, .8)) - texture_type = np.random.choice(['STUCCI', 'MARBLE']) - texture = bpy.data.textures.new(name='seaweed', type=texture_type) - texture.noise_scale = log_uniform(.05, .2) - butil.modify_mesh(obj, 'DISPLACE', True, strength=uniform(.0, .03), texture=texture) - assign_material(obj, self.material) - self.animate_bend(obj) - tag_object(obj, 'seaweed') - return obj - - def animate_bend(self, obj): - obj, mod = butil.modify_mesh(obj, 'SIMPLE_DEFORM', False, deform_method='BEND', deform_axis='Y', - return_mod=True) - driver = mod.driver_add('angle').driver - start_angle = uniform(-np.pi / 4, 0) - driver.expression = repeated_driver(start_angle, start_angle + uniform(np.pi * .2, np.pi * .8), - self.freq) - - @staticmethod - def differential_growth_make(**kwargs): - n_base = np.random.randint(5, 7) - angles = polygon_angles(n_base) - vertices = np.block([[np.cos(angles), 0], [np.sin(angles), 0], [np.zeros(n_base + 1)]]).T - faces = np.stack([np.arange(n_base), np.roll(np.arange(n_base), 1), np.full(n_base, n_base)]).T - obj = mesh2obj(data2mesh(vertices, [], faces, 'diff_growth')) - - boundary = obj.vertex_groups.new(name='Boundary') - boundary.add(list(range(n_base)), 1.0, 'REPLACE') - build_diff_growth(obj, boundary.index, **kwargs) - return obj - - @staticmethod - def geo_seaweed_waves(nw: NodeWrangler): - translation_scale = uniform(0., .25) - expand_scale = uniform(.2, .3) - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - x, y, z = nw.separate(nw.new_node(Nodes.InputPosition)) - angle = np.random.uniform(0, 2 * np.pi) - displacement = nw.scale(nw.add(nw.scale(nw.combine(np.cos(angle), np.sin(angle), 0), - nw.scalar_multiply(nw.musgrave(10), translation_scale)), - nw.scale(nw.combine(x, y, 0), expand_scale)), z) - geometry = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': geometry, 'Offset': displacement}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) - - @staticmethod - def shader_seaweed(nw: NodeWrangler, base_hue=.3): - h_perturb = uniform(-.1, .1) - s_perturb = uniform(-.1, -.0) - v_perturb = log_uniform(1., 2) - - def map_perturb(h, s, v): - return hsv2rgba(h + h_perturb, s + s_perturb, v / v_perturb) - - subsurface_ratio = .01 - roughness = .8 - mix_ratio = uniform(.2, .4) - specular = .2 - - color_1 = map_perturb(base_hue, uniform(.6, .8), .25) - color_2 = map_perturb(base_hue - uniform(.05, .1), uniform(.6, .8), .15) - cr = build_color_ramp(nw, nw.musgrave(uniform(5, 10)), [0, .3, .7, 1.], - [color_1, color_1, color_2, color_2]) - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': cr, - 'Subsurface': subsurface_ratio, - 'Subsurface Radius': (.01, .01, .01), - 'Subsurface Color': map_perturb(base_hue, .6, .2), - 'Roughness': roughness, - 'Specular': specular - }) - - translucent_bsdf = nw.new_node(Nodes.TransparentBSDF, input_kwargs={'Color': cr}) - - mix_shader = nw.new_node(Nodes.MixShader, [mix_ratio, principled_bsdf, translucent_bsdf]) - return mix_shader diff --git a/infinigen/assets/underwater/urchin.py b/infinigen/assets/underwater/urchin.py deleted file mode 100644 index abd58b8ea..000000000 --- a/infinigen/assets/underwater/urchin.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei - - -import colorsys -import bpy -from numpy.random import uniform - -import infinigen.core.util.blender as butil -from infinigen.assets.creatures.util.animation.driver_repeated import repeated_driver -from infinigen.assets.utils.object import new_icosphere, separate_loose -from infinigen.assets.utils.decorate import geo_extension -from infinigen.assets.utils.misc import assign_material -from infinigen.core.util.color import hsv2rgba -from infinigen.core.util.random import log_uniform -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core.placement.detail import adapt_mesh_resolution -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface -from infinigen.core.util.math import FixedSeed -from infinigen.core.tagging import tag_object, tag_nodegroup - - -class UrchinFactory(AssetFactory): - - def __init__(self, factory_seed, coarse=False): - super().__init__(factory_seed, coarse) - with FixedSeed(factory_seed): - self.base_hue = uniform(-.25, .15) % 1 - self.materials = [surface.shaderfunc_to_material(shader, self.base_hue) for shader in - [self.shader_spikes, self.shader_girdle, self.shader_base]] - self.freq = 1 / log_uniform(100, 200) - - def create_asset(self, placeholder, face_size=0.01, **params): - obj = new_icosphere(subdivisions=4) - surface.add_geomod(obj, geo_extension, apply=True) - obj.scale[-1] = uniform(.8, 1.) - butil.apply_transform(obj) - butil.modify_mesh(obj, 'BEVEL', offset_type='PERCENT', width_pct=25, angle_limit=0) - surface.add_geomod(obj, self.geo_extrude, apply=True, attributes=['spike', 'girdle'], - domains=['FACE'] * 2) - levels = 1 - butil.modify_mesh(obj, 'SUBSURF', apply=True, levels=levels, render_levels=levels) - obj.scale = [2 / max(obj.dimensions)] * 3 - obj.scale[-1] *= log_uniform(.6, 1.2) - butil.apply_transform(obj) - adapt_mesh_resolution(obj, face_size, method='subdiv_by_area') - obj = separate_loose(obj) - butil.modify_mesh(obj, 'DISPLACE', texture=bpy.data.textures.new(name='urchin', type='STUCCI'), - strength=.005, mid_level=0) - surface.add_geomod(obj, self.geo_material_index, apply=True, input_attributes=[None, 'spike', 'girdle']) - assign_material(obj, self.materials) - self.animate_stretch(obj) - tag_object(obj, 'urchin') - return obj - - def animate_stretch(self, obj): - obj, mod = butil.modify_mesh(obj, 'SIMPLE_DEFORM', False, return_mod=True, deform_method='STRETCH', - deform_axis='Z') - driver = mod.driver_add('factor').driver - driver.expression = repeated_driver(-.1, .1, self.freq) - - @staticmethod - def geo_extrude(nw: NodeWrangler): - face_prob = .98 - girdle_height = .1 - extrude_height = log_uniform(1., 5.) - perturb = .1 - girdle_size = uniform(.6, 1) - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - face_vertices = nw.new_node(Nodes.FaceNeighbors) - selection = nw.boolean_math('AND', nw.compare('GREATER_EQUAL', face_vertices, 5), - nw.bernoulli(face_prob)) - geometry, top, _ = nw.new_node(Nodes.ExtrudeMesh, [geometry, selection, None, girdle_height]).outputs - geometry, top, girdle = nw.new_node(Nodes.ExtrudeMesh, [geometry, top, None, 1e-3]).outputs - geometry = nw.new_node(Nodes.ScaleElements, [geometry, top, girdle_size]) - geometry, top, _ = nw.new_node(Nodes.ExtrudeMesh, [geometry, top, None, -girdle_height]).outputs - direction = nw.scale(nw.add(nw.new_node(Nodes.InputNormal), nw.uniform([-perturb] * 3, [perturb] * 3)), - nw.uniform(.5 * extrude_height, extrude_height)) - geometry, top, side = nw.new_node(Nodes.ExtrudeMesh, [geometry, top, direction]).outputs - geometry = nw.new_node(Nodes.ScaleElements, [geometry, top, .2]) - spike = nw.boolean_math('OR', top, side) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry, 'Spike': spike, 'Girdle': girdle}) - - @staticmethod - def shader_spikes(nw: NodeWrangler, base_hue): - transmission = uniform(.95, .99) - subsurface = uniform(.1, .2) - roughness = uniform(.5, .8) - color = hsv2rgba(base_hue, uniform(.5, 1.), log_uniform(.05, 1.)) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color, - 'Roughness': roughness, - 'Subsurface': subsurface, - 'Subsurface Color': color, - 'Transmission': transmission - }) - return principled_bsdf - - @staticmethod - def shader_girdle(nw: NodeWrangler, base_hue): - roughness = uniform(.5, .8) - color = hsv2rgba(base_hue, uniform(.4, .5), log_uniform(.02, .1)) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness}) - return principled_bsdf - - @staticmethod - def shader_base(nw: NodeWrangler, base_hue): - roughness = uniform(.5, .8) - color = hsv2rgba(base_hue, uniform(.8, 1.), log_uniform(.01, .02)) - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': color, 'Roughness': roughness}) - return principled_bsdf - - @staticmethod - def geo_material_index(nw: NodeWrangler): - geometry, spike, girdle = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), - ('NodeSocketFloat', 'Spike', None), - ('NodeSocketFloat', 'Girdle', None)]).outputs[:-1] - geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, None, 2]) - geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, spike, 0]) - geometry = nw.new_node(Nodes.SetMaterialIndex, [geometry, girdle, 1]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) diff --git a/infinigen/assets/utils/autobevel.py b/infinigen/assets/utils/autobevel.py index 0565a9242..9e62b8e90 100644 --- a/infinigen/assets/utils/autobevel.py +++ b/infinigen/assets/utils/autobevel.py @@ -5,22 +5,19 @@ # Authors: Alexander Raistrick import bpy - import numpy as np from numpy.random import uniform from infinigen.core.util import blender as butil -from infinigen.core.util.random import random_general as rg -class BevelSharp: +class BevelSharp: def __init__( self, mult=1, angle_min_deg=70, segments=None, ): - self.amount = uniform(0.001, 0.006) self.mult = mult self.angle_min_deg = angle_min_deg @@ -28,26 +25,23 @@ def __init__( if segments is None: segments = 4 if uniform() < 0 else 1 self.segments = segments - + def __call__(self, obj): butil.select_none() butil.select(obj) - with butil.ViewportMode(obj, 'EDIT'): - - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.tris_convert_to_quads() - bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='EDGE') - bpy.ops.mesh.select_all(action='DESELECT') - + bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type="EDGE") + bpy.ops.mesh.select_all(action="DESELECT") + angle = np.deg2rad(self.angle_min_deg) - - bpy.ops.mesh.edges_select_sharp( - sharpness=angle - ) + + bpy.ops.mesh.edges_select_sharp(sharpness=angle) bpy.ops.mesh.bevel( offset=self.amount * self.mult, segments=self.segments, - affect='EDGES', - offset_type='WIDTH' - ) \ No newline at end of file + affect="EDGES", + offset_type="WIDTH", + ) diff --git a/infinigen/assets/utils/bbox_from_mesh.py b/infinigen/assets/utils/bbox_from_mesh.py index eec1c42b9..8e762acd9 100644 --- a/infinigen/assets/utils/bbox_from_mesh.py +++ b/infinigen/assets/utils/bbox_from_mesh.py @@ -7,39 +7,60 @@ import bpy import numpy as np -from infinigen.core.util import blender as butil - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil + -@node_utils.to_nodegroup('nodegroup_cube_from_corners', singleton=True) +@node_utils.to_nodegroup("nodegroup_cube_from_corners", singleton=True) def nodegroup_cube_from_corners(nw: NodeWrangler): # Code generated using version 2.6.5 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketVector', 'min_corner', (0.0000, 0.0000, 0.0000)), - ('NodeSocketVector', 'max_corner', (0.0000, 0.0000, 0.0000))]) - - subtract = nw.new_node(Nodes.VectorMath, - input_kwargs={0: group_input.outputs["max_corner"], 1: group_input.outputs["min_corner"]}, - attrs={'operation': 'SUBTRACT'}) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': subtract.outputs["Vector"]}) - - mix = nw.new_node(Nodes.Mix, - input_kwargs={4: group_input.outputs["min_corner"], 5: group_input.outputs["max_corner"]}, - attrs={'data_type': 'VECTOR'}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': cube.outputs["Mesh"], 'Translation': mix.outputs[1]}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "min_corner", (0.0000, 0.0000, 0.0000)), + ("NodeSocketVector", "max_corner", (0.0000, 0.0000, 0.0000)), + ], + ) -def union_all_bbox(obj: bpy.types.Object): + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["max_corner"], + 1: group_input.outputs["min_corner"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + cube = nw.new_node( + Nodes.MeshCube, input_kwargs={"Size": subtract.outputs["Vector"]} + ) + + mix = nw.new_node( + Nodes.Mix, + input_kwargs={ + 4: group_input.outputs["min_corner"], + 5: group_input.outputs["max_corner"], + }, + attrs={"data_type": "VECTOR"}, + ) + transform_geometry = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": cube.outputs["Mesh"], "Translation": mix.outputs[1]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": transform_geometry} + ) + + +def union_all_bbox(obj: bpy.types.Object): mins, maxs = None, None for oc in butil.iter_object_tree(obj): - if not oc.type == 'MESH': + if not oc.type == "MESH": continue points = butil.apply_matrix_world(oc, np.array(oc.bound_box)) pmins, pmaxs = points.min(axis=0), points.max(axis=0) @@ -48,33 +69,35 @@ def union_all_bbox(obj: bpy.types.Object): return mins, maxs + def box_from_corners(min_corner, max_corner): - bbox = butil.modify_mesh( - butil.spawn_vert(), - 'NODES', - apply=True, - node_group=nodegroup_cube_from_corners(), - ng_inputs=dict(min_corner=min_corner, max_corner=max_corner) + butil.spawn_vert(), + "NODES", + apply=True, + node_group=nodegroup_cube_from_corners(), + ng_inputs=dict(min_corner=min_corner, max_corner=max_corner), ) - + return bbox -def bbox_mesh_from_hipoly(gen: AssetFactory, inst_seed: int, use_pholder=False): +def bbox_mesh_from_hipoly(gen: AssetFactory, inst_seed: int, use_pholder=False): objs = [] - objs.append(gen.spawn_placeholder(inst_seed, loc=(0,0,0), rot=(0,0,0))) + objs.append(gen.spawn_placeholder(inst_seed, loc=(0, 0, 0), rot=(0, 0, 0))) if not use_pholder: objs.append(gen.spawn_asset(inst_seed, placeholder=objs[-1])) min_corner, max_corner = union_all_bbox(objs[-1]) if ( - min_corner is None or - max_corner is None or - np.abs(min_corner - max_corner).sum() < 1e-5 + min_corner is None + or max_corner is None + or np.abs(min_corner - max_corner).sum() < 1e-5 ): - raise ValueError(f'{gen} spawned {objs[-1].name=} with total bbox {min_corner, max_corner}, invalid') + raise ValueError( + f"{gen} spawned {objs[-1].name=} with total bbox {min_corner, max_corner}, invalid" + ) bbox = box_from_corners(min_corner, max_corner) @@ -83,5 +106,7 @@ def bbox_mesh_from_hipoly(gen: AssetFactory, inst_seed: int, use_pholder=False): cleanup.update(butil.iter_object_tree(o)) butil.delete(list(cleanup)) - bbox.name = f'{gen.__class__.__name__}({gen.factory_seed}).bbox_placeholder({inst_seed})' - return bbox \ No newline at end of file + bbox.name = ( + f"{gen.__class__.__name__}({gen.factory_seed}).bbox_placeholder({inst_seed})" + ) + return bbox diff --git a/infinigen/assets/utils/decorate.py b/infinigen/assets/utils/decorate.py index b538cd71d..14fca777c 100644 --- a/infinigen/assets/utils/decorate.py +++ b/infinigen/assets/utils/decorate.py @@ -8,87 +8,94 @@ import logging from collections.abc import Iterable -import bpy import bmesh +import bpy import numpy as np from numpy.random import uniform from trimesh.points import remove_close +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface from infinigen.core.surface import write_attr_data - from infinigen.core.util import blender as butil from infinigen.core.util.math import normalize def multi_res(obj): - multi_res = obj.modifiers.new(name='multires', type='MULTIRES') - bpy.ops.object.multires_subdivide(modifier=multi_res.name, mode='CATMULL_CLARK') + multi_res = obj.modifiers.new(name="multires", type="MULTIRES") + bpy.ops.object.multires_subdivide(modifier=multi_res.name, mode="CATMULL_CLARK") butil.apply_modifiers(obj) -def geo_extension(nw: NodeWrangler, noise_strength=.2, noise_scale=2., musgrave_dimensions='3D'): +def geo_extension( + nw: NodeWrangler, noise_strength=0.2, noise_scale=2.0, musgrave_dimensions="3D" +): noise_strength = uniform(noise_strength / 2, noise_strength) - noise_scale = uniform(noise_scale * .7, noise_scale * 1.4) - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + noise_scale = uniform(noise_scale * 0.7, noise_scale * 1.4) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) pos = nw.new_node(Nodes.InputPosition) - direction = nw.scale(pos, nw.scalar_divide(1, nw.vector_math('LENGTH', pos))) + direction = nw.scale(pos, nw.scalar_divide(1, nw.vector_math("LENGTH", pos))) direction = nw.add(direction, uniform(-1, 1, 3)) musgrave = nw.scalar_multiply( nw.scalar_add( nw.new_node( - Nodes.MusgraveTexture, [direction], input_kwargs={'Scale': noise_scale}, - attrs={'musgrave_dimensions': musgrave_dimensions} - ), .25 - ), noise_strength + Nodes.MusgraveTexture, + [direction], + input_kwargs={"Scale": noise_scale}, + attrs={"musgrave_dimensions": musgrave_dimensions}, + ), + 0.25, + ), + noise_strength, ) geometry = nw.new_node( Nodes.SetPosition, - input_kwargs={'Geometry': geometry, 'Offset': nw.scale(musgrave, pos)} + input_kwargs={"Geometry": geometry, "Offset": nw.scale(musgrave, pos)}, ) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def subsurface2face_size(obj, face_size): arr = np.zeros(len(obj.data.polygons)) - obj.data.polygons.foreach_get('area', arr) + obj.data.polygons.foreach_get("area", arr) area = np.mean(arr) if area < 1e-6: - logging.warning(f'subsurface2face_size found {area=}, quitting to avoid NaN') + logging.warning(f"subsurface2face_size found {area=}, quitting to avoid NaN") return try: levels = int(np.ceil(np.log2(area / face_size))) except ValueError: return # catch nans if levels > 0: - butil.modify_mesh(obj, 'SUBSURF', levels=levels, render_levels=levels) + butil.modify_mesh(obj, "SUBSURF", levels=levels, render_levels=levels) -def read_selected(obj, domain='VERT'): +def read_selected(obj, domain="VERT"): match domain: - case 'VERT': + case "VERT": arr = np.zeros(len(obj.data.vertices), int) - obj.data.vertices.foreach_get('select', arr) - case 'EDGE': + obj.data.vertices.foreach_get("select", arr) + case "EDGE": arr = np.zeros(len(obj.data.edges), int) - obj.data.edges.foreach_get('select', arr) + obj.data.edges.foreach_get("select", arr) case _: arr = np.zeros(len(obj.data.faces), int) - obj.data.faces.foreach_get('select', arr) + obj.data.faces.foreach_get("select", arr) return arr.ravel() def read_co(obj): arr = np.zeros(len(obj.data.vertices) * 3) - obj.data.vertices.foreach_get('co', arr) + obj.data.vertices.foreach_get("co", arr) return arr.reshape(-1, 3) def read_edges(obj): arr = np.zeros(len(obj.data.edges) * 2, dtype=int) - obj.data.edges.foreach_get('vertices', arr) + obj.data.edges.foreach_get("vertices", arr) return arr.reshape(-1, 2) @@ -108,42 +115,42 @@ def read_edge_length(obj): def read_center(obj): arr = np.zeros(len(obj.data.polygons) * 3) - obj.data.polygons.foreach_get('center', arr) + obj.data.polygons.foreach_get("center", arr) return arr.reshape(-1, 3) def read_normal(obj): arr = np.zeros(len(obj.data.polygons) * 3) - obj.data.polygons.foreach_get('normal', arr) + obj.data.polygons.foreach_get("normal", arr) return arr.reshape(-1, 3) def read_area(obj): arr = np.zeros(len(obj.data.polygons)) - obj.data.polygons.foreach_get('area', arr) + obj.data.polygons.foreach_get("area", arr) return arr.reshape(-1) def read_loop_vertices(obj): arr = np.zeros(len(obj.data.loops), dtype=int) - obj.data.loops.foreach_get('vertex_index', arr) + obj.data.loops.foreach_get("vertex_index", arr) return arr.reshape(-1) def read_loop_edges(obj): arr = np.zeros(len(obj.data.loops), dtype=int) - obj.data.loops.foreach_get('edge_index', arr) + obj.data.loops.foreach_get("edge_index", arr) return arr.reshape(-1) def read_uv(obj): arr = np.zeros(len(obj.data.loops) * 2) - obj.data.uv_layers.active.data.foreach_get('uv', arr) + obj.data.uv_layers.active.data.foreach_get("uv", arr) return arr.reshape(-1, 2) def write_uv(obj, arr): - obj.data.uv_layers.active.data.foreach_set('uv', arr.reshape(-1)) + obj.data.uv_layers.active.data.foreach_set("uv", arr.reshape(-1)) def read_base_co(obj): @@ -151,44 +158,46 @@ def read_base_co(obj): obj = obj.evaluated_get(dg) mesh = obj.to_mesh() arr = np.zeros(len(mesh.vertices) * 3) - mesh.vertices.foreach_get('co', arr) + mesh.vertices.foreach_get("co", arr) return arr.reshape(-1, 3) def write_co(obj, arr): try: - obj.data.vertices.foreach_set('co', arr.reshape(-1)) + obj.data.vertices.foreach_set("co", arr.reshape(-1)) except RuntimeError as e: raise RuntimeError( - f'Failed to set vertices.co on {obj.name=}. Object has {len(obj.data.vertices)} verts, ' - f'{arr.shape=}' + f"Failed to set vertices.co on {obj.name=}. Object has {len(obj.data.vertices)} verts, " + f"{arr.shape=}" ) from e def read_material_index(obj): arr = np.zeros(len(obj.data.polygons), dtype=int) - obj.data.polygons.foreach_get('material_index', arr) + obj.data.polygons.foreach_get("material_index", arr) return arr def read_loop_starts(obj): arr = np.zeros(len(obj.data.polygons), dtype=int) - obj.data.polygons.foreach_get('loop_start', arr) + obj.data.polygons.foreach_get("loop_start", arr) return arr def read_loop_totals(obj): arr = np.zeros(len(obj.data.polygons), dtype=int) - obj.data.polygons.foreach_get('loop_total', arr) + obj.data.polygons.foreach_get("loop_total", arr) return arr def write_material_index(obj, arr): - obj.data.polygons.foreach_set('material_index', arr.reshape(-1)) + obj.data.polygons.foreach_set("material_index", arr.reshape(-1)) def set_shade_smooth(obj): - write_attr_data(obj, 'use_smooth', np.ones(len(obj.data.polygons), dtype=int), 'INT', 'FACE') + write_attr_data( + obj, "use_smooth", np.ones(len(obj.data.polygons), dtype=int), "INT", "FACE" + ) def displace_vertices(obj, fn): @@ -208,7 +217,7 @@ def remove_vertices(obj, to_delete): x, y, z = read_co(obj).T to_delete = to_delete(x, y, z) to_delete = np.nonzero(to_delete)[0] - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() geom = [bm.verts[_] for _ in to_delete] @@ -222,11 +231,11 @@ def remove_edges(obj, to_delete): x, y, z = read_edge_center(obj).T to_delete = to_delete(x, y, z) to_delete = np.nonzero(to_delete)[0] - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.edges.ensure_lookup_table() geom = [bm.edges[_] for _ in to_delete] - bmesh.ops.delete(bm, geom=geom, context='EDGES_FACES') + bmesh.ops.delete(bm, geom=geom, context="EDGES_FACES") bmesh.update_edit_mesh(obj.data) return obj @@ -236,16 +245,16 @@ def remove_faces(obj, to_delete, remove_loose=True): x, y, z = read_center(obj).T to_delete = to_delete(x, y, z) to_delete = np.nonzero(to_delete)[0] - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.faces.ensure_lookup_table() geom = [bm.faces[_] for _ in to_delete] - bmesh.ops.delete(bm, geom=geom, context='FACES_ONLY') + bmesh.ops.delete(bm, geom=geom, context="FACES_ONLY") bmesh.update_edit_mesh(obj.data) if remove_loose: - bpy.ops.mesh.select_mode(type='EDGE') + bpy.ops.mesh.select_mode(type="EDGE") bpy.ops.mesh.select_loose() - bpy.ops.mesh.delete(type='EDGE') + bpy.ops.mesh.delete(type="EDGE") return obj @@ -254,9 +263,9 @@ def select_vertices(obj, to_select): x, y, z = read_co(obj).T to_select = to_select(x, y, z) to_select = np.nonzero(to_select)[0] - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='VERT') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="VERT") + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() for i in to_select: @@ -271,9 +280,9 @@ def select_edges(obj, to_select): x, y, z = read_edge_center(obj).T to_select = to_select(x, y, z) to_select = np.nonzero(to_select)[0] - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) bm.edges.ensure_lookup_table() for i in to_select: @@ -288,9 +297,9 @@ def select_faces(obj, to_select): x, y, z = read_center(obj).T to_select = to_select(x, y, z) to_select = np.nonzero(to_select)[0] - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) bm.faces.ensure_lookup_table() for i in to_select: @@ -300,28 +309,30 @@ def select_faces(obj, to_select): return obj -def write_attribute(obj, fn, name, domain="POINT", data_type='FLOAT'): +def write_attribute(obj, fn, name, domain="POINT", data_type="FLOAT"): def geo_attribute(nw: NodeWrangler): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) attr = surface.eval_argument(nw, fn, position=nw.new_node(Nodes.InputPosition)) geometry = nw.new_node( Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': name, 'Value': attr}, - attrs={'domain': domain, 'data_type': data_type} + input_kwargs={"Geometry": geometry, "Name": name, "Value": attr}, + attrs={"domain": domain, "data_type": data_type}, ) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) surface.add_geomod(obj, geo_attribute, apply=True) def distance2boundary(obj): - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.region_to_loop() - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() - distance = np.full(len(obj.data.vertices), -100.) + distance = np.full(len(obj.data.vertices), -100.0) queue = set(v.index for v in bm.verts if v.select) d = 0 while True: @@ -337,15 +348,15 @@ def distance2boundary(obj): d += 1 distance[distance < 0] = 0 distance /= max(d, 1) - write_attr_data(obj, 'distance', distance) + write_attr_data(obj, "distance", distance) return distance def mirror(obj, axis=0): obj.scale[axis] = -1 butil.apply_transform(obj) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.flip_normals() return obj @@ -353,17 +364,23 @@ def mirror(obj, axis=0): def subsurf(obj, levels, simple=False): if levels > 0: butil.modify_mesh( - obj, 'SUBSURF', levels=levels, render_levels=levels, - subdivision_type='SIMPLE' if simple else "CATMULL_CLARK" + obj, + "SUBSURF", + levels=levels, + render_levels=levels, + subdivision_type="SIMPLE" if simple else "CATMULL_CLARK", ) def subdivide_edge_ring(obj, cuts=64, axis=(0, 0, 1), **kwargs): butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.edges.ensure_lookup_table() - selected = np.abs((read_edge_direction(obj) * np.array(axis)[np.newaxis, :]).sum(1)) > 1 - 1e-3 + selected = ( + np.abs((read_edge_direction(obj) * np.array(axis)[np.newaxis, :]).sum(1)) + > 1 - 1e-3 + ) edges = [bm.edges[i] for i in np.nonzero(selected)[0]] bmesh.ops.subdivide_edgering(bm, edges=edges, cuts=int(cuts), **kwargs) bmesh.update_edit_mesh(obj.data) @@ -377,18 +394,18 @@ def solidify(obj, axis, thickness): v = np.zeros(3) v[axes[1]] = thickness butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={'value': u}) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={'value': v}) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={"value": u}) + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value": v}) obj.location = -(u + v) / 2 butil.apply_transform(obj, True) return obj def decimate(points, n): - dist = .1 + dist = 0.1 ratio = 1.2 while True: culled = remove_close(points, dist)[0] @@ -402,7 +419,7 @@ def decimate(points, n): def remove_duplicate_edges(obj): remove_faces(obj, np.ones_like(len(obj.data.polygons)), remove_loose=False) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() counts = [] diff --git a/infinigen/assets/utils/draw.py b/infinigen/assets/utils/draw.py index 1f6f221cc..5e95fc9a8 100644 --- a/infinigen/assets/utils/draw.py +++ b/infinigen/assets/utils/draw.py @@ -7,13 +7,18 @@ from collections.abc import Sized -import bpy import bmesh +import bpy import numpy as np from numpy.random import uniform from scipy.interpolate import interp1d -from infinigen.assets.utils.decorate import read_co, remove_vertices, write_attribute, write_co +from infinigen.assets.utils.decorate import ( + read_co, + remove_vertices, + write_attribute, + write_co, +) from infinigen.assets.utils.mesh import polygon_angles from infinigen.assets.utils.misc import make_circular, make_circular_angle from infinigen.assets.utils.object import data2mesh, mesh2obj, separate_loose @@ -23,7 +28,7 @@ from infinigen.core.util import blender as butil -def shape_by_angles(obj, angles, scales=None, displacements=None, method='quadratic'): +def shape_by_angles(obj, angles, scales=None, displacements=None, method="quadratic"): x, y, z = read_co(obj).T vert_angles = np.arctan2(y, x) if scales is not None: @@ -39,7 +44,7 @@ def shape_by_angles(obj, angles, scales=None, displacements=None, method='quadra return obj -def shape_by_xs(obj, xs, displacements, method='quadratic'): +def shape_by_xs(obj, xs, displacements, method="quadratic"): co = read_co(obj) f = interp1d(xs, displacements, method, bounds_error=False, fill_value=0) vert_displacements = f(co[:, 0]) @@ -49,14 +54,21 @@ def shape_by_xs(obj, xs, displacements, method='quadratic'): def surface_from_func(fn, div_x=16, div_y=16, size_x=2, size_y=2): - x, y = np.meshgrid(np.linspace(-size_x / 2, size_x / 2, div_x + 1), - np.linspace(-size_y / 2, size_y / 2, div_y + 1)) + x, y = np.meshgrid( + np.linspace(-size_x / 2, size_x / 2, div_x + 1), + np.linspace(-size_y / 2, size_y / 2, div_y + 1), + ) z = fn(x, y) vertices = np.stack([x.flatten(), y.flatten(), z.flatten()]).T faces = np.array([[0, div_y + 1, div_y + 2, 1]]) + np.expand_dims( - (np.expand_dims(np.arange(div_y), 0) + np.expand_dims(np.arange(div_x) * (div_y + 1), 1)).flatten(), -1) - - mesh = bpy.data.meshes.new('z_function_surface') + ( + np.expand_dims(np.arange(div_y), 0) + + np.expand_dims(np.arange(div_x) * (div_y + 1), 1) + ).flatten(), + -1, + ) + + mesh = bpy.data.meshes.new("z_function_surface") mesh.from_pydata(vertices, [], faces) mesh.update() return mesh @@ -64,23 +76,28 @@ def surface_from_func(fn, div_x=16, div_y=16, size_x=2, size_y=2): def bezier_curve(anchors, vector_locations=(), resolution=64, to_mesh=True): n = [len(r) for r in anchors if isinstance(r, Sized)][0] - anchors = np.array([np.array(r, dtype=float) if isinstance(r, Sized) else np.full(n, r) for r in anchors]) + anchors = np.array( + [ + np.array(r, dtype=float) if isinstance(r, Sized) else np.full(n, r) + for r in anchors + ] + ) bpy.ops.curve.primitive_bezier_curve_add(location=(0, 0, 0)) obj = bpy.context.active_object if n > 2: - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.curve.subdivide(number_cuts=n - 2) points = obj.data.splines[0].bezier_points for i in range(n): points[i].co = anchors[:, i] for i in range(n): if i in vector_locations: - points[i].handle_left_type = 'VECTOR' - points[i].handle_right_type = 'VECTOR' + points[i].handle_left_type = "VECTOR" + points[i].handle_right_type = "VECTOR" else: - points[i].handle_left_type = 'AUTO' - points[i].handle_right_type = 'AUTO' + points[i].handle_left_type = "AUTO" + points[i].handle_right_type = "AUTO" obj.data.splines[0].resolution_u = resolution if to_mesh: return curve2mesh(obj) @@ -89,13 +106,15 @@ def bezier_curve(anchors, vector_locations=(), resolution=64, to_mesh=True): def curve2mesh(obj): with butil.SelectObjects(obj): - bpy.ops.object.convert(target='MESH') + bpy.ops.object.convert(target="MESH") obj = bpy.context.active_object - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-4) + butil.modify_mesh(obj, "WELD", merge_threshold=1e-4) return obj -def align_bezier(anchors, axes=None, scale=None, vector_locations=(), resolution=64, to_mesh=True): +def align_bezier( + anchors, axes=None, scale=None, vector_locations=(), resolution=64, to_mesh=True +): obj = bezier_curve(anchors, vector_locations, resolution, False) points = obj.data.splines[0].bezier_points if scale is None: @@ -108,46 +127,72 @@ def align_bezier(anchors, axes=None, scale=None, vector_locations=(), resolution if a is None: continue a = np.array(a) - p.handle_left_type = 'FREE' - p.handle_right_type = 'FREE' + p.handle_left_type = "FREE" + p.handle_right_type = "FREE" proj_left = np.array(p.handle_left - p.co) @ a * a - p.handle_left = np.array(p.co) + proj_left / np.linalg.norm(proj_left) * np.linalg.norm( - p.handle_left - p.co) * scale[2 * i] + p.handle_left = ( + np.array(p.co) + + proj_left + / np.linalg.norm(proj_left) + * np.linalg.norm(p.handle_left - p.co) + * scale[2 * i] + ) proj_right = np.array(p.handle_right - p.co) @ a * a - p.handle_right = np.array(p.co) + proj_right / np.linalg.norm(proj_right) * np.linalg.norm( - p.handle_right - p.co) * scale[2 * i + 1] + p.handle_right = ( + np.array(p.co) + + proj_right + / np.linalg.norm(proj_right) + * np.linalg.norm(p.handle_right - p.co) + * scale[2 * i + 1] + ) if to_mesh: return curve2mesh(obj) return obj -def remesh_fill(obj, resolution=.005): +def remesh_fill(obj, resolution=0.005): n = len(obj.data.vertices) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=.1) - write_attribute(obj, lambda nw, position: nw.compare('GREATER_EQUAL', nw.new_node(Nodes.Index), n), 'top') + butil.modify_mesh(obj, "SOLIDIFY", thickness=0.1) + write_attribute( + obj, + lambda nw, position: nw.compare("GREATER_EQUAL", nw.new_node(Nodes.Index), n), + "top", + ) sharp_remesh_with_attrs(obj, resolution) - is_top = read_attr_data(obj, 'top') > 1e-3 + is_top = read_attr_data(obj, "top") > 1e-3 remove_vertices(obj, lambda x, y, z: is_top) - obj.data.attributes.remove(obj.data.attributes['top']) + obj.data.attributes.remove(obj.data.attributes["top"]) return obj -def spin(anchors, vector_locations=(), subdivision=64, resolution=None, axis=(0, 0, 1), loop=False, - dupli=False): +def spin( + anchors, + vector_locations=(), + subdivision=64, + resolution=None, + axis=(0, 0, 1), + loop=False, + dupli=False, +): obj = bezier_curve(anchors, vector_locations, subdivision) co = read_co(obj) - max_radius = np.amax(np.linalg.norm(co - (co @ np.array(axis))[:, np.newaxis] * np.array(axis), axis=-1)) - if resolution is None: resolution = min(int(2 * np.pi * max_radius / .005), 128) - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-4) + max_radius = np.amax( + np.linalg.norm( + co - (co @ np.array(axis))[:, np.newaxis] * np.array(axis), axis=-1 + ) + ) + if resolution is None: + resolution = min(int(2 * np.pi * max_radius / 0.005), 128) + butil.modify_mesh(obj, "WELD", merge_threshold=1e-4) if loop: - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.fill() remesh_fill(obj) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.spin(steps=resolution, angle=np.pi * 2, axis=axis, dupli=dupli) - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.remove_doubles(threshold=1e-4) return obj @@ -158,33 +203,40 @@ def leaf(x_anchors, y_anchors, vector_locations=(), subdivision=64, face_size=No anchors = [x_anchors, i * np.array(y_anchors), 0] curves.append(bezier_curve(anchors, vector_locations, subdivision)) obj = butil.join_objects(curves) - butil.modify_mesh(obj, 'WELD', merge_threshold=.001) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + butil.modify_mesh(obj, "WELD", merge_threshold=0.001) + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.fill() remesh_fill(obj) if face_size is not None: - butil.modify_mesh(obj, 'WELD', merge_threshold=face_size / 2) - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): + butil.modify_mesh(obj, "WELD", merge_threshold=face_size / 2) + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): bpy.ops.mesh.region_to_loop() - bpy.context.object.vertex_groups.new(name='boundary') + bpy.context.object.vertex_groups.new(name="boundary") bpy.ops.object.vertex_group_assign() obj = separate_loose(obj) return obj def cut_plane(obj, cut_center, cut_normal, clear_outer=True): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.select_mode(type="FACE") bm = bmesh.from_edit_mesh(obj.data) - bisect_plane = bmesh.ops.bisect_plane(bm, geom=bm.verts[:] + bm.edges[:] + bm.faces[:], - plane_co=cut_center, plane_no=cut_normal, clear_outer=clear_outer, - clear_inner=not clear_outer) - edges = [e for e in bisect_plane['geom_cut'] if isinstance(e, bmesh.types.BMEdge)] - face = bmesh.ops.edgeloop_fill(bm, edges=edges)['faces'][0] + bisect_plane = bmesh.ops.bisect_plane( + bm, + geom=bm.verts[:] + bm.edges[:] + bm.faces[:], + plane_co=cut_center, + plane_no=cut_normal, + clear_outer=clear_outer, + clear_inner=not clear_outer, + ) + edges = [ + e for e in bisect_plane["geom_cut"] if isinstance(e, bmesh.types.BMEdge) + ] + face = bmesh.ops.edgeloop_fill(bm, edges=edges)["faces"][0] locations = np.array([v.co for v in face.verts]) - bmesh.ops.delete(bm, geom=[face], context='FACES_ONLY') + bmesh.ops.delete(bm, geom=[face], context="FACES_ONLY") bmesh.update_edit_mesh(obj.data) cut = mesh2obj(data2mesh(locations, [], [list(range(len(locations)))])) @@ -195,4 +247,4 @@ def cut_plane(obj, cut_center, cut_normal, clear_outer=True): def make_circular_interp(low, high, n, fn=uniform): xs = make_circular_angle(polygon_angles(n)) ys = make_circular(fn(low, high, n)) - return interp1d(xs, ys, 'quadratic') + return interp1d(xs, ys, "quadratic") diff --git a/infinigen/assets/utils/extract_nodegroup_parts.py b/infinigen/assets/utils/extract_nodegroup_parts.py index 6a9a138e1..8f07fcb5e 100644 --- a/infinigen/assets/utils/extract_nodegroup_parts.py +++ b/infinigen/assets/utils/extract_nodegroup_parts.py @@ -6,19 +6,22 @@ import bpy +from infinigen.core.nodes.node_wrangler import ( + Nodes, + NodeWrangler, + geometry_node_group_empty_new, +) from infinigen.core.util import blender as butil -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes, geometry_node_group_empty_new def extract_nodegroup_geo(target_obj, nodegroup, k, ng_params=None): - assert k in nodegroup.outputs - assert target_obj.type == 'MESH' + assert target_obj.type == "MESH" - vert = butil.spawn_vert('extract_nodegroup_geo.temp') + vert = butil.spawn_vert("extract_nodegroup_geo.temp") - butil.modify_mesh(vert, type='NODES', apply=False) - if vert.modifiers[0].node_group == None: + butil.modify_mesh(vert, type="NODES", apply=False) + if vert.modifiers[0].node_group is None: group = geometry_node_group_empty_new() vert.modifiers[0].node_group = group ng = vert.modifiers[0].node_group @@ -26,18 +29,18 @@ def extract_nodegroup_geo(target_obj, nodegroup, k, ng_params=None): obj_inp = nw.new_node(Nodes.ObjectInfo, [target_obj]) group_input_kwargs = {**ng_params} - if 'Geometry' in nodegroup.inputs: - group_input_kwargs['Geometry'] = obj_inp.outputs['Geometry'] + if "Geometry" in nodegroup.inputs: + group_input_kwargs["Geometry"] = obj_inp.outputs["Geometry"] group = nw.new_node(nodegroup.name, input_kwargs=group_input_kwargs) geo = group.outputs[k] - if k.endswith('Curve'): + if k.endswith("Curve"): # curves dont export from geonodes well, convert it to a mesh geo = nw.new_node(Nodes.CurveToMesh, [geo]) - output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geo}) + output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geo}) butil.apply_modifiers(vert) bpy.data.node_groups.remove(ng) - return vert \ No newline at end of file + return vert diff --git a/infinigen/assets/creatures/util/geometry/__init__.py b/infinigen/assets/utils/geometry/__init__.py similarity index 100% rename from infinigen/assets/creatures/util/geometry/__init__.py rename to infinigen/assets/utils/geometry/__init__.py diff --git a/infinigen/assets/creatures/util/geometry/cpp_utils/.gitignore b/infinigen/assets/utils/geometry/cpp_utils/.gitignore similarity index 100% rename from infinigen/assets/creatures/util/geometry/cpp_utils/.gitignore rename to infinigen/assets/utils/geometry/cpp_utils/.gitignore diff --git a/infinigen/assets/creatures/util/geometry/cpp_utils/__init__.py b/infinigen/assets/utils/geometry/cpp_utils/__init__.py similarity index 100% rename from infinigen/assets/creatures/util/geometry/cpp_utils/__init__.py rename to infinigen/assets/utils/geometry/cpp_utils/__init__.py diff --git a/infinigen/assets/creatures/util/geometry/cpp_utils/bnurbs.pyx b/infinigen/assets/utils/geometry/cpp_utils/bnurbs.pyx similarity index 99% rename from infinigen/assets/creatures/util/geometry/cpp_utils/bnurbs.pyx rename to infinigen/assets/utils/geometry/cpp_utils/bnurbs.pyx index 0905b00db..3c380b9e1 100644 --- a/infinigen/assets/creatures/util/geometry/cpp_utils/bnurbs.pyx +++ b/infinigen/assets/utils/geometry/cpp_utils/bnurbs.pyx @@ -1,8 +1,9 @@ -import numpy as np import cython +import numpy as np + cimport numpy as np -import bpy +import bpy # IMPORTANT: The structs below are copied from DNA_curve_types.h of Blender 3.1.2 source # May not work for versions of Blender diff --git a/infinigen/assets/creatures/util/geometry/curve.py b/infinigen/assets/utils/geometry/curve.py similarity index 77% rename from infinigen/assets/creatures/util/geometry/curve.py rename to infinigen/assets/utils/geometry/curve.py index 893086a74..2b8f92bc1 100644 --- a/infinigen/assets/creatures/util/geometry/curve.py +++ b/infinigen/assets/utils/geometry/curve.py @@ -8,13 +8,10 @@ from infinigen.core.util import blender as butil -class Curve: +class Curve: def __init__( - self, points, - profile=None, taper=None, - closed=False, sharp=None, - scale=None + self, points, profile=None, taper=None, closed=False, sharp=None, scale=None ): self.points = points self.profile = profile @@ -23,16 +20,21 @@ def __init__( self.sharp = sharp self.scale = scale - def to_curve_obj(self, name='curve', - resu=4, curvetype='NURBS', extrude=0, fill_caps = True, - to_mesh=False, cleanup=True + def to_curve_obj( + self, + name="curve", + resu=4, + curvetype="NURBS", + extrude=0, + fill_caps=True, + to_mesh=False, + cleanup=True, ): - - curveData = bpy.data.curves.new(f'{name}_curve', type='CURVE') - curveData.dimensions = '3D' + curveData = bpy.data.curves.new(f"{name}_curve", type="CURVE") + curveData.dimensions = "3D" curveData.resolution_u = resu curveData.use_fill_caps = fill_caps - curveData.twist_mode = 'MINIMUM' + curveData.twist_mode = "MINIMUM" curveData.bevel_depth = extrude polyline = curveData.splines.new(curvetype) @@ -44,7 +46,9 @@ def get_pos(p): x, y = p z = 0 else: - raise ValueError(f'Unrecognized point dim {len(p)} in Curve.to_curve_obj') + raise ValueError( + f"Unrecognized point dim {len(p)} in Curve.to_curve_obj" + ) return x, y, z, 1 for i, p in enumerate(self.points): @@ -59,7 +63,7 @@ def get_pos(p): polyline.points[-1].co = get_pos(p) if self.profile is not None: - curveData.bevel_mode = 'OBJECT' + curveData.bevel_mode = "OBJECT" curveData.bevel_object = self.profile if self.taper is not None: @@ -69,7 +73,7 @@ def get_pos(p): bpy.context.scene.collection.objects.link(obj) if self.closed: - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): bpy.ops.curve.select_all() bpy.ops.curve.cyclic_toggle() @@ -77,7 +81,6 @@ def get_pos(p): obj.scale = self.scale if to_mesh: - bevel = curveData.bevel_object taper = curveData.taper_object diff --git a/infinigen/assets/creatures/util/geometry/lofting.py b/infinigen/assets/utils/geometry/lofting.py similarity index 65% rename from infinigen/assets/creatures/util/geometry/lofting.py rename to infinigen/assets/utils/geometry/lofting.py index 0cdc9f28e..4b5e41561 100644 --- a/infinigen/assets/creatures/util/geometry/lofting.py +++ b/infinigen/assets/utils/geometry/lofting.py @@ -5,24 +5,25 @@ from dataclasses import dataclass -import pdb - -import bpy -import bmesh import numpy as np from infinigen.core.util import blender as butil -from infinigen.core.util.math import rotate_match_directions, lerp_sample, inverse_interpolate +from infinigen.core.util.math import ( + inverse_interpolate, + lerp_sample, + rotate_match_directions, +) + from .nurbs import nurbs + def factorize_nurbs_handles(handles): - - ''' + """ Factorize (n,m,3) handles into a skeleton, radii and relative normalized profiles. IE, profiles output will all face x axis, and have mean radius ~= 1 - ''' + """ skeleton_polyline = handles.mean(axis=1) tangents = skeleton_to_tangents(skeleton_polyline) @@ -32,53 +33,58 @@ def factorize_nurbs_handles(handles): rot_mats = rotate_match_directions(tangents, forward) profiles = handles - skeleton_polyline[:, None] - profiles = np.einsum('bij,bvj->bvi', rot_mats, profiles) - + profiles = np.einsum("bij,bvj->bvi", rot_mats, profiles) + ts = np.linspace(0, 1, handles.shape[0]) - + return skeleton_polyline, ts, profiles + @dataclass class Skin: - - ''' + """ Defines all the data for a loft mesh besides its skeleton, ie how far and what shape should the mesh extend beyond the structure of the skeleton N = number of defined profiles along the skeleton M = number of points per profile - ''' + """ + + ts: np.array # shape (N) float + profiles: np.array # shape (N x M) float as polar distances; or NxMx3 as points with x as forward axis + + profile_as_points: bool = False # whether to interpret profiles as points - ts: np.array # shape (N) float - profiles: np.array # shape (N x M) float as polar distances; or NxMx3 as points with x as forward axis - - profile_as_points: bool = False # whether to interpret profiles as points + angles: np.array = None # shape (M) float + surface_params: np.array = None # shape (N x M x K) float, K is num params per vert - angles: np.array = None # shape (M) float - surface_params: np.array = None # shape (N x M x K) float, K is num params per vert def dist_pcts_to_ts(skeleton, ds): lengths = np.linalg.norm(skeleton[1:] - skeleton[:-1], axis=-1) dists = np.concatenate([np.array([0]), np.cumsum(lengths)]) - ts = inverse_interpolate(dists, ds * dists[-1]) + ts = inverse_interpolate(dists, ds * dists[-1]) return ts / (len(skeleton) - 1) + def skeleton_to_tangents(skeleton): axes = np.empty_like(skeleton, dtype=np.float32) axes[-1] = skeleton[-1] - skeleton[-2] axes[:-1] = skeleton[1:] - skeleton[:-1] - axes[1:-1] = (axes[1:-1] + axes[:-2]) / 2 # use average of neighboring edge directions where available - + axes[1:-1] = ( + axes[1:-1] + axes[:-2] + ) / 2 # use average of neighboring edge directions where available + norm = np.linalg.norm(axes, axis=-1) axes[norm > 0] /= norm[norm > 0, None] return axes + def default_profile_angles(m): - return np.linspace(-np.pi/2, 1.5 * np.pi, m, endpoint=False) + return np.linspace(-np.pi / 2, 1.5 * np.pi, m, endpoint=False) -def compute_profile_verts(skeleton, ts, profiles, angles=None, profile_as_points=False): +def compute_profile_verts(skeleton, ts, profiles, angles=None, profile_as_points=False): n, m = profiles.shape[0:2] k = len(skeleton) @@ -88,36 +94,43 @@ def compute_profile_verts(skeleton, ts, profiles, angles=None, profile_as_points # decide the axes of rotation for each integer distance along the skeleton axes = skeleton_to_tangents(skeleton) - + # user gives t in [0, 1] representing percent of distance along skeleton - #ts = dist_pcts_to_ts(skeleton, ts) + # ts = dist_pcts_to_ts(skeleton, ts) axes = lerp_sample(axes, ts * (k - 1)) pos = lerp_sample(skeleton, ts * (k - 1)) - + # compute profile shapes if profile_as_points: - assert(profiles.shape[2]==3) - profile_verts = profiles; + assert profiles.shape[2] == 3 + profile_verts = profiles else: - unit_circle = np.stack([np.zeros_like(angles), np.cos(angles), np.sin(angles)], axis=-1) + unit_circle = np.stack( + [np.zeros_like(angles), np.cos(angles), np.sin(angles)], axis=-1 + ) profile_verts = profiles[..., None] * unit_circle[None] # pose profiles to get vert locations forward = np.zeros_like(axes) forward[:, 0] = 1 rot_mats = rotate_match_directions(forward, axes) - profile_verts = np.einsum('bij,bvj->bvi', rot_mats, profile_verts) + pos[:, None] + profile_verts = np.einsum("bij,bvj->bvi", rot_mats, profile_verts) + pos[:, None] return profile_verts -def loft(skeleton, skin, method='blender', face_size=0.01, debug=False, **kwargs): - - ctrlpts = compute_profile_verts(skeleton, skin.ts, skin.profiles, skin.angles, profile_as_points=skin.profile_as_points) + +def loft(skeleton, skin, method="blender", face_size=0.01, debug=False, **kwargs): + ctrlpts = compute_profile_verts( + skeleton, + skin.ts, + skin.profiles, + skin.angles, + profile_as_points=skin.profile_as_points, + ) obj = nurbs(ctrlpts, method, face_size, debug, **kwargs) if debug: - skeleton_debug = butil.spawn_point_cloud('skeleton_debug', skeleton) + skeleton_debug = butil.spawn_point_cloud("skeleton_debug", skeleton) skeleton_debug.parent = obj return obj - diff --git a/infinigen/assets/creatures/util/geometry/metaballs.py b/infinigen/assets/utils/geometry/metaballs.py similarity index 70% rename from infinigen/assets/creatures/util/geometry/metaballs.py rename to infinigen/assets/utils/geometry/metaballs.py index 4ad38f671..22aaeec2f 100644 --- a/infinigen/assets/creatures/util/geometry/metaballs.py +++ b/infinigen/assets/utils/geometry/metaballs.py @@ -4,69 +4,62 @@ # Authors: Alexander Raistrick +import bmesh import bpy import mathutils -import bmesh - import numpy as np from infinigen.core.util import blender as butil + class MBallStructure: - def __init__(self, name, resolution=0.1): - self.name = name self.resolution = resolution self.root = butil.spawn_empty(name) - + assert self.name not in bpy.data.metaballs.keys() self.empty_elt((0, 0, 0), rot=mathutils.Quaternion(), scale=(1, 1, 1)) def empty_elt(self, pos, rot, scale): - - mball = bpy.data.metaballs.new(self.name + '_mball') + mball = bpy.data.metaballs.new(self.name + "_mball") mball.resolution = self.resolution mball.render_resolution = self.resolution - - mball_obj = bpy.data.objects.new(self.name + '_element', mball) - bpy.context.view_layer.active_layer_collection.collection.objects.link(mball_obj) - + + mball_obj = bpy.data.objects.new(self.name + "_element", mball) + bpy.context.view_layer.active_layer_collection.collection.objects.link( + mball_obj + ) + mball_obj.parent = self.root mball_obj.location = pos mball_obj.rotation_euler = rot.to_euler() - mball_obj.scale = scale - + mball_obj.scale = scale + return mball_obj def apply_flags(self, ele, flags): - - ele.use_negative = flags.get('neg', False) - ele.stiffness = flags.get('stiffness', 2) - - def ellipse( - self, pos, rot, - length, rad, mode='scale', - scale=(1, 1, 1), flags={} - ): + ele.use_negative = flags.get("neg", False) + ele.stiffness = flags.get("stiffness", 2) + def ellipse(self, pos, rot, length, rad, mode="scale", scale=(1, 1, 1), flags={}): mball_obj = self.empty_elt(pos, rot, scale) ele = mball_obj.data.elements.new() - ele.type = 'ELLIPSOID' + ele.type = "ELLIPSOID" - if mode == 'sizes': - ele.size_x = length + if mode == "sizes": + ele.size_x = length ele.size_y = rad ele.size_z = rad - ele.radius = 1 # this seems to just scale everything up/down, no need - elif mode == 'scale': + ele.radius = 1 # this seems to just scale everything up/down, no need + elif mode == "scale": mball_obj.scale.x *= length mball_obj.scale.y *= rad mball_obj.scale.z *= rad - ele.radius = 1 # this seems to just scale everything up/down, no need + ele.radius = 1 # this seems to just scale everything up/down, no need ele.size_x = 1 ele.size_y = 1 ele.size_z = 1 @@ -76,32 +69,32 @@ def ellipse( return mball_obj def capsule(self, pos, rot, length, rad, scale=(1, 1, 1), flags={}): - mball_obj = self.empty_elt(pos, rot, scale) ele = mball_obj.data.elements.new() - ele.type='CAPSULE' + ele.type = "CAPSULE" ele.size_x = length / 2 - ele.radius = rad #/ 1.15 # blender always seems to overshoot what I ask for by 15% + ele.radius = ( + rad # / 1.15 # blender always seems to overshoot what I ask for by 15% + ) self.apply_flags(ele, flags) return mball_obj - + def ball(self, pos, rad, **kwargs): return self.capsule(pos, length=0, rad=rad, **kwargs) - - def to_object(self): + def to_object(self): bm = bmesh.new() - + mball_obj = self.root.children[0] - - if len(self.root.children) > 1: + + if len(self.root.children) > 1: first = self.root.children[1] mball_obj.location = first.location mball_obj.rotation_euler = first.rotation_euler - + # do resolution via scale, not using their settings for c in self.root.children: c.data.resolution = 1 @@ -114,7 +107,7 @@ def to_object(self): mesh = bpy.data.meshes.new(self.name) bm.to_mesh(mesh) - obj = bpy.data.objects.new(self.name + '_mesh', object_data=mesh) + obj = bpy.data.objects.new(self.name + "_mesh", object_data=mesh) bpy.context.scene.collection.objects.link(obj) if len(self.root.children) > 1: @@ -122,22 +115,24 @@ def to_object(self): obj.rotation_euler = first.rotation_euler obj.scale = np.full(3, self.resolution) with butil.SelectObjects(obj): - bpy.ops.object.transform_apply(location=False, rotation=False, scale=True) + bpy.ops.object.transform_apply( + location=False, rotation=False, scale=True + ) return obj - + def clean(self): for o in self.root.children: bpy.data.objects.remove(o) bpy.data.objects.remove(self.root) -def plusx_cylinder_unwrap(part): - ''' +def plusx_cylinder_unwrap(part): + """ Rotate the part from +X to face -Z, cylinder project it, then rotate it back WARNING: The cylinder projection operation is VERY particular about the 'context' being right - ''' + """ if len(part.data.vertices) == 0: return @@ -148,17 +143,17 @@ def plusx_cylinder_unwrap(part): orig = part.rotation_euler.copy() # translate to pointing upwards - part.rotation_euler = (0, np.pi/2, 0) + part.rotation_euler = (0, np.pi / 2, 0) bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.uv.cylinder_project(direction='ALIGN_TO_OBJECT', correct_aspect=True) - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.uv.cylinder_project(direction="ALIGN_TO_OBJECT", correct_aspect=True) + bpy.ops.object.mode_set(mode="OBJECT") # undo the rotation we just applied into th emesh - part.rotation_euler = (0, -np.pi/2, 0) + part.rotation_euler = (0, -np.pi / 2, 0) bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) # back to normal - part.rotation_euler = orig \ No newline at end of file + part.rotation_euler = orig diff --git a/infinigen/assets/creatures/util/geometry/nurbs.py b/infinigen/assets/utils/geometry/nurbs.py similarity index 71% rename from infinigen/assets/creatures/util/geometry/nurbs.py rename to infinigen/assets/utils/geometry/nurbs.py index 306cbc7e4..e9fba993f 100644 --- a/infinigen/assets/creatures/util/geometry/nurbs.py +++ b/infinigen/assets/utils/geometry/nurbs.py @@ -4,28 +4,28 @@ # Authors: Alexander Raistrick -import math import logging +import math -import bpy import bmesh - -from geomdl import NURBS, knotvector +import bpy import numpy as np +from geomdl import NURBS from infinigen.core.util import blender as butil -from infinigen.core.util.math import randomspacing logger = logging.getLogger(__name__) try: import bnurbs except ImportError: - logger.warning(f'Failed to import compiled `bnurbs` package, either installation failed or we are running a minimal install') + logger.warning( + "Failed to import compiled `bnurbs` package, either installation failed or we are running a minimal install" + ) bnurbs = None -def compute_cylinder_topology(n: int, m: int, uvs=False, cyclic=True, h_neighbors=None): +def compute_cylinder_topology(n: int, m: int, uvs=False, cyclic=True, h_neighbors=None): # n: num vertices in vertical direction # m: num vertices in each loop @@ -48,8 +48,7 @@ def compute_cylinder_topology(n: int, m: int, uvs=False, cyclic=True, h_neighbor edges = np.concatenate([ring_edges, bridge_edges]) # compute faces - face_neighbors = np.concatenate( - [h_neighbors, h_neighbors[:, ::-1] + m], axis=-1) + face_neighbors = np.concatenate([h_neighbors, h_neighbors[:, ::-1] + m], axis=-1) faces = ring_start_offsets[:, None, None] + face_neighbors[None] if not cyclic: faces = faces[:, :-1, :] @@ -58,17 +57,19 @@ def compute_cylinder_topology(n: int, m: int, uvs=False, cyclic=True, h_neighbor if not uvs: return edges, faces - us, vs = np.meshgrid(np.linspace(0, 1, m, endpoint=True), np.linspace(0, 1, n, endpoint=True)) + us, vs = np.meshgrid( + np.linspace(0, 1, m, endpoint=True), np.linspace(0, 1, n, endpoint=True) + ) uvs = np.stack([us, vs], axis=-1).reshape(-1, 2) return edges, faces, uvs -def apply_crease_values(obj, creases: np.array): +def apply_crease_values(obj, creases: np.array): n, m, c = creases.shape # set crease values - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): bm = bmesh.from_edit_mesh(obj.data) creaseLayer = bm.edges.layers.crease.verify() @@ -82,8 +83,9 @@ def apply_crease_values(obj, creases: np.array): bmesh.update_edit_mesh(obj.data) -def subdiv_mesh_nurbs(verts, level, creases=None, name='loft_mesh', cyclic_v=True) -> bpy.types.Object: - +def subdiv_mesh_nurbs( + verts, level, creases=None, name="loft_mesh", cyclic_v=True +) -> bpy.types.Object: if not cyclic_v: raise NotImplementedError() @@ -96,29 +98,38 @@ def subdiv_mesh_nurbs(verts, level, creases=None, name='loft_mesh', cyclic_v=Tru apply_crease_values(obj, creases) if level: - butil.modify_mesh(obj, type='SUBSURF', levels=level, - render_levels=level, apply=False) + butil.modify_mesh( + obj, type="SUBSURF", levels=level, render_levels=level, apply=False + ) return obj - -def blender_nurbs(ctrlpts, ws=None, name='loft_nurbs', resolution=(32, 32), cyclic_v=True, kv_u=None, kv_v=None): - - n, m, _ = ctrlpts.shape + + +def blender_nurbs( + ctrlpts, + ws=None, + name="loft_nurbs", + resolution=(32, 32), + cyclic_v=True, + kv_u=None, + kv_v=None, +): + n, m, _ = ctrlpts.shape if ws is None: ws = np.ones((n, m, 1)) else: assert ws.shape == (n, m, 1) - curve = bpy.data.curves.new(name, 'SURFACE') - curve.dimensions = '3D' + curve = bpy.data.curves.new(name, "SURFACE") + curve.dimensions = "3D" obj = bpy.data.objects.new(name, curve) bpy.context.scene.collection.objects.link(obj) # create each profile as its own spline verts_4d = np.concatenate([ctrlpts, ws], axis=-1) for i, profile in enumerate(verts_4d): - spline = curve.splines.new(type='NURBS') + spline = curve.splines.new(type="NURBS") spline.points.add(m - len(spline.points)) for p, co in zip(spline.points, profile): p.co = co @@ -127,7 +138,7 @@ def blender_nurbs(ctrlpts, ws=None, name='loft_nurbs', resolution=(32, 32), cycl for s in curve.splines: for p in s.points: p.select = True - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): bpy.ops.curve.make_segment() spline = obj.data.splines[0] @@ -135,7 +146,7 @@ def blender_nurbs(ctrlpts, ws=None, name='loft_nurbs', resolution=(32, 32), cycl spline.use_endpoint_u = True spline.use_cyclic_v = cyclic_v spline.resolution_u, spline.resolution_v = resolution - + if kv_u is not None: bnurbs.set_knotsu(spline, kv_u) if kv_v is not None: @@ -144,27 +155,27 @@ def blender_nurbs(ctrlpts, ws=None, name='loft_nurbs', resolution=(32, 32), cycl return obj -def generate_knotvector(degree, n, mode='uniform', clamped=True): - if mode == 'uniform': +def generate_knotvector(degree, n, mode="uniform", clamped=True): + if mode == "uniform": if clamped: middle = np.linspace(0, n, n - degree + 1)[1:-1] else: middle = np.arange(0, n + degree + 1) - elif mode == 'piecewise_bezier': # todo: this isn't correct + elif mode == "piecewise_bezier": # todo: this isn't correct middle = np.repeat(np.arange(0, n), degree) - elif mode == 'random_uniform': + elif mode == "random_uniform": if clamped: middle = np.sort(np.random.uniform(0, n, n - degree - 1)) else: middle = np.sort(np.random.uniform(0, n, n + degree + 1)) else: - raise ValueError(f'Unrecognized {mode=} for generate_knotvector') + raise ValueError(f"Unrecognized {mode=} for generate_knotvector") if clamped: - assert len(middle) == n - degree - \ - 1, f'{len(middle)} != {n - degree - 1}' + assert len(middle) == n - degree - 1, f"{len(middle)} != {n - degree - 1}" knot = np.concatenate( - [np.full(degree + 1, 0), middle, np.full(degree + 1, n)]) # pin the ends + [np.full(degree + 1, 0), middle, np.full(degree + 1, n)] + ) # pin the ends else: knot = middle @@ -174,7 +185,6 @@ def generate_knotvector(degree, n, mode='uniform', clamped=True): def blender_mesh_from_pydata(points, edges, faces, uvs=None, name="pydata_mesh"): - mesh = bpy.data.meshes.new(name=name) mesh.from_pydata(points, edges, faces) @@ -206,7 +216,9 @@ def blender_nurbs_to_geomdl(s: bpy.types.Spline) -> NURBS.Surface: surf.ctrlpts_size_v = s.point_count_v + (s.order_v - 1 if s.use_cyclic_v else 0) if bnurbs is None: - logger.warning(f'Failed to import compiled `bnurbs` package, either installation failed or we are running a minimal install') + logger.warning( + "Failed to import compiled `bnurbs` package, either installation failed or we are running a minimal install" + ) surf.knotvector_u = bnurbs.get_knotsu(s) surf.knotvector_v = bnurbs.get_knotsv(s) @@ -218,22 +230,24 @@ def blender_nurbs_to_geomdl(s: bpy.types.Spline) -> NURBS.Surface: # IMPORTANT: blender stores u as the faster changing index ctrlpts = ctrlpts.reshape((s.point_count_v, s.point_count_u, 4)) if s.use_cyclic_u: - ctrlpts = np.concatenate([ctrlpts, ctrlpts[:, 0:s.order_u - 1, :]], axis=1) + ctrlpts = np.concatenate([ctrlpts, ctrlpts[:, 0 : s.order_u - 1, :]], axis=1) if s.use_cyclic_v: - ctrlpts = np.concatenate([ctrlpts, ctrlpts[0:s.order_v - 1, :, :]], axis=0) + ctrlpts = np.concatenate([ctrlpts, ctrlpts[0 : s.order_v - 1, :, :]], axis=0) + + ctrlpts = ctrlpts.transpose(1, 0, 2).reshape((-1, 4)) - ctrlpts = ctrlpts.transpose(1,0,2).reshape((-1,4)) - surf.ctrlpts = ctrlpts[:, :-1] surf.weights = ctrlpts[:, -1] return surf + def geomdl_to_mesh(surf: NURBS.Surface, eval_delta, name="geomdl_mesh"): surf.delta = eval_delta points = np.array(surf.evalpts) edges, faces = compute_cylinder_topology( - surf.sample_size_u, surf.sample_size_v, cyclic=False) + surf.sample_size_u, surf.sample_size_v, cyclic=False + ) mesh = bpy.data.meshes.new(name=name) mesh.from_pydata(points, edges, faces) @@ -241,6 +255,7 @@ def geomdl_to_mesh(surf: NURBS.Surface, eval_delta, name="geomdl_mesh"): bpy.context.scene.collection.objects.link(obj) return obj + def map_param_to_valid_domain(knots: np.array, order: int, u: np.array, cyclic: bool): u_start, u_end = knots[[order - 1, -order]] if not cyclic and ((u_start > u).any() or (u > u_end).any()): @@ -248,17 +263,20 @@ def map_param_to_valid_domain(knots: np.array, order: int, u: np.array, cyclic: _, r = np.divmod(u - u_start, u_end - u_start) return r + u_start + # for cyclic u or v, wrap them around to valid domain. # raise exception if not cyclic and out of domain def map_uv_to_valid_domain(s: bpy.types.Spline, uv: np.array): knotsu = bnurbs.get_knotsu(s) knotsv = bnurbs.get_knotsv(s) - u = map_param_to_valid_domain(knotsu, s.order_u, uv[:,0], s.use_cyclic_u) - v = map_param_to_valid_domain(knotsv, s.order_v, uv[:,1], s.use_cyclic_v) - return np.stack([u,v], axis=-1) + u = map_param_to_valid_domain(knotsu, s.order_u, uv[:, 0], s.use_cyclic_u) + v = map_param_to_valid_domain(knotsv, s.order_v, uv[:, 1], s.use_cyclic_v) + return np.stack([u, v], axis=-1) -def geomdl_nurbs(ctrlpts, eval_delta, ws=None, kv_u=None, kv_v=None, name='loft_nurbs', cyclic_v=True): +def geomdl_nurbs( + ctrlpts, eval_delta, ws=None, kv_u=None, kv_v=None, name="loft_nurbs", cyclic_v=True +): n, m, _ = ctrlpts.shape degree_u, degree_v = (3, 3) @@ -281,55 +299,57 @@ def geomdl_nurbs(ctrlpts, eval_delta, ws=None, kv_u=None, kv_v=None, name='loft_ if ws is not None: surf.weights = ws - surf.knotvector_u = generate_knotvector( - surf.degree_u, n) if kv_u is None else list(kv_u) + surf.knotvector_u = ( + generate_knotvector(surf.degree_u, n) if kv_u is None else list(kv_u) + ) # uniform spacing is generally recommended, especially for cyclic v if kv_v is None: - kv_v = np.array(generate_knotvector(surf.degree_v, m, - mode='uniform', clamped=not cyclic_v)) + kv_v = np.array( + generate_knotvector(surf.degree_v, m, mode="uniform", clamped=not cyclic_v) + ) if cyclic_v: # wrap around p knot intervals - kv_v = np.append(kv_v, kv_v[1:degree_v+1] + kv_v[-1] - kv_v[0]) + kv_v = np.append(kv_v, kv_v[1 : degree_v + 1] + kv_v[-1] - kv_v[0]) surf.knotvector_v = list(kv_v) surf.delta = eval_delta points = np.array(surf.evalpts) if cyclic_v: # drop the last point (which is a duplicate) for each loop - points = points.reshape( - surf.sample_size_u, surf.sample_size_v, -1)[:, :-1, :].reshape(-1, 3) + points = points.reshape(surf.sample_size_u, surf.sample_size_v, -1)[ + :, :-1, : + ].reshape(-1, 3) - edges, faces, uvs = compute_cylinder_topology(surf.sample_size_u, surf.sample_size_v - cyclic_v, - cyclic=cyclic_v, uvs=True) + edges, faces, uvs = compute_cylinder_topology( + surf.sample_size_u, surf.sample_size_v - cyclic_v, cyclic=cyclic_v, uvs=True + ) return blender_mesh_from_pydata(points, edges, faces, uvs=uvs, name=name) def nurbs(ctrlpts, method, face_size=0.01, debug=False, **kwargs): - n, m, _ = ctrlpts.shape - ulength = np.linalg.norm(np.diff(ctrlpts, axis=0), - axis=-1).sum(axis=0).max() - vlength = np.linalg.norm(np.diff(ctrlpts, axis=1), - axis=-1).sum(axis=1).max() + ulength = np.linalg.norm(np.diff(ctrlpts, axis=0), axis=-1).sum(axis=0).max() + vlength = np.linalg.norm(np.diff(ctrlpts, axis=1), axis=-1).sum(axis=1).max() - if method == 'geomdl': + if method == "geomdl": steps = face_size / max(ulength, vlength) obj = geomdl_nurbs(ctrlpts, steps, **kwargs) - elif method == 'blender': - resolution = np.clip( - np.array([ulength, vlength])/face_size, 6, 40).astype(int) + elif method == "blender": + resolution = np.clip(np.array([ulength, vlength]) / face_size, 6, 40).astype( + int + ) resolution = (6, 6) obj = blender_nurbs(ctrlpts, resolution=resolution) - elif method == 'subdiv': - upres_fac = max(ulength/n, vlength/m) / face_size + elif method == "subdiv": + upres_fac = max(ulength / n, vlength / m) / face_size level = math.ceil(np.log2(upres_fac)) obj = subdiv_mesh_nurbs(ctrlpts, level=np.clip(level, 2, 7), **kwargs) else: - raise ValueError(f'Unrecognized nurbs({method=})') + raise ValueError(f"Unrecognized nurbs({method=})") if debug: - handles = butil.spawn_point_cloud('handles', ctrlpts.reshape(-1, 3)) + handles = butil.spawn_point_cloud("handles", ctrlpts.reshape(-1, 3)) handles.parent = obj return obj diff --git a/infinigen/assets/creatures/util/geometry/skin_ops.py b/infinigen/assets/utils/geometry/skin_ops.py similarity index 51% rename from infinigen/assets/creatures/util/geometry/skin_ops.py rename to infinigen/assets/utils/geometry/skin_ops.py index 39697ebaa..6aba65159 100644 --- a/infinigen/assets/creatures/util/geometry/skin_ops.py +++ b/infinigen/assets/utils/geometry/skin_ops.py @@ -6,63 +6,70 @@ from copy import copy -import bpy - import numpy as np -from numpy.random import uniform, normal +from numpy.random import normal, uniform -from infinigen.assets.creatures.util.geometry.lofting import Skin +from infinigen.assets.utils.geometry import lofting +from infinigen.assets.utils.geometry.lofting import Skin from infinigen.core.util.math import lerp, randomspacing -from infinigen.assets.creatures.util.geometry import lofting + def extend_cap(skin: Skin, r=1, margin=0): res = copy(skin) - res.ts = np.concatenate([np.array([margin]), skin.ts, np.array([1-margin])], axis=0) - res.profiles = np.concatenate([skin.profiles[[0]] * r, skin.profiles, skin.profiles[[-1]] * r]) - + res.ts = np.concatenate( + [np.array([margin]), skin.ts, np.array([1 - margin])], axis=0 + ) + res.profiles = np.concatenate( + [skin.profiles[[0]] * r, skin.profiles, skin.profiles[[-1]] * r] + ) + if res.surface_params is not None: - res.surface_params = np.concatenate([ - skin.surface_params[[0]], skin.surface_params, skin.surface_params[[-1]]]) + res.surface_params = np.concatenate( + [skin.surface_params[[0]], skin.surface_params, skin.surface_params[[-1]]] + ) return res + def square_cap(s: Skin): s = extend_cap(s, r=1, margin=0.01) s = extend_cap(s, r=0) return s -def bevel_cap(s: Skin, n: int, d: float, profile='SPHERE'): - ts = np.linspace(1, 0, n) # pct of distance from end +def bevel_cap(s: Skin, n: int, d: float, profile="SPHERE"): + ts = np.linspace(1, 0, n) # pct of distance from end - if profile == 'SPHERE': + if profile == "SPHERE": rads = np.sqrt(1 - ts * ts) - elif profile == 'CHAMFER': + elif profile == "CHAMFER": rads = ts else: - raise ValueError(f'Unrecognized {profile=}') + raise ValueError(f"Unrecognized {profile=}") for t, r in zip(ts, rads): - s = extend_cap(s, r=r, margin=d*t) + s = extend_cap(s, r=r, margin=d * t) return s + def symmetrize(s: Skin, fac): - - #if s.angles is not None: + # if s.angles is not None: # raise NotImplementedError(f'symmetrize(s: Skin) only supports s.angles = None') - + res = copy(s) - res.profiles = lerp(s.profiles, (s.profiles + s.profiles[:, ::-1])/2, fac) - + res.profiles = lerp(s.profiles, (s.profiles + s.profiles[:, ::-1]) / 2, fac) + if s.surface_params is not None: - res.surface_params = lerp(s.surface_params, (s.surface_params + s.surface_params[:, ::-1]) / 2, fac) + res.surface_params = lerp( + s.surface_params, (s.surface_params + s.surface_params[:, ::-1]) / 2, fac + ) return res -def outerprod_skin(ts, rads, profile, profile_as_points=False, add_cap=True): +def outerprod_skin(ts, rads, profile, profile_as_points=False, add_cap=True): if profile_as_points: - profiles = rads.reshape(-1,1,1) * profile.reshape(1,-1,3) + profiles = rads.reshape(-1, 1, 1) * profile.reshape(1, -1, 3) else: profiles = rads.reshape(-1, 1) * profile.reshape(1, -1) @@ -73,34 +80,34 @@ def outerprod_skin(ts, rads, profile, profile_as_points=False, add_cap=True): s = extend_cap(s, r=0) return s -def random_skin(rad, n, m, n_params=1): +def random_skin(rad, n, m, n_params=1): ts = randomspacing(0.03, 0.97, n, margin=0.1) - angles = None # cutil.randomspacing(-np.pi, 1.5 * np.pi, m, margin=0.4) + angles = None # cutil.randomspacing(-np.pi, 1.5 * np.pi, m, margin=0.4) sine_fac = np.sin(ts * np.pi)[:, None] - sine_fac = sine_fac ** 0.2 + sine_fac = sine_fac**0.2 radius_func = lerp(rad * 0.1, rad, sine_fac) sigmas = np.array([0.07, 0.4, 0.25]) - o_n, o_m, o_ind = np.clip(normal(sigmas, sigmas/4, 3), 0, 1) + o_n, o_m, o_ind = np.clip(normal(sigmas, sigmas / 4, 3), 0, 1) profiles = radius_func * ( - normal(1, o_n, (n, 1)) * - normal(1, o_m, (1, m)) * - normal(1, o_ind, (n, m)) + normal(1, o_n, (n, 1)) * normal(1, o_m, (1, m)) * normal(1, o_ind, (n, m)) ) profiles = np.clip(profiles, 0, 2 * rad) sym = 1 if n_params == 2: - ring_creases = np.power(uniform(0, 1, (n, 1)), 3) - row_creases = np.power(uniform(0, 1, (1, m)), 3) + ring_creases = np.power(uniform(0, 1, (n, 1)), 3) + row_creases = np.power(uniform(0, 1, (1, m)), 3) - params = np.stack([ring_creases * np.ones((1, m)), row_creases * np.ones((n, 1))], axis=-1) + params = np.stack( + [ring_creases * np.ones((1, m)), row_creases * np.ones((n, 1))], axis=-1 + ) else: params = uniform(0.1, 10, (n, m, 1)) - + s = Skin(ts=ts, profiles=profiles, surface_params=params, angles=angles) s = extend_cap(s, r=0.5) s = extend_cap(s, r=0) @@ -109,15 +116,17 @@ def random_skin(rad, n, m, n_params=1): return s -def profile_from_thickened_curve(curve_skeleton: np.array, # Nx3, with x axis as forward - widths: np.array, # N floats + +def profile_from_thickened_curve( + curve_skeleton: np.array, # Nx3, with x axis as forward + widths: np.array, # N floats ): tgs = lofting.skeleton_to_tangents(curve_skeleton) - left_dir = np.stack([np.zeros_like(tgs[:,0]), -tgs[:,2], tgs[:,1]], axis=-1) - left_offset = widths[:,None] * left_dir / np.linalg.norm(left_dir) + left_dir = np.stack([np.zeros_like(tgs[:, 0]), -tgs[:, 2], tgs[:, 1]], axis=-1) + left_offset = widths[:, None] * left_dir / np.linalg.norm(left_dir) left_points = curve_skeleton + left_offset right_points = curve_skeleton - left_offset - profile = np.concatenate([left_points, right_points[::-1]]) - return profile \ No newline at end of file + profile = np.concatenate([left_points, right_points[::-1]]) + return profile diff --git a/infinigen/assets/utils/laplacian.py b/infinigen/assets/utils/laplacian.py index ef748eb1f..27f3c115c 100644 --- a/infinigen/assets/utils/laplacian.py +++ b/infinigen/assets/utils/laplacian.py @@ -4,13 +4,11 @@ # Authors: Lingjie Mei -import bpy import bmesh import numpy as np from numpy.random import uniform -from skimage.measure import find_contours, marching_cubes from scipy.ndimage import convolve -from infinigen.core.util import blender as butil +from skimage.measure import marching_cubes from infinigen.assets.utils.object import data2mesh @@ -21,22 +19,31 @@ def mesh_grid(n, sizes): def init_mesh_3d(n, sizes): - fn = lambda x, y, z: uniform(.5, 1) * (x - uniform(-.2, .2)) ** 2 + uniform(.5, 1) * ( - y - uniform(-.2, .2)) ** 2 + uniform(.1, .2) * z ** 2 < .2 * .2 - extend = lambda f: uniform(0, 1, f.shape) < convolve(f, np.ones((3, 3, 3))) + def fn(x, y, z): + return ( + uniform(0.5, 1) * (x - uniform(-0.2, 0.2)) ** 2 + + uniform(0.5, 1) * (y - uniform(-0.2, 0.2)) ** 2 + + uniform(0.1, 0.2) * z**2 + < 0.2 * 0.2 + ) + + def extend(f): + return uniform(0, 1, f.shape) < convolve(f, np.ones((3, 3, 3))) x, y, z = mesh_grid(n, sizes) f = fn(x, y, z) - a = np.where(f, uniform(.1, .5, x.shape), 0) + uniform(0, .02, x.shape) + a = np.where(f, uniform(0.1, 0.5, x.shape), 0) + uniform(0, 0.02, x.shape) b = np.where(extend(f), 1, uniform(-1, 1, x.shape)).astype(float) return a, b def init_mesh_2d(n, sizes): - fn = lambda x, y: x <= 2 / n + def fn(x, y): + return x <= 2 / n + x, y = mesh_grid(n, sizes) f = fn(x, y) - a = np.where(f, .99, 0) + uniform(0, .01, x.shape) + a = np.where(f, 0.99, 0) + uniform(0, 0.01, x.shape) b = uniform(-1, 1, x.shape) return a, b @@ -46,16 +53,26 @@ def build_laplacian(st, a, b, t, k, dt, tau, eps, alpha, gamma, teq): lap_a = convolve(a, st) lap_b = convolve(b, st) m = alpha / np.pi * np.arctan(gamma * (teq - b)) - delta_a = (eps * eps * lap_a + a * (1. - a) * (a - .5 + m)) / tau + delta_a = (eps * eps * lap_a + a * (1.0 - a) * (a - 0.5 + m)) / tau delta_b = lap_b + k * delta_a a += delta_a * dt b += delta_b * dt return a, b -def build_laplacian_3d(n=32, t=800, k=2., dt=.0005, tau=.0003, eps=.01, alpha=.9, gamma=10., teq=1.): - stencil = np.array([[[1, 3, 1], [3, 14, 3], [1, 3, 1]], [[3, 14, 3], [14, -128, 14], [3, 14, 3]], - [[1, 3, 1], [3, 14, 3], [1, 3, 1]]]) / 128 +def build_laplacian_3d( + n=32, t=800, k=2.0, dt=0.0005, tau=0.0003, eps=0.01, alpha=0.9, gamma=10.0, teq=1.0 +): + stencil = ( + np.array( + [ + [[1, 3, 1], [3, 14, 3], [1, 3, 1]], + [[3, 14, 3], [14, -128, 14], [3, 14, 3]], + [[1, 3, 1], [3, 14, 3], [1, 3, 1]], + ] + ) + / 128 + ) height = 1.5 sizes = [-1, 1], [-1, 1], [0, height] @@ -64,18 +81,32 @@ def build_laplacian_3d(n=32, t=800, k=2., dt=.0005, tau=.0003, eps=.01, alpha=.9 a, b = build_laplacian(stencil * n * n, a, b, t, k, dt, tau, eps, alpha, gamma, teq) a = np.pad(a, 1) - vertices, faces, _, _ = marching_cubes(a, .5) + vertices, faces, _, _ = marching_cubes(a, 0.5) vertices -= 1 vertices /= n vertices[:, :-1] -= 1 x, y, z = vertices.T vertices[:, :-1] *= np.expand_dims( - np.maximum(np.abs(x), np.abs(y)) / (np.sqrt(x ** 2 + y ** 2) + 1e-6) * (1 - z / height) + z / height, - -1) + np.maximum(np.abs(x), np.abs(y)) + / (np.sqrt(x**2 + y**2) + 1e-6) + * (1 - z / height) + + z / height, + -1, + ) return data2mesh(vertices, [], faces) -def build_laplacian_2d(n=128, t=10000, k=1.5, dt=.0002, tau=.0003, eps=.01, alpha=.9, gamma=10., teq=1.): +def build_laplacian_2d( + n=128, + t=10000, + k=1.5, + dt=0.0002, + tau=0.0003, + eps=0.01, + alpha=0.9, + gamma=10.0, + teq=1.0, +): stencil = np.array([[1, 4, 1], [4, -20, 4], [1, 4, 1]]) / 20 sizes = [0, 1], [0, 1] @@ -86,7 +117,7 @@ def build_laplacian_2d(n=128, t=10000, k=1.5, dt=.0002, tau=.0003, eps=.01, alph a = np.pad(a, 1) a = np.stack([a, a], axis=-1) - vertices, faces, _, _ = marching_cubes(a, .5) + vertices, faces, _, _ = marching_cubes(a, 0.5) vertices -= 1 vertices /= n mesh = data2mesh(vertices, [], faces) @@ -97,6 +128,6 @@ def build_laplacian_2d(n=128, t=10000, k=1.5, dt=.0002, tau=.0003, eps=.01, alph bmesh.ops.delete(bm, geom=vertices_to_remove) for v in bm.verts: x, y, z = v.co - v.co *= np.maximum(np.abs(x), np.abs(y)) / (np.sqrt(x ** 2 + y ** 2) + 1e-6) + v.co *= np.maximum(np.abs(x), np.abs(y)) / (np.sqrt(x**2 + y**2) + 1e-6) bm.to_mesh(mesh) return data2mesh(vertices, [], faces) diff --git a/infinigen/assets/utils/mesh.py b/infinigen/assets/utils/mesh.py index fd7f1f319..4a9559dd9 100644 --- a/infinigen/assets/utils/mesh.py +++ b/infinigen/assets/utils/mesh.py @@ -5,8 +5,8 @@ # Authors: Lingjie Mei -import bpy import bmesh +import bpy import numpy as np import shapely import trimesh @@ -14,62 +14,101 @@ from numpy.random import normal, uniform from shapely import LineString -from infinigen.assets.utils.decorate import read_co, read_edges, read_edge_length, remove_faces, read_area -from infinigen.assets.utils.object import new_cube, obj2trimesh, separate_loose +from infinigen.assets.utils.decorate import read_co, read_edges +from infinigen.assets.utils.object import obj2trimesh, separate_loose from infinigen.assets.utils.shapes import dissolve_limited -from infinigen.core.nodes.node_info import Nodes -from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface from infinigen.core.util import blender as butil from infinigen.core.util.math import normalize -def build_prism_mesh(n=6, r_min=1., r_max=1.5, height=.3, tilt=.3): +def build_prism_mesh(n=6, r_min=1.0, r_max=1.5, height=0.3, tilt=0.3): angles = polygon_angles(n) a_upper = uniform(-np.pi / 12, np.pi / 12, n) a_lower = uniform(-np.pi / 12, np.pi / 12, n) - z_upper = 1 + uniform(-height, height, n) + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) - z_lower = 1 + uniform(-height, height, n) + uniform(0, tilt) * np.sin(angles + uniform(-np.pi, np.pi)) + z_upper = ( + 1 + + uniform(-height, height, n) + + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) + ) + z_lower = ( + 1 + + uniform(-height, height, n) + + uniform(0, tilt) * np.sin(angles + uniform(-np.pi, np.pi)) + ) r_upper = uniform(r_min, r_max, n) r_lower = uniform(r_min, r_max, n) vertices = np.block( - [[r_upper * np.cos(angles + a_upper), r_lower * np.cos(angles + a_lower), 0, 0], - [r_upper * np.sin(angles + a_upper), r_lower * np.sin(angles + a_lower), 0, 0], - [z_upper, -z_lower, 1, -1]] + [ + [ + r_upper * np.cos(angles + a_upper), + r_lower * np.cos(angles + a_lower), + 0, + 0, + ], + [ + r_upper * np.sin(angles + a_upper), + r_lower * np.sin(angles + a_lower), + 0, + 0, + ], + [z_upper, -z_lower, 1, -1], + ] ).T r = np.arange(n) s = np.roll(r, -1) faces = np.block( - [[r, r, r + n, s + n], [s, r + n, s + n, r + n], [np.full(n, 2 * n), s, s, np.full(n, 2 * n + 1)]] + [ + [r, r, r + n, s + n], + [s, r + n, s + n, r + n], + [np.full(n, 2 * n), s, s, np.full(n, 2 * n + 1)], + ] ).T - mesh = bpy.data.meshes.new('prism') + mesh = bpy.data.meshes.new("prism") mesh.from_pydata(vertices, [], faces) mesh.update() return mesh -def build_convex_mesh(n=6, height=.2, tilt=.2): +def build_convex_mesh(n=6, height=0.2, tilt=0.2): angles = polygon_angles(n) a_upper = uniform(-np.pi / 18, 0, n) a_lower = uniform(0, np.pi / 18, n) - z_upper = 1 + normal(0, height, n) + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) - z_lower = 1 + normal(0, height, n) + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) + z_upper = ( + 1 + + normal(0, height, n) + + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) + ) + z_lower = ( + 1 + + normal(0, height, n) + + uniform(0, tilt) * np.cos(angles + uniform(-np.pi, np.pi)) + ) r = 1.8 vertices = np.block( - [[r * np.cos(angles + a_upper), r * np.cos(angles + a_lower), 0, 0], + [ + [r * np.cos(angles + a_upper), r * np.cos(angles + a_lower), 0, 0], [r * np.sin(angles + a_upper), r * np.sin(angles + a_lower), 0, 0], - [z_upper, -z_lower, z_upper.max() + uniform(.1, .2), - -z_lower.max() - uniform(.1, .2)]] + [ + z_upper, + -z_lower, + z_upper.max() + uniform(0.1, 0.2), + -z_lower.max() - uniform(0.1, 0.2), + ], + ] ).T r = np.arange(n) s = np.roll(r, -1) faces = np.block( - [[r, r, r + n, s + n], [s, r + n, s + n, r + n], [np.full(n, 2 * n), s, s, np.full(n, 2 * n + 1)]] + [ + [r, r, r + n, s + n], + [s, r + n, s + n, r + n], + [np.full(n, 2 * n), s, s, np.full(n, 2 * n + 1)], + ] ).T - mesh = bpy.data.meshes.new('prism') + mesh = bpy.data.meshes.new("prism") mesh.from_pydata(vertices, [], faces) mesh.update() return mesh @@ -82,20 +121,25 @@ def polygon_angles(n, min_angle=np.pi / 6, max_angle=np.pi * 2 / 3): if (difference >= min_angle).all() and (difference <= max_angle).all(): break else: - angles = np.sort((np.arange(n) * (2 * np.pi / n) + uniform(0, np.pi * 2)) % (np.pi * 2)) + angles = np.sort( + (np.arange(n) * (2 * np.pi / n) + uniform(0, np.pi * 2)) % (np.pi * 2) + ) return angles def face_area(obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) return sum(f.calc_area() for f in bm.faces) def centroid(obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) - s = sum((f.calc_area() * f.calc_center_median() for f in bm.faces), Vector((0, 0, 0))) + s = sum( + (f.calc_area() * f.calc_center_median() for f in bm.faces), + Vector((0, 0, 0)), + ) area = sum(f.calc_area() for f in bm.faces) return np.array(s / area) @@ -113,7 +157,7 @@ def treeify(obj): return obj obj = separate_loose(obj) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() bm.edges.ensure_lookup_table() @@ -130,13 +174,15 @@ def treeify(obj): included[o.index] = 1 to_keep.append(e) queue.append(o) - bmesh.ops.delete(bm, geom=list(set(bm.edges).difference(to_keep)), context='EDGES') + bmesh.ops.delete( + bm, geom=list(set(bm.edges).difference(to_keep)), context="EDGES" + ) bmesh.update_edit_mesh(obj.data) return obj def convert2ls(obj): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) verts = [next(v for v in bm.verts if len(v.link_edges) == 1)] for i in range(len(bm.verts) - 1): @@ -156,7 +202,7 @@ def convert2mls(obj): def fix_tree(obj): - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): bpy.ops.mesh.remove_doubles() bm = bmesh.from_edit_mesh(obj.data) vertices_remove = [] @@ -171,7 +217,7 @@ def fix_tree(obj): def longest_path(obj): - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): bpy.ops.mesh.remove_doubles() bm = bmesh.from_edit_mesh(obj.data) @@ -201,10 +247,16 @@ def longest_path_(u, v): def bevel(obj, width, **kwargs): - preset = np.random.choice(['LINE', 'SUPPORTS', 'CORNICE', 'CROWN', 'STEPS']) + preset = np.random.choice(["LINE", "SUPPORTS", "CORNICE", "CROWN", "STEPS"]) obj, mod = butil.modify_mesh( - obj, 'BEVEL', width=width, segments=np.random.randint(20, 30), - profile_type='CUSTOM', apply=False, return_mod=True, **kwargs + obj, + "BEVEL", + width=width, + segments=np.random.randint(20, 30), + profile_type="CUSTOM", + apply=False, + return_mod=True, + **kwargs, ) reset_preset(mod.custom_profile, preset) butil.apply_modifiers(obj, mod) @@ -214,36 +266,65 @@ def reset_preset(profile, name, n=None): if n is None: n = np.random.randint(8, 15) match name: - case 'LINE': - configs = [(1.0, 0.0, 0, 'AUTO', 'AUTO'), (0.0, 1.0, 0, 'AUTO', 'AUTO')] - case 'CORNICE': - configs = [(1.0, 0.0, 0, 'VECTOR', 'VECTOR'), (1.0, 0.125, 0, 'VECTOR', 'VECTOR'), - (0.92, 0.16, 0, 'AUTO', 'AUTO'), (0.875, 0.25, 0, 'VECTOR', 'VECTOR'), - (0.8, 0.25, 0, 'VECTOR', 'VECTOR'), (0.733, 0.433, 0, 'AUTO', 'AUTO'), - (0.582, 0.522, 0, 'AUTO', 'AUTO'), (0.4, 0.6, 0, 'AUTO', 'AUTO'), - (0.289, 0.727, 0, 'AUTO', 'AUTO'), (0.25, 0.925, 0, 'VECTOR', 'VECTOR'), - (0.175, 0.925, 0, 'VECTOR', 'VECTOR'), (0.175, 1.0, 0, 'VECTOR', 'VECTOR'), - (0.0, 1.0, 0, 'VECTOR', 'VECTOR')] - case 'CROWN': - configs = [(1.0, 0.0, 0, 'VECTOR', 'VECTOR'), (1.0, 0.25, 0, 'VECTOR', 'VECTOR'), - (0.75, 0.25, 0, 'VECTOR', 'VECTOR'), (0.75, 0.325, 0, 'VECTOR', 'VECTOR'), - (0.925, 0.4, 0, 'AUTO', 'AUTO'), (0.975, 0.5, 0, 'AUTO', 'AUTO'), - (0.94, 0.65, 0, 'AUTO', 'AUTO'), (0.85, 0.75, 0, 'AUTO', 'AUTO'), - (0.75, 0.875, 0, 'AUTO', 'AUTO'), (0.7, 1.0, 0, 'VECTOR', 'VECTOR'), - (0.0, 1.0, 0, 'VECTOR', 'VECTOR')] - case 'SUPPORTS': - configs = [(1.0, 0.0, 0, 'VECTOR', 'VECTOR'), (1.0, 0.5, 0, 'VECTOR', 'VECTOR')] + list( - (1 - .5 * ( - 1 - np.cos(i / (n - 3) * np.pi / 2)), .5 + .5 * np.sin(i / (n - 3) * np.pi / 2), 0, 'AUTO', - 'AUTO') for i in range(1, n - 2) - ) + [(0.5, 1.0, 0, 'VECTOR', 'VECTOR'), - (0.0, 1.0, 0, 'VECTOR', 'VECTOR')] + case "LINE": + configs = [(1.0, 0.0, 0, "AUTO", "AUTO"), (0.0, 1.0, 0, "AUTO", "AUTO")] + case "CORNICE": + configs = [ + (1.0, 0.0, 0, "VECTOR", "VECTOR"), + (1.0, 0.125, 0, "VECTOR", "VECTOR"), + (0.92, 0.16, 0, "AUTO", "AUTO"), + (0.875, 0.25, 0, "VECTOR", "VECTOR"), + (0.8, 0.25, 0, "VECTOR", "VECTOR"), + (0.733, 0.433, 0, "AUTO", "AUTO"), + (0.582, 0.522, 0, "AUTO", "AUTO"), + (0.4, 0.6, 0, "AUTO", "AUTO"), + (0.289, 0.727, 0, "AUTO", "AUTO"), + (0.25, 0.925, 0, "VECTOR", "VECTOR"), + (0.175, 0.925, 0, "VECTOR", "VECTOR"), + (0.175, 1.0, 0, "VECTOR", "VECTOR"), + (0.0, 1.0, 0, "VECTOR", "VECTOR"), + ] + case "CROWN": + configs = [ + (1.0, 0.0, 0, "VECTOR", "VECTOR"), + (1.0, 0.25, 0, "VECTOR", "VECTOR"), + (0.75, 0.25, 0, "VECTOR", "VECTOR"), + (0.75, 0.325, 0, "VECTOR", "VECTOR"), + (0.925, 0.4, 0, "AUTO", "AUTO"), + (0.975, 0.5, 0, "AUTO", "AUTO"), + (0.94, 0.65, 0, "AUTO", "AUTO"), + (0.85, 0.75, 0, "AUTO", "AUTO"), + (0.75, 0.875, 0, "AUTO", "AUTO"), + (0.7, 1.0, 0, "VECTOR", "VECTOR"), + (0.0, 1.0, 0, "VECTOR", "VECTOR"), + ] + case "SUPPORTS": + configs = ( + [(1.0, 0.0, 0, "VECTOR", "VECTOR"), (1.0, 0.5, 0, "VECTOR", "VECTOR")] + + list( + ( + 1 - 0.5 * (1 - np.cos(i / (n - 3) * np.pi / 2)), + 0.5 + 0.5 * np.sin(i / (n - 3) * np.pi / 2), + 0, + "AUTO", + "AUTO", + ) + for i in range(1, n - 2) + ) + + [(0.5, 1.0, 0, "VECTOR", "VECTOR"), (0.0, 1.0, 0, "VECTOR", "VECTOR")] + ) case _: n_steps_x = n if n % 2 == 0 else n - 1 n_steps_y = n - 2 if n % 2 == 0 else n - 1 configs = list( - (1 - (i + 1) // 2 * 2 / n_steps_x, i // 2 * 2 / n_steps_y, 0, 'VECTOR', 'VECTOR') for i in - range(n) + ( + 1 - (i + 1) // 2 * 2 / n_steps_x, + i // 2 * 2 / n_steps_y, + 0, + "VECTOR", + "VECTOR", + ) + for i in range(n) ) k = len(configs) - len(profile.points) for i in range(k): @@ -258,13 +339,15 @@ def reset_preset(profile, name, n=None): def canonicalize_ls(line): - line = shapely.simplify(line, .02) + line = shapely.simplify(line, 0.02) while True: coords = np.array(line.coords) diff = coords[1:] - coords[:-1] diff = diff / (np.linalg.norm(diff, axis=-1, keepdims=True) + 1e-6) product = (diff[:-1] * diff[1:]).sum(-1) - valid_indices = (np.nonzero((1 - 1e-6 > product) & (product > -.8))[0] + 1).tolist() + valid_indices = ( + np.nonzero((1 - 1e-6 > product) & (product > -0.8))[0] + 1 + ).tolist() ls = LineString(coords[[0] + valid_indices + [-1]]) if ls.length < line.length: line = ls @@ -279,10 +362,10 @@ def canonicalize_mls(mls): def separate_selected(obj, face=False): butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): if face: bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') + bpy.ops.mesh.separate(type="SELECTED") o = next(o for o in bpy.context.selected_objects if o != obj) butil.select_none() return o @@ -308,16 +391,15 @@ def snap_mesh(obj, eps=1e-3): indices = np.concatenate([[0], np.nonzero(es[1:] != es[:-1])[0] + 1]) vs = vs[indices] es = es[indices] - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bm = bmesh.from_edit_mesh(obj.data) bm.verts.ensure_lookup_table() bm.edges.ensure_lookup_table() dis = co[w[es]] - co[u[es]] norms = np.linalg.norm(dis, axis=-1) - percents = ((co[vs] - co[u[es]]) * dis).sum(-1) / (norms ** 2) + percents = ((co[vs] - co[u[es]]) * dis).sum(-1) / (norms**2) edges = [bm.edges[e] for e in es] for e, p in zip(edges, percents): bmesh.ops.subdivide_edges(bm, edges=[e], cuts=1, edge_percents={e: p}) bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=eps * 1.5) bmesh.update_edit_mesh(obj.data) - diff --git a/infinigen/assets/utils/misc.py b/infinigen/assets/utils/misc.py index 006a54043..5ac931792 100644 --- a/infinigen/assets/utils/misc.py +++ b/infinigen/assets/utils/misc.py @@ -4,22 +4,19 @@ import string from functools import update_wrapper, wraps -# Authors: Lingjie Mei - - import bpy import numpy as np from numpy.random import normal, uniform from infinigen.assets.utils.object import origin2lowest +from infinigen.core.nodes import Nodes, NodeWrangler from infinigen.core.util import blender as butil from infinigen.core.util.math import clip_gaussian -from infinigen.core.util.random import log_uniform # imported by other files -from infinigen.core.nodes import NodeWrangler, Nodes +# Authors: Lingjie Mei -class CountInstance: +class CountInstance: def __init__(self, name): self.name = name @@ -46,7 +43,9 @@ def sample_direction(min_z): def subclasses(cls): - return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in subclasses(c)]) + return set(cls.__subclasses__()).union( + [s for c in cls.__subclasses__() for s in subclasses(c)] + ) def make_normalized_factory(cls): @@ -66,8 +65,8 @@ def create_asset(self, **params): return CLS -def build_color_ramp(nw: NodeWrangler, x, positions, colors, mode='HSV'): - cr = nw.new_node(Nodes.ColorRamp, input_kwargs={'Fac': x}) +def build_color_ramp(nw: NodeWrangler, x, positions, colors, mode="HSV"): + cr = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": x}) cr.color_ramp.color_mode = mode elements = cr.color_ramp.elements size = len(positions) @@ -132,11 +131,21 @@ def assign_material(obj, material): character_set = list(string.ascii_lowercase + string.ascii_uppercase + string.digits) character_set_weights = np.concatenate( - [1.5 * np.ones(len(string.ascii_lowercase)), 0.5 * np.ones(len(string.ascii_uppercase)), - 0.5 * np.ones(len(string.digits))]) + [ + 1.5 * np.ones(len(string.ascii_lowercase)), + 0.5 * np.ones(len(string.ascii_uppercase)), + 0.5 * np.ones(len(string.digits)), + ] +) character_set_weights /= character_set_weights.sum() def generate_text(): - return "".join(np.random.choice(character_set, size=int(clip_gaussian(3, 7, 2, 15)), replace=True, - p=character_set_weights)) + return "".join( + np.random.choice( + character_set, + size=int(clip_gaussian(3, 7, 2, 15)), + replace=True, + p=character_set_weights, + ) + ) diff --git a/infinigen/assets/utils/nodegroup.py b/infinigen/assets/utils/nodegroup.py index 5f559825d..cb3fe13c8 100644 --- a/infinigen/assets/utils/nodegroup.py +++ b/infinigen/assets/utils/nodegroup.py @@ -10,14 +10,12 @@ import bpy import numpy as np -from infinigen.assets.utils.misc import toggle_hide -from infinigen.core.nodes import node_utils +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -def build_curve(nw: NodeWrangler, positions, circular=False, handle='VECTOR'): +def build_curve(nw: NodeWrangler, positions, circular=False, handle="VECTOR"): length = 1 transferred_positions = [] id_mesh = nw.new_node(Nodes.InputID) @@ -25,80 +23,127 @@ def build_curve(nw: NodeWrangler, positions, circular=False, handle='VECTOR'): if isinstance(p, Iterable) and not isinstance(p, bpy.types.Nodes): length = len(p) transferred_positions.append( - nw.build_float_curve(id_mesh, np.stack([np.arange(length), np.array(p)], -1), handle)) + nw.build_float_curve( + id_mesh, np.stack([np.arange(length), np.array(p)], -1), handle + ) + ) else: transferred_positions.append(p) if circular: - base_curve = nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': length}) + base_curve = nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": length}) else: - base_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={ - 'Mesh': nw.new_node(Nodes.MeshLine, input_kwargs={'Count': length}, attrs={'mode': 'END_POINTS'}) - }) - - curve = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': base_curve, - 'Position': nw.new_node(Nodes.CombineXYZ, transferred_positions) - }) + base_curve = nw.new_node( + Nodes.MeshToCurve, + input_kwargs={ + "Mesh": nw.new_node( + Nodes.MeshLine, + input_kwargs={"Count": length}, + attrs={"mode": "END_POINTS"}, + ) + }, + ) + + curve = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": base_curve, + "Position": nw.new_node(Nodes.CombineXYZ, transferred_positions), + }, + ) return curve -def geo_radius(nw: NodeWrangler, radius, resolution=6, merge_distance=.004, rotation=0, to_align_tilt=True, - align_tilt_axis=(0, 0, 1)): - skeleton = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) +def geo_radius( + nw: NodeWrangler, + radius, + resolution=6, + merge_distance=0.004, + rotation=0, + to_align_tilt=True, + align_tilt_axis=(0, 0, 1), +): + skeleton = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) radius = surface.eval_argument(nw, radius) curve = nw.new_node(Nodes.MeshToCurve, [skeleton]) if to_align_tilt: curve = align_tilt(nw, curve, align_tilt_axis) - skeleton = nw.new_node(Nodes.SetCurveRadius, input_kwargs={'Curve': curve, 'Radius': radius}) - geometry = nw.curve2mesh(skeleton, nw.new_node(Nodes.Transform, [ - nw.new_node(Nodes.CurveCircle, input_kwargs={'Resolution': resolution})], - input_kwargs={'Rotation': [0, 0, rotation]})) + skeleton = nw.new_node( + Nodes.SetCurveRadius, input_kwargs={"Curve": curve, "Radius": radius} + ) + geometry = nw.curve2mesh( + skeleton, + nw.new_node( + Nodes.Transform, + [nw.new_node(Nodes.CurveCircle, input_kwargs={"Resolution": resolution})], + input_kwargs={"Rotation": [0, 0, rotation]}, + ), + ) if merge_distance > 0: geometry = nw.new_node(Nodes.MergeByDistance, [geometry, None, merge_distance]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def geo_selection(nw: NodeWrangler, selection): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) selection = surface.eval_argument(nw, selection) geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) -def geo_selection_attribute(nw: NodeWrangler, selection, name, domain='POINT'): - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) +def geo_selection_attribute(nw: NodeWrangler, selection, name, domain="POINT"): + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) selection = surface.eval_argument(nw, selection) - geometry = nw.new_node(Nodes.StoreNamedAttribute, [geometry, None, name, None, selection], - attrs={'domain': domain}) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + [geometry, None, name, None, selection], + attrs={"domain": domain}, + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) def geo_base_selection(nw: NodeWrangler, base_obj, selection, merge_threshold=0): - geometry = nw.new_node(Nodes.ObjectInfo, [base_obj], attrs={'transform_space': 'RELATIVE'}).outputs[ - 'Geometry'] + geometry = nw.new_node( + Nodes.ObjectInfo, [base_obj], attrs={"transform_space": "RELATIVE"} + ).outputs["Geometry"] selection = surface.eval_argument(nw, selection) geometry = nw.new_node(Nodes.SeparateGeometry, [geometry, selection]) if merge_threshold > 0: geometry = nw.new_node(Nodes.MergeByDistance, [geometry, None, merge_threshold]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) -def align_tilt(nw: NodeWrangler, curve, axis=(1, 0, 0), noise_strength=0, noise_scale=.5): - axis = nw.vector_math('NORMALIZE', axis) +def align_tilt( + nw: NodeWrangler, curve, axis=(1, 0, 0), noise_strength=0, noise_scale=0.5 +): + axis = nw.vector_math("NORMALIZE", axis) if noise_strength != 0: z = nw.separate(nw.new_node(Nodes.InputPosition))[-1] - rot_z = nw.scalar_multiply(noise_strength, - nw.new_node(Nodes.NoiseTexture, input_kwargs={'W': z, 'Scale': noise_scale}, - attrs={'noise_dimensions': '1D'})) - axis = nw.new_node(Nodes.VectorRotate, input_kwargs={'Vector': axis, 'Angle': rot_z}, - attrs={'rotation_type': 'Z_AXIS'}) + rot_z = nw.scalar_multiply( + noise_strength, + nw.new_node( + Nodes.NoiseTexture, + input_kwargs={"W": z, "Scale": noise_scale}, + attrs={"noise_dimensions": "1D"}, + ), + ) + axis = nw.new_node( + Nodes.VectorRotate, + input_kwargs={"Vector": axis, "Angle": rot_z}, + attrs={"rotation_type": "Z_AXIS"}, + ) normal = nw.new_node(Nodes.InputNormal) - tangent = nw.vector_math('NORMALIZE', nw.new_node(Nodes.CurveTangent)) - axis = nw.vector_math('NORMALIZE', nw.sub(axis, nw.dot(axis, tangent))) + tangent = nw.vector_math("NORMALIZE", nw.new_node(Nodes.CurveTangent)) + axis = nw.vector_math("NORMALIZE", nw.sub(axis, nw.dot(axis, tangent))) cos = nw.dot(axis, normal) - sin = nw.dot(nw.vector_math('CROSS_PRODUCT', normal, axis), tangent) - tilt = nw.math('ARCTAN2', sin, cos) + sin = nw.dot(nw.vector_math("CROSS_PRODUCT", normal, axis), tangent) + tilt = nw.math("ARCTAN2", sin, cos) curve = nw.new_node(Nodes.SetCurveTilt, [curve, None, tilt]) return curve diff --git a/infinigen/assets/utils/nodegroups/__init__.py b/infinigen/assets/utils/nodegroups/__init__.py new file mode 100644 index 000000000..8bd0ae453 --- /dev/null +++ b/infinigen/assets/utils/nodegroups/__init__.py @@ -0,0 +1 @@ +from . import attach, curve, geometry diff --git a/infinigen/assets/utils/nodegroups/attach.py b/infinigen/assets/utils/nodegroups/attach.py new file mode 100644 index 000000000..62e5d2152 --- /dev/null +++ b/infinigen/assets/utils/nodegroups/attach.py @@ -0,0 +1,448 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from .curve import ( + nodegroup_profile_part, + nodegroup_smooth_taper, + nodegroup_warped_circle_curve, +) +from .math import nodegroup_deg2_rad + + +@node_utils.to_nodegroup( + "nodegroup_part_surface", singleton=True, type="GeometryNodeTree" +) +def nodegroup_part_surface(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ], + ) + + sample_curve = nw.new_node( + Nodes.SampleCurve, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Factor": group_input.outputs["Length Fac"], + }, + attrs={"mode": "FACTOR"}, + ) + + vector_rotate = nw.new_node( + Nodes.VectorRotate, + input_kwargs={ + "Vector": sample_curve.outputs["Tangent"], + "Rotation": group_input.outputs["Ray Rot"], + }, + attrs={"rotation_type": "EULER_XYZ"}, + ) + + raycast = nw.new_node( + Nodes.Raycast, + input_kwargs={ + "Target Geometry": group_input.outputs["Skin Mesh"], + "Source Position": sample_curve.outputs["Position"], + "Ray Direction": vector_rotate, + "Ray Length": 5.0, + }, + ) + + lerp = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["Rad"], + 9: sample_curve.outputs["Position"], + 10: raycast.outputs["Hit Position"], + }, + label="lerp", + attrs={"data_type": "FLOAT_VECTOR", "clamp": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Position": lerp.outputs["Vector"], + "Hit Normal": raycast.outputs["Hit Normal"], + "Tangent": sample_curve.outputs["Tangent"], + "Skeleton Pos": sample_curve.outputs["Position"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_part_surface_simple", singleton=True, type="GeometryNodeTree" +) +def nodegroup_part_surface_simple(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketVector", "Length, Yaw, Rad", (0.0, 0.0, 0.0)), + ], + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["Length, Yaw, Rad"]}, + ) + + clamp_1 = nw.new_node( + Nodes.Clamp, input_kwargs={"Value": separate_xyz.outputs["X"]} + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": 1.5708, "Y": separate_xyz.outputs["Y"], "Z": 1.5708}, + ) + + part_surface = nw.new_node( + nodegroup_part_surface().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length Fac": clamp_1, + "Ray Rot": combine_xyz, + "Rad": separate_xyz.outputs["Z"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Position": part_surface.outputs["Position"], + "Hit Normal": part_surface.outputs["Hit Normal"], + "Tangent": part_surface.outputs["Tangent"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_raycast_rotation", singleton=True, type="GeometryNodeTree" +) +def nodegroup_raycast_rotation(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVectorEuler", "Rotation", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Hit Normal", (0.0, 0.0, 1.0)), + ("NodeSocketVector", "Curve Tangent", (0.0, 0.0, 1.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + align_euler_to_vector = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={"Vector": group_input.outputs["Hit Normal"]}, + ) + + rotate_euler = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Rotate By": align_euler_to_vector, + }, + ) + + if_normal_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Normal Rot"], + 8: group_input.outputs["Rotation"], + 9: rotate_euler, + }, + label="if_normal_rot", + attrs={"input_type": "VECTOR"}, + ) + + align_euler_to_vector_1 = nw.new_node( + Nodes.AlignEulerToVector, + input_kwargs={ + "Rotation": group_input.outputs["Rotation"], + "Vector": group_input.outputs["Curve Tangent"], + }, + ) + + rotate_euler_1 = nw.new_node( + Nodes.RotateEuler, + input_kwargs={ + "Rotation": align_euler_to_vector_1, + "Rotate By": group_input.outputs["Rotation"], + }, + attrs={"space": "LOCAL"}, + ) + + if_tangent_rot = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: group_input.outputs["Do Tangent Rot"], + 8: if_normal_rot.outputs[3], + 9: rotate_euler_1, + }, + label="if_tangent_rot", + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": if_tangent_rot.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_surface_muscle", singleton=True, type="GeometryNodeTree" +) +def nodegroup_surface_muscle(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketVector", "Coord 0", (0.4, 0.0, 1.0)), + ("NodeSocketVector", "Coord 1", (0.5, 0.0, 1.0)), + ("NodeSocketVector", "Coord 2", (0.6, 0.0, 1.0)), + ("NodeSocketVector", "StartRad, EndRad, Fullness", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "ProfileHeight, StartTilt, EndTilt", (0.0, 0.0, 0.0)), + ("NodeSocketBool", "Debug Points", False), + ], + ) + + cube = nw.new_node(Nodes.MeshCube, input_kwargs={"Size": (0.03, 0.03, 0.03)}) + + part_surface_simple = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 0"], + }, + ) + + transform_2 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple.outputs["Position"], + }, + ) + + part_surface_simple_1 = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 1"], + }, + ) + + transform_1 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple_1.outputs["Position"], + }, + ) + + part_surface_simple_2 = nw.new_node( + nodegroup_part_surface_simple().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length, Yaw, Rad": group_input.outputs["Coord 2"], + }, + ) + + transform_3 = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": cube, + "Translation": part_surface_simple_2.outputs["Position"], + }, + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [transform_2, transform_1, transform_3]}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={1: group_input.outputs["Debug Points"], 15: join_geometry}, + ) + + u_resolution = nw.new_node(Nodes.Integer, label="U Resolution") + u_resolution.integer = 16 + + quadratic_bezier = nw.new_node( + Nodes.QuadraticBezier, + input_kwargs={ + "Resolution": u_resolution, + "Start": part_surface_simple.outputs["Position"], + "Middle": part_surface_simple_1.outputs["Position"], + "End": part_surface_simple_2.outputs["Position"], + }, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={ + "Vector": group_input.outputs["ProfileHeight, StartTilt, EndTilt"] + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz_1.outputs["Y"], + 4: separate_xyz_1.outputs["Z"], + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": map_range_1.outputs["Result"]} + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": quadratic_bezier, "Tilt": deg2rad} + ) + + position = nw.new_node(Nodes.InputPosition) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": separate_xyz_1.outputs["X"], "Y": 1.0, "Z": 1.0}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: combine_xyz}, + attrs={"operation": "MULTIPLY"}, + ) + + v_resolution = nw.new_node(Nodes.Integer, label="V resolution") + v_resolution.integer = 24 + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": v_resolution}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["StartRad, EndRad, Fullness"]}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["X"], + "end_rad": separate_xyz.outputs["Y"], + "fullness": separate_xyz.outputs["Z"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": set_curve_tilt, + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + join_geometry_1 = nw.new_node( + Nodes.JoinGeometry, input_kwargs={"Geometry": [switch.outputs[6], profilepart]} + ) + + switch_1 = nw.new_node(Nodes.Switch, input_kwargs={1: True, 15: join_geometry_1}) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": switch_1.outputs[6]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_attach_part", singleton=True, type="GeometryNodeTree" +) +def nodegroup_attach_part(nw: NodeWrangler): + # Code generated using version 2.4.2 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skin Mesh", None), + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloatFactor", "Length Fac", 0.0), + ("NodeSocketVectorEuler", "Ray Rot", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Rad", 0.0), + ("NodeSocketVector", "Part Rot", (0.0, 0.0, 0.0)), + ("NodeSocketBool", "Do Normal Rot", False), + ("NodeSocketBool", "Do Tangent Rot", False), + ], + ) + + part_surface = nw.new_node( + nodegroup_part_surface().name, + input_kwargs={ + "Skeleton Curve": group_input.outputs["Skeleton Curve"], + "Skin Mesh": group_input.outputs["Skin Mesh"], + "Length Fac": group_input.outputs["Length Fac"], + "Ray Rot": group_input.outputs["Ray Rot"], + "Rad": group_input.outputs["Rad"], + }, + ) + + deg2rad = nw.new_node( + nodegroup_deg2_rad().name, input_kwargs={"Deg": group_input.outputs["Part Rot"]} + ) + + raycast_rotation = nw.new_node( + nodegroup_raycast_rotation().name, + input_kwargs={ + "Rotation": deg2rad, + "Hit Normal": part_surface.outputs["Hit Normal"], + "Curve Tangent": part_surface.outputs["Tangent"], + "Do Normal Rot": group_input.outputs["Do Normal Rot"], + "Do Tangent Rot": group_input.outputs["Do Tangent Rot"], + }, + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Translation": part_surface.outputs["Position"], + "Rotation": raycast_rotation, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": transform, + "Position": part_surface.outputs["Position"], + }, + ) diff --git a/infinigen/assets/utils/nodegroups/curve.py b/infinigen/assets/utils/nodegroups/curve.py new file mode 100644 index 000000000..84edba93f --- /dev/null +++ b/infinigen/assets/utils/nodegroups/curve.py @@ -0,0 +1,456 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + +from .math import ( + nodegroup_aspect_to_dim, + nodegroup_polar_to_cart, + nodegroup_switch4, + nodegroup_vector_sum, +) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube", singleton=True, type="GeometryNodeTree" +) +def nodegroup_simple_tube(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Angles Deg", (30.0, -1.5, 11.0)), + ("NodeSocketVector", "Seg Lengths", (0.02, 0.02, 0.02)), + ("NodeSocketFloat", "Start Radius", 0.06), + ("NodeSocketFloat", "End Radius", 0.03), + ("NodeSocketFloat", "Fullness", 8.17), + ("NodeSocketBool", "Do Bezier", True), + ("NodeSocketFloat", "Aspect Ratio", 1.0), + ], + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 25, + "Origin": group_input.outputs["Origin"], + "angles_deg": group_input.outputs["Angles Deg"], + "Seg Lengths": group_input.outputs["Seg Lengths"], + "Do Bezier": group_input.outputs["Do Bezier"], + }, + ) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["Aspect Ratio"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: aspect_to_dim, 1: position}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": 40}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": group_input.outputs["Start Radius"], + "end_rad": group_input.outputs["End Radius"], + "fullness": group_input.outputs["Fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": profilepart, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Endpoint": polarbezier.outputs["Endpoint"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube_v2", singleton=True, type="GeometryNodeTree" +) +def nodegroup_simple_tube_v2(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "length_rad1_rad2", (1.0, 0.5, 0.3)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "proportions", (0.3333, 0.3333, 0.3333)), + ("NodeSocketFloat", "aspect", 1.0), + ("NodeSocketBool", "do_bezier", True), + ("NodeSocketFloat", "fullness", 4.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + vector_sum = nw.new_node( + nodegroup_vector_sum().name, + input_kwargs={"Vector": group_input.outputs["proportions"]}, + ) + + divide = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["proportions"], 1: vector_sum}, + attrs={"operation": "DIVIDE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": group_input.outputs["length_rad1_rad2"]}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: divide.outputs["Vector"], "Scale": separate_xyz.outputs["X"]}, + attrs={"operation": "SCALE"}, + ) + + polarbezier = nw.new_node( + nodegroup_polar_bezier().name, + input_kwargs={ + "Resolution": 25, + "Origin": group_input.outputs["Origin"], + "angles_deg": group_input.outputs["angles_deg"], + "Seg Lengths": scale.outputs["Vector"], + "Do Bezier": group_input.outputs["do_bezier"], + }, + ) + + aspect_to_dim = nw.new_node( + nodegroup_aspect_to_dim().name, + input_kwargs={"Aspect Ratio": group_input.outputs["aspect"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: aspect_to_dim, 1: position}, + attrs={"operation": "MULTIPLY"}, + ) + + warped_circle_curve = nw.new_node( + nodegroup_warped_circle_curve().name, + input_kwargs={"Position": multiply.outputs["Vector"], "Vertices": 40}, + ) + + smoothtaper = nw.new_node( + nodegroup_smooth_taper().name, + input_kwargs={ + "start_rad": separate_xyz.outputs["Y"], + "end_rad": separate_xyz.outputs["Z"], + "fullness": group_input.outputs["fullness"], + }, + ) + + profilepart = nw.new_node( + nodegroup_profile_part().name, + input_kwargs={ + "Skeleton Curve": polarbezier.outputs["Curve"], + "Profile Curve": warped_circle_curve, + "Radius Func": smoothtaper, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": profilepart, + "Skeleton Curve": polarbezier.outputs["Curve"], + "Endpoint": polarbezier.outputs["Endpoint"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_smooth_taper", singleton=True, type="GeometryNodeTree" +) +def nodegroup_smooth_taper(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: spline_parameter.outputs["Factor"], 1: 3.1416}, + attrs={"operation": "MULTIPLY"}, + ) + + sine = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SINE"} + ) + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "start_rad", 0.29), + ("NodeSocketFloat", "end_rad", 0.0), + ("NodeSocketFloat", "fullness", 2.5), + ], + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["fullness"]}, + attrs={"operation": "DIVIDE"}, + ) + + power = nw.new_node( + Nodes.Math, input_kwargs={0: sine, 1: divide}, attrs={"operation": "POWER"} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: group_input.outputs["start_rad"], + 4: group_input.outputs["end_rad"], + }, + attrs={"clamp": False}, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: power, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Value": multiply_1}) + + +@node_utils.to_nodegroup( + "nodegroup_warped_circle_curve", singleton=True, type="GeometryNodeTree" +) +def nodegroup_warped_circle_curve(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketVector", "Position", (0.0, 0.0, 0.0)), + ("NodeSocketInt", "Vertices", 32), + ], + ) + + mesh_circle = nw.new_node( + Nodes.MeshCircle, input_kwargs={"Vertices": group_input.outputs["Vertices"]} + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": mesh_circle, + "Position": group_input.outputs["Position"], + }, + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Curve": mesh_to_curve}) + + +@node_utils.to_nodegroup( + "nodegroup_polar_bezier", singleton=True, type="GeometryNodeTree" +) +def nodegroup_polar_bezier(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketIntUnsigned", "Resolution", 32), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "angles_deg", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Seg Lengths", (0.3, 0.3, 0.3)), + ("NodeSocketBool", "Do Bezier", True), + ], + ) + + mesh_line = nw.new_node(Nodes.MeshLine, input_kwargs={"Count": 4}) + + index = nw.new_node(Nodes.Index) + + deg2_rad = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["angles_deg"], "Scale": 0.0175}, + label="Deg2Rad", + attrs={"operation": "SCALE"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": deg2_rad.outputs["Vector"]} + ) + + reroute = nw.new_node( + Nodes.Reroute, input_kwargs={"Input": separate_xyz.outputs["X"]} + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Seg Lengths"]} + ) + + polartocart = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": reroute, + "Length": separate_xyz_1.outputs["X"], + "Origin": group_input.outputs["Origin"], + }, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: reroute, 1: separate_xyz.outputs["Y"]} + ) + + polartocart_1 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add, + "Length": separate_xyz_1.outputs["Y"], + "Origin": polartocart, + }, + ) + + add_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: add}) + + polartocart_2 = nw.new_node( + nodegroup_polar_to_cart().name, + input_kwargs={ + "Angle": add_1, + "Length": separate_xyz_1.outputs["Z"], + "Origin": polartocart_1, + }, + ) + + switch4 = nw.new_node( + nodegroup_switch4().name, + input_kwargs={ + "Arg": index, + "Arg == 0": group_input.outputs["Origin"], + "Arg == 1": polartocart, + "Arg == 2": polartocart_1, + "Arg == 3": polartocart_2, + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": mesh_line, "Position": switch4} + ) + + mesh_to_curve = nw.new_node(Nodes.MeshToCurve, input_kwargs={"Mesh": set_position}) + + subdivide_curve_1 = nw.new_node( + Nodes.SubdivideCurve, + input_kwargs={ + "Curve": mesh_to_curve, + "Cuts": group_input.outputs["Resolution"], + }, + ) + + integer = nw.new_node(Nodes.Integer, attrs={"integer": 2}) + integer.integer = 2 + + bezier_segment = nw.new_node( + Nodes.BezierSegment, + input_kwargs={ + "Resolution": integer, + "Start": group_input.outputs["Origin"], + "Start Handle": polartocart, + "End Handle": polartocart_1, + "End": polartocart_2, + }, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Resolution"], 1: integer}, + attrs={"operation": "DIVIDE"}, + ) + + subdivide_curve = nw.new_node( + Nodes.SubdivideCurve, input_kwargs={"Curve": bezier_segment, "Cuts": divide} + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["Do Bezier"], + 14: subdivide_curve_1, + 15: subdivide_curve, + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Curve": switch.outputs[6], "Endpoint": polartocart_2}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_profile_part", singleton=True, type="GeometryNodeTree" +) +def nodegroup_profile_part(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Skeleton Curve", None), + ("NodeSocketGeometry", "Profile Curve", None), + ("NodeSocketFloatDistance", "Radius Func", 1.0), + ], + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={ + "Curve": group_input.outputs["Skeleton Curve"], + "Radius": group_input.outputs["Radius Func"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={ + "Curve": set_curve_radius, + "Profile Curve": group_input.outputs["Profile Curve"], + "Fill Caps": True, + }, + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": curve_to_mesh, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) diff --git a/infinigen/assets/utils/nodegroups/geometry.py b/infinigen/assets/utils/nodegroups/geometry.py new file mode 100644 index 000000000..a14d1c522 --- /dev/null +++ b/infinigen/assets/utils/nodegroups/geometry.py @@ -0,0 +1,229 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_symmetric_instance", singleton=True, type="GeometryNodeTree" +) +def nodegroup_symmetric_instance(nw: NodeWrangler): + # Code generated using version 2.4.1 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Offset", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Reflector", (1.0, -1.0, 1.0)), + ], + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Offset"], + 1: group_input.outputs["Reflector"], + }, + attrs={"operation": "MULTIPLY"}, + ) + + mesh_line = nw.new_node( + Nodes.MeshLine, + input_kwargs={ + "Count": 2, + "Start Location": group_input.outputs["Offset"], + "Offset": multiply.outputs["Vector"], + }, + attrs={"mode": "END_POINTS"}, + ) + + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + input_kwargs={"Points": mesh_line, "Instance": group_input.outputs["Geometry"]}, + ) + + index = nw.new_node(Nodes.Index) + + equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: index}, + attrs={"data_type": "INT", "operation": "EQUAL"}, + ) + + scale_instances = nw.new_node( + Nodes.ScaleInstances, + input_kwargs={"Instances": instance_on_points, "Selection": equal}, + ) + + flip_faces = nw.new_node( + Nodes.FlipFaces, input_kwargs={"Mesh": scale_instances, "Selection": equal} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Instances": flip_faces} + ) + + +@node_utils.to_nodegroup( + "nodegroup_symmetric_clone", singleton=True, type="GeometryNodeTree" +) +def nodegroup_symmetric_clone(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVectorXYZ", "Scale", (1.0, -1.0, 1.0)), + ], + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Scale": group_input.outputs["Scale"], + }, + ) + + flip_faces = nw.new_node(Nodes.FlipFaces, input_kwargs={"Mesh": transform}) + + join_geometry_2 = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [group_input.outputs["Geometry"], flip_faces]}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Both": join_geometry_2, + "Orig": group_input.outputs["Geometry"], + "Inverted": flip_faces, + }, + ) + + +@node_utils.to_nodegroup("nodegroup_solidify", singleton=True, type="GeometryNodeTree") +def nodegroup_solidify(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Mesh", None), + ("NodeSocketFloatDistance", "Distance", 0.0), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Distance"]}, + attrs={"operation": "MULTIPLY"}, + ) + + extrude_mesh = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Offset Scale": multiply, + "Individual": False, + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Distance"], 1: -0.5}, + attrs={"operation": "MULTIPLY"}, + ) + + extrude_mesh_1 = nw.new_node( + Nodes.ExtrudeMesh, + input_kwargs={ + "Mesh": group_input.outputs["Mesh"], + "Offset Scale": multiply_1, + "Individual": False, + }, + ) + + flip_faces = nw.new_node( + Nodes.FlipFaces, input_kwargs={"Mesh": extrude_mesh_1.outputs["Mesh"]} + ) + + join_geometry = nw.new_node( + Nodes.JoinGeometry, + input_kwargs={"Geometry": [extrude_mesh.outputs["Mesh"], flip_faces]}, + ) + + merge_by_distance = nw.new_node( + Nodes.MergeByDistance, input_kwargs={"Geometry": join_geometry, "Distance": 0.0} + ) + + set_shade_smooth = nw.new_node( + Nodes.SetShadeSmooth, + input_kwargs={"Geometry": merge_by_distance, "Shade Smooth": False}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_shade_smooth} + ) + + +@node_utils.to_nodegroup("nodegroup_taper", singleton=True, type="GeometryNodeTree") +def nodegroup_taper(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Start", (1.0, 0.63, 0.72)), + ("NodeSocketVector", "End", (1.0, 1.0, 1.0)), + ], + ) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + attribute_statistic = nw.new_node( + Nodes.AttributeStatistic, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + 2: separate_xyz.outputs["X"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": separate_xyz.outputs["X"], + 7: attribute_statistic.outputs["Min"], + 8: attribute_statistic.outputs["Max"], + 9: group_input.outputs["Start"], + 10: group_input.outputs["End"], + }, + attrs={"data_type": "FLOAT_VECTOR", "clamp": False}, + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position, 1: map_range.outputs["Vector"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Position": multiply.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) diff --git a/infinigen/assets/utils/nodegroups/hair.py b/infinigen/assets/utils/nodegroups/hair.py new file mode 100644 index 000000000..8b9d0cb1b --- /dev/null +++ b/infinigen/assets/utils/nodegroups/hair.py @@ -0,0 +1,526 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.assets.utils.nodegroups.math import nodegroup_vector_bezier +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_comb_direction", singleton=True, type="GeometryNodeTree" +) +def nodegroup_comb_direction(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Surface", None), + ("NodeSocketVector", "Root Positiion", (0.0, 0.0, 0.0)), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + surface_normal = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={ + "Mesh": group_input.outputs["Surface"], + "Value": normal, + "Sample Position": group_input.outputs["Root Positiion"], + }, + label="Surface Normal", + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + named_attribute = nw.new_node( + Nodes.NamedAttribute, + input_kwargs={"Name": "skeleton_loc"}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + named_attribute_1 = nw.new_node( + Nodes.NamedAttribute, + input_kwargs={"Name": "parent_skeleton_loc"}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: named_attribute.outputs["Attribute"], + 1: named_attribute_1.outputs["Attribute"], + }, + attrs={"operation": "SUBTRACT"}, + ) + + normalize = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: subtract.outputs["Vector"]}, + attrs={"operation": "NORMALIZE"}, + ) + + skeleton_tangent = nw.new_node( + Nodes.SampleNearestSurface, + input_kwargs={ + "Mesh": group_input.outputs["Surface"], + "Value": normalize.outputs["Vector"], + "Sample Position": group_input.outputs["Root Positiion"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + cross_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: surface_normal, 1: skeleton_tangent}, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + cross_product_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: surface_normal, 1: cross_product.outputs["Vector"]}, + attrs={"operation": "CROSS_PRODUCT"}, + ) + + normalize_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: cross_product_1.outputs["Vector"]}, + attrs={"operation": "NORMALIZE"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Combing Direction": normalize_1.outputs["Vector"], + "Surface Normal": (surface_normal, "Value"), + "Skeleton Tangent": skeleton_tangent, + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_hair_position", singleton=True, type="GeometryNodeTree" +) +def nodegroup_hair_position(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Curves", None)] + ) + + position = nw.new_node(Nodes.InputPosition) + + index = nw.new_node(Nodes.Index) + + spline_length = nw.new_node(Nodes.SplineLength) + + snap = nw.new_node( + Nodes.Math, + input_kwargs={0: index, 1: spline_length.outputs["Point Count"]}, + attrs={"operation": "SNAP"}, + ) + + hair_root_position = nw.new_node( + Nodes.SampleIndex, + input_kwargs={ + "Geometry": group_input.outputs["Curves"], + "Value": position, + "Index": snap, + }, + label="Hair Root Position", + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + position_1 = nw.new_node(Nodes.InputPosition) + + relative_position = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: position_1, 1: hair_root_position}, + label="Relative Position", + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Root Position": hair_root_position, + "Relative Position": relative_position.outputs["Vector"], + }, + ) + + +@node_utils.to_nodegroup( + "nodegroup_comb_hairs", singleton=True, type="GeometryNodeTree" +) +def nodegroup_comb_hairs(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curves", None), + ("NodeSocketVector", "Root Position", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Comb Dir", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Surface Normal", (0.0, 0.0, 0.0)), + ("NodeSocketFloat", "Length", 0.03), + ("NodeSocketFloat", "Puiff", 1.0), + ("NodeSocketFloat", "Comb", 1.0), + ], + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Surface Normal"], + "Scale": group_input.outputs["Comb"], + }, + attrs={"operation": "SCALE"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Comb Dir"], + "Scale": group_input.outputs["Puiff"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale_1.outputs["Vector"], 1: scale.outputs["Vector"]}, + ) + + vectorbezier = nw.new_node( + nodegroup_vector_bezier().name, + input_kwargs={ + "t": spline_parameter.outputs["Factor"], + "b": scale.outputs["Vector"], + "c": add.outputs["Vector"], + }, + ) + + length = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: add.outputs["Vector"]}, + attrs={"operation": "LENGTH"}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Length"], 1: length.outputs["Value"]}, + attrs={"operation": "DIVIDE"}, + ) + + scale_2 = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: vectorbezier, "Scale": divide}, + attrs={"operation": "SCALE"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Root Position"], + 1: scale_2.outputs["Vector"], + }, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Curves"], + "Position": add_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_strand_noise", singleton=False, type="GeometryNodeTree" +) +def nodegroup_strand_noise(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketFloat", "Random Mag", 0.001), + ("NodeSocketFloat", "Perlin Mag", 1.0), + ("NodeSocketFloat", "Perlin Scale", 5.0), + ], + ) + + noise_texture = nw.new_node( + Nodes.NoiseTexture, + input_kwargs={ + "Scale": group_input.outputs["Perlin Scale"], + "Detail": 10.0, + "Roughness": 1.0, + }, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: noise_texture.outputs["Color"], 1: (0.5, 0.5, 0.5)}, + attrs={"operation": "SUBTRACT"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: subtract.outputs["Vector"], + "Scale": group_input.outputs["Perlin Mag"], + }, + attrs={"operation": "SCALE"}, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: (-1.0, -1.0, -1.0)}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: random_value.outputs["Value"], + "Scale": group_input.outputs["Random Mag"], + }, + attrs={"operation": "SCALE"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: scale_1.outputs["Vector"]}, + ) + + add_1 = nw.new_node(Nodes.VectorMath, input_kwargs={0: add.outputs["Vector"]}) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Offset": add_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position} + ) + + +@node_utils.to_nodegroup( + "nodegroup_duplicate_to_clumps", singleton=False, type="GeometryNodeTree" +) +def nodegroup_duplicate_to_clumps(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ("NodeSocketVector", "Surface Normal", (0.0, 0.0, 0.0)), + ("NodeSocketInt", "Amount", 3), + ("NodeSocketFloat", "Tuft Spread", 0.01), + ("NodeSocketFloat", "Tuft Clumping", 0.5), + ], + ) + + duplicate_elements = nw.new_node( + Nodes.DuplicateElements, + attrs={"domain": "SPLINE"}, + input_kwargs={ + "Geometry": group_input.outputs["Geometry"], + "Amount": group_input.outputs["Amount"], + }, + ) + + random_value = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: (-1.0, -1.0, -1.0)}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: random_value.outputs["Value"], + "Scale": group_input.outputs["Tuft Spread"], + }, + attrs={"operation": "SCALE"}, + ) + + project = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: scale.outputs["Vector"], + 1: group_input.outputs["Surface Normal"], + }, + attrs={"operation": "PROJECT"}, + ) + + subtract = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scale.outputs["Vector"], 1: project.outputs["Vector"]}, + attrs={"operation": "SUBTRACT"}, + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": duplicate_elements.outputs["Geometry"], + 1: subtract.outputs["Vector"], + }, + attrs={"domain": "CURVE", "data_type": "FLOAT_VECTOR"}, + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + subtract_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["Tuft Clumping"]}, + attrs={"operation": "SUBTRACT"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: 1.0, + 4: subtract_1, + }, + ) + + scale_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: capture_attribute.outputs["Attribute"], + "Scale": map_range.outputs["Result"], + }, + attrs={"operation": "SCALE"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Offset": scale_1.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_hair_length_rescale", singleton=False, type="GeometryNodeTree" +) +def nodegroup_hair_length_rescale(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curves", None), + ("NodeSocketFloat", "Min", 0.69999999999999996), + ], + ) + + random_value_1 = nw.new_node( + Nodes.RandomValue, input_kwargs={2: group_input.outputs["Min"]} + ) + + capture_attribute = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={ + "Geometry": group_input.outputs["Curves"], + 2: random_value_1.outputs[1], + }, + attrs={"domain": "CURVE"}, + ) + + hairposition = nw.new_node( + nodegroup_hair_position().name, + input_kwargs={"Curves": group_input.outputs["Curves"]}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: hairposition.outputs["Relative Position"], + 1: capture_attribute.outputs[2], + 2: hairposition.outputs["Root Position"], + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + set_position_1 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": capture_attribute.outputs["Geometry"], + "Position": multiply_add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_1} + ) + + +@node_utils.to_nodegroup( + "nodegroup_snap_roots_to_surface", singleton=True, type="GeometryNodeTree" +) +def nodegroup_snap_roots_to_surface(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Target", None), + ("NodeSocketGeometry", "Curves", None), + ], + ) + + hair_pos = nw.new_node( + nodegroup_hair_position().name, + input_kwargs={"Curves": group_input.outputs["Curves"]}, + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={ + "Target": group_input.outputs["Target"], + "Source Position": hair_pos.outputs["Root Position"], + }, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: geometry_proximity.outputs["Position"], + 1: hair_pos.outputs["Relative Position"], + }, + ) + + set_position_2 = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Curves"], + "Position": add.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Geometry": set_position_2} + ) diff --git a/infinigen/assets/utils/nodegroups/math.py b/infinigen/assets/utils/nodegroups/math.py new file mode 100644 index 000000000..be0b92f7f --- /dev/null +++ b/infinigen/assets/utils/nodegroups/math.py @@ -0,0 +1,330 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_floor_ceil", singleton=False, type="GeometryNodeTree" +) +def nodegroup_floor_ceil(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Value", 0.0000)] + ) + + float_to_integer = nw.new_node( + Nodes.FloatToInt, + input_kwargs={"Float": group_input.outputs["Value"]}, + attrs={"rounding_mode": "FLOOR"}, + ) + + float_to_integer_1 = nw.new_node( + Nodes.FloatToInt, + input_kwargs={"Float": group_input.outputs["Value"]}, + attrs={"rounding_mode": "CEILING"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 1: float_to_integer}, + attrs={"operation": "SUBTRACT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Floor": float_to_integer, + "Ceil": float_to_integer_1, + "Remainder": subtract, + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_clamp_or_wrap", singleton=False, type="GeometryNodeTree" +) +def nodegroup_clamp_or_wrap(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Value", 0), + ("NodeSocketFloat", "Max", 0.5000), + ("NodeSocketBool", "Use Wrap", False), + ], + ) + + clamp = nw.new_node( + Nodes.Clamp, + input_kwargs={ + "Value": group_input.outputs["Value"], + "Max": group_input.outputs["Max"], + }, + ) + + wrap = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Value"], + 1: group_input.outputs["Max"], + 2: 0.0000, + }, + attrs={"operation": "WRAP"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: group_input.outputs["Use Wrap"], 4: clamp, 5: wrap}, + attrs={"input_type": "INT"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Output": switch.outputs[1]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_polar_to_cart", singleton=True, type="GeometryNodeTree" +) +def nodegroup_polar_to_cart(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Angle", 0.5), + ("NodeSocketFloat", "Length", 0.0), + ("NodeSocketVector", "Origin", (0.0, 0.0, 0.0)), + ], + ) + + cosine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "COSINE"}, + ) + + sine = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Angle"]}, + attrs={"operation": "SINE"}, + ) + + construct_unit_vector = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": cosine, "Z": sine}, + label="Construct Unit Vector", + ) + + offset_polar = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: group_input.outputs["Length"], + 1: construct_unit_vector, + 2: group_input.outputs["Origin"], + }, + label="Offset Polar", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": offset_polar.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_switch4", singleton=True, type="GeometryNodeTree") +def nodegroup_switch4(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketInt", "Arg", 0), + ("NodeSocketVector", "Arg == 0", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 1", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 2", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "Arg == 3", (0.0, 0.0, 0.0)), + ], + ) + + greater_equal = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 2}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + greater_equal_1 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 1}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_1 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_1, + 8: group_input.outputs["Arg == 0"], + 9: group_input.outputs["Arg == 1"], + }, + attrs={"input_type": "VECTOR"}, + ) + + greater_equal_2 = nw.new_node( + Nodes.Compare, + input_kwargs={2: group_input.outputs["Arg"], 3: 3}, + attrs={"data_type": "INT", "operation": "GREATER_EQUAL"}, + ) + + switch_2 = nw.new_node( + Nodes.Switch, + input_kwargs={ + 0: greater_equal_2, + 8: group_input.outputs["Arg == 2"], + 9: group_input.outputs["Arg == 3"], + }, + attrs={"input_type": "VECTOR"}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_equal, 8: switch_1.outputs[3], 9: switch_2.outputs[3]}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Output": switch.outputs[3]} + ) + + +@node_utils.to_nodegroup("nodegroup_deg2_rad", singleton=True, type="GeometryNodeTree") +def nodegroup_deg2_rad(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Deg", (0.0, 0.0, 0.0))] + ) + + multiply = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: group_input.outputs["Deg"], 1: (0.0175, 0.0175, 0.0175)}, + attrs={"operation": "MULTIPLY"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Rad": multiply.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_aspect_to_dim", singleton=True, type="GeometryNodeTree" +) +def nodegroup_aspect_to_dim(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "Aspect Ratio", 1.0)] + ) + + greater_than = nw.new_node( + Nodes.Compare, input_kwargs={0: group_input.outputs["Aspect Ratio"], 1: 1.0} + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={"X": group_input.outputs["Aspect Ratio"], "Y": 1.0}, + ) + + divide = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: group_input.outputs["Aspect Ratio"]}, + attrs={"operation": "DIVIDE"}, + ) + + combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={"X": 1.0, "Y": divide}) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={0: greater_than, 8: combine_xyz_1, 9: combine_xyz_2}, + attrs={"input_type": "VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"XY Scale": switch.outputs[3]} + ) + + +@node_utils.to_nodegroup( + "nodegroup_vector_sum", singleton=True, type="GeometryNodeTree" +) +def nodegroup_vector_sum(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + separate_xyz_1 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_1.outputs["X"], 1: separate_xyz_1.outputs["Y"]}, + ) + + add_1 = nw.new_node( + Nodes.Math, input_kwargs={0: add, 1: separate_xyz_1.outputs["Z"]} + ) + + group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Sum": add_1}) + + +@node_utils.to_nodegroup( + "nodegroup_vector_bezier", singleton=True, type="GeometryNodeTree" +) +def nodegroup_vector_bezier(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "t", 0.0), + ("NodeSocketVector", "a", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "b", (0.0, 0.0, 0.0)), + ("NodeSocketVector", "c", (0.0, 0.0, 0.0)), + ], + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["t"], + 9: group_input.outputs["a"], + 10: group_input.outputs["b"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": group_input.outputs["t"], + 9: map_range.outputs["Vector"], + 10: group_input.outputs["c"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": map_range_1.outputs["Vector"]} + ) diff --git a/infinigen/assets/utils/nodegroups/sculpt_v1.py b/infinigen/assets/utils/nodegroups/sculpt_v1.py new file mode 100644 index 000000000..21d9c4ad0 --- /dev/null +++ b/infinigen/assets/utils/nodegroups/sculpt_v1.py @@ -0,0 +1,522 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Alexander Raistrick + + +from infinigen.assets.utils.nodegroups.geometry import ( + nodegroup_symmetric_clone, +) +from infinigen.assets.utils.nodegroups.math import ( + nodegroup_clamp_or_wrap, + nodegroup_floor_ceil, +) +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_u_v_param_to_vert_idxs", singleton=False, type="GeometryNodeTree" +) +def nodegroup_u_v_param_to_vert_idxs(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketFloat", "Value", 0.5000), + ("NodeSocketInt", "Size", 0), + ("NodeSocketBool", "Cyclic", False), + ], + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: group_input.outputs["Value"], 1: group_input.outputs["Size"]}, + attrs={"operation": "MULTIPLY"}, + ) + + floorceil = nw.new_node( + nodegroup_floor_ceil().name, input_kwargs={"Value": multiply} + ) + + clamporwrap = nw.new_node( + nodegroup_clamp_or_wrap().name, + input_kwargs={ + "Value": floorceil.outputs["Floor"], + "Max": group_input.outputs["Size"], + "Use Wrap": group_input.outputs["Cyclic"], + }, + ) + + clamporwrap_1 = nw.new_node( + nodegroup_clamp_or_wrap().name, + input_kwargs={ + "Value": floorceil.outputs["Ceil"], + "Max": group_input.outputs["Size"], + "Use Wrap": group_input.outputs["Cyclic"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Floor": clamporwrap, + "Ceil": clamporwrap_1, + "Remainder": floorceil.outputs["Remainder"], + }, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_bilinear_interp_index_transfer", singleton=False, type="GeometryNodeTree" +) +def nodegroup_bilinear_interp_index_transfer(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Source", None), + ("NodeSocketFloat", "U", 0.5000), + ("NodeSocketFloat", "V", 0.5000), + ("NodeSocketVector", "Attribute", (0.0000, 0.0000, 0.0000)), + ("NodeSocketInt", "SizeU", 0), + ("NodeSocketInt", "SizeV", 0), + ("NodeSocketBool", "CyclicU", False), + ("NodeSocketBool", "CyclicV", False), + ], + ) + + uvparamtovertidxs = nw.new_node( + nodegroup_u_v_param_to_vert_idxs().name, + input_kwargs={ + "Value": group_input.outputs["V"], + "Size": group_input.outputs["SizeV"], + "Cyclic": group_input.outputs["CyclicV"], + }, + ) + + uvparamtovertidxs_1 = nw.new_node( + nodegroup_u_v_param_to_vert_idxs().name, + input_kwargs={ + "Value": group_input.outputs["U"], + "Size": group_input.outputs["SizeU"], + "Cyclic": group_input.outputs["CyclicU"], + }, + ) + + floor_floor = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Floor"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Floor"], + }, + label="FloorFloor", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_1 = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={ + "Source": group_input, + 1: group_input.outputs["Attribute"], + "Index": floor_floor, + }, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "INDEX"}, + ) + + ceil_floor = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Ceil"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Floor"], + }, + label="CeilFloor", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_2 = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={ + "Source": group_input, + 1: group_input.outputs["Attribute"], + "Index": ceil_floor, + }, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "INDEX"}, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs_1.outputs["Remainder"], + 9: transfer_attribute_1.outputs["Attribute"], + 10: transfer_attribute_2.outputs["Attribute"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + floor_ceil = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Floor"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Ceil"], + }, + label="FloorCeil", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_3 = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={ + "Source": group_input, + 1: group_input.outputs["Attribute"], + "Index": floor_ceil, + }, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "INDEX"}, + ) + + ceil_ceil = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: uvparamtovertidxs_1.outputs["Ceil"], + 1: group_input.outputs["SizeV"], + 2: uvparamtovertidxs.outputs["Ceil"], + }, + label="CeilCeil", + attrs={"operation": "MULTIPLY_ADD"}, + ) + + transfer_attribute_4 = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={ + "Source": group_input, + 1: group_input.outputs["Attribute"], + "Index": ceil_ceil, + }, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "INDEX"}, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs_1.outputs["Remainder"], + 9: transfer_attribute_3.outputs["Attribute"], + 10: transfer_attribute_4.outputs["Attribute"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": uvparamtovertidxs.outputs["Remainder"], + 9: map_range.outputs["Vector"], + 10: map_range_1.outputs["Vector"], + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Vector": map_range_2.outputs["Vector"]}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_parameter_curve", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_parameter_curve(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Surface", None), + ("NodeSocketGeometry", "UVCurve", None), + ("NodeSocketInt", "CtrlptsU", 0), + ("NodeSocketInt", "CtrlptsW", 0), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + position = nw.new_node(Nodes.InputPosition) + + separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": position}) + + position_1 = nw.new_node(Nodes.InputPosition) + + bilinearinterpindextransfer = nw.new_node( + nodegroup_bilinear_interp_index_transfer().name, + input_kwargs={ + "Source": group_input.outputs["Surface"], + "U": separate_xyz.outputs["X"], + "V": separate_xyz.outputs["Y"], + "Attribute": position_1, + "SizeU": group_input.outputs["CtrlptsU"], + "SizeV": group_input.outputs["CtrlptsW"], + "CyclicV": True, + }, + ) + + transfer_attribute = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={ + "Source": group_input.outputs["Surface"], + 1: normal, + "Source Position": bilinearinterpindextransfer, + }, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "NEAREST_FACE_INTERPOLATED"}, + ) + + multiply_add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: transfer_attribute.outputs["Attribute"], + 1: separate_xyz.outputs["Z"], + 2: bilinearinterpindextransfer, + }, + attrs={"operation": "MULTIPLY_ADD"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["UVCurve"], + "Position": multiply_add.outputs["Vector"], + }, + ) + + normal_1 = nw.new_node(Nodes.InputNormal) + + dot_product = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: transfer_attribute.outputs["Attribute"], 1: normal_1}, + attrs={"operation": "DOT_PRODUCT"}, + ) + + arcsine = nw.new_node( + Nodes.Math, + input_kwargs={0: dot_product.outputs["Value"]}, + attrs={"operation": "ARCSINE"}, + ) + + set_curve_tilt = nw.new_node( + Nodes.SetCurveTilt, input_kwargs={"Curve": set_position, "Tilt": arcsine} + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_curve_tilt}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_curve_sculpt", singleton=False, type="GeometryNodeTree" +) +def nodegroup_curve_sculpt(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Target", None), + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketFloat", "Base Radius", 0.0500), + ("NodeSocketFloat", "Base Factor", 0.0500), + ("NodeSocketBool", "SymmY", True), + ("NodeSocketGeometry", "StrokeRadFacModifier", None), + ], + ) + + normal = nw.new_node(Nodes.InputNormal) + + symmetric_clone = nw.new_node( + nodegroup_symmetric_clone().name, + input_kwargs={"Geometry": group_input.outputs["Curve"]}, + ) + + switch = nw.new_node( + Nodes.Switch, + input_kwargs={ + 1: group_input.outputs["SymmY"], + 14: group_input.outputs["Curve"], + 15: symmetric_clone.outputs["Both"], + }, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, input_kwargs={"Curve": switch.outputs[6]} + ) + + geometry_proximity = nw.new_node( + Nodes.Proximity, + input_kwargs={"Target": curve_to_mesh}, + attrs={"target_element": "POINTS"}, + ) + + curve_to_mesh_1 = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": group_input.outputs["StrokeRadFacModifier"]}, + ) + + position = nw.new_node(Nodes.InputPosition) + + index = nw.new_node(Nodes.Index) + + transfer_attribute = nw.new_node( + Nodes.TransferAttribute, + input_kwargs={"Source": curve_to_mesh_1, 1: position, "Index": index}, + attrs={"data_type": "FLOAT_VECTOR", "mapping": "INDEX"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, + input_kwargs={"Vector": transfer_attribute.outputs["Attribute"]}, + ) + + add = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Base Radius"], + 1: separate_xyz.outputs["X"], + }, + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": geometry_proximity.outputs["Distance"], 2: add}, + ) + + float_curve = nw.new_node( + Nodes.FloatCurve, input_kwargs={"Value": map_range.outputs["Result"]} + ) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0000, 1.0000), (0.2000, 0.9400), (0.8000, 0.0600), (1.0000, 0.0000)], + handles=["VECTOR", "AUTO", "AUTO", "VECTOR"], + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: group_input.outputs["Base Factor"], + 1: separate_xyz.outputs["Y"], + }, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: float_curve, 1: add_1}, + attrs={"operation": "MULTIPLY"}, + ) + + scale = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: normal, "Scale": multiply}, + attrs={"operation": "SCALE"}, + ) + + set_position = nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": group_input.outputs["Target"], + "Offset": scale.outputs["Vector"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Geometry": set_position}, + attrs={"is_active_output": True}, + ) + + +@node_utils.to_nodegroup( + "nodegroup_simple_tube_skin", singleton=False, type="GeometryNodeTree" +) +def nodegroup_simple_tube_skin(nw: NodeWrangler): + # Code generated using version 2.6.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Curve", None), + ("NodeSocketVector", "RadStartEnd", (0.0500, 0.0500, 1.0000)), + ("NodeSocketInt", "Resolution", 32), + ], + ) + + spline_parameter = nw.new_node(Nodes.SplineParameter) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0000, 1: spline_parameter.outputs["Factor"]}, + attrs={"operation": "SUBTRACT"}, + ) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: subtract, 1: spline_parameter.outputs["Factor"]}, + attrs={"operation": "MULTIPLY"}, + ) + + sqrt = nw.new_node( + Nodes.Math, input_kwargs={0: multiply}, attrs={"operation": "SQRT"} + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["RadStartEnd"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Value": spline_parameter.outputs["Factor"], + 3: separate_xyz.outputs["X"], + 4: separate_xyz.outputs["Y"], + }, + ) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: sqrt, 1: map_range.outputs["Result"]}, + attrs={"operation": "MULTIPLY"}, + ) + + set_curve_radius = nw.new_node( + Nodes.SetCurveRadius, + input_kwargs={"Curve": group_input.outputs["Curve"], "Radius": multiply_1}, + ) + + curve_circle = nw.new_node( + Nodes.CurveCircle, + input_kwargs={"Resolution": group_input.outputs["Resolution"]}, + ) + + combine_xyz = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"X": 1.0000, "Y": separate_xyz.outputs["Z"]} + ) + + transform = nw.new_node( + Nodes.Transform, + input_kwargs={"Geometry": curve_circle.outputs["Curve"], "Scale": combine_xyz}, + ) + + curve_to_mesh = nw.new_node( + Nodes.CurveToMesh, + input_kwargs={"Curve": set_curve_radius, "Profile Curve": transform}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Mesh": curve_to_mesh}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/assets/utils/nodegroups/shader.py b/infinigen/assets/utils/nodegroups/shader.py new file mode 100644 index 000000000..2ecbda5c7 --- /dev/null +++ b/infinigen/assets/utils/nodegroups/shader.py @@ -0,0 +1,257 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. + +# Authors: Mingzhe Wang and Alexander Raistrick + + +from numpy.random import normal as N +from numpy.random import uniform as U + +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + + +@node_utils.to_nodegroup( + "nodegroup_norm_local_pos", singleton=True, type="ShaderNodeTree" +) +def nodegroup_norm_local_pos(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_5 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "local_pos"}) + + attribute_6 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "skeleton_rad"}) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: attribute_6.outputs["Fac"], 1: -1.0}, + attrs={"operation": "MULTIPLY"}, + ) + + combine_xyz_2 = nw.new_node( + Nodes.CombineXYZ, input_kwargs={"Y": multiply, "Z": multiply} + ) + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketFloat", "X Max", 1.0)] + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": group_input.outputs["X Max"], + "Y": attribute_6.outputs["Fac"], + "Z": attribute_6.outputs["Fac"], + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": attribute_5.outputs["Vector"], + 7: combine_xyz_2, + 8: combine_xyz_1, + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": map_range_1.outputs["Vector"]} + ) + + +@node_utils.to_nodegroup("nodegroup_abs_y", singleton=True, type="ShaderNodeTree") +def nodegroup_abs_y(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + group_input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketVector", "Vector", (0.0, 0.0, 0.0))] + ) + + separate_xyz_4 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": group_input.outputs["Vector"]} + ) + + absolute = nw.new_node( + Nodes.Math, + input_kwargs={0: separate_xyz_4.outputs["Y"]}, + attrs={"operation": "ABSOLUTE"}, + ) + + combine_xyz_1 = nw.new_node( + Nodes.CombineXYZ, + input_kwargs={ + "X": separate_xyz_4.outputs["X"], + "Y": absolute, + "Z": separate_xyz_4.outputs["Z"], + }, + ) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Vector": combine_xyz_1} + ) + + +@node_utils.to_nodegroup("nodegroup_color_mask", singleton=False, type="ShaderNodeTree") +def nodegroup_color_mask(nw: NodeWrangler): + # Code generated using version 2.4.3 of the node_transpiler + + attribute_2 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_body"}) + + attribute_3 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_leg"}) + + attribute_4 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "tag_head"}) + + attribute_5 = nw.new_node(Nodes.Attribute, attrs={"attribute_name": "local_pos"}) + + group_2 = nw.new_node( + nodegroup_abs_y().name, input_kwargs={"Vector": attribute_5.outputs["Vector"]} + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": group_2, + "W": U(1e4), + "Scale": N(7, 1), + "Detail": N(7, 1), + "Dimension": U(1.5, 3), + }, + attrs={"musgrave_dimensions": "4D"}, + ) + + add = nw.new_node( + Nodes.Math, input_kwargs={0: musgrave_texture, 1: 0.69999999999999996} + ) + + colorramp_4 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add}) + colorramp_4.color_ramp.interpolation = "EASE" + colorramp_4.color_ramp.elements[0].position = 0.0 + colorramp_4.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_4.color_ramp.elements[1].position = 0.4864 + colorramp_4.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + group = nw.new_node(nodegroup_norm_local_pos().name) + + separate_xyz_4 = nw.new_node(Nodes.SeparateXYZ, input_kwargs={"Vector": group}) + + colorramp_5 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz_4.outputs["Z"]} + ) + colorramp_5.color_ramp.interpolation = "EASE" + colorramp_5.color_ramp.elements[0].position = 0.0 + colorramp_5.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_5.color_ramp.elements[1].position = 0.5318 + colorramp_5.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + multiply = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_4.outputs["Color"], 1: colorramp_5.outputs["Color"]}, + attrs={"operation": "MULTIPLY"}, + ) + + mix_3 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_4.outputs["Fac"], + "Color1": (1.0, 1.0, 1.0, 1.0), + "Color2": multiply, + }, + ) + + noise_texture = nw.new_node(Nodes.NoiseTexture, input_kwargs={"Scale": N(14, 2)}) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={ + "Vector": noise_texture.outputs["Color"], + 9: (-0.10000000000000001, -0.10000000000000001, -0.10000000000000001), + 10: (0.10000000000000001, 0.10000000000000001, 0.10000000000000001), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + + add_1 = nw.new_node( + Nodes.VectorMath, input_kwargs={0: group, 1: map_range_1.outputs["Vector"]} + ) + + separate_xyz_2 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add_1.outputs["Vector"]} + ) + + colorramp_1 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz_2.outputs["X"]} + ) + colorramp_1.color_ramp.interpolation = "EASE" + colorramp_1.color_ramp.elements[0].position = 0.3091 + colorramp_1.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_1.color_ramp.elements[1].position = 0.9773 + colorramp_1.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + colorramp_2 = nw.new_node( + Nodes.ColorRamp, input_kwargs={"Fac": separate_xyz_2.outputs["Y"]} + ) + colorramp_2.color_ramp.interpolation = "EASE" + colorramp_2.color_ramp.elements[0].position = 0.0955 + colorramp_2.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp_2.color_ramp.elements[1].position = 0.5318 + colorramp_2.color_ramp.elements[1].color = (0.0, 0.0, 0.0, 1.0) + + multiply_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: colorramp_1.outputs["Color"], 1: colorramp_2.outputs["Color"]}, + attrs={"operation": "MULTIPLY"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={0: 1.0, 1: multiply_1}, + attrs={"operation": "SUBTRACT"}, + ) + + mix_2 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_3.outputs["Fac"], + "Color1": mix_3, + "Color2": subtract, + }, + ) + + separate_xyz_3 = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": add_1.outputs["Vector"]} + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz_3.outputs["Z"]}) + + colorramp_3 = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": add_2}) + colorramp_3.color_ramp.elements[0].position = 0.2 + colorramp_3.color_ramp.elements[0].color = (0.0, 0.0, 0.0, 1.0) + colorramp_3.color_ramp.elements[1].position = 0.6136 + colorramp_3.color_ramp.elements[1].color = (1.0, 1.0, 1.0, 1.0) + + mix_1 = nw.new_node( + Nodes.MixRGB, + input_kwargs={ + "Fac": attribute_2.outputs["Fac"], + "Color1": mix_2, + "Color2": colorramp_3.outputs["Color"], + }, + ) + + colorramp = nw.new_node(Nodes.ColorRamp, input_kwargs={"Fac": mix_1}) + colorramp.color_ramp.elements.new(0) + colorramp.color_ramp.elements[0].position = 0.2727 + colorramp.color_ramp.elements[0].color = (1.0, 1.0, 1.0, 1.0) + colorramp.color_ramp.elements[1].position = 0.6091 + colorramp.color_ramp.elements[1].color = ( + 0.78220000000000001, + 0.78220000000000001, + 0.78220000000000001, + 1.0, + ) + colorramp.color_ramp.elements[2].position = 0.9727 + colorramp.color_ramp.elements[2].color = (0.0, 0.0, 0.0, 1.0) + + group_output = nw.new_node( + Nodes.GroupOutput, input_kwargs={"Color": colorramp.outputs["Color"]} + ) diff --git a/infinigen/assets/utils/object.py b/infinigen/assets/utils/object.py index f8ebe7e9a..081d489fd 100644 --- a/infinigen/assets/utils/object.py +++ b/infinigen/assets/utils/object.py @@ -12,12 +12,11 @@ import infinigen.core.util.blender as butil from infinigen.assets.utils.decorate import read_co -from infinigen.core.util import blender as butil from infinigen.core.util.blender import select_none def center(obj): - return (Vector(obj.bound_box[0]) + Vector(obj.bound_box[-2])) * obj.scale / 2. + return (Vector(obj.bound_box[0]) + Vector(obj.bound_box[-2])) * obj.scale / 2.0 def origin2lowest(obj, vertical=False, centered=False, approximate=False): @@ -27,7 +26,7 @@ def origin2lowest(obj, vertical=False, centered=False, approximate=False): i = np.argmin(co[:, -1]) if approximate: indices = np.argsort(co[:, -1]) - obj.location = -np.mean(co[indices[:len(co) // 10]], 0) + obj.location = -np.mean(co[indices[: len(co) // 10]], 0) obj.location[-1] = -co[i, -1] elif centered: obj.location = -center(obj) @@ -53,7 +52,7 @@ def origin2leftmost(obj): butil.apply_transform(obj, loc=True) -def data2mesh(vertices=(), edges=(), faces=(), name=''): +def data2mesh(vertices=(), edges=(), faces=(), name=""): mesh = bpy.data.meshes.new(name) mesh.from_pydata(vertices, edges, faces) mesh.update() @@ -68,23 +67,23 @@ def mesh2obj(mesh): def trimesh2obj(trimesh): - obj = butil.object_from_trimesh(trimesh, '') + obj = butil.object_from_trimesh(trimesh, "") bpy.context.scene.collection.objects.link(obj) bpy.context.view_layer.objects.active = obj return obj def obj2trimesh(obj): - butil.modify_mesh(obj, 'TRIANGULATE', min_vertices=3) + butil.modify_mesh(obj, "TRIANGULATE", min_vertices=3) vertices = read_co(obj) arr = np.zeros(len(obj.data.polygons) * 3) - obj.data.polygons.foreach_get('vertices', arr) + obj.data.polygons.foreach_get("vertices", arr) faces = arr.reshape(-1, 3) return trimesh.Trimesh(vertices, faces) def new_cube(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.mesh.primitive_cube_add(**kwargs) return bpy.context.active_object @@ -106,13 +105,13 @@ def new_bbox_2d(x, x_, y, y_, z=0): def new_icosphere(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.mesh.primitive_ico_sphere_add(**kwargs) return bpy.context.active_object def new_circle(**kwargs): - kwargs['location'] = kwargs.get('location', (1, 0, 0)) + kwargs["location"] = kwargs.get("location", (1, 0, 0)) bpy.ops.mesh.primitive_circle_add(**kwargs) obj = bpy.context.active_object butil.apply_transform(obj, loc=True) @@ -120,22 +119,22 @@ def new_circle(**kwargs): def new_base_circle(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.mesh.primitive_circle_add(**kwargs) obj = bpy.context.active_object return obj def new_empty(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.object.empty_add(**kwargs) obj = bpy.context.active_object - obj.scale = kwargs.get('scale', (1, 1, 1)) + obj.scale = kwargs.get("scale", (1, 1, 1)) return obj def new_plane(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.mesh.primitive_plane_add(**kwargs) obj = bpy.context.active_object butil.apply_transform(obj, loc=True) @@ -143,8 +142,8 @@ def new_plane(**kwargs): def new_cylinder(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, .5)) - kwargs['depth'] = kwargs.get('depth', 1) + kwargs["location"] = kwargs.get("location", (0, 0, 0.5)) + kwargs["depth"] = kwargs.get("depth", 1) bpy.ops.mesh.primitive_cylinder_add(**kwargs) obj = bpy.context.active_object butil.apply_transform(obj, loc=True) @@ -159,16 +158,22 @@ def new_base_cylinder(**kwargs): def new_grid(**kwargs): - kwargs['location'] = kwargs.get('location', (0, 0, 0)) + kwargs["location"] = kwargs.get("location", (0, 0, 0)) bpy.ops.mesh.primitive_grid_add(**kwargs) obj = bpy.context.active_object butil.apply_transform(obj, loc=True) return obj -def new_line(subdivisions=1, scale=1.): +def new_line(subdivisions=1, scale=1.0): vertices = np.stack( - [np.linspace(0, scale, subdivisions + 1), np.zeros(subdivisions + 1), np.zeros(subdivisions + 1)], -1) + [ + np.linspace(0, scale, subdivisions + 1), + np.zeros(subdivisions + 1), + np.zeros(subdivisions + 1), + ], + -1, + ) edges = np.stack([np.arange(subdivisions), np.arange(1, subdivisions + 1)], -1) obj = mesh2obj(data2mesh(vertices, edges)) return obj @@ -203,12 +208,12 @@ def separate_loose(obj): def print3d_clean_up(obj): - bpy.ops.preferences.addon_enable(module='object_print3d_utils') - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.preferences.addon_enable(module="object_print3d_utils") + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bpy.ops.mesh.fill_holes() - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bpy.ops.mesh.normals_make_consistent() bpy.ops.mesh.print3d_clean_distorted() bpy.ops.mesh.print3d_clean_non_manifold() diff --git a/infinigen/assets/utils/physics.py b/infinigen/assets/utils/physics.py index 0af5b2c4a..d0f043f05 100644 --- a/infinigen/assets/utils/physics.py +++ b/infinigen/assets/utils/physics.py @@ -11,7 +11,7 @@ def free_fall(actives, passives, place_fn, t=100): - height = 0. + height = 0.0 for o in sorted(actives, key=lambda o: -o.dimensions[-1]): height = place_fn(o, height) with EnablePhysics(actives, passives): @@ -24,7 +24,6 @@ def free_fall(actives, passives, place_fn, t=100): class EnablePhysics: - def __init__(self, actives, passives): self.actives = actives self.passives = passives @@ -35,12 +34,12 @@ def __enter__(self): self.frame_end = bpy.context.scene.frame_start for a in self.actives: with butil.SelectObjects(a): - bpy.ops.rigidbody.objects_add(type='ACTIVE') + bpy.ops.rigidbody.objects_add(type="ACTIVE") bpy.ops.rigidbody.mass_calculate() for p in self.passives: with butil.SelectObjects(p): - bpy.ops.rigidbody.objects_add(type='PASSIVE') - bpy.context.object.rigid_body.collision_shape = 'MESH' + bpy.ops.rigidbody.objects_add(type="PASSIVE") + bpy.context.object.rigid_body.collision_shape = "MESH" def __exit__(self, *_): bpy.ops.rigidbody.world_remove() diff --git a/infinigen/assets/utils/reaction_diffusion.py b/infinigen/assets/utils/reaction_diffusion.py index 46d7eb516..5b65fc06c 100644 --- a/infinigen/assets/utils/reaction_diffusion.py +++ b/infinigen/assets/utils/reaction_diffusion.py @@ -5,15 +5,24 @@ import math -import bpy + import bmesh import numpy as np -import tqdm -from numpy.random import uniform, normal +from numpy.random import normal, uniform -def reaction_diffusion(obj, weight_fn, steps=1000, dt=1., scale=.5, diff_a=.18, diff_b=.09, feed_rate=.055, - kill_rate=.062, perturb=.05): +def reaction_diffusion( + obj, + weight_fn, + steps=1000, + dt=1.0, + scale=0.5, + diff_a=0.18, + diff_b=0.09, + feed_rate=0.055, + kill_rate=0.062, + perturb=0.05, +): diff_a = diff_a * scale diff_b = diff_b * scale bm = bmesh.new() @@ -31,7 +40,7 @@ def reaction_diffusion(obj, weight_fn, steps=1000, dt=1., scale=.5, diff_a=.18, b_msg = b[edge_to] - b[edge_from] lap_a = np.bincount(edge_from, a_msg, size) - np.bincount(edge_to, a_msg, size) lap_b = np.bincount(edge_from, b_msg, size) - np.bincount(edge_to, b_msg, size) - ab2 = a * b ** 2 + ab2 = a * b**2 new_a = a + (diff_a * lap_a - ab2 + feed_rate * (1 - a)) * dt new_b = b + (diff_b * lap_b + ab2 - (kill_rate + feed_rate) * b) * dt a = new_a @@ -47,15 +56,15 @@ def reaction_diffusion(obj, weight_fn, steps=1000, dt=1., scale=.5, diff_a=.18, lap_a *= 1 + normal(0, perturb, n) lap_a *= 1 + normal(0, perturb, n) - vg_a = obj.vertex_groups.new(name='A') - vg_b = obj.vertex_groups.new(name='B') - vg_la = obj.vertex_groups.new(name='LA') - vg_lb = obj.vertex_groups.new(name='LB') + vg_a = obj.vertex_groups.new(name="A") + vg_b = obj.vertex_groups.new(name="B") + vg_la = obj.vertex_groups.new(name="LA") + vg_lb = obj.vertex_groups.new(name="LB") for i in range(n): - vg_la.add([i], lap_a[i], 'REPLACE') - vg_lb.add([i], lap_b[i], 'REPLACE') - vg_a.add([i], a[i], 'REPLACE') - vg_b.add([i], b[i], 'REPLACE') + vg_la.add([i], lap_a[i], "REPLACE") + vg_lb.add([i], lap_b[i], "REPLACE") + vg_a.add([i], a[i], "REPLACE") + vg_b.add([i], b[i], "REPLACE") obj.vertex_groups.update() obj.data.update() @@ -64,11 +73,13 @@ def feed2kill(feed): return math.sqrt(feed) / 2 - feed -def make_periodic_weight_fn(n_instances, stride=.1): +def make_periodic_weight_fn(n_instances, stride=0.1): def periodic_weight_fn(coords): multiplier = uniform(20, 100, (1, n_instances)) center = coords[np.random.randint(0, len(coords) - 1, n_instances)] - phi = (np.expand_dims(coords, 1) * np.expand_dims(center, 0)).sum(-1) * multiplier + phi = (np.expand_dims(coords, 1) * np.expand_dims(center, 0)).sum( + -1 + ) * multiplier measure = np.cos(phi).sum(-1) / math.sqrt(n_instances) return (np.abs(measure) < stride).astype(float) diff --git a/infinigen/assets/utils/shapes.py b/infinigen/assets/utils/shapes.py index 82676b1eb..f39f7d433 100644 --- a/infinigen/assets/utils/shapes.py +++ b/infinigen/assets/utils/shapes.py @@ -6,12 +6,11 @@ import numpy as np import shapely from shapely import Polygon, remove_repeated_points, simplify - -from shapely.ops import linemerge, orient, polygonize, unary_union, shared_paths +from shapely.ops import linemerge, orient, polygonize, shared_paths, unary_union from trimesh.creation import triangulate_polygon -from infinigen.assets.utils.decorate import write_co, read_co, select_faces, read_normal -from infinigen.assets.utils.object import new_circle, data2mesh, mesh2obj, join_objects +from infinigen.assets.utils.decorate import read_co, read_normal, select_faces, write_co +from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_circle from infinigen.core.util import blender as butil @@ -24,7 +23,7 @@ def is_valid_polygon(p): def simplify_polygon(p): with np.errstate(invalid="ignore"): - p = remove_repeated_points(simplify(p, 1e-6).normalize(), .01) + p = remove_repeated_points(simplify(p, 1e-6).normalize(), 0.01) return p @@ -35,23 +34,28 @@ def cut_polygon_by_line(polygon, *args): return list(polygons) -def safe_polygon2obj(p, reversed=False, z=0): - ps = [p] if p.geom_type == 'Polygon' else p.geoms - objs_ = [] - for p in ps: - p = orient(p).segmentize(.005) - try: - obj = triangulate_polygon2obj(p) - objs_.append(obj) - except: - try: - obj = polygon2obj(p) - objs_.append(obj) - except: - pass - if len(objs_) == 0: +def safe_polygon_to_obj_single(p: Polygon): + p = orient(p).segmentize(0.005) + try: + return triangulate_polygon2obj(p) + except Exception: # TODO narrow this + pass + + try: + return polygon2obj(p) + except Exception: # TODO narrow this + pass + + +def safe_polygon2obj(poly, reversed=False, z=0): + ps = [poly] if poly.geom_type == "Polygon" else poly.geoms + + objs = [safe_polygon_to_obj_single(p) for p in ps] + objs = [o for o in objs if o is not None] + + if len(objs) == 0: return None - obj = join_objects(objs_) + obj = join_objects(objs) obj.location[-1] = z butil.apply_transform(obj, True) point_normal_up(obj, reversed) @@ -70,9 +74,9 @@ def polygon2obj(p, reversed=False, z=0): write_co(o, np.concatenate([coords, np.zeros((len(coords), 1))], -1)) objs.append(o) obj = join_objects(objs) - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-6) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + butil.modify_mesh(obj, "WELD", merge_threshold=1e-6) + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.fill() dissolve_limited(obj) obj.location[-1] = z @@ -82,7 +86,7 @@ def polygon2obj(p, reversed=False, z=0): def point_normal_up(obj, reversed=False): - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): no_z = read_normal(obj)[:, -1] select_faces(obj, (no_z > 0) if reversed else (no_z < 0)) bpy.ops.mesh.flip_normals() @@ -95,34 +99,39 @@ def triangulate_polygon2obj(p): co = read_co(obj) co[:, -1] = 0 write_co(obj, co) - butil.modify_mesh(obj, 'WELD', merge_threshold=1e-6) + butil.modify_mesh(obj, "WELD", merge_threshold=1e-6) dissolve_limited(obj) return obj def dissolve_limited(obj): - with butil.ViewportMode(obj, 'EDIT'), butil.Suppress(): - for angle_limit in reversed(.05 * .1 ** np.arange(5)): - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(obj, "EDIT"), butil.Suppress(): + for angle_limit in reversed(0.05 * 0.1 ** np.arange(5)): + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.dissolve_limited(angle_limit=angle_limit) def obj2polygon(obj): co = read_co(obj)[:, :2] p = shapely.union_all( - [shapely.make_valid(orient(shapely.Polygon(co[p.vertices]))) for p in obj.data.polygons] + [ + shapely.make_valid(orient(shapely.Polygon(co[p.vertices]))) + for p in obj.data.polygons + ] ) return shapely.make_valid(shapely.simplify(p, 1e-6)) def buffer(p, distance): with np.errstate(invalid="ignore"): - return remove_repeated_points(simplify(p.buffer(distance, join_style='mitre', cap_style='flat'), 1e-6)) + return remove_repeated_points( + simplify(p.buffer(distance, join_style="mitre", cap_style="flat"), 1e-6) + ) def segment_filter(mls, margin): - for ls in mls.geoms if mls.geom_type == 'MultiLineString' else [mls]: + for ls in mls.geoms if mls.geom_type == "MultiLineString" else [mls]: coords = np.array(ls.coords) if len(coords) < 2: continue diff --git a/infinigen/assets/utils/shortest_path.py b/infinigen/assets/utils/shortest_path.py index ec4abaf0e..c6009e89c 100644 --- a/infinigen/assets/utils/shortest_path.py +++ b/infinigen/assets/utils/shortest_path.py @@ -5,43 +5,104 @@ # Authors: Lingjie Mei - +from infinigen.core import surface from infinigen.core.nodes.node_info import Nodes from infinigen.core.nodes.node_wrangler import NodeWrangler -from infinigen.core import surface -def geo_shortest_path(nw: NodeWrangler, end_index, weight, trim_threshold=.1, offset=0., merge_threshold=.005, - subdiv=0): +def geo_shortest_path( + nw: NodeWrangler, + end_index, + weight, + trim_threshold=0.1, + offset=0.0, + merge_threshold=0.005, + subdiv=0, +): weight = surface.eval_argument(nw, weight) end_index = surface.eval_argument(nw, end_index) - geometry = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - - geometry = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': geometry, 'Name': 'custom_normal', 'Value': nw.new_node(Nodes.InputNormal)}, - attrs={'data_type': 'FLOAT_VECTOR'}) - curve = nw.new_node(Nodes.EdgePathToCurve, - [geometry, None, nw.new_node(Nodes.ShortestEdgePath, [end_index, weight]).outputs[0]]) - curve = nw.new_node(Nodes.SplineType, [curve], attrs={'spline_type': 'NURBS'}) - curve = nw.new_node(Nodes.TrimCurve, input_kwargs={'Curve': curve, 'Start': trim_threshold}) - curve = nw.new_node(Nodes.ResampleCurve, [curve], input_kwargs={'Length': .001}, attrs={'mode': 'LENGTH'}) - curve = nw.new_node(Nodes.StoreNamedAttribute, - input_kwargs={'Geometry': curve, 'Name': 'spline_parameter', 'Value': nw.new_node(Nodes.SplineParameter)}) - geometry = nw.new_node(Nodes.MergeByDistance, [nw.curve2mesh(curve), None, merge_threshold]) - - distance = nw.vector_math('DISTANCE', *nw.new_node(Nodes.InputEdgeVertices).outputs[2:]) - curve = nw.new_node(Nodes.EdgePathToCurve, [geometry, None, nw.new_node(Nodes.ShortestEdgePath, [ - nw.compare('EQUAL', nw.new_node(Nodes.Index), 0), distance]).outputs[0]]) + geometry = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + + geometry = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": geometry, + "Name": "custom_normal", + "Value": nw.new_node(Nodes.InputNormal), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) + curve = nw.new_node( + Nodes.EdgePathToCurve, + [ + geometry, + None, + nw.new_node(Nodes.ShortestEdgePath, [end_index, weight]).outputs[0], + ], + ) + curve = nw.new_node(Nodes.SplineType, [curve], attrs={"spline_type": "NURBS"}) + curve = nw.new_node( + Nodes.TrimCurve, input_kwargs={"Curve": curve, "Start": trim_threshold} + ) + curve = nw.new_node( + Nodes.ResampleCurve, + [curve], + input_kwargs={"Length": 0.001}, + attrs={"mode": "LENGTH"}, + ) + curve = nw.new_node( + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": curve, + "Name": "spline_parameter", + "Value": nw.new_node(Nodes.SplineParameter), + }, + ) + geometry = nw.new_node( + Nodes.MergeByDistance, [nw.curve2mesh(curve), None, merge_threshold] + ) + + distance = nw.vector_math( + "DISTANCE", *nw.new_node(Nodes.InputEdgeVertices).outputs[2:] + ) + curve = nw.new_node( + Nodes.EdgePathToCurve, + [ + geometry, + None, + nw.new_node( + Nodes.ShortestEdgePath, + [nw.compare("EQUAL", nw.new_node(Nodes.Index), 0), distance], + ).outputs[0], + ], + ) if subdiv > 0: curve = nw.new_node(Nodes.SubdivisionSurface, [curve, subdiv]) curve = nw.new_node( - Nodes.StoreNamedAttribute, - input_kwargs={'Geometry':curve, 'Name': 'tangent', 'Value': nw.new_node(Nodes.CurveTangent)}, - attrs={'data_type': 'FLOAT_VECTOR'}) + Nodes.StoreNamedAttribute, + input_kwargs={ + "Geometry": curve, + "Name": "tangent", + "Value": nw.new_node(Nodes.CurveTangent), + }, + attrs={"data_type": "FLOAT_VECTOR"}, + ) geometry = nw.new_node(Nodes.MergeByDistance, [nw.curve2mesh(curve)]) - geometry = nw.new_node(Nodes.SetPosition, [geometry, None, None, - nw.scale(nw.new_node(Nodes.InputNormal), nw.scalar_multiply(nw.musgrave(), offset))]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': geometry}) + geometry = nw.new_node( + Nodes.SetPosition, + [ + geometry, + None, + None, + nw.scale( + nw.new_node(Nodes.InputNormal), + nw.scalar_multiply(nw.musgrave(), offset), + ), + ], + ) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": geometry}) return geometry diff --git a/infinigen/assets/utils/uv.py b/infinigen/assets/utils/uv.py index 72e8090a6..ff9d518ee 100644 --- a/infinigen/assets/utils/uv.py +++ b/infinigen/assets/utils/uv.py @@ -1,23 +1,30 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +import logging + # Authors: Lingjie Mei from collections.abc import Iterable import bpy -import bmesh import numpy as np from sklearn.linear_model import LinearRegression from infinigen.assets.materials import common from infinigen.assets.utils.decorate import ( - read_co, read_edges, read_loop_edges, read_loop_starts, - read_loop_totals, read_loop_vertices, read_normal, read_uv, select_faces, write_uv, + read_co, + read_edges, + read_loop_edges, + read_loop_starts, + read_loop_totals, + read_loop_vertices, + read_normal, + read_uv, + select_faces, + write_uv, ) from infinigen.core.util import blender as butil -import logging - logger = logging.getLogger(__name__) @@ -40,9 +47,9 @@ def unwrap_faces(obj, selection=None): smart = True else: uv = read_uv(obj)[selection.astype(bool)[face_corner2faces(obj)]] - smart = (np.isnan(uv) | (np.abs(uv) < .1)).sum() / uv.size > .5 + smart = (np.isnan(uv) | (np.abs(uv) < 0.1)).sum() / uv.size > 0.5 butil.select_none() - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.select_mode(type="FACE") select_faces(obj, selection) if smart: @@ -55,27 +62,27 @@ def str2vec(axis): if not isinstance(axis, str): return axis match axis[-1].lower(): - case 'x': + case "x": vec = 1, 0, 0 - case 'y': + case "y": vec = 0, 1, 0 - case 'z': + case "z": vec = 0, 0, 1 - case 'u': + case "u": vec = -1, 0, 0 - case 'v': + case "v": vec = 0, -1, 0 - case 'w': + case "w": vec = 0, 0, -1 case _: raise NotImplementedError vec = np.array(vec) - if axis[0] == '-': + if axis[0] == "-": vec = -vec return vec -def compute_uv_direction(obj, x='x', y='y', selection=None): +def compute_uv_direction(obj, x="x", y="y", selection=None): ensure_uv(obj, selection) x, y = str2vec(x), str2vec(y) co = read_co(obj) @@ -105,21 +112,38 @@ def compute_uv_direction(obj, x='x', y='y', selection=None): if x_max - x_min > y_max - y_min: scale = 1 / (x_max - x_min + 1e-4) mid = (y_max + y_min) / 2 - pred = np.stack([(pred[:, 0] - x_min) * scale, (pred[:, 1] - mid) * scale + .5], -1) - bbox = 0, 1, .5 - .5 * (y_max - y_min) * scale, .5 + .5 * (y_max - y_min) * scale + pred = np.stack( + [(pred[:, 0] - x_min) * scale, (pred[:, 1] - mid) * scale + 0.5], -1 + ) + bbox = ( + 0, + 1, + 0.5 - 0.5 * (y_max - y_min) * scale, + 0.5 + 0.5 * (y_max - y_min) * scale, + ) else: scale = 1 / (y_max - y_min + 1e-4) mid = (x_max + x_min) / 2 - pred = np.stack([(pred[:, 0] - mid) * scale + .5, (pred[:, 1] - y_min) * scale], -1) - bbox = .5 - .5 * (x_max - x_min) * scale, .5 + .5 * (x_max - x_min) * scale, 0, 1 + pred = np.stack( + [(pred[:, 0] - mid) * scale + 0.5, (pred[:, 1] - y_min) * scale], -1 + ) + bbox = ( + 0.5 - 0.5 * (x_max - x_min) * scale, + 0.5 + 0.5 * (x_max - x_min) * scale, + 0, + 1, + ) new_uv = np.where(selection[:, np.newaxis], pred, uv) write_uv(obj, new_uv) return bbox def max_bbox(bboxes): - return min(b[0] for b in bboxes), max(b[1] for b in bboxes), min(b[2] for b in bboxes), max( - b[3] for b in bboxes + return ( + min(b[0] for b in bboxes), + max(b[1] for b in bboxes), + min(b[2] for b in bboxes), + max(b[3] for b in bboxes), ) @@ -134,37 +158,59 @@ def wrap_sides(obj, surface, axes, xs, ys, groupings=None, selection=None, **kwa selected = faces == i selections.append(selected) unwrap_faces(obj, selected) - bboxes.append(compute_uv_direction(obj, str2vec(xs[i]), str2vec(ys[i]), selected[fc2f])) + bboxes.append( + compute_uv_direction(obj, str2vec(xs[i]), str2vec(ys[i]), selected[fc2f]) + ) if groupings is None: groupings = [[i] for i in range(len(axes))] for indices in groupings: selected = sum(selections[i] for i in indices) try: - surface.apply(obj, selected, bbox=max_bbox([bboxes[i] for i in indices]), **kwargs) + surface.apply( + obj, selected, bbox=max_bbox([bboxes[i] for i in indices]), **kwargs + ) except TypeError: - logger.debug(f'apply() for {surface=} with kwarg bbox failed, trying again without') + logger.debug( + f"apply() for {surface=} with kwarg bbox failed, trying again without" + ) surface.apply(obj, selected, **kwargs) def wrap_front_back(obj, surface, shared=True, **kwargs): - wrap_sides(obj, surface, 'vy', 'xu', 'zz', [[0, 1]] if shared else None, **kwargs) - + wrap_sides(obj, surface, "vy", "xu", "zz", [[0, 1]] if shared else None, **kwargs) + + def wrap_top_bottom(obj, surface, shared=True, **kwargs): - wrap_sides(obj, surface, 'zw', 'xu', 'yy', [[0, 1]] if shared else None, **kwargs) + wrap_sides(obj, surface, "zw", "xu", "yy", [[0, 1]] if shared else None, **kwargs) def wrap_front_back_side(obj, surface, shared=True, **kwargs): - wrap_sides(obj, surface, 'vuy', 'xyu', 'zzz', [[0, 2], [1]] if shared else None, **kwargs) + wrap_sides( + obj, surface, "vuy", "xyu", "zzz", [[0, 2], [1]] if shared else None, **kwargs + ) def wrap_four_sides(obj, surface, shared=True, **kwargs): - wrap_sides(obj, surface, 'vxyu', 'xyuv', 'zzzz', [[0, 2], [1, 3]] if shared else None, **kwargs) + wrap_sides( + obj, + surface, + "vxyu", + "xyuv", + "zzzz", + [[0, 2], [1, 3]] if shared else None, + **kwargs, + ) def wrap_six_sides(obj, surface, shared=True, **kwargs): wrap_sides( - obj, surface, 'vxyuzw', 'xyuvxx', 'zzzzyv', [[0, 2], [1, 3], [4, 5]] if shared else None, - **kwargs + obj, + surface, + "vxyuzw", + "xyuvxx", + "zzzzyv", + [[0, 2], [1, 3], [4, 5]] if shared else None, + **kwargs, ) @@ -191,7 +237,9 @@ def unwrap_normal(obj, selection=None, axis=None, axis_=None): axis = axis[np.newaxis, :] - np.inner(axis, normal)[:, np.newaxis] * normal axis /= np.maximum(np.linalg.norm(axis, axis=-1, keepdims=True), 1e-4) axis_ = np.cross(normal, axis) - uv = np.stack([(co[loop_vertices] * axis).sum(1), (co[loop_vertices] * axis_).sum(1)], -1) + uv = np.stack( + [(co[loop_vertices] * axis).sum(1), (co[loop_vertices] * axis_).sum(1)], -1 + ) uv = np.where(selection[:, np.newaxis], uv, read_uv(obj)) write_uv(obj, uv) diff --git a/infinigen/assets/wall_decorations/range_hood.py b/infinigen/assets/wall_decorations/range_hood.py deleted file mode 100644 index 2cbbfd7b8..000000000 --- a/infinigen/assets/wall_decorations/range_hood.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo - -import bpy -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface - -import infinigen.core.util.blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.table_decorations.utils import nodegroup_lofting_poly -from infinigen.assets.tables.table_utils import nodegroup_n_gon_profile -from infinigen.assets.material_assignments import AssetList - - -class RangeHoodFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, dimensions=None): - super(RangeHoodFactory, self).__init__(factory_seed, coarse=coarse) - - self.dimensions = dimensions - - with FixedSeed(factory_seed): - self.params = self.sample_parameters(dimensions) - self.surface, self.scratch, self.edge_wear = self.get_material_params() - - - def get_material_params(self): - material_assignments = AssetList['RangeHoodFactory']() - surface = material_assignments['surface'].assign_material() - - scratch_prob, edge_wear_prob = material_assignments['wear_tear_prob'] - scratch, edge_wear = material_assignments['wear_tear'] - - is_scratch = np.random.uniform() < scratch_prob - is_edge_wear = np.random.uniform() < edge_wear_prob - if not is_scratch: - scratch = None - - if not is_edge_wear: - edge_wear = None - - return surface, scratch, edge_wear - - @staticmethod - def sample_parameters(dimensions): - # all in meters - if dimensions is None: - x = 0.55 - y = 0.75 - z = 1.0 - dimensions = (x, y, z) - - x, y, z = dimensions - - height_1 = uniform(0.05, 0.07) - height_2 = uniform(0.1, 0.3) - scale_2 = uniform(0.25, 0.4) - - parameters = { - 'Height_total': z, - 'Width': y, - 'Depth': x, - 'Height_1': height_1, - 'Scale_2': scale_2, - 'Height_2': height_2 - } - - return parameters - - def create_asset(self, **params): - - bpy.ops.mesh.primitive_plane_add( - size=2, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) - obj = bpy.context.active_object - - surface.add_geomod(obj, geometry_generate_hood, apply=True, input_kwargs=self.params) - butil.modify_mesh(obj, 'SOLIDIFY', apply=True, thickness=.002) - butil.modify_mesh(obj, 'SUBSURF', apply=True, levels=1, render_levels=1) - - return obj - - def finalize_assets(self, assets): - self.surface.apply(assets) - if self.scratch: - self.scratch.apply(assets) - if self.edge_wear: - self.edge_wear.apply(assets) - - - -def geometry_generate_hood(nw: NodeWrangler, **kwargs): - # Code generated using version 2.6.4 of the node_transpiler - - generatetabletop = nw.new_node(geometry_range_hood().name, - input_kwargs={'Resolution': 64, - 'Height_total': kwargs['Height_total'], - 'Width': kwargs['Width'], - 'Depth': kwargs['Depth'], - 'Height_1': kwargs['Height_1'], - 'Scale_2': kwargs['Scale_2'], - 'Height_2': kwargs['Height_2'], - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': generatetabletop}, attrs={'is_active_output': True}) - -@node_utils.to_nodegroup('geometry_range_hood', singleton=False, type='GeometryNodeTree') -def geometry_range_hood(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketInt', 'Resolution', 128), - ('NodeSocketFloat', 'Height_total', 0.0000), - ('NodeSocketFloat', 'Width', 0.0000), - ('NodeSocketFloat', 'Depth', 0.0000), - ('NodeSocketFloat', 'Profile Fillet Ratio', 0.0100), - ('NodeSocketFloat', 'Height_1', 0.0000), - ('NodeSocketFloat', 'Scale_2', 0.0000), - ('NodeSocketFloat', 'Height_2', 0.3000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: 1.4140}, attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Depth"], 1: group_input.outputs["Width"]}, - attrs={'operation': 'DIVIDE'}) - - ngonprofile = nw.new_node(nodegroup_n_gon_profile().name, - input_kwargs={'Profile Width': multiply, 'Profile Aspect Ratio': divide, 'Profile Fillet Ratio': group_input.outputs["Profile Fillet Ratio"]}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': ngonprofile, 'Count': group_input.outputs["Resolution"]}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_1}) - - transform_geometry = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': resample_curve, 'Translation': combine_xyz}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Height_1"]}) - - transform_geometry_1 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_geometry, 'Translation': combine_xyz_1}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': group_input.outputs["Height_2"]}) - - transform_geometry_2 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_geometry, 'Translation': combine_xyz_2, 'Scale': group_input.outputs["Scale_2"]}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Height_total"], 1: group_input.outputs["Height_2"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': subtract}) - - transform_geometry_3 = nw.new_node(Nodes.Transform, input_kwargs={'Geometry': transform_geometry_2, 'Translation': combine_xyz_3}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_geometry_3, transform_geometry_2, transform_geometry_1, transform_geometry]}) - - lofting_poly = nw.new_node(nodegroup_lofting_poly().name, - input_kwargs={'Profile Curves': join_geometry, 'U Resolution': group_input.outputs["Resolution"], 'V Resolution': group_input.outputs["Resolution"]}) - - delete_geometry = nw.new_node(Nodes.DeleteGeometry, - input_kwargs={'Geometry': lofting_poly.outputs["Geometry"], 'Selection': lofting_poly.outputs["Top"]}) - - grid = nw.new_node(Nodes.MeshGrid, - input_kwargs={'Size X': group_input.outputs["Width"], 'Size Y': group_input.outputs["Depth"], 'Vertices X': group_input.outputs["Resolution"], 'Vertices Y': group_input.outputs["Resolution"]}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Depth"]}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_2}) - - transform_geometry_4 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': grid.outputs["Mesh"], 'Translation': combine_xyz_4, 'Rotation': (-0.0698, 0.0000, 0.0000), 'Scale': (0.9800, 0.9800, 1.0000)}) - - transform_geometry_5 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': transform_geometry_4, 'Rotation': (0.1047, 0.0000, 0.0000), 'Scale': (0.9500, 0.9700, 1.0000)}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [delete_geometry, transform_geometry_5]}) - - transform_geometry_6 = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': join_geometry_1, 'Rotation': (0.0, 0.0000, -np.pi/2)}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': transform_geometry_6}, attrs={'is_active_output': True}) - diff --git a/infinigen/assets/wall_decorations/skirting_board.py b/infinigen/assets/wall_decorations/skirting_board.py deleted file mode 100644 index ce85f9f30..000000000 --- a/infinigen/assets/wall_decorations/skirting_board.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Yiming Zuo, Lingjie Mei, Alexander Raistrick - -import logging - -import bmesh -import bpy -import mathutils -import numpy as np -from numpy.random import uniform, normal, randint, choice, randint -from tqdm import tqdm - -from infinigen.assets.creatures.util.geometry.curve import Curve -from infinigen.assets.utils.decorate import ( - read_co, read_edge_length, remove_edges, read_edge_direction, read_edges, - remove_duplicate_edges, -) -from infinigen.assets.utils.draw import bezier_curve -from infinigen.assets.utils.object import new_plane, join_objects -from infinigen.core.constraints.example_solver.room import constants -from infinigen.core.constraints.example_solver.room.constants import WALL_HEIGHT, DOOR_WIDTH, WALL_THICKNESS -from infinigen.core.constraints.example_solver.room.types import get_room_level -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.surface import write_attr_data -from infinigen.core.util.color import color_category -from infinigen.core import surface - -import infinigen.core.util.blender as butil - -from infinigen.core.util.math import FixedSeed -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.materials.plastics import plastic_rough - -import shapely -from shapely.geometry import Polygon, MultiPolygon -from shapely import affinity -from shapely.ops import unary_union - -from infinigen.assets.utils.shapes import polygon2obj, obj2polygon -from infinigen.core import tagging, tags as t -from shapely.plotting import plot_polygon - -logger = logging.getLogger(__name__) - - -@node_utils.to_nodegroup('nodegroup_make_skirting_board_001', singleton=False, type='GeometryNodeTree') -def nodegroup_make_skirting_board(nw: NodeWrangler, control_points): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node( - Nodes.GroupInput, - expose_input=[('NodeSocketCollection', 'Parent', None), - ('NodeSocketFloat', 'Thickness', 0.0300), - ('NodeSocketFloat', 'Height', 0.1500), - ('NodeSocketFloatDistance', 'Resolution', 0.0050), - ('NodeSocketBool', 'Is Ceiling', False)] - ) - - collection_info = nw.new_node(Nodes.CollectionInfo, input_kwargs={'Collection': group_input.outputs["Parent"]}) - - mesh = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': collection_info}) - - - quadrilateral = nw.new_node( - 'GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': group_input.outputs["Thickness"], 'Height': group_input.outputs["Height"]} - ) - - multiply = nw.new_node( - Nodes.Math, input_kwargs={0: group_input.outputs["Thickness"]}, attrs={'operation': 'MULTIPLY'} - ) - - multiply_1 = nw.new_node( - Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, attrs={'operation': 'MULTIPLY'} - ) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': multiply_1}) - - transform_geometry = nw.new_node( - Nodes.Transform, input_kwargs={'Geometry': quadrilateral, 'Translation': combine_xyz} - ) - - resample_curve_1 = nw.new_node( - Nodes.ResampleCurve, - input_kwargs={'Curve': transform_geometry, 'Count': 220, 'Length': group_input.outputs["Resolution"]}, - attrs={'mode': 'LENGTH'} - ) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - greater_than = nw.new_node(Nodes.Compare, input_kwargs={0: separate_xyz.outputs["X"]}) - - multiply_2 = nw.new_node( - Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -1.0000}, attrs={'operation': 'MULTIPLY'} - ) - - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': separate_xyz.outputs["Y"], 1: multiply_2, 2: 0.0000}) - - float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={'Value': map_range.outputs["Result"]}) - node_utils.assign_curve(float_curve.mapping.curves[0], control_points) - - multiply_3 = nw.new_node( - Nodes.Math, - input_kwargs={0: float_curve, 1: group_input.outputs["Thickness"]}, - attrs={'operation': 'MULTIPLY'} - ) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_3, 'Y': separate_xyz.outputs["Y"]}) - - set_position = nw.new_node( - Nodes.SetPosition, - input_kwargs={'Geometry': resample_curve_1, 'Selection': greater_than, 'Position': combine_xyz_1} - ) - - switch = nw.new_node( - Nodes.Switch, - input_kwargs={ - 0: group_input.outputs["Is Ceiling"], 8: (-1.0000, 1.0000, 1.0000), 9: (-1.0000, -1.0000, -1.0000) - }, - attrs={'input_type': 'VECTOR'} - ) - - transform_geometry_1 = nw.new_node( - Nodes.Transform, input_kwargs={'Geometry': set_position, 'Scale': switch.outputs[3]} - ) - - curve_to_mesh_1 = nw.new_node( - Nodes.CurveToMesh, input_kwargs={'Curve': mesh, 'Profile Curve': transform_geometry_1, 'Fill Caps': True} - ) - - set_shade_smooth = nw.new_node( - Nodes.SetShadeSmooth, input_kwargs={'Geometry': curve_to_mesh_1, 'Shade Smooth': False} - ) - - group_output = nw.new_node( - Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, attrs={'is_active_output': True} - ) - - -def apply_skirtingboard(nw: NodeWrangler, contour, is_ceiling=False, seed=None, thickness=.02): - # Code generated using version 2.6.5 of the node_transpiler - - # TODO: randomize style / size / materials - if seed is None: - seed = randint(0, 10000) - with FixedSeed(seed): - thickness = uniform(.02, .05) - height = uniform(0.08, 0.15) - color = color_category('white') - roughness = uniform(0.5, 1.0) - n_peaks = randint(1, 4) - start_y = uniform(0.0, 0.5) - mid_x = uniform(0.2, 0.8) - peak_xs = np.sort(uniform(0.0, mid_x, size=n_peaks)) - peak_ys = np.sort(uniform(start_y, 1.0, size=n_peaks)) - control_points = [(0.0000, start_y)] - control_points += [(x, y) for x, y in zip(peak_xs, peak_ys)] - control_points += [(mid_x, 1.0000), - (1.0000, 1.0000)] - - makeskirtingboard = nw.new_node( - nodegroup_make_skirting_board(control_points=control_points).name, - input_kwargs={ - 'Parent': contour, - 'Resolution': 0.0010, - 'Thickness': thickness, - 'Height': height, - 'Is Ceiling': is_ceiling - } - ) - - makeskirtingboard = nw.new_node( - Nodes.SetMaterial, - input_kwargs={ - 'Geometry': makeskirtingboard, 'Material': surface.shaderfunc_to_material( - plastic_rough.shader_rough_plastic, base_color=color, roughness=roughness - ) - } - ) - - group_output = nw.new_node( - Nodes.GroupOutput, input_kwargs={'Geometry': makeskirtingboard}, attrs={'is_active_output': True} - ) - - -def make_skirtingboard_contour(objs: list[bpy.types.Object], tag: t.Subpart): - # make the outline curve - - assert len(objs) > 0 - - objs = [ - tagging.extract_tagged_faces(o, {tag, t.Subpart.Visible}, nonempty=True) - for o in list(objs) - ] - - all_polys = [] - all_zs = [] - for floor_pieces in objs: - all_polys.append(obj2polygon(floor_pieces)) - all_zs.append(read_co(floor_pieces)[:, -1] + floor_pieces.location[-1]) - - floor_z = np.mean(np.concatenate(all_zs)) - - boundary = unary_union(all_polys).buffer(.05, join_style='mitre').buffer(-.05, join_style='mitre') - - if isinstance(boundary, Polygon): - boundaries = [boundary] - else: - boundaries = boundary.geoms - - contours = [] - - for b in boundaries: - lr = b.exterior - o = linear_ring2curve(lr) - contours.append(o) - o.location[-1] += floor_z - butil.apply_transform(o, True) - for lr in b.interiors: - o = linear_ring2curve(lr, True) - contours.append(o) - o.location[-1] += floor_z - butil.apply_transform(o, True) - butil.delete(objs) - return contours - - -def make_skirting_board(objs, tag, joined=True): - if joined: - seqs = list([o for o in objs if get_room_level(o.name.split('.')[0]) == i] for i in [0]) - else: - seqs = [[o] for o in objs] - - for s in seqs: - logger.debug(f'make_skirting_board for {len(objs)=} {tag=}') - - try: - contours = make_skirtingboard_contour(s, tag) - except shapely.errors.GEOSException as e: - logger.warning(f'make_skirting_board({objs=}, {tag=}) failed with {e}, skipping') - return - - obj = new_plane() - obj.name = "skirtingboard_" + tag.value - - col = butil.put_in_collection(contours, 'contour') - kwargs = { - 'contour': col, - 'seed': np.random.randint(1e7), - 'is_ceiling': tag == t.Subpart.Ceiling - } - surface.add_geomod(obj, apply_skirtingboard, apply=True, input_kwargs=kwargs) - - portal_cutters = butil.get_collection('placeholders:portal_cutters').objects - for p in portal_cutters: - if p.name.startswith('entrance') and int(p.location[-1] / WALL_HEIGHT - 1 / 2) == 0: - p.location[-1] -= WALL_HEIGHT / 2 - butil.modify_mesh( - obj, 'BOOLEAN', object=p, operation='DIFFERENCE', use_self=True, - use_hole_tolerant=True - ) - p.location[-1] += WALL_HEIGHT / 2 - butil.delete_collection(col) - col = butil.get_collection("skirting") - butil.put_in_collection(obj, col) - - -def linear_ring2curve(ring, reversed=False): - coords = ring.coords - if shapely.is_ccw(ring) == reversed: - coords = coords[::-1] - coords = np.array(coords) - lengths = np.linalg.norm(coords[:-1] - coords[1:], axis=-1) - invalid = np.sort(np.nonzero((np.abs(lengths - WALL_THICKNESS) < .02) | (np.abs(lengths - DOOR_WIDTH) < .02))[0]) - ranges = -1, *invalid, len(coords) - curves = [] - for l, r in zip(ranges[:-1], ranges[1:]): - x, y = np.array(coords[l + 1:r + 1]).T - if len(x) > 1: - curves.append(bezier_curve((x, y, 0), list(np.arange(len(x))), 1, False)) - return join_objects(curves) diff --git a/infinigen/assets/wall_decorations/wall_shelf.py b/infinigen/assets/wall_decorations/wall_shelf.py deleted file mode 100644 index 4b05b0e3b..000000000 --- a/infinigen/assets/wall_decorations/wall_shelf.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Lingjie Mei -import bpy -import numpy as np -import shapely -from numpy.random import uniform -import shapely.affinity - -from infinigen.assets.materials import metal, plastic -from infinigen.assets.materials.woods import wood -from infinigen.assets.utils.decorate import read_edge_direction, select_edges, read_edge_center -from infinigen.assets.utils.object import new_bbox, new_bbox_2d, join_objects -from infinigen.assets.utils.shapes import polygon2obj -from infinigen.core.placement.factory import AssetFactory -from infinigen.core.surface import write_attr_data -from infinigen.core import tagging as t -from infinigen.core.tags import Subpart - -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.random import random_general as rg, log_uniform - - -class WallShelfFactory(AssetFactory): - support_sides_ = 'weighted_choice', (.5, 'none'), (1, 'bottom'), (1, 'top'), (1.5, 'both') - support_margins = 'weighted_choice', (2, 0), (1, ('uniform', .0, .2)) - support_ratios = 'weighted_choice', (2, 1), (1, ('uniform', .5, .9)) - support_alphas = 'weighted_choice', (1, 1), ( - 1, ('weighted_choice', (1, ('log_uniform', .4, .7)), (2, ('log_uniform', 1.5, 3)), (1, 10))) - support_joins = 'mitre', 'round', 'bevel' - plate_bevels = 'weighted_choice', (1, 'none'), (1, 'front'), (1, 'side') - - plate_surfaces = 'weighted_choice', (2, wood), (1, metal) - support_surfaces = 'weighted_choice', (2, metal), (1, wood), (2, plastic) - - def __init__(self, factory_seed, coarse=False): - super(WallShelfFactory, self).__init__(factory_seed, coarse) - self.support_side = rg(self.support_sides_) - self.support_margin = rg(self.support_margins) - if self.support_margin == 0: - n_support = np.random.choice([2, 3, 4], p=[.7, .2, .1]) - else: - n_support = np.random.choice([2, 3], p=[.8, .2]) - self.support_locs = np.linspace(-.5 + self.support_margin, .5 - self.support_margin, n_support) - self.length = log_uniform(.3, .8) - self.width = log_uniform(.1, .2) - match self.support_side: - case 'none': - self.thickness = log_uniform(.03, .08) - case _: - self.thickness = log_uniform(.01, .05) - self.support_width = log_uniform(.01, .015) - self.support_thickness = self.support_width * log_uniform(.4, 1.) - self.support_length = self.width * uniform(.7, 1.1) - self.plate_bevel = rg(self.plate_bevels) - self.support_join = np.random.choice(self.support_joins) - self.plate_surface = rg(self.plate_surfaces) - self.support_surface = rg(self.support_surfaces) - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - box = new_bbox(0, self.width, -self.length / 2, self.length / 2, -self.support_length, self.support_length) - plane = new_bbox_2d(0, self.width, -self.length / 2, self.length / 2, self.thickness / 2) - write_attr_data(plane, f'{t.PREFIX}{Subpart.SupportSurface.value}', np.ones(1).astype(bool), 'INT', 'FACE') - return join_objects([box, plane]) - - def create_asset(self, **params) -> bpy.types.Object: - obj = self.make_plate() - self.plate_surface.apply(obj) - if self.support_side != 'none': - support = self.make_support() - supports = [support] + [deep_clone_obj(support) for _ in range(len(self.support_locs) - 1)] - for s, l in zip(supports, self.support_locs): - s.location[1] = self.length * l - self.support_surface.apply(supports) - obj = join_objects([obj] + supports) - return obj - - def make_plate(self): - obj = new_bbox(0, self.width, -self.length / 2, self.length / 2, -self.thickness / 2, self.thickness / 2) - c = read_edge_center(obj) - d = read_edge_direction(obj) - front = (np.abs(d[:, 1]) > .5) & (c[:, 0] > .1) - side = np.abs(d[:, 0]) > .5 - match self.plate_bevel: - case 'front': - selection = front - case 'side': - selection = front + side - case _: - selection = np.zeros_like(front) - with butil.ViewportMode(obj, 'EDIT'): - select_edges(obj, selection) - bpy.ops.mesh.bevel(offset=uniform(.3, .5) * self.thickness, segments=np.random.randint(4, 9)) - return obj - - def make_support_contour(self): - l = shapely.LineString(np.array([(1, 0), (0, 0), (0, 1)]) * self.support_length) - theta = np.linspace(0, np.pi / 2, 31) - alpha = rg(self.support_alphas) - r = 1 / ((np.cos(theta) + 1e-6) ** alpha + (np.sin(theta) + 1e-6) ** alpha) ** (1 / alpha) - xy = r[:, np.newaxis] * np.stack([np.cos(theta), np.sin(theta)], -1) - d = shapely.LineString(xy * self.support_length * rg(self.support_ratios)) - return shapely.union(l, d) - - def make_support(self): - lines = [] - if self.support_side in ['top', 'both']: - lines.append(self.make_support_contour()) - if self.support_side in ['bottom', 'both']: - lines.append(shapely.affinity.scale(self.make_support_contour(), 1, -1, 1, (0, 0, 0))) - - contour = shapely.union_all(lines).buffer(self.support_thickness / 2, join_style=self.support_join) - obj = polygon2obj(contour) - obj.rotation_euler[0] = np.pi / 2 - obj.location = self.support_thickness / 2, -self.support_width / 2, 0 - butil.apply_transform(obj, True) - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.extrude_region_move( - TRANSFORM_OT_translate={ - 'value': (0, self.support_width, 0) - } - ) - return obj diff --git a/infinigen/assets/weather/__init__.py b/infinigen/assets/weather/__init__.py index 0d5bf77f5..97d26424c 100644 --- a/infinigen/assets/weather/__init__.py +++ b/infinigen/assets/weather/__init__.py @@ -1,15 +1,10 @@ -from . import particles, cloud -from .cloud import ( - CloudFactory, - CumulonimbusFactory, - CumulusFactory, - AltocumulusFactory, - StratocumulusFactory -) +from .kole_clouds import add_kole_clouds from .particles import ( - DustMoteFactory, - RaindropFactory, - SnowflakeFactory + FallingParticles, + falling_leaf_param_distribution, + floating_dust_param_distribution, + marine_snow_param_distribution, + rain_param_distribution, + snow_param_distribution, ) - -from .kole_clouds import add_kole_clouds \ No newline at end of file +from .wind_effectors import TurbulenceEffector, WindEffector diff --git a/infinigen/assets/weather/cloud/__init__.py b/infinigen/assets/weather/cloud/__init__.py deleted file mode 100644 index 3aedf7413..000000000 --- a/infinigen/assets/weather/cloud/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .generate import CloudFactory, CumulonimbusFactory, CumulusFactory, AltocumulusFactory, \ - StratocumulusFactory diff --git a/infinigen/assets/weather/cloud/generate.py b/infinigen/assets/weather/cloud/generate.py deleted file mode 100644 index f132f01b7..000000000 --- a/infinigen/assets/weather/cloud/generate.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hei Law - - -import gin -import bpy -import numpy as np - -from infinigen.assets.utils.object import new_cube -from infinigen.core.placement.factory import AssetFactory - -from infinigen.assets.weather.cloud.cloud import Cumulus, Cumulonimbus, Stratocumulus, Altocumulus -from infinigen.assets.weather.cloud.cloud import create_3d_grid - -from infinigen.core import surface -from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import random_general as rg - -from infinigen.core.nodes.node_wrangler import Nodes -from infinigen.core.tagging import tag_object, tag_nodegroup - - -@gin.configurable -class CloudFactory(AssetFactory): - def __init__( - self, - factory_seed, - coarse=False, - terrain_mesh=None, - max_distance=300, - steps=128, - cloudy=("bool", 0.01), - ): - super(CloudFactory, self).__init__(factory_seed, coarse=coarse) - - self.max_distance = max_distance - - self.ref_cloud = bpy.data.meshes.new('ref_cloud') - self.ref_cloud.from_pydata(create_3d_grid(steps=steps), [], []) - self.ref_cloud.update() - - with FixedSeed(factory_seed): - self.cloudy = rg(cloudy) - - self.cloud_types = [Cumulonimbus, ] if self.cloudy else [Cumulus, Stratocumulus, Altocumulus, ] - - self.resolutions = { - Cumulonimbus: [16, 128], - Cumulus: [16, 128], - Stratocumulus: [32, 256], - Altocumulus: [16, 64], } - scale_resolution = 4 - self.resolutions = {k: (scale_resolution * u, scale_resolution * v) for k, (u, v) in - self.resolutions.items()} - - self.min_distance = 256 if self.cloudy else 64 - self.dome_radius = 1024 if self.cloudy else 256 - self.dome_threshold = 32 if self.cloudy else 0 - self.density_range = [1e-5, 1e-4] if self.cloudy else [1e-4, 2e-4] - - self.max_scale = max([t.MAX_EXPECTED_SCALE for t in self.cloud_types]) - self.density = max([t.PLACEHOLDER_DENSITY for t in self.cloud_types]) - - def spawn_locations(self): - obj = new_cube() - surface.add_geomod(obj, self.geo_dome, apply=True, - input_args=[self.dome_radius, self.dome_threshold, self.density_range, - self.min_distance]) - - locations = np.array([obj.matrix_world @ v.co for v in obj.data.vertices]) - butil.delete(obj) - return locations - - def create_placeholder(self, **kwargs) -> bpy.types.Object: - return butil.spawn_empty('placeholder', disp_type='CUBE', s=self.max_scale) - - def create_asset(self, distance, **kwargs): - - cloud_type = np.random.choice(self.cloud_types) - - resolution_min, resolution_max = self.resolutions[cloud_type] - resolution = max(1 - distance / self.max_distance, 0) - resolution = resolution * (resolution_max - resolution_min) + resolution_min - resolution = int(resolution) - - new_cloud = cloud_type("Cloud", self.ref_cloud) - new_cloud = new_cloud.make_cloud(marching_cubes=False, resolution=resolution, ) - butil.apply_transform(new_cloud) - - tag_object(new_cloud, 'cloud') - return new_cloud - - @staticmethod - def geo_dome(nw, dome_radius, dome_threshold, density_range, min_distance, ): - ico_sphere = nw.new_node('GeometryNodeMeshIcoSphere', - input_kwargs={'Radius': dome_radius, 'Subdivisions': 8, }, ) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': ico_sphere, 'Scale': (1.2, 1.4, 1.0), }, ) - - position = nw.new_node(Nodes.InputPosition) - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position, }, ) - - less_than = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["Z"], 1: dome_threshold, }, - attrs={'operation': 'LESS_THAN', }, ) - - delete_geometry = nw.new_node('GeometryNodeDeleteGeometry', - input_kwargs={'Geometry': transform, 'Selection': less_than, }, ) - - distribute_points_on_faces = nw.new_node(Nodes.DistributePointsOnFaces, input_kwargs={ - 'Mesh': delete_geometry, - 'Distance Min': min_distance, - 'Density Max': np.random.uniform(*density_range), - 'Seed': np.random.randint(1e5), }, attrs={'distribute_method': 'POISSON', }, ) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'Z': nw.uniform(32, np.random.randint(64, 1e5)), }, ) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': distribute_points_on_faces.outputs["Points"], - 'Offset': combine_xyz, }, ) - - verts = nw.new_node(Nodes.PointsToVertices, input_kwargs={'Points': set_position, }, ) - - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': verts, }, ) - - -class CumulonimbusFactory(CloudFactory): - def __init__(self, factory_seed, coarse=False, max_distance=300, steps=128, ): - self.cloud_types = [Cumulonimbus] - super(CumulonimbusFactory, self).__init__(factory_seed, coarse, max_distance, steps) - self.cloud_types = [Cumulonimbus] - - -class CumulusFactory(CloudFactory): - def __init__(self, factory_seed, coarse=False, max_distance=300, steps=128, ): - self.cloud_types = [Cumulus] - super(CumulusFactory, self).__init__(factory_seed, coarse, max_distance, steps) - self.cloud_types = [Cumulus] - - -class StratocumulusFactory(CloudFactory): - def __init__(self, factory_seed, coarse=False, max_distance=300, steps=128, ): - self.cloud_types = [Stratocumulus] - super(StratocumulusFactory, self).__init__(factory_seed, coarse, max_distance, steps) - self.cloud_types = [Stratocumulus] - - -class AltocumulusFactory(CloudFactory): - def __init__(self, factory_seed, coarse=False, max_distance=300, steps=128, ): - self.cloud_types = [Altocumulus] - super(AltocumulusFactory, self).__init__(factory_seed, coarse, max_distance, steps) - self.cloud_types = [Altocumulus] diff --git a/infinigen/assets/weather/kole_clouds.py b/infinigen/assets/weather/kole_clouds.py index 6650fed52..283258956 100644 --- a/infinigen/assets/weather/kole_clouds.py +++ b/infinigen/assets/weather/kole_clouds.py @@ -5,155 +5,243 @@ import gin import numpy as np from mathutils import Vector -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils + from infinigen.core import surface +from infinigen.core.nodes import node_utils +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.util.random import random_general as rg from infinigen.terrain.utils import drive_param + @gin.configurable def kole_clouds_shader( nw: NodeWrangler, - coverage_frame_start=("clip_gaussian", -0.1, 0.1, -0.3, 0.3), coverage_frame_end=("clip_gaussian", -0.1, 0.1, -0.3, 0.3), - density=("uniform", .01, .04), + coverage_frame_start=("clip_gaussian", -0.1, 0.1, -0.3, 0.3), + coverage_frame_end=("clip_gaussian", -0.1, 0.1, -0.3, 0.3), + density=("uniform", 0.01, 0.04), translation_animation=("bool", 0.5), translation=0, anisotropy=("clip_gaussian", 0.1, 0.1, 0, 0.5), ): density = rg(density) anisotropy = rg(anisotropy) - + # Code generated using version 2.4.3 of the node_transpiler transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - + # PARAMETER: Coverage value = nw.new_node(Nodes.Value) coverage_frame_start = rg(coverage_frame_start) coverage_frame_end = rg(coverage_frame_end) drive_param( - value.outputs[0], (coverage_frame_end - coverage_frame_start) / (bpy.context.scene.frame_end - bpy.context.scene.frame_start + 1), - offset=coverage_frame_start - (coverage_frame_end - coverage_frame_start) * bpy.context.scene.frame_start / (bpy.context.scene.frame_end - bpy.context.scene.frame_start + 1) - ) - - geometry = nw.new_node('ShaderNodeNewGeometry') - - vector_transform = nw.new_node('ShaderNodeVectorTransform', - input_kwargs={'Vector': geometry.outputs["Position"]}) - + value.outputs[0], + (coverage_frame_end - coverage_frame_start) + / (bpy.context.scene.frame_end - bpy.context.scene.frame_start + 1), + offset=coverage_frame_start + - (coverage_frame_end - coverage_frame_start) + * bpy.context.scene.frame_start + / (bpy.context.scene.frame_end - bpy.context.scene.frame_start + 1), + ) + + geometry = nw.new_node("ShaderNodeNewGeometry") + + vector_transform = nw.new_node( + "ShaderNodeVectorTransform", + input_kwargs={"Vector": geometry.outputs["Position"]}, + ) + vector_transform = nw.add(vector_transform, Vector([translation, 0, 0])) if rg(translation_animation): - drive_param(vector_transform.inputs[1], 0.001, offset=-(bpy.context.scene.frame_start + bpy.context.scene.frame_end) / 2 * 0.001 + translation, index=0) + drive_param( + vector_transform.inputs[1], + 0.001, + offset=-(bpy.context.scene.frame_start + bpy.context.scene.frame_end) + / 2 + * 0.001 + + translation, + index=0, + ) - multiply = nw.new_node(Nodes.VectorMath, + multiply = nw.new_node( + Nodes.VectorMath, input_kwargs={0: vector_transform, 1: (1.0, 1.0, 1.0)}, - attrs={'operation': 'MULTIPLY'}) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, - input_kwargs={'Vector': multiply.outputs["Vector"]}) - - map_range = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': separate_xyz.outputs["Z"], 1: 0.1, 2: 0.3, 4: -0.2}) - - multiply_1 = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply.outputs["Vector"], 1: (1.0, 1.0, 10 ** np.random.uniform(-1, 0))}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.VectorMath, - input_kwargs={0: multiply_1.outputs["Vector"], 1: (0.0, 0.0, np.random.uniform(0, 2))}) - - musgrave_texture = nw.new_node(Nodes.MusgraveTexture, - input_kwargs={'Vector': add.outputs["Vector"], 'Scale': 3.0, 'Detail': 10.0, 'Dimension': 0.6, 'Lacunarity': 2.6}) - - map_range_1 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': musgrave_texture, 1: -1.0}, - attrs={'clamp': False}) - - add_1 = nw.new_node(Nodes.Math, - input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}) - - add_2 = nw.new_node(Nodes.Math, - input_kwargs={0: value, 1: add_1}) - - map_range_2 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': add_2, 1: 0.4, 2: 0.5}, - attrs={'clamp': False}) - - length = nw.new_node(Nodes.VectorMath, + attrs={"operation": "MULTIPLY"}, + ) + + separate_xyz = nw.new_node( + Nodes.SeparateXYZ, input_kwargs={"Vector": multiply.outputs["Vector"]} + ) + + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": separate_xyz.outputs["Z"], 1: 0.1, 2: 0.3, 4: -0.2}, + ) + + multiply_1 = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply.outputs["Vector"], + 1: (1.0, 1.0, 10 ** np.random.uniform(-1, 0)), + }, + attrs={"operation": "MULTIPLY"}, + ) + + add = nw.new_node( + Nodes.VectorMath, + input_kwargs={ + 0: multiply_1.outputs["Vector"], + 1: (0.0, 0.0, np.random.uniform(0, 2)), + }, + ) + + musgrave_texture = nw.new_node( + Nodes.MusgraveTexture, + input_kwargs={ + "Vector": add.outputs["Vector"], + "Scale": 3.0, + "Detail": 10.0, + "Dimension": 0.6, + "Lacunarity": 2.6, + }, + ) + + map_range_1 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": musgrave_texture, 1: -1.0}, + attrs={"clamp": False}, + ) + + add_1 = nw.new_node( + Nodes.Math, + input_kwargs={0: map_range.outputs["Result"], 1: map_range_1.outputs["Result"]}, + ) + + add_2 = nw.new_node(Nodes.Math, input_kwargs={0: value, 1: add_1}) + + map_range_2 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": add_2, 1: 0.4, 2: 0.5}, + attrs={"clamp": False}, + ) + + length = nw.new_node( + Nodes.VectorMath, input_kwargs={0: multiply.outputs["Vector"]}, - attrs={'operation': 'LENGTH'}) - + attrs={"operation": "LENGTH"}, + ) + # This value should change with the solidify thickness value_1 = nw.new_node(Nodes.Value) value_1.outputs[0].default_value = 0.5 - - add_3 = nw.new_node(Nodes.Math, - input_kwargs={0: 1.0, 1: value_1}) - - map_range_3 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': length.outputs["Value"], 1: 1.0, 2: add_3}) - - geometry_1 = nw.new_node('ShaderNodeNewGeometry') - - voronoi_texture = nw.new_node(Nodes.VoronoiTexture, - input_kwargs={'Vector': geometry_1.outputs["Position"], 'Scale': 0.01}) - - map_range_4 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': voronoi_texture.outputs["Distance"], 3: 0.5, 4: 2.0}) - - power = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_3.outputs["Result"], 1: map_range_4.outputs["Result"]}, - attrs={'operation': 'POWER'}) - - float_curve = nw.new_node(Nodes.FloatCurve, - input_kwargs={'Value': power}) - node_utils.assign_curve(float_curve.mapping.curves[0], [(0.0, 1.0), (0.0273, 0.0063), (0.2455, 0.6), (0.6682, 0.3188), (0.9955, 1.0)]) - - map_range_5 = nw.new_node(Nodes.MapRange, - input_kwargs={'Value': float_curve, 4: 5.0}) - - greater_than = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_2.outputs["Result"], 1: map_range_5.outputs["Result"]}, - attrs={'operation': 'GREATER_THAN'}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: map_range_1.outputs["Result"], 1: map_range_5.outputs["Result"]}, - attrs={'operation': 'SUBTRACT', 'use_clamp': True}) - - multiply_2 = nw.new_node(Nodes.Math, - input_kwargs={0: subtract, 1: 0.3}, - attrs={'operation': 'MULTIPLY'}) - - multiply_3 = nw.new_node(Nodes.Math, + + add_3 = nw.new_node(Nodes.Math, input_kwargs={0: 1.0, 1: value_1}) + + map_range_3 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": length.outputs["Value"], 1: 1.0, 2: add_3}, + ) + + geometry_1 = nw.new_node("ShaderNodeNewGeometry") + + voronoi_texture = nw.new_node( + Nodes.VoronoiTexture, + input_kwargs={"Vector": geometry_1.outputs["Position"], "Scale": 0.01}, + ) + + map_range_4 = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": voronoi_texture.outputs["Distance"], 3: 0.5, 4: 2.0}, + ) + + power = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_3.outputs["Result"], + 1: map_range_4.outputs["Result"], + }, + attrs={"operation": "POWER"}, + ) + + float_curve = nw.new_node(Nodes.FloatCurve, input_kwargs={"Value": power}) + node_utils.assign_curve( + float_curve.mapping.curves[0], + [(0.0, 1.0), (0.0273, 0.0063), (0.2455, 0.6), (0.6682, 0.3188), (0.9955, 1.0)], + ) + + map_range_5 = nw.new_node( + Nodes.MapRange, input_kwargs={"Value": float_curve, 4: 5.0} + ) + + greater_than = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_2.outputs["Result"], + 1: map_range_5.outputs["Result"], + }, + attrs={"operation": "GREATER_THAN"}, + ) + + subtract = nw.new_node( + Nodes.Math, + input_kwargs={ + 0: map_range_1.outputs["Result"], + 1: map_range_5.outputs["Result"], + }, + attrs={"operation": "SUBTRACT", "use_clamp": True}, + ) + + multiply_2 = nw.new_node( + Nodes.Math, input_kwargs={0: subtract, 1: 0.3}, attrs={"operation": "MULTIPLY"} + ) + + multiply_3 = nw.new_node( + Nodes.Math, input_kwargs={0: multiply_2, 1: 0.01}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, - input_kwargs={0: greater_than, 1: multiply_3}) - - power_1 = nw.new_node(Nodes.Math, + attrs={"operation": "MULTIPLY"}, + ) + + add_4 = nw.new_node(Nodes.Math, input_kwargs={0: greater_than, 1: multiply_3}) + + power_1 = nw.new_node( + Nodes.Math, input_kwargs={0: map_range_1.outputs["Result"]}, - attrs={'operation': 'POWER'}) + attrs={"operation": "POWER"}, + ) - density_mul = nw.new_node(Nodes.Math, + density_mul = nw.new_node( + Nodes.Math, input_kwargs={0: power_1, 1: density}, - attrs={'operation': 'MULTIPLY', 'use_clamp': True}) - - volume_scatter = nw.new_node('ShaderNodeVolumeScatter', - input_kwargs={'Color': add_4, 'Density': density_mul, 'Anisotropy': anisotropy}) - - nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': transparent_bsdf, 'Volume': volume_scatter}) + attrs={"operation": "MULTIPLY", "use_clamp": True}, + ) + + volume_scatter = nw.new_node( + "ShaderNodeVolumeScatter", + input_kwargs={"Color": add_4, "Density": density_mul, "Anisotropy": anisotropy}, + ) + + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": transparent_bsdf, "Volume": volume_scatter}, + ) + @gin.configurable("kole_clouds") def add_kole_clouds(height=0): - bpy.ops.mesh.primitive_ico_sphere_add(subdivisions=6, radius=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) + bpy.ops.mesh.primitive_ico_sphere_add( + subdivisions=6, + radius=1, + enter_editmode=False, + align="WORLD", + location=(0, 0, 0), + scale=(1, 1, 1), + ) sphere = bpy.context.active_object sphere.name = "KoleClouds" surface.add_material(sphere, kole_clouds_shader, selection=None) # Don't change the solidify modifier - bpy.ops.object.modifier_add(type='SOLIDIFY') + bpy.ops.object.modifier_add(type="SOLIDIFY") sphere.modifiers["Solidify"].thickness = 0.5 sphere.modifiers["Solidify"].offset = 1 sphere.modifiers["Solidify"].use_even_offset = True @@ -161,5 +249,6 @@ def add_kole_clouds(height=0): sphere.location = (0, 0, height) sphere.rotation_euler[1] = np.pi + if __name__ == "__main__": - add_kole_clouds() \ No newline at end of file + add_kole_clouds() diff --git a/infinigen/assets/weather/particles.py b/infinigen/assets/weather/particles.py index a57393c3c..35c44f1f0 100644 --- a/infinigen/assets/weather/particles.py +++ b/infinigen/assets/weather/particles.py @@ -1,159 +1,134 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: Hei Law, Alexander Raistrick - +import logging +import typing import bpy -import numpy as np -from numpy.random import normal as N -import mathutils - -import gin - -from infinigen.core.placement.factory import AssetFactory -from infinigen.core import surface - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util import blender as butil -from infinigen.core.util.random import random_general -from infinigen.core.tagging import tag_object, tag_nodegroup - -from infinigen.assets.materials import dirt -from infinigen.infinigen_gpl.surfaces import snow - -def shader_raindrop(nw): - glass_bsdf = nw.new_node( - 'ShaderNodeBsdfGlass', - input_kwargs={ - 'IOR': 1.33, - }, - ) - material_output = nw.new_node( - Nodes.MaterialOutput, - input_kwargs={ - 'Surface': glass_bsdf, - }, +from numpy.random import normal, uniform + +from infinigen.core.generator import Generator +from infinigen.core.placement import AssetFactory, make_asset_collection, particles +from infinigen.core.util import butil +from infinigen.core.util.random import log_uniform + +logger = logging.getLogger(__name__) + + +def rain_param_distribution(): + drops_per_sec_m2 = uniform(0.05, 1) + velocity = uniform(9, 20) + lifetime = 100 + + return dict( + mass=0.001, + warmup_frames=100, + density=drops_per_sec_m2 * lifetime, + lifetime=lifetime, + particle_size=uniform(0.01, 0.015), + size_random=uniform(0.005, 0.01), + normal_factor=-velocity, + effect_gravity=0.0, + use_die_on_collision=True, ) -def geo_raindrop(nw): - group_input = nw.new_node( - Nodes.GroupInput, - expose_input=[( - 'NodeSocketGeometry', - 'Geometry', - None, - )], + +def falling_leaf_param_distribution(): + rate = uniform(0.001, 0.006) + dur = max(bpy.context.scene.frame_end - bpy.context.scene.frame_start, 500) + + return dict( + warmup_frames=1024, + density=rate * dur, + particle_size=normal(0.5, 0.15), + size_random=uniform(0.1, 0.2), + lifetime=dur, + use_rotations=True, + rotation_factor_random=1.0, + use_die_on_collision=False, + drag_factor=0.2, + damping=0.3, + mass=0.01, + normal_factor=0.0, + angular_velocity_mode="RAND", + angular_velocity_factor=uniform(0, 3), + use_dynamic_rotation=True, ) - position = nw.new_node(Nodes.InputPosition) - vector_curves = nw.new_node( - Nodes.VectorCurve, - input_kwargs={ - 'Vector': position, - }, - ) - node_utils.assign_curve( - vector_curves.mapping.curves[0], - [(-1.0, -1.0), (1.0, 1.0)], - ) - node_utils.assign_curve( - vector_curves.mapping.curves[1], - [(-1.0, -1.0), (1.0, 1.0)], - ) - node_utils.assign_curve( - vector_curves.mapping.curves[2], - [(-1.0, -0.15 * N(1, 0.15)), (-0.6091, -0.0938), (1.0, 1.0)], +def floating_dust_param_distribution(): + return dict( + mass=0.0001, + count=int(7000 * uniform(0.5, 2)), + lifetime=1000, + warmup_frames=100, + particle_size=0.001, + size_random=uniform(0.7, 1.0), + emit_from="VOLUME", + damping=1.0, + drag_factor=1.0, + effect_gravity=uniform(0.3, 0.7), # partially buoyant ) - set_position = nw.new_node( - Nodes.SetPosition, - input_kwargs={ - 'Geometry': group_input.outputs["Geometry"], - 'Position': vector_curves, - }, + +def marine_snow_param_distribution(): + return dict( + mass=0.0001, + count=int(10000 * uniform(0.5, 2)), + lifetime=1000, + warmup_frames=100, + particle_size=0.005, + size_random=uniform(0.7, 1.0), + emit_from="VOLUME", + brownian_factor=log_uniform(0.0002, 0.0005), + damping=log_uniform(0.95, 0.98), + drag_factor=uniform(0.85, 0.95), + factor_random=uniform(0.1, 0.2), + use_rotations=True, + phase_factor_random=uniform(0.2, 0.5), + use_dynamic_rotation=True, + effect_gravity=uniform(0, 0.5), ) - group_output = nw.new_node( - Nodes.GroupOutput, - input_kwargs={ - 'Geometry': set_position, - }, + +def snow_param_distribution(): + density = uniform(2, 26) + + return dict( + mass=0.001, + density=density, + lifetime=2000, + warmup_frames=1000, + particle_size=0.003, + emit_from="FACE", + damping=1.0, + drag_factor=1.0, + use_rotations=True, + use_die_on_collision=True, ) -class RaindropFactory(AssetFactory): - def create_asset(self, **kwargs): +class FallingParticles(Generator): + def __init__( + self, + particle_gen: AssetFactory, + distribution: typing.Callable, + ): + self.particle_gen = particle_gen - bpy.ops.mesh.primitive_ico_sphere_add( - radius=1, - enter_editmode=False, - subdivisions=5, - align='WORLD', - location=(0, 0, 0), - scale=(1, 1, 1), - ) + super().__init__(distribution) - sphere = bpy.context.object - - surface.add_geomod(sphere, geo_raindrop, apply=True) - tag_object(sphere, 'raindrop') - return sphere - - def finalize_assets(self, assets): - surface.add_material(assets, shader_raindrop) - -class DustMoteFactory(AssetFactory): - - def create_asset(self, **kwargs): - - bpy.ops.mesh.primitive_ico_sphere_add( - radius=1, - subdivisions=2, - enter_editmode=False, - align='WORLD', - location=(0, 0, 0), - scale=(1, 1, 1), - ) - tag_object(bpy.context.object, 'dustmote') - return bpy.context.object - - def finalize_assets(self, assets): - dirt.apply(assets) - -class SnowflakeFactory(AssetFactory): - - def create_asset(self, **params) -> bpy.types.Object: - bpy.ops.mesh.primitive_circle_add( - vertices=6, - fill_type='TRIFAN', + def generate( + self, + emitter: bpy.types.Object, + collision: bpy.types.Collection = None, + ): + col = make_asset_collection(self.particle_gen, 5) + + emitter, system = particles.particle_system( + emitter, col, self.params, collision ) - tag_object(bpy.context.object, 'snowflake') - return bpy.context.object - - def finalize_assets(self, assets): - snow.apply(assets, subsurface=0) - -@gin.configurable -def wind_effector(strength): - bpy.ops.object.effector_add(type='WIND') - wind = bpy.context.active_object - - yaw = np.random.uniform(0, 360) - wind.rotation_euler = np.deg2rad((90, 0, yaw)) - - wind.field.strength = random_general(strength) - wind.field.flow = 0 - - return wind - -@gin.configurable -def turbulence_effector(strength, noise, size=1, flow=0): - bpy.ops.object.effector_add(type='TURBULENCE') - wind = bpy.context.active_object - wind.field.strength = random_general(strength) - wind.field.noise = random_general(noise) - wind.field.flow = random_general(flow) - wind.field.size = random_general(size) + + logger.info(f"{self} baking particles") + particles.bake(emitter, system) + + butil.put_in_collection(emitter, butil.get_collection("particles")) + + return emitter diff --git a/infinigen/assets/weather/wind_effectors.py b/infinigen/assets/weather/wind_effectors.py new file mode 100644 index 000000000..5b5da035d --- /dev/null +++ b/infinigen/assets/weather/wind_effectors.py @@ -0,0 +1,47 @@ +import bpy +import gin +import numpy as np + +from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import FixedSeed, random_general + + +@gin.configurable +class WindEffector(AssetFactory): + def __init__(self, factory_seed, strength): + super().__init__(factory_seed) + with FixedSeed(factory_seed): + self.strength = random_general(strength) + + def create_asset(self, **kwargs): + bpy.ops.object.effector_add(type="WIND") + wind = bpy.context.active_object + + yaw = np.random.uniform(0, 360) + wind.rotation_euler = np.deg2rad((90, 0, yaw)) + + wind.field.strength = self.strength + wind.field.flow = 0 + + return wind + + +@gin.configurable +class TurbulenceEffector(AssetFactory): + def __init__(self, factory_seed, strength, noise, size=1, flow=0): + super().__init__(factory_seed) + with FixedSeed(factory_seed): + self.strength = random_general(strength) + self.noise = random_general(noise) + self.size = random_general(size) + self.flow = random_general(flow) + + def create_asset(self, **kwargs): + bpy.ops.object.effector_add(type="TURBULENCE") + wind = bpy.context.active_object + wind.field.strength = self.strength + wind.field.noise = self.noise + wind.field.flow = self.flow + wind.field.size = self.size + + return wind diff --git a/infinigen/assets/windows/window.py b/infinigen/assets/windows/window.py deleted file mode 100644 index d01d38be6..000000000 --- a/infinigen/assets/windows/window.py +++ /dev/null @@ -1,1035 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. - -# Authors: -# - Hongyu Wen: primary author -# - Alexander Raistrick: update window glass - -import bpy -import random -import mathutils -import numpy as np -from numpy.random import uniform as U, normal as N, randint as RI, uniform - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils -from infinigen.core.util.blender import deep_clone_obj -from infinigen.core.util.color import color_category -from infinigen.core import surface -from infinigen.core.util import blender as butil - -from infinigen.core.util.math import FixedSeed, clip_gaussian -from infinigen.core.placement.factory import AssetFactory -from infinigen.assets.materials import metal_shader_list, wood_shader_list - -from infinigen.assets.utils.autobevel import BevelSharp - -def shader_window_glass(nw: NodeWrangler): - - """ Non-refractive glass shader, since windows consist of a one-sided mesh currently and would not properly - refract-then un-refract the light - """ - - roughness = clip_gaussian(0, 0.015, 0, 0.03, 0.03) - transmission = uniform(0.05, 0.12) - - # non-refractive glass - transparent_bsdf = nw.new_node(Nodes.TransparentBSDF) - shader = nw.new_node(Nodes.GlossyBSDF, input_kwargs={'Roughness': roughness}) - shader = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': transmission, 1: transparent_bsdf, 2: shader}) - - # complete pass-through for non-camera rays, for render efficiency - light_path = nw.new_node(Nodes.LightPath) - shader = nw.new_node(Nodes.MixShader, input_kwargs={'Fac': light_path.outputs["Is Camera Ray"], 1: transparent_bsdf, 2: shader}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': shader}, attrs={'is_active_output': True}) - -class WindowFactory(AssetFactory): - def __init__(self, factory_seed, coarse=False, curtain=None, shutter=None): - super(WindowFactory, self).__init__(factory_seed, coarse=coarse) - - with FixedSeed(factory_seed): - self.params = self.sample_parameters() - self.beveler = BevelSharp() - self.curtain = curtain - self.shutter = shutter - - @staticmethod - def sample_parameters(): - frame_width = U(0.05, 0.1) - sub_frame_width = U(0.01, frame_width) - sub_frame_h_amount = RI(1, 2) - sub_frame_v_amount = RI(1, 2) - glass_thickness = U(0.01, 0.03) - - shutter_panel_radius = U(0.001, 0.003) - shutter_width = U(0.03, 0.05) - shutter_thickness = U(0.003, 0.007) - shutter_rotation = U(0, 1) - shutter_inverval = shutter_width + U(0.001, 0.003) - - curtain_frame_depth = U(0.05, 0.1) - curtain_depth = U(0.03, curtain_frame_depth) - curtain_frame_radius = U(0.01, 0.02) - - shader_frame_material_choice = random.choice(wood_shader_list) - shader_curtain_frame_material_choice = random.choice(metal_shader_list) - shader_curtain_material_choice = shader_curtain_material - - params = { - "FrameWidth": frame_width, - "SubFrameWidth": sub_frame_width, - "SubPanelHAmount": sub_frame_h_amount, - "SubPanelVAmount": sub_frame_v_amount, - "GlassThickness": glass_thickness, - "CurtainFrameDepth": curtain_frame_depth, - "CurtainDepth": curtain_depth, - "CurtainFrameRadius": curtain_frame_radius, - "ShutterPanelRadius": shutter_panel_radius, - "ShutterWidth": shutter_width, - "ShutterThickness": shutter_thickness, - "ShutterRotation": shutter_rotation, - "ShutterInterval": shutter_inverval, - "FrameMaterial": surface.shaderfunc_to_material(shader_frame_material_choice, vertical=True), - "CurtainFrameMaterial": surface.shaderfunc_to_material(shader_curtain_frame_material_choice), - "CurtainMaterial": surface.shaderfunc_to_material(shader_curtain_material_choice), - "Material": surface.shaderfunc_to_material(shader_window_glass) - } - return params - - def sample_asset_params(self, dimensions=None, open=None, curtain=None, shutter=None): - if dimensions is None: - width = U(1, 4) - height = U(1, 4) - frame_thickness = U(0.05, 0.15) - else: - width, height, frame_thickness = dimensions - - panel_h_amount = RI(1, 2) - v_ = width / height * panel_h_amount - panel_v_amount = int(uniform(v_ * 1.6, v_ * 2.5)) - - if open is None: - open = U(0, 1) < 0.5 - - if shutter is None: - shutter = U(0, 1) < 0.5 - - if curtain is None: - curtain = U(0, 1) < 0.5 - if curtain: - open = False - sub_frame_thickness = U(0.01, frame_thickness) - - open = False # keep windows closed on generation, let articulation module handle this later on - open_type = RI(0, 3) - open_offset = 0 - oe_offset = 0 - if open_type == 0: - if frame_thickness < sub_frame_thickness * 2: - open_type = RI(1, 2) - else: - oe_offset = U(sub_frame_thickness / 2, (frame_thickness - 2 * sub_frame_thickness) / 2) - if open: - open_offset = U(0, width / panel_h_amount) - else: - open_offset = 0 - open_h_angle = U(0, 0.3) if open_type == 1 and open else 0 - open_v_angle = -U(0, 0.3) if open_type == 2 and open else 0 - - curtain_interval_number = int(width / U(0.08, 0.2)) - curtain_mid_l = -U(0, width / 2) - curtain_mid_r = U(0, width / 2) - return { - **self.params, - "Width": width, - "Height": height, - "FrameThickness": frame_thickness, - "PanelHAmount": panel_h_amount, - "PanelVAmount": panel_v_amount, - "SubFrameThickness": sub_frame_thickness, - "OpenHAngle": open_h_angle, - "OpenVAngle": open_v_angle, - "OpenOffset": open_offset, - "OEOffset": oe_offset, - "Curtain": curtain, - "CurtainIntervalNumber": curtain_interval_number, - "CurtainMidL": curtain_mid_l, - "CurtainMidR": curtain_mid_r, - "Shutter": shutter, - } - - def create_asset(self, dimensions=None, open=None, realized=True, **params): - obj = butil.spawn_cube() - - butil.modify_mesh( - obj, - 'NODES', - node_group=nodegroup_window_geometry(), - ng_inputs=self.sample_asset_params(dimensions, open, self.curtain,self.shutter), - apply=realized - ) - - obj.rotation_euler[0] = np.pi / 2 - butil.apply_transform(obj, True) - obj_ =deep_clone_obj(obj) - self.beveler(obj) - if max(obj.dimensions) > 8: - butil.delete(obj) - obj = obj_ - else: - butil.delete(obj_) - - bpy.ops.object.light_add( - type='AREA', - radius=1, - align='WORLD', - location=(0,0,0), - scale=(1,1,1) - ) - portal = bpy.context.active_object - - w, _, h = obj.dimensions - portal.scale = (w, h, 1) - portal.data.cycles.is_portal = True - portal.rotation_euler = (-np.pi/2, 0, 0) - butil.parent_to(portal, obj, no_inverse=True) - portal.hide_viewport = True - - return obj - - -@node_utils.to_nodegroup('nodegroup_window_geometry', singleton=True, type='GeometryNodeTree') -def nodegroup_window_geometry(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input_1 = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Height', 2.0000), ('NodeSocketFloatDistance', 'FrameWidth', 0.1000), - ('NodeSocketFloatDistance', 'FrameThickness', 0.1000), ('NodeSocketInt', 'PanelHAmount', 0), - ('NodeSocketInt', 'PanelVAmount', 0), ('NodeSocketFloatDistance', 'SubFrameWidth', 0.0500), - ('NodeSocketFloatDistance', 'SubFrameThickness', 0.0500), ('NodeSocketInt', 'SubPanelHAmount', 3), - ('NodeSocketInt', 'SubPanelVAmount', 2), ('NodeSocketFloat', 'GlassThickness', 0.0100), - ('NodeSocketFloat', 'OpenHAngle', 0.5000), ('NodeSocketFloat', 'OpenVAngle', 0.5000), - ('NodeSocketFloat', 'OpenOffset', 0.5000), ('NodeSocketFloat', 'OEOffset', 0.0500), - ('NodeSocketBool', 'Curtain', False), ('NodeSocketFloat', 'CurtainFrameDepth', 0.5000), - ('NodeSocketFloat', 'CurtainDepth', 0.0300), ('NodeSocketFloat', 'CurtainIntervalNumber', 20.0000), - ('NodeSocketFloatDistance', 'CurtainFrameRadius', 0.0100), ('NodeSocketFloat', 'CurtainMidL', -0.5000), - ('NodeSocketFloat', 'CurtainMidR', 0.5000), ('NodeSocketBool', 'Shutter', True), - ('NodeSocketFloatDistance', 'ShutterPanelRadius', 0.0050), - ('NodeSocketFloatDistance', 'ShutterWidth', 0.0500), - ('NodeSocketFloatDistance', 'ShutterThickness', 0.0050), ('NodeSocketFloat', 'ShutterRotation', 0.0000), - ('NodeSocketFloat', 'ShutterInterval', 0.0500), ('NodeSocketMaterial', 'FrameMaterial', None), - ('NodeSocketMaterial', 'CurtainFrameMaterial', None), ('NodeSocketMaterial', 'CurtainMaterial', None), - ('NodeSocketMaterial', 'Material', None)]) - - windowpanel = nw.new_node(nodegroup_window_panel().name, input_kwargs={ - 'Width': group_input_1.outputs["Width"], - 'Height': group_input_1.outputs["Height"], - 'FrameWidth': group_input_1.outputs["FrameWidth"], - 'FrameThickness': group_input_1.outputs["FrameThickness"], - 'PanelWidth': group_input_1.outputs["FrameWidth"], - 'PanelThickness': group_input_1.outputs["FrameThickness"], - 'PanelHAmount': group_input_1.outputs["PanelHAmount"], - 'PanelVAmount': group_input_1.outputs["PanelVAmount"], - 'FrameMaterial': group_input_1.outputs["FrameMaterial"], - 'Material': group_input_1.outputs["Material"] - }) - - multiply = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input_1.outputs["FrameWidth"], - 1: group_input_1.outputs["PanelVAmount"] - }, attrs={'operation': 'MULTIPLY'}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Width"], 1: multiply}, - attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: group_input_1.outputs["PanelVAmount"]}, - attrs={'operation': 'DIVIDE'}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: group_input_1.outputs["SubFrameWidth"]}, - attrs={'operation': 'SUBTRACT'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input_1.outputs["FrameWidth"], - 1: group_input_1.outputs["PanelHAmount"] - }, attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Height"], 1: multiply_1}, - attrs={'operation': 'SUBTRACT'}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_2, 1: group_input_1.outputs["PanelHAmount"]}, - attrs={'operation': 'DIVIDE'}) - - subtract_3 = nw.new_node(Nodes.Math, input_kwargs={0: divide_1, 1: group_input_1.outputs["SubFrameWidth"]}, - attrs={'operation': 'SUBTRACT'}) - - windowpanel_1 = nw.new_node(nodegroup_window_panel().name, input_kwargs={ - 'Width': subtract_1, - 'Height': subtract_3, - 'FrameWidth': group_input_1.outputs["SubFrameWidth"], - 'FrameThickness': group_input_1.outputs["SubFrameThickness"], - 'PanelWidth': group_input_1.outputs["SubFrameWidth"], - 'PanelThickness': group_input_1.outputs["SubFrameThickness"], - 'PanelHAmount': group_input_1.outputs["SubPanelHAmount"], - 'PanelVAmount': group_input_1.outputs["SubPanelVAmount"], - 'WithGlass': True, - 'GlassThickness': group_input_1.outputs["GlassThickness"], - 'FrameMaterial': group_input_1.outputs["FrameMaterial"], - 'Material': group_input_1.outputs["Material"] - }) - - windowshutter = nw.new_node(nodegroup_window_shutter().name, input_kwargs={ - 'Width': subtract_1, - 'Height': subtract_3, - 'FrameWidth': group_input_1.outputs["FrameWidth"], - 'FrameThickness': group_input_1.outputs["FrameThickness"], - 'PanelWidth': group_input_1.outputs["ShutterPanelRadius"], - 'PanelThickness': group_input_1.outputs["ShutterPanelRadius"], - 'ShutterWidth': group_input_1.outputs["ShutterWidth"], - 'ShutterThickness': group_input_1.outputs["ShutterThickness"], - 'ShutterInterval': group_input_1.outputs["ShutterInterval"], - 'ShutterRotation': group_input_1.outputs["ShutterRotation"], - 'FrameMaterial': group_input_1.outputs["FrameMaterial"] - }) - - switch = nw.new_node(Nodes.Switch, - input_kwargs={1: group_input_1.outputs["Shutter"], 14: windowpanel_1, 15: windowshutter - }) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Width"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - divide_2 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input_1.outputs["Width"], - 1: group_input_1.outputs["PanelVAmount"] - }, attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: divide_2}, attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_3}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Height"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - divide_3 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input_1.outputs["Height"], - 1: group_input_1.outputs["PanelHAmount"] - }, attrs={'operation': 'DIVIDE'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: divide_3}, attrs={'operation': 'MULTIPLY'}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_5}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add, 'Y': add_1}) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': switch.outputs[6], 'Translation': combine_xyz}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': transform}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input_1.outputs["PanelHAmount"], - 1: group_input_1.outputs["PanelVAmount"] - }, attrs={'operation': 'MULTIPLY'}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': multiply_6}, - attrs={'domain': 'INSTANCE'}) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input_1.outputs["PanelHAmount"]}) - - divide_4 = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: reroute}, - attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide_4}, attrs={'operation': 'FLOOR'}) - - add_2 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: group_input_1.outputs["FrameWidth"]}) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: floor, 1: add_2}, attrs={'operation': 'MULTIPLY'}) - - modulo = nw.new_node(Nodes.Math, - input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: reroute}, - attrs={'operation': 'MODULO'}) - - add_3 = nw.new_node(Nodes.Math, input_kwargs={0: divide_1, 1: group_input_1.outputs["FrameWidth"]}) - - multiply_8 = nw.new_node(Nodes.Math, input_kwargs={0: modulo, 1: add_3}, attrs={'operation': 'MULTIPLY'}) - - power = nw.new_node(Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={'operation': 'POWER'}) - - multiply_9 = nw.new_node(Nodes.Math, input_kwargs={0: power, 1: group_input_1.outputs["OEOffset"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_7, 'Y': multiply_8, 'Z': multiply_9}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements.outputs["Geometry"], - 'Offset': combine_xyz_1 - }) - - power_1 = nw.new_node(Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={'operation': 'POWER'}) - - multiply_10 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["OpenVAngle"], 1: power_1}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_10}) - - modulo_1 = nw.new_node(Nodes.Math, input_kwargs={0: floor, 1: 2.0000}, attrs={'operation': 'MODULO'}) - - multiply_11 = nw.new_node(Nodes.Math, input_kwargs={0: divide, 1: modulo_1}, - attrs={'operation': 'MULTIPLY'}) - - add_4 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: multiply_11}) - - modulo_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_8, 1: 2.0000}, attrs={'operation': 'MODULO'}) - - multiply_12 = nw.new_node(Nodes.Math, input_kwargs={0: divide_1, 1: modulo_2}, - attrs={'operation': 'MULTIPLY'}) - - add_5 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: multiply_12}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': add_4, 'Y': add_5}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, input_kwargs={ - 'Instances': set_position, - 'Rotation': combine_xyz_3, - 'Pivot Point': combine_xyz_2 - }) - - multiply_13 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["OpenHAngle"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_13}) - - multiply_14 = nw.new_node(Nodes.Math, input_kwargs={0: add_3, 1: -0.5000}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_14}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, input_kwargs={ - 'Instances': rotate_instances, - 'Rotation': combine_xyz_5, - 'Pivot Point': combine_xyz_6 - }) - - power_2 = nw.new_node(Nodes.Math, input_kwargs={0: -1.0000, 1: floor}, attrs={'operation': 'POWER'}) - - multiply_15 = nw.new_node(Nodes.Math, input_kwargs={0: power_2, 1: group_input_1.outputs["OpenOffset"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_15}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': rotate_instances_1, 'Offset': combine_xyz_4}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [windowpanel, set_position_1]}) - - multiply_16 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_17 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_16, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - multiply_18 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input_1.outputs["CurtainFrameDepth"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - curtain = nw.new_node(nodegroup_curtain().name, input_kwargs={ - 'Width': group_input_1.outputs["Width"], - 'Depth': group_input_1.outputs["CurtainDepth"], - 'Height': group_input_1.outputs["Height"], - 'IntervalNumber': group_input_1.outputs["CurtainIntervalNumber"], - 'Radius': group_input_1.outputs["CurtainFrameRadius"], - 'L1': multiply_17, - 'R1': group_input_1.outputs["CurtainMidL"], - 'L2': group_input_1.outputs["CurtainMidR"], - 'R2': multiply_16, - 'FrameDepth': multiply_18, - 'CurtainFrameMaterial': group_input_1.outputs["CurtainFrameMaterial"], - 'CurtainMaterial': group_input_1.outputs["CurtainMaterial"] - }) - - multiply_19 = nw.new_node(Nodes.Math, input_kwargs={0: group_input_1.outputs["FrameThickness"]}, - attrs={'operation': 'MULTIPLY'}) - - add_6 = nw.new_node(Nodes.Math, - input_kwargs={0: group_input_1.outputs["CurtainFrameDepth"], 1: multiply_19}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': add_6}) - - transform_geometry = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': curtain, 'Translation': combine_xyz_7}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [transform_geometry, join_geometry]}) - - switch_1 = nw.new_node(Nodes.Switch, input_kwargs={ - 1: group_input_1.outputs["Curtain"], - 14: join_geometry, - 15: join_geometry_1 - }) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': switch_1.outputs[6]}) - - bounding_box = nw.new_node(Nodes.BoundingBox, input_kwargs={'Geometry': realize_instances}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={ - 'Geometry': realize_instances, - 'Bounding Box': bounding_box.outputs["Bounding Box"] - }, attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_line_seq', singleton=False, type='GeometryNodeTree') -def nodegroup_line_seq(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Width', -1.0000), - ('NodeSocketFloat', 'Height', 0.5000), ('NodeSocketFloat', 'Amount', 0.5000)]) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply, 'Y': multiply_1}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"], 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2, 'Y': multiply_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz, 'End': combine_xyz_1}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', input_kwargs={'Geometry': curve_line}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, input_kwargs={ - 'Geometry': geometry_to_instance, - 'Amount': group_input.outputs["Amount"] - }, attrs={'domain': 'INSTANCE'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: duplicate_elements.outputs["Duplicate Index"], 1: 1.0000}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Amount"], 1: 1.0000}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: add_1}, - attrs={'operation': 'DIVIDE'}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: add, 1: divide}, attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements.outputs["Geometry"], - 'Offset': combine_xyz_2 - }) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Curve': set_position}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_curtain', singleton=False, type='GeometryNodeTree') -def nodegroup_curtain(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloat', 'Width', 0.5000), - ('NodeSocketFloat', 'Depth', 0.1000), ('NodeSocketFloatDistance', 'Height', 0.1000), - ('NodeSocketFloat', 'IntervalNumber', 0.5000), ('NodeSocketFloatDistance', 'Radius', 1.0000), - ('NodeSocketFloat', 'L1', 0.5000), ('NodeSocketFloat', 'R1', 0.0000), ('NodeSocketFloat', 'L2', 0.0000), - ('NodeSocketFloat', 'R2', 0.5000), ('NodeSocketFloat', 'FrameDepth', 0.0000), - ('NodeSocketMaterial', 'CurtainFrameMaterial', None), ('NodeSocketMaterial', 'CurtainMaterial', None)]) - - reroute_1 = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Radius"]}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: reroute_1, 1: 2.0000}, attrs={'operation': 'MULTIPLY'}) - - ico_sphere = nw.new_node(Nodes.MeshIcoSphere, input_kwargs={'Radius': multiply, 'Subdivisions': 4}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Width"]}, - attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_2}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': multiply_1}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz, 'End': combine_xyz_1}) - - sample_curve_1 = nw.new_node(Nodes.SampleCurve, input_kwargs={'Curves': curve_line, 'Factor': 1.0000}) - - set_position_2 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': ico_sphere.outputs["Mesh"], - 'Offset': sample_curve_1.outputs["Position"] - }) - - combine_xyz_9 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_1, 'Z': group_input.outputs["FrameDepth"]}) - - curve_line_4 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_1, 'End': combine_xyz_9}) - - combine_xyz_8 = nw.new_node(Nodes.CombineXYZ, - input_kwargs={'X': multiply_2, 'Z': group_input.outputs["FrameDepth"]}) - - curve_line_3 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz, 'End': combine_xyz_8}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_line, curve_line_4, curve_line_3]}) - - curve_circle = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': group_input.outputs["Radius"]}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': join_geometry_3, - 'Profile Curve': curve_circle.outputs["Curve"], - 'Fill Caps': True - }) - - ico_sphere_1 = nw.new_node(Nodes.MeshIcoSphere, input_kwargs={'Radius': multiply, 'Subdivisions': 4}) - - sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={'Curves': curve_line}) - - set_position_3 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': ico_sphere_1.outputs["Mesh"], - 'Offset': sample_curve.outputs["Position"] - }) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_position_2, curve_to_mesh_1, set_position_3]}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["Height"], 1: -0.4700}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_3}) - - set_position_1 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': join_geometry_2, 'Offset': combine_xyz_3}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': set_position_1, - 'Material': group_input.outputs["CurtainFrameMaterial"] - }) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["L1"]}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["R1"]}) - - curve_line_1 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_4, 'End': combine_xyz_5}) - - resample_curve = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line_1, 'Count': 200}) - - combine_xyz_6 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["L2"]}) - - combine_xyz_7 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': group_input.outputs["R2"]}) - - curve_line_2 = nw.new_node(Nodes.CurveLine, input_kwargs={'Start': combine_xyz_6, 'End': combine_xyz_7}) - - resample_curve_1 = nw.new_node(Nodes.ResampleCurve, input_kwargs={'Curve': curve_line_2, 'Count': 200}) - - join_geometry_1 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [resample_curve, resample_curve_1]}) - - spline_parameter_1 = nw.new_node(Nodes.SplineParameter) - - capture_attribute = nw.new_node(Nodes.CaptureAttribute, input_kwargs={ - 'Geometry': join_geometry_1, - 2: spline_parameter_1.outputs["Factor"] - }) - - spline_parameter = nw.new_node(Nodes.SplineParameter) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["IntervalNumber"], 1: 6.2800}, - attrs={'operation': 'MULTIPLY'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: multiply_4, 1: group_input.outputs["Width"]}, - attrs={'operation': 'DIVIDE'}) - - multiply_5 = nw.new_node(Nodes.Math, input_kwargs={0: spline_parameter.outputs["Length"], 1: divide}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_5, 1: 1.6800}) - - sine = nw.new_node(Nodes.Math, input_kwargs={0: add}, attrs={'operation': 'SINE'}) - - multiply_6 = nw.new_node(Nodes.Math, input_kwargs={0: sine, 1: group_input.outputs["Depth"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Z': multiply_6}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': capture_attribute.outputs["Geometry"], - 'Offset': combine_xyz_2 - }) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["Height"]}) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': reroute, 'Height': 0.0020}) - - position = nw.new_node(Nodes.InputPosition) - - separate_xyz = nw.new_node(Nodes.SeparateXYZ, input_kwargs={'Vector': position}) - - divide_1 = nw.new_node(Nodes.Math, input_kwargs={0: separate_xyz.outputs["X"], 1: reroute}, - attrs={'operation': 'DIVIDE'}) - - capture_attribute_1 = nw.new_node(Nodes.CaptureAttribute, - input_kwargs={'Geometry': quadrilateral, 2: divide_1}) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': set_position, - 'Profile Curve': capture_attribute_1.outputs["Geometry"] - }) - - combine_xyz_12 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': capture_attribute_1.outputs[2], - 'Y': capture_attribute.outputs[2] - }) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': curve_to_mesh, - 'Name': 'UVMap', - 3: combine_xyz_12 - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT2'}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': store_named_attribute, - 'Material': group_input.outputs["CurtainMaterial"] - }) - - multiply_7 = nw.new_node(Nodes.Math, input_kwargs={0: reroute_1, 1: 1.3000}, - attrs={'operation': 'MULTIPLY'}) - - curve_circle_1 = nw.new_node(Nodes.CurveCircle, input_kwargs={'Radius': multiply_7}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, input_kwargs={ - 'Curve': curve_line, - 'Profile Curve': curve_circle_1.outputs["Curve"] - }) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_3, 1: group_input.outputs["Radius"]}) - - combine_xyz_10 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_1}) - - set_position_4 = nw.new_node(Nodes.SetPosition, - input_kwargs={'Geometry': curve_to_mesh_2, 'Offset': combine_xyz_10}) - - difference = nw.new_node(Nodes.MeshBoolean, input_kwargs={'Mesh 1': set_material, 'Mesh 2': set_position_4}) - - join_geometry = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [set_material_1, difference.outputs["Mesh"]]}) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': join_geometry, 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_window_shutter', singleton=False, type='GeometryNodeTree') -def nodegroup_window_shutter(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Height', 2.0000), ('NodeSocketFloatDistance', 'FrameWidth', 0.1000), - ('NodeSocketFloatDistance', 'FrameThickness', 0.1000), - ('NodeSocketFloatDistance', 'PanelWidth', 0.1000), - ('NodeSocketFloatDistance', 'PanelThickness', 0.1000), - ('NodeSocketFloatDistance', 'ShutterWidth', 0.1000), - ('NodeSocketFloatDistance', 'ShutterThickness', 0.1000), ('NodeSocketFloat', 'ShutterInterval', 0.5000), - ('NodeSocketFloat', 'ShutterRotation', 0.0000), ('NodeSocketMaterial', 'FrameMaterial', None)]) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', input_kwargs={ - 'Width': group_input.outputs["Width"], - 'Height': group_input.outputs["Height"] - }) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: 2.0000}, attrs={'operation': 'SQRT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["FrameWidth"], 1: sqrt}, - attrs={'operation': 'MULTIPLY'}) - - quadrilateral_1 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', input_kwargs={ - 'Width': multiply, - 'Height': group_input.outputs["FrameThickness"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadrilateral, 'Profile Curve': quadrilateral_1}) - - subtract = nw.new_node(Nodes.Math, - input_kwargs={0: group_input.outputs["Width"], 1: group_input.outputs["FrameWidth"]}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': subtract, - 'Y': group_input.outputs["ShutterWidth"], - 'Z': group_input.outputs["ShutterThickness"] - }) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - geometry_to_instance = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': cube.outputs["Mesh"]}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: group_input.outputs["Height"], - 1: group_input.outputs["FrameWidth"] - }, attrs={'operation': 'SUBTRACT'}) - - divide = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: group_input.outputs["ShutterInterval"]}, - attrs={'operation': 'DIVIDE'}) - - floor = nw.new_node(Nodes.Math, input_kwargs={0: divide}, attrs={'operation': 'FLOOR'}) - - shutter_number = nw.new_node(Nodes.Math, input_kwargs={0: floor, 1: 1.0000}, label='ShutterNumber', - attrs={'operation': 'SUBTRACT'}) - - duplicate_elements = nw.new_node(Nodes.DuplicateElements, - input_kwargs={'Geometry': geometry_to_instance, 'Amount': shutter_number}, - attrs={'domain': 'INSTANCE'}) - - shutter_true_interval = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: floor}, - label='ShutterTrueInterval', attrs={'operation': 'DIVIDE'}) - - multiply_1 = nw.new_node(Nodes.Math, input_kwargs={ - 0: duplicate_elements.outputs["Duplicate Index"], - 1: shutter_true_interval - }, attrs={'operation': 'MULTIPLY'}) - - multiply_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: -0.5000}, - attrs={'operation': 'MULTIPLY'}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: multiply_2, 1: shutter_true_interval}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: multiply_1, 1: add}) - - combine_xyz_1 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': add_1}) - - set_position = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': duplicate_elements.outputs["Geometry"], - 'Offset': combine_xyz_1 - }) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["ShutterRotation"]}) - - combine_xyz_5 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute}) - - rotate_instances = nw.new_node(Nodes.RotateInstances, - input_kwargs={'Instances': set_position, 'Rotation': combine_xyz_5}) - - multiply_3 = nw.new_node(Nodes.Math, input_kwargs={0: shutter_true_interval, 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - subtract_2 = nw.new_node(Nodes.Math, input_kwargs={0: subtract_1, 1: multiply_3}, - attrs={'operation': 'SUBTRACT'}) - - combine_xyz_2 = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["PanelWidth"], - 'Y': subtract_2, - 'Z': group_input.outputs["PanelThickness"] - }) - - cube_1 = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz_2}) - - multiply_4 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["ShutterWidth"]}, - attrs={'operation': 'MULTIPLY'}) - - combine_xyz_3 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'Y': multiply_4}) - - curve_line = nw.new_node(Nodes.CurveLine, input_kwargs={'End': combine_xyz_3}) - - geometry_to_instance_1 = nw.new_node('GeometryNodeGeometryToInstance', - input_kwargs={'Geometry': curve_line}) - - combine_xyz_4 = nw.new_node(Nodes.CombineXYZ, input_kwargs={'X': reroute}) - - rotate_instances_1 = nw.new_node(Nodes.RotateInstances, input_kwargs={ - 'Instances': geometry_to_instance_1, - 'Rotation': combine_xyz_4 - }) - - realize_instances = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': rotate_instances_1}) - - sample_curve = nw.new_node(Nodes.SampleCurve, input_kwargs={'Curves': realize_instances, 'Factor': 1.0000}) - - set_position_1 = nw.new_node(Nodes.SetPosition, input_kwargs={ - 'Geometry': cube_1.outputs["Mesh"], - 'Offset': sample_curve.outputs["Position"] - }) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh, rotate_instances, set_position_1]}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': join_geometry_2, - 'Material': group_input.outputs["FrameMaterial"] - }) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': set_material, 'Shade Smooth': False}) - - realize_instances_1 = nw.new_node(Nodes.RealizeInstances, input_kwargs={'Geometry': set_shade_smooth}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': realize_instances_1}, - attrs={'is_active_output': True}) - - -@node_utils.to_nodegroup('nodegroup_window_panel', singleton=False, type='GeometryNodeTree') -def nodegroup_window_panel(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - group_input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketFloatDistance', 'Width', 2.0000), - ('NodeSocketFloatDistance', 'Height', 2.0000), ('NodeSocketFloatDistance', 'FrameWidth', 0.1000), - ('NodeSocketFloatDistance', 'FrameThickness', 0.1000), - ('NodeSocketFloatDistance', 'PanelWidth', 0.1000), - ('NodeSocketFloatDistance', 'PanelThickness', 0.1000), ('NodeSocketInt', 'PanelHAmount', 0), - ('NodeSocketInt', 'PanelVAmount', 0), ('NodeSocketBool', 'WithGlass', False), - ('NodeSocketFloat', 'GlassThickness', 0.0000), ('NodeSocketMaterial', 'FrameMaterial', None), - ('NodeSocketMaterial', 'Material', None)]) - - quadrilateral = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', input_kwargs={ - 'Width': group_input.outputs["Width"], - 'Height': group_input.outputs["Height"] - }) - - sqrt = nw.new_node(Nodes.Math, input_kwargs={0: 2.0000}, attrs={'operation': 'SQRT'}) - - multiply = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["FrameWidth"], 1: sqrt}, - attrs={'operation': 'MULTIPLY'}) - - quadrilateral_1 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', input_kwargs={ - 'Width': multiply, - 'Height': group_input.outputs["FrameThickness"] - }) - - curve_to_mesh = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': quadrilateral, 'Profile Curve': quadrilateral_1}) - - add = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["PanelHAmount"], 1: -1.0000}) - - lineseq = nw.new_node(nodegroup_line_seq().name, input_kwargs={ - 'Width': group_input.outputs["Width"], - 'Height': group_input.outputs["Height"], - 'Amount': add - }) - - reroute = nw.new_node(Nodes.Reroute, input_kwargs={'Input': group_input.outputs["PanelWidth"]}) - - subtract = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["PanelThickness"], 1: 0.0010}, - attrs={'operation': 'SUBTRACT'}) - - quadrilateral_2 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': reroute, 'Height': subtract}) - - curve_to_mesh_1 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': lineseq, 'Profile Curve': quadrilateral_2}) - - add_1 = nw.new_node(Nodes.Math, input_kwargs={0: group_input.outputs["PanelVAmount"], 1: -1.0000}) - - lineseq_1 = nw.new_node(nodegroup_line_seq().name, input_kwargs={ - 'Width': group_input.outputs["Height"], - 'Height': group_input.outputs["Width"], - 'Amount': add_1 - }) - - transform = nw.new_node(Nodes.Transform, - input_kwargs={'Geometry': lineseq_1, 'Rotation': (0.0000, 0.0000, 1.5708)}) - - subtract_1 = nw.new_node(Nodes.Math, input_kwargs={0: subtract, 1: 0.0010}, attrs={'operation': 'SUBTRACT'}) - - quadrilateral_3 = nw.new_node('GeometryNodeCurvePrimitiveQuadrilateral', - input_kwargs={'Width': reroute, 'Height': subtract_1}) - - curve_to_mesh_2 = nw.new_node(Nodes.CurveToMesh, - input_kwargs={'Curve': transform, 'Profile Curve': quadrilateral_3}) - - join_geometry_3 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh_1, curve_to_mesh_2]}) - - join_geometry_2 = nw.new_node(Nodes.JoinGeometry, - input_kwargs={'Geometry': [curve_to_mesh, join_geometry_3]}) - - set_material_1 = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': join_geometry_2, - 'Material': group_input.outputs["FrameMaterial"] - }) - - combine_xyz = nw.new_node(Nodes.CombineXYZ, input_kwargs={ - 'X': group_input.outputs["Width"], - 'Y': group_input.outputs["Height"], - 'Z': group_input.outputs["GlassThickness"] - }) - - cube = nw.new_node(Nodes.MeshCube, input_kwargs={'Size': combine_xyz}) - - store_named_attribute = nw.new_node(Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': cube.outputs["Mesh"], - 'Name': 'uv_map', - 3: cube.outputs["UV Map"] - }, attrs={'domain': 'CORNER', 'data_type': 'FLOAT_VECTOR'}) - - set_material = nw.new_node(Nodes.SetMaterial, input_kwargs={ - 'Geometry': store_named_attribute, - 'Material': group_input.outputs["Material"] - }) - - join_geometry = nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': [set_material, set_material_1]}) - - switch = nw.new_node(Nodes.Switch, input_kwargs={ - 1: group_input.outputs["WithGlass"], - 14: set_material_1, - 15: join_geometry - }) - - set_shade_smooth = nw.new_node(Nodes.SetShadeSmooth, - input_kwargs={'Geometry': switch.outputs[6], 'Shade Smooth': False}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': set_shade_smooth}, - attrs={'is_active_output': True}) - - -def shader_curtain_material(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': color_category('textile'), - 'Transmission': np.random.uniform(0, 1), - 'Transmission Roughness': 1.0 - }) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) - - -def shader_curtain_frame_material(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.1840, 0.0000, 0.8000, 1.0000)}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) - - -def shader_frame_material(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, - input_kwargs={'Base Color': (0.8000, 0.5033, 0.0057, 1.0000)}) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) - - -def shader_glass_material(nw: NodeWrangler): - # Code generated using version 2.6.5 of the node_transpiler - - principled_bsdf = nw.new_node(Nodes.PrincipledBSDF, input_kwargs={ - 'Base Color': (0.0094, 0.0055, 0.8000, 1.0000), - 'Roughness': 0.0000 - }) - - material_output = nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': principled_bsdf}, - attrs={'is_active_output': True}) diff --git a/infinigen/core/constraints/checks.py b/infinigen/core/constraints/checks.py index 863dfb50c..2c86ef273 100644 --- a/infinigen/core/constraints/checks.py +++ b/infinigen/core/constraints/checks.py @@ -4,23 +4,23 @@ # Authors: Alexander Raistrick -import typing import itertools import logging +import typing from tqdm import tqdm -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.example_solver import ( + propose_discrete, + propose_relations, ) -from infinigen.core.constraints.example_solver import propose_discrete, propose_relations -from infinigen_examples import indoor_constraint_examples as ex logger = logging.getLogger(__name__) -def iter_domains(node: cl.Node) -> typing.Iterator[r.Domain]: +def iter_domains(node: cl.Node) -> typing.Iterator[r.Domain]: match node: case cl.ObjectSetExpression(): yield node, r.constraint_domain(node) @@ -28,74 +28,81 @@ def iter_domains(node: cl.Node) -> typing.Iterator[r.Domain]: for k, c in node.children(): yield from iter_domains(c) case _: - raise ValueError(f'iter_domains found unmatched {type(node)=} {node=}') + raise ValueError(f"iter_domains found unmatched {type(node)=} {node=}") + def bound_coverage(b: r.Bound, stages: dict[str, r.Domain]) -> list[str]: return [ - k - for k, f in stages.items() - if propose_discrete.active_for_stage(b.domain, f) + k for k, f in stages.items() if propose_discrete.active_for_stage(b.domain, f) ] def check_coverage_errors(b: r.Bound, coverage: list, stages: dict[str, r.Domain]): - if len(coverage) == 0: - raise ValueError(f'Greedy stages did not cover all object classes! User specified bound {b} had {coverage=}') + raise ValueError( + f"Greedy stages did not cover all object classes! User specified bound {b} had {coverage=}" + ) if len(coverage) != 1: raise ValueError( - f'Object class {b} was covered in more than one greedy stage! Got {coverage=}. Greedy stages must be non-overlapping' + f"Object class {b} was covered in more than one greedy stage! Got {coverage=}. Greedy stages must be non-overlapping" ) - + gen_options = propose_discrete.lookup_generator(b.domain.tags) if len(gen_options) < 1: - raise ValueError(f'Object class {b=} had {gen_options=}') - + raise ValueError(f"Object class {b=} had {gen_options=}") + for k in coverage: - logger.debug(f'Checking coverage {k=} {b.domain=} {stages[k]=}') + logger.debug(f"Checking coverage {k=} {b.domain=} {stages[k]=}") if not b.domain.intersects(stages[k]): continue prop = b.domain.intersection(stages[k]) if prop.is_recursive(): - raise ValueError(f'Found recursive prop domain {prop.tags=} {len(prop.relations)=}') + raise ValueError( + f"Found recursive prop domain {prop.tags=} {len(prop.relations)=}" + ) assert not prop.is_recursive(), prop.tags - + if not len(prop.relations): continue - first, remaining, implied = propose_relations.minimize_redundant_relations(prop.relations) + first, remaining, implied = propose_relations.minimize_redundant_relations( + prop.relations + ) if implied: continue if isinstance(first[0], cl.AnyRelation): - raise ValueError(f'{b=} in {stages[k]=} had underspecified {first=}') + raise ValueError(f"{b=} in {stages[k]=} had underspecified {first=}") -def check_problem_greedy_coverage(prob: cl.Problem, stages: dict[str, r.Domain]): +def check_problem_greedy_coverage(prob: cl.Problem, stages: dict[str, r.Domain]): bounds = r.constraint_bounds(prob) - + for b in tqdm(bounds, desc="Checking greedy stages coverage"): coverage = bound_coverage(b, stages) check_coverage_errors(b, coverage, stages) + def check_unfinalized_constraints(prob: cl.Problem): # TODO return [] -def check_contradictory_domains(prob: cl.Problem): +def check_contradictory_domains(prob: cl.Problem): for node, dom in iter_domains(prob): contradictory = not dom.satisfies(dom) if contradictory: - raise ValueError(f'Constraint node had self-contradicting domain. \n{node=} \n{dom=}') + raise ValueError( + f"Constraint node had self-contradicting domain. \n{node=} \n{dom=}" + ) + def validate_stages(stages: dict[str, r.Domain]): - for k, d in stages.items(): if d.is_recursive(): - raise ValueError(f'{k=} had recursive domain') - + raise ValueError(f"{k=} had recursive domain") + for (k1, d1), (k2, d2) in itertools.product(stages.items(), stages.items()): inter = d1.intersects(d2) if inter != (k1 == k2): @@ -104,20 +111,19 @@ def validate_stages(stages: dict[str, r.Domain]): " please define greedy stages which are mutually exclusive." ) + def check_all( - prob: cl.Problem, - greedy_stages: dict[str, r.Domain], - all_vars: list[str] + prob: cl.Problem, greedy_stages: dict[str, r.Domain], all_vars: list[str] ): - for k, v in greedy_stages.items(): - if not isinstance(v, r.Domain): - raise TypeError(f'Greedy stage {k=} had non-domain value {v=}') + raise TypeError(f"Greedy stage {k=} had non-domain value {v=}") extras = v.all_vartags() - set(all_vars) if len(extras): - raise ValueError(f'{k=} had extra vars {extras=}. Greedy domains may only contain vars from {all_vars}') + raise ValueError( + f"{k=} had extra vars {extras=}. Greedy domains may only contain vars from {all_vars}" + ) validate_stages(greedy_stages) diff --git a/infinigen/core/constraints/constraint_language/__init__.py b/infinigen/core/constraints/constraint_language/__init__.py index 5b386e1cc..f45705bc0 100644 --- a/infinigen/core/constraints/constraint_language/__init__.py +++ b/infinigen/core/constraints/constraint_language/__init__.py @@ -4,64 +4,48 @@ # Authors: Alexander Raistrick, Karhan Kayan -from infinigen.core.tags import Semantics, Negated -from .types import Node +from infinigen.core.tags import Negated, Semantics from .expression import ( - Expression, ArithmethicExpression, - constant, - ScalarOperatorExpression, + BoolExpression, BoolOperatorExpression, + Expression, ScalarExpression, - BoolExpression, + ScalarOperatorExpression, + constant, hinge, max_expr, min_expr, ) - -from .set_reasoning import ( - scene, - tagged, - excludes, - count, - in_range, - related_to, -) +from .gather import ForAll, MeanOver, SumOver, all, item, mean, sum from .geometry import ( ObjectSetExpression, + accessibility_cost, + angle_alignment_cost, + center_stable_surface_dist, + coplanarity_cost, distance, - min_distance_internal, focus_score, - angle_alignment_cost, freespace_2d, min_dist_2d, - rotational_asymmetry, - center_stable_surface_dist, - accessibility_cost, + min_distance_internal, reflectional_asymmetry, - volume, - coplanarity_cost + rotational_asymmetry, + volume, ) -from .result import Problem from .relations import ( - Relation, - NegatedRelation, AnyRelation, ConnectorType, - RoomNeighbour, CutFrom, GeometryRelation, - Touching, + NegatedRelation, + Relation, + RoomNeighbour, + StableAgainst, SupportedBy, - StableAgainst + Touching, ) -from .gather import ( - sum, - mean, - all, - item, - ForAll, - SumOver, - MeanOver -) \ No newline at end of file +from .result import Problem +from .set_reasoning import count, excludes, in_range, related_to, scene, tagged +from .types import Node diff --git a/infinigen/core/constraints/constraint_language/expression.py b/infinigen/core/constraints/constraint_language/expression.py index 9dc8d2c1d..a585782e5 100644 --- a/infinigen/core/constraints/constraint_language/expression.py +++ b/infinigen/core/constraints/constraint_language/expression.py @@ -4,105 +4,121 @@ # Authors: Alexander Raistrick -import typing -import operator -from dataclasses import dataclass import functools -import math +import operator +import typing from .types import Node, nodedataclass -OPERATOR_ASSOCIATIVE = [ - operator.add, - operator.mul, - operator.and_, - max, - min -] +OPERATOR_ASSOCIATIVE = [operator.add, operator.mul, operator.and_, max, min] + @nodedataclass() class Expression(Node): - @classmethod def register_postfix_func(cls, expr_cls): @functools.wraps(expr_cls) def postfix_instantiator(self, *args, **kwargs): return expr_cls(self, *args, **kwargs) + setattr(cls, expr_cls.__name__, postfix_instantiator) return expr_cls - + + @nodedataclass() class ArithmethicExpression(Expression): pass + @nodedataclass() class ScalarExpression(ArithmethicExpression): - def minimize(self, *, weight: float): return self * constant(weight) - + def maximize(self, *, weight: float): return self * constant(-weight) - + def multiply(self, other): return ScalarOperatorExpression(operator.mul, [self, other]) + __mul__ = multiply def abs(self): return ScalarOperatorExpression(operator.abs, [self]) + __abs__ = abs - + def add(self, other): return ScalarOperatorExpression(operator.add, [self, other]) + __add__ = add def sub(self, other): return ScalarOperatorExpression(operator.sub, [self, other]) + __sub__ = sub - + def div(self, other): return ScalarOperatorExpression(operator.truediv, [self, other]) + __truediv__ = div - + + def safediv(self, other): + def safediv_impl(a, b): + if b == 0: + return 0 if a == 0 else 1 + return a / b + + return ScalarOperatorExpression(safediv_impl, [self, other]) + def pow(self, other): return ScalarOperatorExpression(operator.pow, [self, other]) + __pow__ = pow def equals(self, other): return BoolOperatorExpression(operator.eq, [self, other]) - __eq__ = equals - + + __eq__ = equals + def __ge__(self, other): return BoolOperatorExpression(operator.ge, [self, other]) + def __gt__(self, other): return BoolOperatorExpression(operator.gt, [self, other]) + def __le__(self, other): return BoolOperatorExpression(operator.le, [self, other]) + def __lt__(self, other): return BoolOperatorExpression(operator.lt, [self, other]) + def __ne__(self, other): return BoolOperatorExpression(operator.ne, [self, other]) - + def __neg__(self): return self * constant(-1) def clamp_min(self, other): return max_expr(self, other) + def clamp_max(self, other): return min_expr(self, other) + def max_expr(*args): return ScalarOperatorExpression(max, args) + def min_expr(*args): return ScalarOperatorExpression(min, args) + @nodedataclass() class BoolExpression(ArithmethicExpression): - def __mul__(self, other): return BoolOperatorExpression(operator.and_, [self, other]) - + @nodedataclass() class constant(ScalarExpression): @@ -114,6 +130,7 @@ def __post_init__(self): def __call__(self): return self.value + def _preprocess_operands(operands): def cast_to_node(x): match x: @@ -122,11 +139,12 @@ def cast_to_node(x): case x if isinstance(x, (bool | float | int)): return constant(x) case _: - raise ValueError(f'Unsupported operand type {type(x)=} {x=}') + raise ValueError(f"Unsupported operand type {type(x)=} {x=}") + return [cast_to_node(x) for x in operands] -def _collapse_associative(self, operands): +def _collapse_associative(self, operands): if self.func not in OPERATOR_ASSOCIATIVE: return operands @@ -138,9 +156,9 @@ def _collapse_associative(self, operands): new_operands.append(op) return new_operands + @nodedataclass() class BoolOperatorExpression(BoolExpression): - func: typing.Callable operands: list[Expression] @@ -150,7 +168,7 @@ def __post_init__(self): def children(self): for i, v in enumerate(self.operands): - yield f'operands[{i}]', v + yield f"operands[{i}]", v def __call__(self) -> typing.Any: return self.func(*[x() for x in self.operands]) @@ -158,7 +176,6 @@ def __call__(self) -> typing.Any: @nodedataclass() class ScalarOperatorExpression(ScalarExpression): - func: typing.Callable operands: list[Expression] @@ -169,14 +186,15 @@ def __post_init__(self): def children(self): for i, v in enumerate(self.operands): - yield f'operands[{i}]', v + yield f"operands[{i}]", v def __call__(self) -> typing.Any: return self.func(*[x() for x in self.operands]) - + + @ScalarExpression.register_postfix_func @nodedataclass() class hinge(ScalarExpression): val: ScalarExpression low: float - high: float \ No newline at end of file + high: float diff --git a/infinigen/core/constraints/constraint_language/gather.py b/infinigen/core/constraints/constraint_language/gather.py index 071516d19..f70bad3e6 100644 --- a/infinigen/core/constraints/constraint_language/gather.py +++ b/infinigen/core/constraints/constraint_language/gather.py @@ -5,63 +5,60 @@ # Authors: Alexander Raistrick import typing -from dataclasses import dataclass, field -from infinigen.core import tags as t -from .relations import Relation from .expression import BoolExpression, ScalarExpression, nodedataclass from .geometry import ObjectSetExpression + @nodedataclass() class item(ObjectSetExpression): name: str member_of: ObjectSetExpression def __repr__(self): - return f'item({self.name})' + return f"item({self.name})" def children(self): # member_of is metadata, should not be treated as a child return [] + @nodedataclass() class ForAll(BoolExpression): objs: ObjectSetExpression var: str pred: BoolExpression -@ObjectSetExpression.register_postfix_func + +@ObjectSetExpression.register_postfix_func def all( - objs: ObjectSetExpression, - pred: typing.Callable[[item], BoolExpression] + objs: ObjectSetExpression, pred: typing.Callable[[item], BoolExpression] ) -> BoolExpression: - var = 'var_all_' + str(id(pred)) + var = "var_all_" + str(id(pred)) return ForAll(objs, var, pred(item(var, objs))) + @nodedataclass() class SumOver(ScalarExpression): objs: ObjectSetExpression var: str pred: ScalarExpression + @ObjectSetExpression.register_postfix_func -def sum( - objs: ObjectSetExpression, - pred: typing.Callable[[item], ScalarExpression] -): - var = 'var_sum_' + str(id(pred)) +def sum(objs: ObjectSetExpression, pred: typing.Callable[[item], ScalarExpression]): + var = "var_sum_" + str(id(pred)) return SumOver(objs, var, pred(item(var, objs))) + @nodedataclass() class MeanOver(ScalarExpression): objs: ObjectSetExpression var: str pred: ScalarExpression -@ObjectSetExpression.register_postfix_func -def mean( - objs: ObjectSetExpression, - pred: typing.Callable[[item], ScalarExpression] -): - var = 'var_mean_' + str(id(pred)) - return MeanOver(objs, var, pred(item(var, objs))) \ No newline at end of file + +@ObjectSetExpression.register_postfix_func +def mean(objs: ObjectSetExpression, pred: typing.Callable[[item], ScalarExpression]): + var = "var_mean_" + str(id(pred)) + return MeanOver(objs, var, pred(item(var, objs))) diff --git a/infinigen/core/constraints/constraint_language/geometry.py b/infinigen/core/constraints/constraint_language/geometry.py index 2e4fd8851..b34dd3c5e 100644 --- a/infinigen/core/constraints/constraint_language/geometry.py +++ b/infinigen/core/constraints/constraint_language/geometry.py @@ -3,20 +3,19 @@ # Authors: Karhan Kayan -import typing -from dataclasses import dataclass, field +from dataclasses import field import numpy as np -from infinigen.core import tags as t -from .relations import Relation -from .expression import Expression, BoolExpression, ScalarExpression, nodedataclass +from .expression import ScalarExpression, nodedataclass from .set_reasoning import ObjectSetExpression + @nodedataclass() class center_stable_surface_dist(ScalarExpression): objs: ObjectSetExpression + @nodedataclass() class accessibility_cost(ScalarExpression): objs: ObjectSetExpression @@ -29,6 +28,7 @@ def __post_init__(self): self.normal = np.array(self.normal) assert isinstance(self.normal, np.ndarray) + @ObjectSetExpression.register_postfix_func @nodedataclass() class distance(ScalarExpression): @@ -41,15 +41,18 @@ def __post_init__(self): assert isinstance(self.others, ObjectSetExpression) assert isinstance(self.others_tags, set) + @nodedataclass() class min_distance_internal(ScalarExpression): objs: ObjectSetExpression - + + @nodedataclass() class focus_score(ScalarExpression): objs: ObjectSetExpression others: ObjectSetExpression + @nodedataclass() class angle_alignment_cost(ScalarExpression): objs: ObjectSetExpression @@ -61,32 +64,38 @@ def __post_init__(self): self.others_tags = set() assert isinstance(self.others_tags, set), type(self.others_tags) + @nodedataclass() class freespace_2d(ScalarExpression): objs: ObjectSetExpression others: ObjectSetExpression + @nodedataclass() class min_dist_2d(ScalarExpression): objs: ObjectSetExpression others: ObjectSetExpression + @nodedataclass() class rotational_asymmetry(ScalarExpression): objs: ObjectSetExpression + @nodedataclass() class reflectional_asymmetry(ScalarExpression): objs: ObjectSetExpression others: ObjectSetExpression use_long_plane: bool = True + @ObjectSetExpression.register_postfix_func @nodedataclass() class volume(ScalarExpression): objs: ObjectSetExpression dims: int | tuple = 3 + @nodedataclass() class coplanarity_cost(ScalarExpression): objs: ObjectSetExpression diff --git a/infinigen/core/constraints/constraint_language/relations.py b/infinigen/core/constraints/constraint_language/relations.py index 74483d0b0..399e43a24 100644 --- a/infinigen/core/constraints/constraint_language/relations.py +++ b/infinigen/core/constraints/constraint_language/relations.py @@ -6,28 +6,27 @@ from __future__ import annotations +import logging from abc import ABC, abstractmethod +from copy import deepcopy from dataclasses import dataclass, field, fields from enum import Enum -from typing import Optional , Union -from copy import deepcopy -import logging from infinigen.core import tags as t from infinigen.core.constraints import constraint_language as cl logger = logging.getLogger(__name__) + @dataclass(frozen=True) class Relation(ABC): - @abstractmethod def implies(self, other) -> bool: """ self must imply all parts of other, both positive and negative. """ pass - + @abstractmethod def satisfies(self, other: Relation) -> bool: """ @@ -42,46 +41,46 @@ def intersects(self, other, strict=False) -> bool: @abstractmethod def intersection(self, other: Relation) -> Relation: pass - + @abstractmethod def difference(self, other: Relation) -> Relation: pass - + def __neg__(self) -> Relation: return NegatedRelation(self) - + + @dataclass(frozen=True) class AnyRelation(Relation): - def implies(self, other) -> bool: return other.__class__ is AnyRelation - + def satisfies(self, other: cl.Relation) -> bool: return other.__class__ is AnyRelation - + def intersects(self, _other: Relation, strict=False) -> bool: return True def intersection(self, other: Relation) -> Relation: return deepcopy(other) - + def difference(self, other: Relation): return -other + @dataclass(frozen=True) class NegatedRelation(Relation): - rel: Relation def __repr__(self): - return f'-{self.rel}' - + return f"-{self.rel}" + def __str__(self): - return f'{self.__class__.__name__}({self.rel})' - + return f"{self.__class__.__name__}({self.rel})" + def __neg__(self) -> Relation: return self.rel - + def implies(self, other: Relation) -> bool: match other: case AnyRelation(): @@ -89,11 +88,10 @@ def implies(self, other: Relation) -> bool: case NegatedRelation(rel): return self.rel.implies(rel) case _: - return ( - not self.rel.implies(other) - and not self.intersects(other, strict=True) + return not self.rel.implies(other) and not self.intersects( + other, strict=True ) - + def satisfies(self, other: cl.Relation) -> bool: match other: case AnyRelation(): @@ -101,13 +99,11 @@ def satisfies(self, other: cl.Relation) -> bool: case NegatedRelation(rel): return self.rel.satisfies(rel) case _: - return ( - not self.rel.satisfies(other) - and not self.intersects(other, strict=True) + return not self.rel.satisfies(other) and not self.intersects( + other, strict=True ) - - def intersects(self, other: Relation, strict=False) -> bool: + def intersects(self, other: Relation, strict=False) -> bool: match other: case NegatedRelation(rel): if isinstance(self.rel, AnyRelation) or isinstance(rel, AnyRelation): @@ -116,62 +112,57 @@ def intersects(self, other: Relation, strict=False) -> bool: return True case _: # implementationn depends on other's type, let them handle it - return other.intersects(self, strict=strict) - + return other.intersects(self, strict=strict) + def intersection(self, other: Relation) -> Relation: return self.rel.difference(other) - + def difference(self, other: Relation) -> Relation: return self.rel.intersection(other) + class ConnectorType(Enum): Door = "door" Open = "open" Wall = "wall" + @dataclass(frozen=True) class RoomNeighbour(Relation): connector_types: frozenset[ConnectorType] = field(default_factory=frozenset) def __post_init__(self): if self.connector_types is not None: - object.__setattr__(self, 'connector_types', frozenset(self.connector_types)) - + object.__setattr__(self, "connector_types", frozenset(self.connector_types)) def implies(self, other: Relation) -> bool: - if isinstance(other, AnyRelation): return True - return ( - isinstance(other, RoomNeighbour) - and self.connector_types.issuperset(other.connector_types) + return isinstance(other, RoomNeighbour) and self.connector_types.issuperset( + other.connector_types ) - + def satisfies(self, other: Relation) -> bool: return self.implies(other) - - def intersects(self, other: Relation, strict=False) -> bool: + def intersects(self, other: Relation, strict=False) -> bool: if isinstance(other, AnyRelation): return True - return ( - isinstance(other, RoomNeighbour) - and not self.connector_types.isdisjoint(other.connector_types) + return isinstance(other, RoomNeighbour) and not self.connector_types.isdisjoint( + other.connector_types ) - - def intersection(self, other: Relation) -> Relation: + def intersection(self, other: Relation) -> Relation: if isinstance(other, AnyRelation): return deepcopy(self) return self.__class__( connector_types=self.connector_types.intersection(other.connector_types) ) - - def difference(self, other: Relation) -> Relation: + def difference(self, other: Relation) -> Relation: if isinstance(other, AnyRelation): return -AnyRelation() @@ -181,9 +172,14 @@ def difference(self, other: Relation) -> Relation: def no_frozenset_repr(self: GeometryRelation): - is_neg = lambda x: isinstance(x, t.Negated) - setrepr = lambda s: f'{{{", ".join(repr(x) for x in sorted(list(s), key=is_neg))}}}' - return f'{self.__class__.__name__}({setrepr(self.child_tags)}, {setrepr(self.parent_tags)})' + def is_neg(x): + return isinstance(x, t.Negated) + + def setrepr(s): + return f"{{{', '.join(repr(x) for x in sorted(list(s), key=is_neg))}}}" + + return f"{self.__class__.__name__}({setrepr(self.child_tags)}, {setrepr(self.parent_tags)})" + @dataclass(frozen=True) class GeometryRelation(Relation): @@ -191,38 +187,36 @@ class GeometryRelation(Relation): parent_tags: frozenset[t.Subpart] = field(default_factory=frozenset) __repr__ = no_frozenset_repr - + def __post_init__(self): # allow the user to init with sets that subsequently get frozen # use object.__setattr__ to bypass dataclass's frozen since it is guaranteed safe here - object.__setattr__(self, 'child_tags', frozenset(self.child_tags)) - object.__setattr__(self, 'parent_tags', frozenset(self.parent_tags)) + object.__setattr__(self, "child_tags", frozenset(self.child_tags)) + object.__setattr__(self, "parent_tags", frozenset(self.parent_tags)) - def _extra_fields(self) -> list[str]: - """Return any fields added by subclasses. Useful for implementing implies/intersects + def _extra_fields(self) -> list[str]: + """Return any fields added by subclasses. Useful for implementing implies/intersects which must check these fields regardless of inheritance. TODO, Hacky. """ return [ - f.name - for f in fields(self) - if f.name not in ['child_tags', 'parent_tags'] + f.name for f in fields(self) if f.name not in ["child_tags", "parent_tags"] ] - def _compatibility_checks(self, other: GeometryRelation, strict_on_fields=False) -> bool: - + def _compatibility_checks( + self, other: GeometryRelation, strict_on_fields=False + ) -> bool: if not issubclass(other.__class__, self.__class__): return False if strict_on_fields: for k in self._extra_fields(): if not getattr(self, k) == getattr(other, k): - #logger.warning(f'{self._compatibility_checks} ignoring mismatch {k=} for {other=}') + # logger.warning(f'{self._compatibility_checks} ignoring mismatch {k=} for {other=}') return False return True def implies(self, other: Relation) -> bool: - match other: case AnyRelation(): return True @@ -230,28 +224,27 @@ def implies(self, other: Relation) -> bool: return False case GeometryRelation(ochild, oparent): if not self._compatibility_checks(other): - logger.debug(f'{self.implies} failed compatibility for %s', other) + # logger.debug(f"{self.implies} failed compatibility for %s", other) return False if not t.implies(self.child_tags, ochild): - logger.debug(f'{self.implies} failed child tags for %s', other) + # logger.debug(f"{self.implies} failed child tags for %s", other) return False if not t.implies(self.parent_tags, oparent): - logger.debug(f'{self.implies} failed parent tags for %s', other) + # logger.debug(f"{self.implies} failed parent tags for %s", other) return False return True case NegatedRelation(GeometryRelation(ochild, oparent)): if not self._compatibility_checks(other.rel): - logger.debug(f'{self.implies} failed compatibility for %s', other) + logger.debug(f"{self.implies} failed compatibility for %s", other) return False - if ( - t.implies(self.child_tags, {-t for t in ochild}) - and t.implies(self.parent_tags, {-t for t in oparent}) + if t.implies(self.child_tags, {-t for t in ochild}) and t.implies( + self.parent_tags, {-t for t in oparent} ): return True return False case _: - raise ValueError(f'{self.implies} encountered unhandled {other=}') - + raise ValueError(f"{self.implies} encountered unhandled {other=}") + def satisfies(self, other: Relation) -> bool: match other: case AnyRelation(): @@ -260,37 +253,35 @@ def satisfies(self, other: Relation) -> bool: return False case GeometryRelation(ochild, oparent): if not self._compatibility_checks(other): - logger.debug(f'{self.satisfies} failed compatibility for %s', other) + logger.debug(f"{self.satisfies} failed compatibility for %s", other) return False if not t.satisfies(self.child_tags, ochild): - logger.debug(f'{self.satisfies} failed child tags for %s', other) + logger.debug(f"{self.satisfies} failed child tags for %s", other) return False if not t.satisfies(self.parent_tags, oparent): - logger.debug(f'{self.satisfies} failed parent tags for %s', other) + logger.debug(f"{self.satisfies} failed parent tags for %s", other) return False return True case NegatedRelation(GeometryRelation(ochild, oparent)): if not self._compatibility_checks(other.rel): - logger.debug(f'{self.implies} failed compatibility for %s', other) + logger.debug(f"{self.implies} failed compatibility for %s", other) return False - if ( - t.satisfies(self.child_tags, {-t for t in ochild}) - and t.satisfies(self.parent_tags, {-t for t in oparent}) + if t.satisfies(self.child_tags, {-t for t in ochild}) and t.satisfies( + self.parent_tags, {-t for t in oparent} ): return True return False case _: - raise ValueError(f'{self.satisfies} encountered unhandled {other=}') - - def intersects(self, other: Relation, strict=False) -> bool: + raise ValueError(f"{self.satisfies} encountered unhandled {other=}") + def intersects(self, other: Relation, strict=False) -> bool: def tags_compatible(a, b): if strict: return t.implies(a, b) or t.implies(b, a) else: return not t.contradiction(a.union(b)) - - logger.debug(f'{self.intersects} other=%s', other) + + logger.debug(f"{self.intersects} other=%s", other) match other: case AnyRelation(): @@ -299,13 +290,28 @@ def tags_compatible(a, b): return False case GeometryRelation(ochild, oparent): if not self._compatibility_checks(other): - logger.debug(f'{self.intersects} failed compatibility for other=%s', other) + logger.debug( + "%s failed compatiblity_checks for self=%s, other=%s", + self.intersects.__name__, + self.child_tags, + other, + ) return False if not tags_compatible(self.child_tags, ochild): - logger.debug(f'{self.intersects} failed child tags for other=%s', other) + logger.debug( + "%s failed child tags for self=%s, other=%s", + self.intersects.__name__, + self.child_tags, + other, + ) return False if not tags_compatible(self.parent_tags, oparent): - logger.debug('{self.intersects} failed parent tags for other=%s', other) + logger.debug( + "%s failed parent tags for self=%s, other=%s", + self.intersects.__name__, + self.child_tags, + other, + ) return False return True case NegatedRelation(GeometryRelation()): @@ -313,13 +319,14 @@ def tags_compatible(a, b): # true unless other.rel->self return not other.rel.implies(self) case _: - logger.warning(f'{self.intersects} encountered unhandled %s, returning False', other) + logger.warning( + f"{self.intersects} encountered unhandled %s, returning False", + other, + ) return False - - def intersection(self: Relation, other: Relation) -> Relation: - """ TODO: There are potentially many intersections of relations with negations. - """ + def intersection(self: Relation, other: Relation) -> Relation: + """TODO: There are potentially many intersections of relations with negations.""" match other: case AnyRelation(): @@ -328,19 +335,22 @@ def intersection(self: Relation, other: Relation) -> Relation: return self.difference(rel) case GeometryRelation(ochild, oparent): if not self._compatibility_checks(other): - logger.warning(f'{self.intersection} failed compatibility for {other=}') + logger.warning( + f"{self.intersection} failed compatibility for {other=}" + ) return -AnyRelation() return self.__class__( child_tags=self.child_tags.union(ochild), parent_tags=self.parent_tags.union(oparent), - **{k: getattr(self, k) for k in self._extra_fields()} + **{k: getattr(self, k) for k in self._extra_fields()}, ) case _: - logger.warning(f'Encountered unhandled {other=} for {self.intersection}') + logger.warning( + f"Encountered unhandled {other=} for {self.intersection}" + ) return -AnyRelation() - - def difference(self: Relation, other: Relation) -> Relation: + def difference(self: Relation, other: Relation) -> Relation: match other: case AnyRelation(): return -AnyRelation() @@ -349,21 +359,23 @@ def difference(self: Relation, other: Relation) -> Relation: case GeometryRelation(ochild, oparent): if not self.intersects(other): return deepcopy(self) - if ( - t.implies(self.child_tags, ochild) - and t.implies(self.parent_tags, oparent) + if t.implies(self.child_tags, ochild) and t.implies( + self.parent_tags, oparent ): return -AnyRelation() - + return self.__class__( child_tags=t.difference(self.child_tags, ochild), parent_tags=t.difference(self.parent_tags, oparent), - **{k: getattr(self, k) for k in self._extra_fields()} + **{k: getattr(self, k) for k in self._extra_fields()}, ) case _: - logger.warning(f'Encountered unhandled {other=} for {self.intersection}') + logger.warning( + f"Encountered unhandled {other=} for {self.intersection}" + ) return -AnyRelation() - + + @dataclass(frozen=True) class Touching(GeometryRelation): __repr__ = no_frozenset_repr @@ -377,38 +389,31 @@ class SupportedBy(Touching): @dataclass(frozen=True) class StableAgainst(GeometryRelation): margin: float = 0 - - # check_ if False, only check x/z stability, z is allowed to overhand. + + # check_ if False, only check x/z stability, z is allowed to overhand. # typical use is chair-against-table relation - check_z: bool = True + check_z: bool = True - # rev_normal: if True, align the normals so they face the SAME direction, rather than two planes facing eachother. + # rev_normal: if True, align the normals so they face the SAME direction, rather than two planes facing eachother. # typical use is for sink embedded in countertop - rev_normal: bool = False + rev_normal: bool = False __repr__ = no_frozenset_repr @dataclass(frozen=True) class CutFrom(Relation): - def implies(self, other: Relation) -> bool: - return ( - isinstance(other, AnyRelation) - or isinstance(other, CutFrom) - ) - + return isinstance(other, AnyRelation) or isinstance(other, CutFrom) + def satisfies(self, other: Relation) -> bool: return self.implies(other) - + def intersects(self, other: Relation, strict=False) -> bool: - return ( - isinstance(other, AnyRelation) - or isinstance(other, CutFrom) - ) - + return isinstance(other, AnyRelation) or isinstance(other, CutFrom) + def intersection(self, other: Relation) -> Relation: return deepcopy(self) - + def difference(self, other: Relation) -> Relation: return -AnyRelation() diff --git a/infinigen/core/constraints/constraint_language/result.py b/infinigen/core/constraints/constraint_language/result.py index f58c59c4d..136c39424 100644 --- a/infinigen/core/constraints/constraint_language/result.py +++ b/infinigen/core/constraints/constraint_language/result.py @@ -4,22 +4,17 @@ # Authors: Alexander Raistrick -import typing -from dataclasses import dataclass, field -import numpy as np - -from .types import Node from .expression import BoolExpression, ScalarExpression, nodedataclass +from .types import Node + @nodedataclass() class Problem(Node): - constraints: dict[str, BoolExpression] - score_terms: dict[str, ScalarExpression] + score_terms: dict[str, ScalarExpression] def __post_init__(self): - if isinstance(self.constraints, list): self.constraints = {i: c for i, c in enumerate(self.constraints)} if isinstance(self.score_terms, list): @@ -27,6 +22,6 @@ def __post_init__(self): def children(self): for i, v in enumerate(self.constraints.values()): - yield f'constraints[{i}]', v + yield f"constraints[{i}]", v for i, v in enumerate(self.score_terms.values()): - yield f'score_terms[{i}]', v \ No newline at end of file + yield f"score_terms[{i}]", v diff --git a/infinigen/core/constraints/constraint_language/set_reasoning.py b/infinigen/core/constraints/constraint_language/set_reasoning.py index b87df0e36..4febb1ee0 100644 --- a/infinigen/core/constraints/constraint_language/set_reasoning.py +++ b/infinigen/core/constraints/constraint_language/set_reasoning.py @@ -4,24 +4,26 @@ # Authors: Alexander Raistrick -import typing -from dataclasses import dataclass, field +from dataclasses import field from infinigen.core import tags as t from infinigen.core.constraints import usage_lookup -from .relations import Relation, AnyRelation -from .expression import Expression, BoolExpression, ScalarExpression, nodedataclass + +from .expression import BoolExpression, Expression, ScalarExpression, nodedataclass +from .relations import AnyRelation, Relation + @nodedataclass() class ObjectSetExpression(Expression): - def __getitem__(self, key): return tagged(self, key) + @nodedataclass() class scene(ObjectSetExpression): pass + @ObjectSetExpression.register_postfix_func @nodedataclass() class tagged(ObjectSetExpression): @@ -31,16 +33,16 @@ class tagged(ObjectSetExpression): def __post_init__(self): self.tags = t.to_tag_set(self.tags, fac_context=usage_lookup._factory_lookup) - + @ObjectSetExpression.register_postfix_func def excludes(objs, tags): - # syntactic helper - assume people wont construct obvious contradictions if isinstance(objs, tagged): tags = tags.difference(objs.tags) - return tagged(objs, {t.Negated(x) for x in tags}) - + return tagged(objs, {t.Negated(x) for x in tags}) + + @ObjectSetExpression.register_postfix_func @nodedataclass() class related_to(ObjectSetExpression): @@ -50,11 +52,16 @@ class related_to(ObjectSetExpression): def __post_init__(self): if not isinstance(self.child, ObjectSetExpression): - raise TypeError(f'related_to got {self.child=}, must be an ObjectSetExpression') + raise TypeError( + f"related_to got {self.child=}, must be an ObjectSetExpression" + ) if not isinstance(self.parent, ObjectSetExpression): - raise TypeError(f'related_to got {self.parent=}, must be an ObjectSetExpression') + raise TypeError( + f"related_to got {self.parent=}, must be an ObjectSetExpression" + ) if not isinstance(self.relation, Relation): - raise TypeError(f'related_to got {self.relation=}, must be a Relation') + raise TypeError(f"related_to got {self.relation=}, must be a Relation") + @ObjectSetExpression.register_postfix_func @nodedataclass() @@ -63,7 +70,7 @@ class count(ScalarExpression): def __post_init__(self): if not isinstance(self.objs, ObjectSetExpression): - raise TypeError(f'count got {self.objs=}, must be an ObjectSetExpression') + raise TypeError(f"count got {self.objs=}, must be an ObjectSetExpression") @ScalarExpression.register_postfix_func @@ -75,9 +82,8 @@ class in_range(BoolExpression): def __post_init__(self): if not isinstance(self.val, ScalarExpression): - raise TypeError(f'in_range got {self.val=}, must be a ScalarExpression') + raise TypeError(f"in_range got {self.val=}, must be a ScalarExpression") if not isinstance(self.low, (int, float)): - raise TypeError(f'in_range got {self.low=}, must be a number') + raise TypeError(f"in_range got {self.low=}, must be a number") if not isinstance(self.high, (int, float)): - raise TypeError(f'in_range got {self.high=}, must be a number') - + raise TypeError(f"in_range got {self.high=}, must be a number") diff --git a/infinigen/core/constraints/constraint_language/types.py b/infinigen/core/constraints/constraint_language/types.py index d6e07686d..494e2d244 100644 --- a/infinigen/core/constraints/constraint_language/types.py +++ b/infinigen/core/constraints/constraint_language/types.py @@ -4,30 +4,30 @@ # Authors: Alexander Raistrick -from enum import Enum -from typing import Any from dataclasses import dataclass -import functools nodedataclass_kwargs = dict(eq=False, order=False) + def _nodeclass_bool_throw(self): raise RuntimeError( - f'Attempted to convert {self.__class__} to bool, ' + f"Attempted to convert {self.__class__} to bool, " f"truth value of {self} is ambiguous. Constraint language must use * instead of `and`, etc since python bool ops are not overridable" ) + def nodedataclass(frozen=False): def decorator(cls): ddec = dataclass(eq=False, order=False, frozen=frozen) cls = ddec(cls) cls.__bool__ = _nodeclass_bool_throw return cls + return decorator + @nodedataclass() class Node: - def children(self): for k, v in self.__dict__.items(): if isinstance(v, Node): diff --git a/infinigen/core/constraints/constraint_language/util.py b/infinigen/core/constraints/constraint_language/util.py index 8d5d0bffc..7720d4fbf 100644 --- a/infinigen/core/constraints/constraint_language/util.py +++ b/infinigen/core/constraints/constraint_language/util.py @@ -1,51 +1,54 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: Karhan Kayan -from typing import Union -import random -import math import functools import logging +import math +import random + +# Authors: Karhan Kayan +from typing import Union -import bpy -import mathutils -from trimesh import Trimesh, Scene -import trimesh -from shapely import LineString, Point, Polygon, MultiPolygon -import numpy as np -from sklearn.decomposition import PCA import bpy import fcl - -from mathutils import Matrix, Vector import gin +import numpy as np +import trimesh +from mathutils import Matrix, Vector +from shapely import LineString, MultiPolygon, Point, Polygon +from sklearn.decomposition import PCA +from trimesh import Scene +from infinigen.core import tagging from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t logger = logging.getLogger(__name__) + @gin.configurable def bvh_caching_config(enabled=True): return enabled + @functools.cache def group(scene, x): if isinstance(x, (list, set)): x = tuple(x) return subset(scene, x) + def meshes_from_names(scene, names): if isinstance(names, str): names = [names] return [scene.geometry[g] for _, g in (scene.graph[n] for n in names)] + def blender_objs_from_names(names): if isinstance(names, str): names = [names] return [bpy.data.objects[n] for n in names] + def name_from_mesh(scene, mesh): mesh_name = None for name, mesh in scene.geometry.items(): @@ -54,44 +57,48 @@ def name_from_mesh(scene, mesh): break return mesh_name + def project_to_xy_path2d(mesh: trimesh.Trimesh) -> trimesh.path.Path2D: - poly = trimesh.path.polygons.projected(mesh, (0,0,1), (0,0,0)) + poly = trimesh.path.polygons.projected(mesh, (0, 0, 1), (0, 0, 0)) d = trimesh.path.exchange.misc.polygon_to_path(poly) - return trimesh.path.Path2D(entities = d['entities'], vertices = d['vertices']) + return trimesh.path.Path2D(entities=d["entities"], vertices=d["vertices"]) + def project_to_xy_poly(mesh: trimesh.Trimesh): - poly = trimesh.path.polygons.projected(mesh, (0,0,1), (0,0,0)) + poly = trimesh.path.polygons.projected(mesh, (0, 0, 1), (0, 0, 0)) return poly def closest_edge_to_point_poly(polygon, point): - closest_distance = float('inf') + closest_distance = float("inf") closest_edge = None for i, coord in enumerate(polygon.exterior.coords[:-1]): start, end = coord, polygon.exterior.coords[i + 1] line = LineString([start, end]) distance = line.distance(point) - + if distance < closest_distance: closest_distance = distance closest_edge = line return closest_edge + def closest_edge_to_point_edge_list(edge_list: list[LineString], point): - closest_distance = float('inf') + closest_distance = float("inf") closest_edge = None for line in edge_list: distance = line.distance(point) - + if distance < closest_distance: closest_distance = distance closest_edge = line return closest_edge + def compute_outward_normal(line, polygon): dx = line.xy[0][1] - line.xy[0][0] # x1 - x0 dy = line.xy[1][1] - line.xy[1][0] # y1 - y0 @@ -103,20 +110,20 @@ def compute_outward_normal(line, polygon): # Normalize the vectors (optional but recommended for consistency) normal_vector_1 = normal_vector_1 / np.linalg.norm(normal_vector_1) normal_vector_2 = normal_vector_2 / np.linalg.norm(normal_vector_2) - + # Midpoint of the line segment mid_point = line.interpolate(0.5, normalized=True) - + # Move a tiny bit in the direction of the normals to check which points outside test_point_1 = mid_point.coords[0] + 0.01 * normal_vector_1 - test_point_2 = mid_point.coords[0] + 0.01 * normal_vector_2 - + mid_point.coords[0] + 0.01 * normal_vector_2 + # Return the normal for which the test point lies outside the polygon if polygon.contains(Point(test_point_1)): return normal_vector_2 else: return normal_vector_1 - + def get_transformed_axis(scene, obj_name): obj = bpy.data.objects[obj_name] @@ -135,8 +142,6 @@ def set_axis(scene, objs: Union[str, list[str]], canonical_axis): obj.axis = get_transformed_axis(scene, obj_name) - - def get_plane_from_3dmatrix(matrix): """Extract the plane_normal and plane_origin from a transformation matrix.""" # The normal of the plane can be extracted from the 3x3 rotation part of the matrix @@ -150,6 +155,7 @@ def project_points_onto_plane(points, plane_origin, plane_normal): d = np.dot(points - plane_origin, plane_normal)[:, None] return points - d * plane_normal + def to_2d_coordinates(points, plane_normal): """Convert 3D points to 2D using the plane defined by its normal.""" # Compute two perpendicular vectors on the plane @@ -159,7 +165,7 @@ def to_2d_coordinates(points, plane_normal): u /= np.linalg.norm(u) v = np.cross(plane_normal, u) v /= np.linalg.norm(v) - + # Convert 3D points to 2D using dot products return np.column_stack([points.dot(u), points.dot(v)]) @@ -171,12 +177,18 @@ def ensure_correct_order(points): """ # Calculate signed area n = len(points) - area = sum((points[i][0] * points[(i+1)%n][1]) - (points[(i+1)%n][0] * points[i][1]) for i in range(n)) / 2.0 + area = ( + sum( + (points[i][0] * points[(i + 1) % n][1]) + - (points[(i + 1) % n][0] * points[i][1]) + for i in range(n) + ) + / 2.0 + ) # Return the points in reverse order if area is negative return points[::-1] if area < 0 else points - def sample_random_point(polygon): """ Sample a random point from inside the given Shapely polygon. @@ -186,7 +198,8 @@ def sample_random_point(polygon): p = Point(random.uniform(minx, maxx), random.uniform(miny, maxy)) if polygon.contains(p): return p - + + def delete_obj(scene, a, delete_blender=True): if isinstance(a, str): a = [a] @@ -195,20 +208,22 @@ def delete_obj(scene, a, delete_blender=True): butil.delete(obj_list) for obj_name in a: # bpy.data.objects.remove(bpy.data.objects[obj_name], do_unlink=True) - if scene: + if scene: scene.graph.transforms.remove_node(obj_name) - scene.delete_geometry(obj_name + '_mesh') + scene.delete_geometry(obj_name + "_mesh") def global_vertex_coordinates(obj, local_vertex) -> Vector: return obj.matrix_world @ local_vertex.co + def global_polygon_normal(obj, polygon): loc, rot, scale = obj.matrix_world.decompose() rot = rot.to_matrix() normal = rot @ polygon.normal return normal / np.linalg.norm(normal) + def is_planar(obj, tolerance=1e-6): if len(obj.data.polygons) != 1: return False @@ -221,14 +236,17 @@ def is_planar(obj, tolerance=1e-6): # Check if all vertices lie on the plane defined by the reference vertex and the global normal for vertex in obj.data.vertices: - distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot(global_normal) + distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot( + global_normal + ) if not math.isclose(distance, 0, abs_tol=tolerance): return False return True + def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6): - if plane_obj_a.type != 'MESH' or plane_obj_b.type != 'MESH': + if plane_obj_a.type != "MESH" or plane_obj_b.type != "MESH": raise ValueError("Both objects should be of type 'MESH'") # # Check if the objects are planar @@ -240,16 +258,17 @@ def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6): dot_product = global_normal_a.dot(global_normal_b) - return math.isclose(dot_product, 1, abs_tol=tolerance) or math.isclose(dot_product, -1, abs_tol=tolerance) + return math.isclose(dot_product, 1, abs_tol=tolerance) or math.isclose( + dot_product, -1, abs_tol=tolerance + ) + - def distance_to_plane(point, plane_point, plane_normal): """Compute the distance from a point to a plane defined by a point and a normal.""" return abs((point - plane_point).dot(plane_normal)) def subset(scene: Scene, incl): - if isinstance(incl, str): incl = [incl] @@ -258,7 +277,7 @@ def subset(scene: Scene, incl): T, g = scene.graph[n] if g is None: continue - otags = scene.geometry[g].metadata['tags'] + otags = scene.geometry[g].metadata["tags"] if any(t in incl for t in otags): objs.append(n) @@ -267,17 +286,13 @@ def subset(scene: Scene, incl): return objs -def add_object_cached(col, - name, - col_obj, - fcl_obj): +def add_object_cached(col, name, col_obj, fcl_obj): geom = fcl_obj o = col_obj # # Add collision object to set if name in col._objs: col._manager.unregisterObject(col._objs[name]) - col._objs[name] = {'obj': o, - 'geom': geom} + col._objs[name] = {"obj": o, "geom": geom} # # store the name of the geometry col._names[id(geom)] = name @@ -285,12 +300,11 @@ def add_object_cached(col, col._manager.update() return o -def col_from_subset(scene, names, tags=None, bvh_cache=None): +def col_from_subset(scene, names, tags=None, bvh_cache=None): if isinstance(names, str): names = [names] - if bvh_cache is not None and bvh_caching_config(): tag_key = frozenset(tags) if tags is not None else None key = (frozenset(names), tag_key) @@ -307,7 +321,7 @@ def col_from_subset(scene, names, tags=None, bvh_cache=None): obj = blender_objs_from_names(name)[0] mask = tagging.tagged_face_mask(obj, tags) if not mask.any(): - logger.warning(f'{name=} had {mask.sum()=} for {tags=}') + logger.warning(f"{name=} had {mask.sum()=} for {tags=}") continue geom = geom.submesh(np.where(mask), append=True) T = trimesh.transformations.identity_matrix() @@ -319,7 +333,7 @@ def col_from_subset(scene, names, tags=None, bvh_cache=None): add_object_cached(col, name, geom.col_obj, geom.fcl_obj) if len(col._objs) == 0: - logger.debug(f'{names=} got no objs, returning None') + logger.debug(f"{names=} got no objs, returning None") col = None if bvh_cache is not None and bvh_caching_config(): @@ -327,19 +341,20 @@ def col_from_subset(scene, names, tags=None, bvh_cache=None): return col -def plot_geometry(ax, geom, color = 'blue'): + +def plot_geometry(ax, geom, color="blue"): if isinstance(geom, Polygon): x, y = geom.exterior.xy - ax.fill(x, y, alpha=0.5, fc=color, ec='black') + ax.fill(x, y, alpha=0.5, fc=color, ec="black") elif isinstance(geom, MultiPolygon): for sub_geom in geom: x, y = sub_geom.exterior.xy - ax.fill(x, y, alpha=0.5, fc=color, ec='black') + ax.fill(x, y, alpha=0.5, fc=color, ec="black") elif isinstance(geom, LineString): x, y = geom.xy ax.plot(x, y, color=color) elif isinstance(geom, Point): - ax.plot(geom.x, geom.y, 'o', color=color) + ax.plot(geom.x, geom.y, "o", color=color) def sync_trimesh(scene: trimesh.Scene, obj_name: str): @@ -353,16 +368,17 @@ def sync_trimesh(scene: trimesh.Scene, obj_name: str): t = fcl.Transform(T[:3, :3], T[:3, 3]) mesh.col_obj.setTransform(t) + def translate(scene: trimesh.Scene, a: str, translation): blender_obj = bpy.data.objects[a] blender_obj.location += Vector(translation) - if scene: + if scene: sync_trimesh(scene, a) - + def rotate(scene: trimesh.Scene, a: str, axis, angle): blender_obj = bpy.data.objects[a] - + rotation_matrix = trimesh.transformations.rotation_matrix(angle, axis) transform_matrix = Matrix(rotation_matrix).to_4x4() loc, rot, scale = blender_obj.matrix_world.decompose() @@ -371,7 +387,7 @@ def rotate(scene: trimesh.Scene, a: str, axis, angle): rot = rot.to_quaternion() blender_obj.matrix_world = Matrix.LocRotScale(loc, rot, scale) - if scene: + if scene: sync_trimesh(scene, a) @@ -379,18 +395,17 @@ def set_location(scene: trimesh.Scene, obj_name: str, location): blender_mesh = bpy.data.objects[obj_name] blender_mesh.location = location sync_trimesh(scene, obj_name) - def set_rotation(scene: trimesh.Scene, obj_name: str, rotation): blender_mesh = blender_objs_from_names(obj_name)[0] blender_mesh.rotation_euler = rotation sync_trimesh(scene, obj_name) - + # for debugging. does not actually find centroid def blender_centroid(a): - return np.mean([a.matrix_world @ v.co for v in a.data.vertices], axis = 0) + return np.mean([a.matrix_world @ v.co for v in a.data.vertices], axis=0) def order_objects_by_principal_axis(objects: list[bpy.types.Object]): @@ -401,4 +416,3 @@ def order_objects_by_principal_axis(objects: list[bpy.types.Object]): locations_projected = pca.transform(location_matrix) sorted_indices = np.argsort(locations_projected.ravel()) return [objects[i] for i in sorted_indices] - diff --git a/infinigen/core/constraints/evaluator/__init__.py b/infinigen/core/constraints/evaluator/__init__.py index 6300f519b..a653187a3 100644 --- a/infinigen/core/constraints/evaluator/__init__.py +++ b/infinigen/core/constraints/evaluator/__init__.py @@ -1,10 +1,2 @@ -from .evaluate import ( - evaluate_problem, - evaluate_node, - EvalResult -) -from .eval_memo import ( - evict_memo_for_move, - evict_memo_for_obj, - memo_key, -) \ No newline at end of file +from .eval_memo import evict_memo_for_move, evict_memo_for_obj, memo_key +from .evaluate import EvalResult, evaluate_node, evaluate_problem diff --git a/infinigen/core/constraints/evaluator/domain_contains.py b/infinigen/core/constraints/evaluator/domain_contains.py index f4a8b167a..1162dbcff 100644 --- a/infinigen/core/constraints/evaluator/domain_contains.py +++ b/infinigen/core/constraints/evaluator/domain_contains.py @@ -4,57 +4,46 @@ # Authors: Alexander Raistrick -import bpy - import logging -from tqdm import tqdm -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, -) -from infinigen.core.constraints.example_solver import state_def from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.example_solver import state_def logger = logging.getLogger(__name__) -def domain_contains( - dom: r.Domain, - state: state_def.State, - obj: state_def.ObjectState -): +def domain_contains(dom: r.Domain, state: state_def.State, obj: state_def.ObjectState): assert isinstance(dom, r.Domain), dom assert isinstance(obj, state_def.ObjectState), obj if not t.satisfies(obj.tags, dom.tags): - #logger.debug(f"domain_contains failed, {obj} does not satisfy {obj.tags}") + # logger.debug(f"domain_contains failed, {obj} does not satisfy {obj.tags}") return False for rel, dom in dom.relations: - if isinstance(rel, cl.NegatedRelation): if any( - relstate.relation.intersects(rel.rel) and - domain_contains(dom, state, state.objs[relstate.target_name]) + relstate.relation.intersects(rel.rel) + and domain_contains(dom, state, state.objs[relstate.target_name]) for relstate in obj.relations ): - #logger.debug(f"domain_contains failed, {obj} satisfies negative {rel} {dom}") + # logger.debug(f"domain_contains failed, {obj} satisfies negative {rel} {dom}") return False else: if not any( - relstate.relation.intersects(rel) and - domain_contains(dom, state, state.objs[relstate.target_name]) + relstate.relation.intersects(rel) + and domain_contains(dom, state, state.objs[relstate.target_name]) for relstate in obj.relations ): - #logger.debug(f"domain_contains failed, {obj} does not satisfy {rel} {dom}") + # logger.debug(f"domain_contains failed, {obj} does not satisfy {rel} {dom}") return False - + return True + def objkeys_in_dom(dom: r.Domain, curr: state_def.State): return [ - k for k, o in curr.objs.items() - if domain_contains(dom, curr, o) and o.active + k for k, o in curr.objs.items() if domain_contains(dom, curr, o) and o.active ] - \ No newline at end of file diff --git a/infinigen/core/constraints/evaluator/eval_memo.py b/infinigen/core/constraints/evaluator/eval_memo.py index 91d0ea0b5..06c7dba3e 100644 --- a/infinigen/core/constraints/evaluator/eval_memo.py +++ b/infinigen/core/constraints/evaluator/eval_memo.py @@ -6,16 +6,14 @@ import logging -from infinigen.core.constraints.example_solver.state_def import State, ObjectState -from infinigen.core.constraints.example_solver import moves - -from infinigen.core.constraints import ( - constraint_language as cl -) from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints.example_solver import moves +from infinigen.core.constraints.example_solver.state_def import ObjectState, State logger = logging.getLogger(__name__) + def memo_key(n: cl.Node): match n: case cl.item(var): @@ -25,16 +23,9 @@ def memo_key(n: cl.Node): case _: return id(n) -def evict_memo_for_obj( - node: cl.Problem, - memo: dict, - obj: ObjectState -): - recvals = [ - evict_memo_for_obj(child, memo, obj) - for _, child in node.children() - ] +def evict_memo_for_obj(node: cl.Problem, memo: dict, obj: ObjectState): + recvals = [evict_memo_for_obj(child, memo, obj) for _, child in node.children()] res = any(recvals) match node: @@ -52,29 +43,28 @@ def evict_memo_for_obj( return res -def reset_bvh_cache(state, filter_name=None): - ''' +def reset_bvh_cache(state, filter_name=None): + """ filter_name: if specified, only get rid of things containing this - ''' + """ static_tags = {t.Semantics.Room, t.Semantics.Cutter} def keep_key(k): - names, tags = k - + if filter_name is not None: obj = state.objs[filter_name].obj - return not (obj.name in names) - + return obj.name not in names + for n in names: if n not in state.objs: return False ostate = state.objs[n] if not ostate.tags.intersection(static_tags): return False - + return True prev_keys = list(state.bvh_cache.keys()) @@ -84,22 +74,22 @@ def keep_key(k): continue del state.bvh_cache[k] - logger.debug(f'reset_bvh_cache evicted {len(prev_keys) - len(state.bvh_cache)} out of {len(prev_keys)} orig') + logger.debug( + f"reset_bvh_cache evicted {len(prev_keys) - len(state.bvh_cache)} out of {len(prev_keys)} orig" + ) + def evict_memo_for_move( - problem: cl.Problem, - state: State, - memo: dict, - move: moves.Move + problem: cl.Problem, state: State, memo: dict, move: moves.Move ): match move: case ( - moves.TranslateMove(names) | - moves.RotateMove(names) | - moves.Addition(names=names) | - moves.ReinitPoseMove(names=names) | - moves.RelationPlaneChange(names=names) | - moves.Resample(names=names) + moves.TranslateMove(names) + | moves.RotateMove(names) + | moves.Addition(names=names) + | moves.ReinitPoseMove(names=names) + | moves.RelationPlaneChange(names=names) + | moves.Resample(names=names) ): for name in names: assert name is not None, move @@ -112,4 +102,4 @@ def evict_memo_for_move( del memo[k] reset_bvh_cache(state) case _: - raise NotImplementedError(f'Unsure what to evict for {move=}') \ No newline at end of file + raise NotImplementedError(f"Unsure what to evict for {move=}") diff --git a/infinigen/core/constraints/evaluator/evaluate.py b/infinigen/core/constraints/evaluator/evaluate.py index 6c57343ba..f2bc985c3 100644 --- a/infinigen/core/constraints/evaluator/evaluate.py +++ b/infinigen/core/constraints/evaluator/evaluate.py @@ -4,44 +4,38 @@ # Authors: Alexander Raistrick -from typing import Type, Callable -from dataclasses import dataclass import copy import logging import operator +from dataclasses import dataclass + import pandas as pd +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.evaluator import eval_memo, node_impl from infinigen.core.constraints.example_solver.state_def import State -from infinigen.core.constraints.evaluator import node_impl, eval_memo - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) logger = logging.getLogger(__name__) -SPECIAL_CASE_NODES = [ - cl.ForAll, - cl.SumOver, - cl.MeanOver, - cl.item, - cl.Problem, - cl.scene -] - +SPECIAL_CASE_NODES = [cl.ForAll, cl.SumOver, cl.MeanOver, cl.item, cl.Problem, cl.scene] + gather_funcs = { cl.ForAll: all, cl.SumOver: sum, - cl.MeanOver: lambda vs: (sum(vs) / len(vs)) if len(vs) else 0 + cl.MeanOver: lambda vs: (sum(vs) / len(vs)) if len(vs) else 0, } -def _compute_node_val(node: cl.Node, state: State, memo: dict): +def _compute_node_val(node: cl.Node, state: State, memo: dict): match node: case cl.scene(): - return set(k for k, v in state.objs.items() if v.active) - case cl.ForAll(objs, var, pred) | cl.SumOver(objs, var, pred) | cl.MeanOver(objs, var, pred): + return set(k for k, v in state.objs.items() if v.active) + case ( + cl.ForAll(objs, var, pred) + | cl.SumOver(objs, var, pred) + | cl.MeanOver(objs, var, pred) + ): assert isinstance(var, str) loop_over_objs = evaluate_node(objs, state, memo) @@ -51,61 +45,56 @@ def _compute_node_val(node: cl.Node, state: State, memo: dict): memo_sub = copy.copy(memo) memo_sub[var] = {o} results.append(evaluate_node(pred, state, memo=memo_sub)) - - logger.debug(f'{node.__class__.__name__} had {len(results)=}') + + logger.debug(f"{node.__class__.__name__} had {len(results)=}") return gather_funcs[node.__class__](results) case cl.item(): raise ValueError( - f'_compute_node_val encountered undefined variable {node}. {memo.keys()}' + f"_compute_node_val encountered undefined variable {node}. {memo.keys()}" ) case cl.Node() if node.__class__ in node_impl.node_impls: impl_func = node_impl.node_impls.get(node.__class__) child_vals = { - name: evaluate_node(c, state, memo) - for name, c in node.children() + name: evaluate_node(c, state, memo) for name, c in node.children() } kwargs = {} - if hasattr(node, 'others_tags'): - kwargs['others_tags'] = getattr(node, 'others_tags') + if hasattr(node, "others_tags"): + kwargs["others_tags"] = getattr(node, "others_tags") return impl_func(node, state, child_vals, **kwargs) case cl.Problem(): - raise TypeError(f'evaluate_node is invalid for {node}, please use evaluate_problem') + raise TypeError( + f"evaluate_node is invalid for {node}, please use evaluate_problem" + ) case _: raise NotImplementedError( - f'Couldnt compute value for {type(node)}, please add it to ' - f'{node_impl.node_impls.keys()=} or add a specialcase' + f"Couldnt compute value for {type(node)}, please add it to " + f"{node_impl.node_impls.keys()=} or add a specialcase" ) -def relevant( - node: cl.Node, - filter: r.Domain | None -) -> bool: +def relevant(node: cl.Node, filter: r.Domain | None) -> bool: if filter is None: raise ValueError() return True - + if not isinstance(node, cl.Node): - raise ValueError(f'{node=}') + raise ValueError(f"{node=}") match node: - case cl.ObjectSetExpression(): d = r.constraint_domain(node, finalize_variables=True) - assert r.domain_finalized(d), f'{relevant.__name__} encountered unfinalized {d=}' + assert r.domain_finalized( + d + ), f"{relevant.__name__} encountered unfinalized {d=}" res = d.intersects(filter, require_satisfies_right=True) - logger.debug(f'{relevant.__name__} got {res=} for {d=}\n {filter=}') + logger.debug(f"{relevant.__name__} got {res=} for {d=}\n {filter=}") return res case _: return any(relevant(c, filter) for _, c in node.children()) -def _viol_count_binop( - node: cl.BoolOperatorExpression, - lhs, - rhs -) -> int: +def _viol_count_binop(node: cl.BoolOperatorExpression, lhs, rhs) -> int: if not isinstance(lhs, int) or not isinstance(rhs, int): satisfied = node.func(lhs, rhs) return 1 if not satisfied else 0 @@ -120,24 +109,16 @@ def _viol_count_binop( case operator.lt: return max(0, lhs - rhs + 1) case _: - raise ValueError(f'Unhandled {node.func=}') - - -def viol_count( - node: cl.Node, - state: State, - memo: dict, - filter: r.Domain=None -): + raise ValueError(f"Unhandled {node.func=}") - match node: +def viol_count(node: cl.Node, state: State, memo: dict, filter: r.Domain = None): + match node: case cl.BoolOperatorExpression(operator.and_, cons) | cl.Problem(cons): res = sum(viol_count(o, state, memo, filter) for o in cons) case cl.in_range(val, low, high): - val_res = evaluate_node(val, state, memo) - + if val_res < low: res = low - val_res elif val_res > high: @@ -161,10 +142,10 @@ def viol_count( viol += viol_count(pred, state, memo_sub, filter) res = viol case ( - cl.BoolOperatorExpression(operator.ge, [lhs, rhs]) | - cl.BoolOperatorExpression(operator.le, [rhs, lhs]) | - cl.BoolOperatorExpression(operator.gt, [rhs, lhs]) | - cl.BoolOperatorExpression(operator.lt, [rhs, lhs]) + cl.BoolOperatorExpression(operator.ge, [lhs, rhs]) + | cl.BoolOperatorExpression(operator.le, [rhs, lhs]) + | cl.BoolOperatorExpression(operator.gt, [rhs, lhs]) + | cl.BoolOperatorExpression(operator.lt, [rhs, lhs]) ): if relevant(lhs, filter) or relevant(rhs, filter): l_res = evaluate_node(lhs, state, memo) @@ -172,23 +153,26 @@ def viol_count( res = _viol_count_binop(node, l_res, r_res) else: res = 0 - + case cl.constant(val) if isinstance(val, bool): res = 0 if val else 1 case _: - raise NotImplementedError(f'{node.__class__.__name__}(...) is not supported for hard constraints. Please use an alternative. Full node was {node}') + raise NotImplementedError( + f"{node.__class__.__name__}(...) is not supported for hard constraints. Please use an alternative. Full node was {node}" + ) return res + @dataclass class ConstraintsViolated: constraints: list[cl.Node] def __bool__(self): return False - -def evaluate_node(node: cl.Node, state: State, memo=None): + +def evaluate_node(node: cl.Node, state: State, memo=None): k = eval_memo.memo_key(node) if memo is None: @@ -196,15 +180,15 @@ def evaluate_node(node: cl.Node, state: State, memo=None): elif k in memo: return memo[k] val = _compute_node_val(node, state, memo) - + memo[k] = val - logger.debug(f'Evaluated {node.__class__} to {val}') - + # logger.debug("Evaluated %s to %s", node.__class__, val) + return val + @dataclass class EvalResult: - loss_vals: dict[str, float] violations: dict[str, bool] @@ -216,24 +200,19 @@ def viol_count(self): def to_df(self) -> pd.DataFrame: keys = set(self.loss_vals.keys()).union(self.violations.keys()) - return pd.DataFrame.from_dict({ - k: dict( - loss=self.loss_vals.get(k), - viol_count=self.violations.get(k) - ) - for k in keys - }) + return pd.DataFrame.from_dict( + { + k: dict(loss=self.loss_vals.get(k), viol_count=self.violations.get(k)) + for k in keys + } + ) def evaluate_problem( - problem: cl.Problem, - state: State, - filter: r.Domain = None, - memo=None + problem: cl.Problem, state: State, filter: r.Domain = None, memo=None ): - logger.debug( - f'Evaluating problem {len(problem.constraints)=} {len(problem.score_terms)=}' + f"Evaluating problem {len(problem.constraints)=} {len(problem.score_terms)=}" ) if memo is None: @@ -241,15 +220,14 @@ def evaluate_problem( scores = {} for name, score_node in problem.score_terms.items(): - logger.debug(f'Evaluating score for {name=}') + logger.debug(f"Evaluating score for {name=}") scores[name] = evaluate_node(score_node, state, memo) - + logger.debug(f"Evaluator got score {scores[name]} for {name=}") + violated = {} for name, node in problem.constraints.items(): + logger.debug(f"Evaluating constraint {name=}") violated[name] = viol_count(node, state, memo, filter=filter) - logger.debug(f'Evaluator found {violated[name]} violations for {name=}') + logger.debug(f"Evaluator found {violated[name]} violations for {name=}") - return EvalResult( - loss_vals=scores, - violations=violated - ) \ No newline at end of file + return EvalResult(loss_vals=scores, violations=violated) diff --git a/infinigen/core/constraints/evaluator/indoor_util.py b/infinigen/core/constraints/evaluator/indoor_util.py index aa61cdd3f..b043671ba 100644 --- a/infinigen/core/constraints/evaluator/indoor_util.py +++ b/infinigen/core/constraints/evaluator/indoor_util.py @@ -3,15 +3,14 @@ # Authors: Karhan Kayan -import bpy +import math +import random +from typing import Union + +import bpy +import numpy as np import trimesh -from trimesh import Scene from shapely import LineString, Point -import numpy as np -from typing import Union -import random -import math -import functools def meshes_from_names(scene, names): @@ -19,11 +18,13 @@ def meshes_from_names(scene, names): names = [names] return [scene.geometry[g] for _, g in (scene.graph[n] for n in names)] + def blender_objs_from_names(names): if isinstance(names, str): names = [names] return [bpy.data.objects[n] for n in names] + def name_from_mesh(scene, mesh): mesh_name = None for name, mesh in scene.geometry.items(): @@ -32,31 +33,34 @@ def name_from_mesh(scene, mesh): break return mesh_name + def project_to_xy_path2d(mesh: trimesh.Trimesh) -> trimesh.path.Path2D: - poly = trimesh.path.polygons.projected(mesh, (0,0,1), (0,0,0)) + poly = trimesh.path.polygons.projected(mesh, (0, 0, 1), (0, 0, 0)) d = trimesh.path.exchange.misc.polygon_to_path(poly) - return trimesh.path.Path2D(entities = d['entities'], vertices = d['vertices']) + return trimesh.path.Path2D(entities=d["entities"], vertices=d["vertices"]) + def project_to_xy_poly(mesh: trimesh.Trimesh): - poly = trimesh.path.polygons.projected(mesh, (0,0,1), (0,0,0)) + poly = trimesh.path.polygons.projected(mesh, (0, 0, 1), (0, 0, 0)) return poly def closest_edge_to_point(polygon, point): - closest_distance = float('inf') + closest_distance = float("inf") closest_edge = None for i, coord in enumerate(polygon.exterior.coords[:-1]): start, end = coord, polygon.exterior.coords[i + 1] line = LineString([start, end]) distance = line.distance(point) - + if distance < closest_distance: closest_distance = distance closest_edge = line return closest_edge + def compute_outward_normal(line, polygon): dx = line.xy[0][1] - line.xy[0][0] # x1 - x0 dy = line.xy[1][1] - line.xy[1][0] # y1 - y0 @@ -68,20 +72,20 @@ def compute_outward_normal(line, polygon): # Normalize the vectors (optional but recommended for consistency) normal_vector_1 = normal_vector_1 / np.linalg.norm(normal_vector_1) normal_vector_2 = normal_vector_2 / np.linalg.norm(normal_vector_2) - + # Midpoint of the line segment mid_point = line.interpolate(0.5, normalized=True) - + # Move a tiny bit in the direction of the normals to check which points outside test_point_1 = mid_point.coords[0] + 0.01 * normal_vector_1 - test_point_2 = mid_point.coords[0] + 0.01 * normal_vector_2 - + mid_point.coords[0] + 0.01 * normal_vector_2 + # Return the normal for which the test point lies outside the polygon if polygon.contains(Point(test_point_1)): return normal_vector_2 else: return normal_vector_1 - + def get_transformed_axis(scene, obj_name): obj = bpy.data.objects[obj_name] @@ -100,8 +104,6 @@ def set_axis(scene, objs: Union[str, list[str]], canonical_axis): obj.axis = get_transformed_axis(scene, obj_name) - - def get_plane_from_3dmatrix(matrix): """Extract the plane_normal and plane_origin from a transformation matrix.""" # The normal of the plane can be extracted from the 3x3 rotation part of the matrix @@ -115,6 +117,7 @@ def project_points_onto_plane(points, plane_origin, plane_normal): d = np.dot(points - plane_origin, plane_normal)[:, None] return points - d * plane_normal + def to_2d_coordinates(points, plane_normal): """Convert 3D points to 2D using the plane defined by its normal.""" # Compute two perpendicular vectors on the plane @@ -124,7 +127,7 @@ def to_2d_coordinates(points, plane_normal): u /= np.linalg.norm(u) v = np.cross(plane_normal, u) v /= np.linalg.norm(v) - + # Convert 3D points to 2D using dot products return np.column_stack([points.dot(u), points.dot(v)]) @@ -136,12 +139,18 @@ def ensure_correct_order(points): """ # Calculate signed area n = len(points) - area = sum((points[i][0] * points[(i+1)%n][1]) - (points[(i+1)%n][0] * points[i][1]) for i in range(n)) / 2.0 + area = ( + sum( + (points[i][0] * points[(i + 1) % n][1]) + - (points[(i + 1) % n][0] * points[i][1]) + for i in range(n) + ) + / 2.0 + ) # Return the points in reverse order if area is negative return points[::-1] if area < 0 else points - def sample_random_point(polygon): """ Sample a random point from inside the given Shapely polygon. @@ -151,27 +160,29 @@ def sample_random_point(polygon): p = Point(random.uniform(minx, maxx), random.uniform(miny, maxy)) if polygon.contains(p): return p - -def delete_obj(a, scene = None): + +def delete_obj(a, scene=None): if isinstance(a, str): a = [a] for obj_name in a: bpy.data.objects.remove(bpy.data.objects[obj_name], do_unlink=True) - if scene: + if scene: scene.graph.transforms.remove_node(obj_name) - scene.delete_geometry(obj_name + '_mesh') + scene.delete_geometry(obj_name + "_mesh") def global_vertex_coordinates(obj, local_vertex): return obj.matrix_world @ local_vertex.co + def global_polygon_normal(obj, polygon): loc, rot, scale = obj.matrix_world.decompose() rot = rot.to_matrix() normal = rot @ polygon.normal return normal / np.linalg.norm(normal) + def is_planar(obj, tolerance=1e-6): if len(obj.data.polygons) != 1: return False @@ -184,14 +195,17 @@ def is_planar(obj, tolerance=1e-6): # Check if all vertices lie on the plane defined by the reference vertex and the global normal for vertex in obj.data.vertices: - distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot(global_normal) + distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot( + global_normal + ) if not math.isclose(distance, 0, abs_tol=tolerance): return False return True + def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6): - if plane_obj_a.type != 'MESH' or plane_obj_b.type != 'MESH': + if plane_obj_a.type != "MESH" or plane_obj_b.type != "MESH": raise ValueError("Both objects should be of type 'MESH'") # # Check if the objects are planar @@ -203,17 +217,22 @@ def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6): dot_product = global_normal_a.dot(global_normal_b) - return math.isclose(dot_product, 1, abs_tol=tolerance) or math.isclose(dot_product, -1, abs_tol=tolerance) + return math.isclose(dot_product, 1, abs_tol=tolerance) or math.isclose( + dot_product, -1, abs_tol=tolerance + ) + - def distance_to_plane(point, plane_point, plane_normal): """Compute the distance from a point to a plane defined by a point and a normal.""" return abs((point - plane_point).dot(plane_normal)) -def is_within_margin_from_plane(obj, obj_b, margin, tol = 1e-6): + +def is_within_margin_from_plane(obj, obj_b, margin, tol=1e-6): """Check if all vertices of an object are within a given margin from a plane.""" polygon_b = obj_b.data.polygons[0] - plane_point_b = global_vertex_coordinates(obj_b, obj_b.data.vertices[polygon_b.vertices[0]]) + plane_point_b = global_vertex_coordinates( + obj_b, obj_b.data.vertices[polygon_b.vertices[0]] + ) plane_normal_b = global_polygon_normal(obj_b, polygon_b) for vertex in obj.data.vertices: global_vertex = global_vertex_coordinates(obj, vertex) @@ -222,10 +241,11 @@ def is_within_margin_from_plane(obj, obj_b, margin, tol = 1e-6): return False return True + # def update_blender_representation(scene, trimesh_obj): -# transform_matrix = +# transform_matrix = # def update_trimesh_representation(scnene, blender_obj): -# pass \ No newline at end of file +# pass diff --git a/infinigen/core/constraints/evaluator/node_impl/__init__.py b/infinigen/core/constraints/evaluator/node_impl/__init__.py index 138875f46..e1a6f184b 100644 --- a/infinigen/core/constraints/evaluator/node_impl/__init__.py +++ b/infinigen/core/constraints/evaluator/node_impl/__init__.py @@ -1 +1 @@ -from .impl_bindings import node_impls \ No newline at end of file +from .impl_bindings import node_impls diff --git a/infinigen/core/constraints/evaluator/node_impl/impl_bindings.py b/infinigen/core/constraints/evaluator/node_impl/impl_bindings.py index 3ee9e0f04..1cdf4f3de 100644 --- a/infinigen/core/constraints/evaluator/node_impl/impl_bindings.py +++ b/infinigen/core/constraints/evaluator/node_impl/impl_bindings.py @@ -2,139 +2,126 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Karhan Kayan: geometry impl bindings # - Alexander Raistrick: impl interface, set_reasoning / operator impls # - Lingjie Mei: bugfix -import math -import logging import functools +import logging +import math -import gin import numpy as np -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) -from infinigen.core.constraints.example_solver import state_def from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.evaluator import domain_contains +from infinigen.core.constraints.example_solver import state_def -from . import trimesh_geometry, symmetry -import inspect +from . import symmetry, trimesh_geometry logger = logging.getLogger(__name__) node_impls = {} + def statenames_to_blnames(state, names): return [state.objs[n].obj.name for n in names] + def register_node_impl(node_cls): def decorator(func): node_impls[node_cls] = func return func + return decorator -def generic_impl_interface( - cons: cl.Node, - state: state_def.State, - child_vals: dict -): + +def generic_impl_interface(cons: cl.Node, state: state_def.State, child_vals: dict): pass + @register_node_impl(cl.constant) -def constant_impl( - cons: cl.Node, - state: state_def.State, - child_vals: dict -): +def constant_impl(cons: cl.Node, state: state_def.State, child_vals: dict): return cons.value + @register_node_impl(cl.ScalarOperatorExpression) @register_node_impl(cl.BoolOperatorExpression) def operator_impl( - cons: cl.ScalarOperatorExpression | cl.BoolOperatorExpression, - state: state_def.State, - child_vals: dict + cons: cl.ScalarOperatorExpression | cl.BoolOperatorExpression, + state: state_def.State, + child_vals: dict, ): - - operands = [ - child_vals[f'operands[{i}]'] - for i in range(len(cons.operands)) - ] + operands = [child_vals[f"operands[{i}]"] for i in range(len(cons.operands))] try: - if ( - isinstance(cons.func, np.ufunc) - or len(operands) == 1 - ): + if isinstance(cons.func, np.ufunc) or len(operands) == 1: return cons.func(*operands) else: return functools.reduce(cons.func, operands) except ZeroDivisionError as e: - raise ZeroDivisionError(f'{e} in {cons=}, {operands=}') + raise ZeroDivisionError(f"{e} in {cons=}, {operands=}") + @register_node_impl(cl.center_stable_surface_dist) def center_stable_surface_impl( - cons: cl.center_stable_surface_dist, - state: state_def.State, - child_vals: dict + cons: cl.center_stable_surface_dist, state: state_def.State, child_vals: dict ): - objs = child_vals['objs'] + objs = child_vals["objs"] return trimesh_geometry.center_stable_surface(state.trimesh_scene, objs, state) + @register_node_impl(cl.accessibility_cost) def accessibility_impl( - cons: cl.accessibility_cost, - state: state_def.State, + cons: cl.accessibility_cost, + state: state_def.State, child_vals: dict, - use_collision_impl: bool = True + use_collision_impl: bool = True, ): - - objs = statenames_to_blnames(state, child_vals['objs']) - others = statenames_to_blnames(state, child_vals['others']) + objs = statenames_to_blnames(state, child_vals["objs"]) + others = statenames_to_blnames(state, child_vals["others"]) if len(objs) == 0: return 0 - + if use_collision_impl: + logger.debug("accessibility_cost_cuboid_penetration(%s, %s)", objs, others) res = trimesh_geometry.accessibility_cost_cuboid_penetration( - state.trimesh_scene, - objs, - others, - cons.normal, - cons.dist, - bvh_cache=state.bvh_cache + state.trimesh_scene, + objs, + others, + cons.normal, + cons.dist, + bvh_cache=state.bvh_cache, ) else: + logger.debug("accessibility_cost(%s, %s)", objs, others) res = trimesh_geometry.accessibility_cost( state.trimesh_scene, objs, others, cons.normal ) return res + @register_node_impl(cl.distance) def min_distance_impl( - cons: cl.Node, - state: state_def.State, - child_vals: dict, - others_tags: set = None + cons: cl.Node, state: state_def.State, child_vals: dict, others_tags: set = None ): - - objs = statenames_to_blnames(state, child_vals['objs']) - others = statenames_to_blnames(state, child_vals['others']) + objs = statenames_to_blnames(state, child_vals["objs"]) + others = statenames_to_blnames(state, child_vals["others"]) if len(objs) == 0 or len(others) == 0: - logger.debug('min_distance had no targets') + logger.debug("min_distance had no targets") return 0 - + + logger.debug("min_distance_impl(%s, %s)", objs, others) + res = trimesh_geometry.min_dist( - state.trimesh_scene, - a=objs, - b=others, + state.trimesh_scene, + a=objs, + b=others, b_tags=others_tags, - bvh_cache=state.bvh_cache + bvh_cache=state.bvh_cache, ) if res.dist < 0: @@ -142,159 +129,123 @@ def min_distance_impl( return res.dist + @register_node_impl(cl.min_distance_internal) def min_distance_internal_impl( - cons: cl.min_distance_internal, - state: state_def.State, - child_vals: dict + cons: cl.min_distance_internal, state: state_def.State, child_vals: dict ): - objs = statenames_to_blnames(state, child_vals['objs']) + objs = statenames_to_blnames(state, child_vals["objs"]) if len(objs) <= 1: return 0 - return trimesh_geometry.min_dist( - state.trimesh_scene, a=objs - ).dist + return trimesh_geometry.min_dist(state.trimesh_scene, a=objs).dist + @register_node_impl(cl.min_dist_2d) -def min_dist_2d_impl( - cons: cl.min_dist_2d, - state: state_def.State, - child_vals: dict -): - a = statenames_to_blnames(state, child_vals['objs']) - b = statenames_to_blnames(state, child_vals['others']) +def min_dist_2d_impl(cons: cl.min_dist_2d, state: state_def.State, child_vals: dict): + a = statenames_to_blnames(state, child_vals["objs"]) + b = statenames_to_blnames(state, child_vals["others"]) if len(a) == 0 or len(b) == 0: return 0 - return trimesh_geometry.min_dist_2d( - state.trimesh_scene, a, b - ) + return trimesh_geometry.min_dist_2d(state.trimesh_scene, a, b) + @register_node_impl(cl.focus_score) def focus_score_impl( - cons: cl.focus_score, - state: state_def.State, + cons: cl.focus_score, + state: state_def.State, child_vals: dict, ): + a = statenames_to_blnames(state, child_vals["objs"]) + b = statenames_to_blnames(state, child_vals["others"]) - a = statenames_to_blnames(state, child_vals['objs']) - b = statenames_to_blnames(state, child_vals['others']) - if len(a) == 0 or len(b) == 0: return 0 - - return trimesh_geometry.focus_score( - state, - a=a, - b=b - ) + + return trimesh_geometry.focus_score(state, a=a, b=b) + @register_node_impl(cl.angle_alignment_cost) def angle_alignment_impl( - cons: cl.angle_alignment_cost, - state: state_def.State, - child_vals: dict, - others_tags: set = None + cons: cl.angle_alignment_cost, + state: state_def.State, + child_vals: dict, + others_tags: set = None, ): - a = statenames_to_blnames(state, child_vals['objs']) - b = statenames_to_blnames(state, child_vals['others']) + a = statenames_to_blnames(state, child_vals["objs"]) + b = statenames_to_blnames(state, child_vals["others"]) if len(a) == 0 or len(b) == 0: return 0 - return trimesh_geometry.angle_alignment_cost( - state, a, b, others_tags - ) + return trimesh_geometry.angle_alignment_cost(state, a, b, others_tags) + @register_node_impl(cl.freespace_2d) -def freespace_2d_impl( - cons: cl.freespace_2d, - state: state_def.State, - child_vals: dict -): +def freespace_2d_impl(cons: cl.freespace_2d, state: state_def.State, child_vals: dict): return trimesh_geometry.freespace_2d() + @register_node_impl(cl.rotational_asymmetry) def rotational_asymmetry_impl( - cons: cl.rotational_asymmetry, - state: state_def.State, - child_vals: dict + cons: cl.rotational_asymmetry, state: state_def.State, child_vals: dict ): - objs = statenames_to_blnames(state, child_vals['objs']) + objs = statenames_to_blnames(state, child_vals["objs"]) if len(objs) <= 1: return 0 return symmetry.compute_total_rotation_asymmetry(objs) + @register_node_impl(cl.reflectional_asymmetry) def reflectional_asymmetry_impl( - cons: cl.reflectional_asymmetry, - state: state_def.State, - child_vals: dict, + cons: cl.reflectional_asymmetry, + state: state_def.State, + child_vals: dict, use_long_plane: bool = True, ): - - objs = statenames_to_blnames(state, child_vals['objs']) - others = statenames_to_blnames(state, child_vals['others']) + objs = statenames_to_blnames(state, child_vals["objs"]) + others = statenames_to_blnames(state, child_vals["others"]) if len(objs) <= 1: return 0 return trimesh_geometry.reflectional_asymmetry_score( state.trimesh_scene, objs, others, use_long_plane ) + @register_node_impl(cl.coplanarity_cost) def coplanarity_cost_impl( - cons: cl.coplanarity_cost, - state: state_def.State, - child_vals: dict + cons: cl.coplanarity_cost, state: state_def.State, child_vals: dict ): - objs = child_vals['objs'] + objs = child_vals["objs"] if len(objs) <= 1: return 0 return trimesh_geometry.coplanarity_cost(state.trimesh_scene, objs) @register_node_impl(cl.tagged) -def tagged_impl( - cons: cl.tagged, - state: state_def.State, - child_vals: dict -): - res = { - o for o in child_vals['objs'] - if t.satisfies(state.objs[o].tags, cons.tags) - } +def tagged_impl(cons: cl.tagged, state: state_def.State, child_vals: dict): + res = {o for o in child_vals["objs"] if t.satisfies(state.objs[o].tags, cons.tags)} - #logger.debug('tagged(%s) produced %s from %i candidates', cons.tags, res, len(child_vals['objs'])) + # logger.debug('tagged(%s) produced %s from %i candidates', cons.tags, res, len(child_vals['objs'])) return res + @register_node_impl(cl.count) -def count_impl( - cons: cl.count, - state: state_def.State, - child_vals: dict -): - return len(child_vals['objs']) +def count_impl(cons: cl.count, state: state_def.State, child_vals: dict): + return len(child_vals["objs"]) + @register_node_impl(cl.in_range) -def in_range_impl( - cons: cl.in_range, - state: state_def.State, - child_vals: dict -): - x = child_vals['val'] - return ( - x <= cons.high and - x >= cons.low - ) +def in_range_impl(cons: cl.in_range, state: state_def.State, child_vals: dict): + x = child_vals["val"] + return x <= cons.high and x >= cons.low + @register_node_impl(cl.related_to) def related_children_impl( - cons: cl.related_to, - state: state_def.State, - child_vals: dict + cons: cl.related_to, state: state_def.State, child_vals: dict ): - r = cons.relation - children: set[str] = child_vals['child'] - parents: set[str] = child_vals['parent'] + children: set[str] = child_vals["child"] + parents: set[str] = child_vals["parent"] res = set() for o in children: @@ -304,54 +255,40 @@ def related_children_impl( ): res.add(o) - #logger.debug('related_to %s produced %s from %i candidates', cons.relation, res, len(children)) + # logger.debug('related_to %s produced %s from %i candidates', cons.relation, res, len(children)) return res + @register_node_impl(cl.excludes) -def excludes_impl( - cons: cl.excludes, - state: state_def.State, - child_vals: dict -): - - return { - o for o in child_vals['objs'] - if state.objs[o].tags.isdisjoint(cons.tags) - } +def excludes_impl(cons: cl.excludes, state: state_def.State, child_vals: dict): + return {o for o in child_vals["objs"] if state.objs[o].tags.isdisjoint(cons.tags)} + @register_node_impl(cl.volume) -def volume_impl( - cons: cl.volume, - state: state_def.State, - child_vals: dict -): - objs = child_vals['objs'] - +def volume_impl(cons: cl.volume, state: state_def.State, child_vals: dict): + objs = child_vals["objs"] + res = 0 for o in objs: - s = state.objs[o] dims = sorted(list(s.obj.dimensions), reverse=True) if isinstance(cons.dims, int): - dims = dims[:cons.dims] + dims = dims[: cons.dims] elif isinstance(cons.dims, tuple): dims = np.array(dims)[np.array(cons.dims)] else: - raise TypeError(f'Unexpected {type(cons.dims)=}') - + raise TypeError(f"Unexpected {type(cons.dims)=}") + res += math.prod(dims) return res + @register_node_impl(cl.hinge) -def hinge_impl( - cons: cl.hinge, - state: state_def.State, - child_vals: dict -): - x = child_vals['val'] +def hinge_impl(cons: cl.hinge, state: state_def.State, child_vals: dict): + x = child_vals["val"] if x < cons.low: return cons.low - x @@ -359,16 +296,14 @@ def hinge_impl( return x - cons.high else: return 0 - + + @register_node_impl(r.FilterByDomain) def filter_by_domain_impl( - cons: r.FilterByDomain, - state: state_def.State, - child_vals: dict + cons: r.FilterByDomain, state: state_def.State, child_vals: dict ) -> set[str]: - return { o - for o in child_vals['objs'] + for o in child_vals["objs"] if domain_contains.domain_contains(cons.filter, state, state.objs[o]) } diff --git a/infinigen/core/constraints/evaluator/node_impl/symmetry.py b/infinigen/core/constraints/evaluator/node_impl/symmetry.py index ec12f6017..4b8bf9c46 100644 --- a/infinigen/core/constraints/evaluator/node_impl/symmetry.py +++ b/infinigen/core/constraints/evaluator/node_impl/symmetry.py @@ -5,22 +5,24 @@ # Acknowledgement: Rotational symmetry code draws inspiration from https://pubs.acs.org/doi/abs/10.1021/ja00046a033 by Zabrodsky et al. +from typing import Union + +import matplotlib.pyplot as plt import numpy as np -from typing import Union, Any -from infinigen.core.constraints.evaluator.indoor_util import blender_objs_from_names -from mathutils import Vector, Quaternion, Matrix +from mathutils import Matrix, Quaternion, Vector from scipy.optimize import linear_sum_assignment -import matplotlib.pyplot as plt + +from infinigen.core.constraints.evaluator.indoor_util import blender_objs_from_names def rotate_vector(vector, angle): """Rotate a 2D vector by a given angle.""" - rotation_matrix = np.array([ - [np.cos(angle), -np.sin(angle)], - [np.sin(angle), np.cos(angle)] - ]) + rotation_matrix = np.array( + [[np.cos(angle), -np.sin(angle)], [np.sin(angle), np.cos(angle)]] + ) return np.dot(vector, rotation_matrix.T) + def compute_centroid(objects): """Compute the centroid of the provided objects.""" total_coords = np.zeros(2) @@ -28,63 +30,60 @@ def compute_centroid(objects): total_coords += np.array([obj.location.x, obj.location.y]) return total_coords / len(objects) + def compute_location_asymmetry(objects, centroid): """Compute location asymmetry based on the described method.""" num_objects = len(objects) - + P = np.zeros((num_objects, 2)) for i, obj in enumerate(objects): P[i] = np.array([obj.location.x, obj.location.y]) - centroid # print(P) - + # 2. Rotate all P_i so that P_1 is aligned with the x axis angle_p1 = np.arctan2(P[0][1], P[0][0]) for i in range(num_objects): P[i] = rotate_vector(P[i], -angle_p1) - # 3. Normalize P_i by dividing by max norm max_norm = max(np.linalg.norm(P, axis=1)) P /= max_norm - # print(P) - - + # print(P) + # 4. Compute Q as the average of the rotated P_i vectors Q = np.zeros(2) for i in range(num_objects): rotated_p = rotate_vector(P[i], -i * 2 * np.pi / num_objects) -# print("rot", P[i], rotated_p) + # print("rot", P[i], rotated_p) Q += rotated_p Q /= num_objects - + # 5. and 6. Compute Q_i and find the MSD between Q_i and P_i total_msd = 0 for i in range(num_objects): - Q_i = rotate_vector(Q, i * 2 * np.pi / num_objects) -# print("rot2", Q_i, P[i]) - msd = np.linalg.norm(Q_i - P[i])**2 + Q_i = rotate_vector(Q, i * 2 * np.pi / num_objects) + # print("rot2", Q_i, P[i]) + msd = np.linalg.norm(Q_i - P[i]) ** 2 total_msd += msd - + return total_msd / num_objects def compute_orientation_asymmetry(objects, centroid): """Compute orientation asymmetry of objects.""" num_objects = len(objects) - + # 1. Get the orientation vectors V = np.zeros((num_objects, 2)) for i, obj in enumerate(objects): # Extract orientation from object's rotation attribute V[i] = np.array([np.cos(obj.rotation_euler.z), np.sin(obj.rotation_euler.z)]) - - + # Rotate all V_i so that V_1 is aligned with the x axis angle_v1 = np.arctan2(V[0][1], V[0][0]) for i in range(num_objects): V[i] = rotate_vector(V[i], -angle_v1) - - + # Normalize V_i by dividing by max norm max_norm = max(np.linalg.norm(V, axis=1)) V /= max_norm @@ -93,21 +92,21 @@ def compute_orientation_asymmetry(objects, centroid): Q = np.zeros(2) for i in range(num_objects): rotated_v = rotate_vector(V[i], -i * 2 * np.pi / num_objects) -# print("rot", P[i], rotated_p) + # print("rot", P[i], rotated_p) Q += rotated_v Q /= num_objects - # 5. and 6. Compute Q_i and find the MSD between Q_i and V_i total_msd = 0 for i in range(num_objects): - Q_i = rotate_vector(Q, i * 2 * np.pi / num_objects) -# print("rot2", Q_i, P[i]) - msd = np.linalg.norm(Q_i - V[i])**2 + Q_i = rotate_vector(Q, i * 2 * np.pi / num_objects) + # print("rot2", Q_i, P[i]) + msd = np.linalg.norm(Q_i - V[i]) ** 2 total_msd += msd return total_msd / num_objects + def sort_objects_clockwise(objects, centroid): angles = [] for obj in objects: @@ -134,11 +133,10 @@ def compute_total_rotation_asymmetry(a: Union[str, list[str]]) -> float: objects = sort_objects_clockwise(objects, centroid) location_asymmetry = compute_location_asymmetry(objects, centroid) orientation_asymmetry = compute_orientation_asymmetry(objects, centroid) - + # print("location asym", location_asymmetry, "orient asym", orientation_asymmetry) - - return (location_asymmetry + orientation_asymmetry) / 2 + return (location_asymmetry + orientation_asymmetry) / 2 def reflect_point(point, plane_point, plane_normal): @@ -148,18 +146,19 @@ def reflect_point(point, plane_point, plane_normal): reflected_point = point - 2 * distance_to_plane * plane_normal return reflected_point -# prob doesnt work + +# prob doesnt work def reflect_quaternion(q, n): # Decompose the quaternion into scalar and vector parts w = q.w v = Vector((q.x, q.y, q.z)) - + # Reflect the vector part v_reflected = v - 2 * v.dot(n) * n - + # Construct the reflected quaternion q_reflected = Quaternion((w, v_reflected.x, v_reflected.y, v_reflected.z)) - + return q_reflected @@ -169,24 +168,30 @@ def reflect_axis_angle(axis_angle, n): # Reflect the vector part v_reflected = axis - 2 * axis.dot(n) * n angle_reflected = -angle - - # Construct the reflected axis angle - axis_angle_reflected = Vector((angle_reflected, v_reflected.x, v_reflected.y, v_reflected.z)) - + + # Construct the reflected axis angle + axis_angle_reflected = Vector( + (angle_reflected, v_reflected.x, v_reflected.y, v_reflected.z) + ) + return axis_angle_reflected + def reflect(obj, plane_point, plane_normal): - obj.rotation_mode = 'AXIS_ANGLE' + obj.rotation_mode = "AXIS_ANGLE" reflected_position = reflect_point(obj.location, plane_point, plane_normal) reflected_axis_angle = reflect_axis_angle(obj.rotation_axis_angle, plane_normal) - reflected_quaternion = Matrix.Rotation(reflected_axis_angle[0], 4, reflected_axis_angle[1:]).to_quaternion() + reflected_quaternion = Matrix.Rotation( + reflected_axis_angle[0], 4, reflected_axis_angle[1:] + ).to_quaternion() return reflected_position, reflected_quaternion - + def distance(pos1, pos2): # Calculate Euclidean distance between two positions return (pos1 - pos2).length + def angle_difference(orient1, orient2): # Calculate the angular difference between two orientations represented as quaternions. orient1.normalize() @@ -198,6 +203,7 @@ def angle_difference(orient1, orient2): angle = 2 * np.arccos(dot_product) return angle + def weight(obj): # Assign a weight based on obj size or other criteria bbox = obj.bound_box @@ -205,59 +211,88 @@ def weight(obj): volume = (max(dims) - min(dims)) ** 3 return volume + def normalization_factor(objs): - avg_distance = np.mean([distance(obj1.location, obj2.location) for obj1 in objs for obj2 in objs if obj1 != obj2]) + avg_distance = np.mean( + [ + distance(obj1.location, obj2.location) + for obj1 in objs + for obj2 in objs + if obj1 != obj2 + ] + ) return avg_distance + def bipartite_matching(objs, reflected_objs_data): # Use the Hungarian algorithm to find the optimal pairing between objs and reflected_objs - for obj in objs: - obj.rotation_mode = 'QUATERNION' - cost_matrix = np.array([[distance(obj.location, ref[0]) + angle_difference(obj.rotation_quaternion, ref[1]) for ref in reflected_objs_data] for obj in objs]) + for obj in objs: + obj.rotation_mode = "QUATERNION" + cost_matrix = np.array( + [ + [ + distance(obj.location, ref[0]) + + angle_difference(obj.rotation_quaternion, ref[1]) + for ref in reflected_objs_data + ] + for obj in objs + ] + ) row_ind, col_ind = linear_sum_assignment(cost_matrix) return [(objs[i], reflected_objs_data[j]) for i, j in zip(row_ind, col_ind)] -def calculate_reflectional_asymmetry(objs, plane_point, plane_normal, visualize = False): +def calculate_reflectional_asymmetry(objs, plane_point, plane_normal, visualize=False): if visualize: fig, ax = plt.subplots() # plot plane point and plane normal - ax.scatter(plane_point.x, plane_point.y, c='g', label='plane point') - ax.quiver(plane_point.x, plane_point.y, plane_normal.x, plane_normal.y, color='g', label='plane normal') + ax.scatter(plane_point.x, plane_point.y, c="g", label="plane point") + ax.quiver( + plane_point.x, + plane_point.y, + plane_normal.x, + plane_normal.y, + color="g", + label="plane normal", + ) - reflected_objs_data = [reflect(obj, plane_point, plane_normal) for obj in objs] - + # Use bipartite matching to find optimal pairings pairings = bipartite_matching(objs, reflected_objs_data) - + total_deviation = 0 for original, reflected_data in pairings: positional_deviation = distance(original.location, reflected_data[0]) - original.rotation_mode = 'QUATERNION' - angular_deviation = angle_difference(original.rotation_quaternion, reflected_data[1]) - - weighted_deviation = weight(original) * (positional_deviation + angular_deviation) + original.rotation_mode = "QUATERNION" + angular_deviation = angle_difference( + original.rotation_quaternion, reflected_data[1] + ) + + weighted_deviation = weight(original) * ( + positional_deviation + angular_deviation + ) total_deviation += weighted_deviation if visualize: # plot the point and the reflected point with different colors - ax.scatter(original.location.x, original.location.y, c='b', label='original point') - ax.scatter(reflected_data[0].x, reflected_data[0].y, c='r', label='reflected point') + ax.scatter( + original.location.x, original.location.y, c="b", label="original point" + ) + ax.scatter( + reflected_data[0].x, reflected_data[0].y, c="r", label="reflected point" + ) - # Normalize based on scene scale or other criteria normalized_deviation = total_deviation / normalization_factor(objs) - + symmetry_score = 1 / (1 + normalized_deviation) asymmetry_score = 1 - symmetry_score - for obj in objs: - obj.rotation_mode = 'XYZ' + for obj in objs: + obj.rotation_mode = "XYZ" if visualize: ax.legend() plt.show() - - - return asymmetry_score + return asymmetry_score diff --git a/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py b/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py index abdf935c0..bc3e6d793 100644 --- a/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py +++ b/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py @@ -1,46 +1,41 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Karhan Kayan: primary author # - Alexander Raistrick: initial version of collision/distance # Acknowledgement: Some metrics draw inspiration from https://dl.acm.org/doi/10.1145/1964921.1964981 by Yu et al. from __future__ import annotations -from typing import Union, Any + import logging from dataclasses import dataclass -from copy import copy - -import numpy as np -import gin +from typing import Union import bpy -import trimesh - -from trimesh import Trimesh, Scene -import networkx as nx -from shapely.geometry import Point, LineString -from shapely.ops import unary_union, nearest_points -from shapely import Polygon -from shapely import MultiPolygon - +import gin import matplotlib.pyplot as plt -# import fcl - -# from infinigen.core.util import blender as butil -from infinigen.core.util import blender as butil -from mathutils import Vector, Quaternion +import networkx as nx +import numpy as np +import trimesh +from mathutils import Vector from scipy.optimize import linear_sum_assignment +from shapely import MultiPolygon, Polygon +from shapely.geometry import LineString, Point +from shapely.ops import nearest_points, unary_union +from trimesh import Scene - - -import infinigen.core.constraints.constraint_language.util as iu +import infinigen.core.constraints.constraint_language.util as iu +import infinigen.core.constraints.evaluator.node_impl.symmetry as symmetry +from infinigen.core import tagging +from infinigen.core import tags as t from infinigen.core.constraints.example_solver import state_def from infinigen.core.constraints.example_solver.geometry.parse_scene import add_to_scene +from infinigen.core.util.logging import lazydebug -from infinigen.core import tags as t, tagging -import infinigen.core.constraints.evaluator.node_impl.symmetry as symmetry +# from infinigen.core.util import blender as butil + +# import fcl # from infinigen.core.tagging import tag_object,tag_system @@ -49,6 +44,7 @@ logger = logging.getLogger(__name__) + def get_cardinal_planes_bbox(vertices: np.ndarray): """ Get the mid dividing planes. Assumes vertices form a box @@ -57,7 +53,7 @@ def get_cardinal_planes_bbox(vertices: np.ndarray): # Calculate the covariance matrix and principal components centered_vertices = vertices - centroid - cov_matrix = np.cov(centered_vertices[:,:2].T) # Covariance on XY plane + cov_matrix = np.cov(centered_vertices[:, :2].T) # Covariance on XY plane eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix) # Sort eigenvectors based on eigenvalues @@ -74,22 +70,29 @@ def get_cardinal_planes_bbox(vertices: np.ndarray): longer_plane_normal /= np.linalg.norm(longer_plane_normal) shorter_plane_normal /= np.linalg.norm(shorter_plane_normal) - return [[Vector(centroid), Vector(longer_plane_normal)], [Vector(centroid), Vector(shorter_plane_normal)]] + return [ + [Vector(centroid), Vector(longer_plane_normal)], + [Vector(centroid), Vector(shorter_plane_normal)], + ] -def get_axis(state: state_def.State, obj: bpy.types.Object, tag = t.Subpart.Front): +def get_axis(state: state_def.State, obj: bpy.types.Object, tag=t.Subpart.Front): a_front_planes = state.planes.get_tagged_planes(obj, tag) if len(a_front_planes) > 1: - logging.warning(f'{obj.name=} had too many front planes ({len(a_front_planes)})') + logging.warning( + f"{obj.name=} had too many front planes ({len(a_front_planes)})" + ) a_front_plane = a_front_planes[0] a_front_plane_ind = a_front_plane[1] a_poly = obj.data.polygons[a_front_plane_ind] - front_plane_pt = iu.global_vertex_coordinates(obj, obj.data.vertices[a_poly.vertices[0]]) + front_plane_pt = iu.global_vertex_coordinates( + obj, obj.data.vertices[a_poly.vertices[0]] + ) front_plane_normal = iu.global_polygon_normal(obj, a_poly) return front_plane_pt, front_plane_normal -def preprocess_collision_query_cases(a, b, a_tags, b_tags): +def preprocess_collision_query_cases(a, b, a_tags, b_tags): if isinstance(a, list): a = set(a) if isinstance(b, list): @@ -101,10 +104,7 @@ def preprocess_collision_query_cases(a, b, a_tags, b_tags): b = b.pop() # eliminate symmetrical cases - if ( - a is None or - (isinstance(b, set) and not isinstance(a, set)) - ): + if a is None or (isinstance(b, set) and not isinstance(a, set)): a, b = b, a a_tags, b_tags = b_tags, a_tags @@ -113,9 +113,9 @@ def preprocess_collision_query_cases(a, b, a_tags, b_tags): a.remove(b) if isinstance(a, set) and len(a) == 0: - raise ValueError(f'query recieved empty input {a=}') + raise ValueError(f"query recieved empty input {a=}") if isinstance(a, set) and len(a) == 0: - raise ValueError(f'query recieved empty input {b=}') + raise ValueError(f"query recieved empty input {b=}") # single-to-single is treated as many-to-single if isinstance(a, str): @@ -130,58 +130,62 @@ def preprocess_collision_query_cases(a, b, a_tags, b_tags): return a, b, a_tags, b_tags -@dataclass + +@dataclass class ContactResult: hit: bool names: list[str] contacts: list + def any_touching( - scene: Scene, - a: Union[str, list[str]], + scene: Scene, + a: Union[str, list[str]], b: Union[str, list[str]] = None, a_tags=None, b_tags=None, - bvh_cache=None + bvh_cache=None, ): - - ''' + """ Computes one-to-one, many-to-one, one-to-many or many-to-many collisions In all cases, returns True if any one object from a and b touch - ''' + """ a, b, a_tags, b_tags = preprocess_collision_query_cases(a, b, a_tags, b_tags) col = iu.col_from_subset(scene, a, a_tags, bvh_cache) - + if b is None and len(a) == 1: # query makes no sense, asking for intra-set collision on one element hit, names, contacts = None, (a, b), [] elif b is None: - hit, names, contacts = col.in_collision_internal(return_data=True, return_names=True) + hit, names, contacts = col.in_collision_internal( + return_data=True, return_names=True + ) elif isinstance(b, str): T, g = scene.graph[b] - hit, names, contacts = col.in_collision_single(scene.geometry[g], transform=T, return_data=True, return_names=True) + hit, names, contacts = col.in_collision_single( + scene.geometry[g], transform=T, return_data=True, return_names=True + ) elif isinstance(b, list): col2 = iu.col_from_subset(scene, b, b_tags, bvh_cache) - hit, names, contacts = col.in_collision_other(col2, return_names=True, return_data=True) + hit, names, contacts = col.in_collision_other( + col2, return_names=True, return_data=True + ) else: - raise ValueError(f'Unhandled case {a=} {b=}') + raise ValueError(f"Unhandled case {a=} {b=}") names = list(names) if len(names) == 1: assert isinstance(b, str) names.append(b) - logging.debug(f'added name {b} to make {names}') + logging.debug(f"added name {b} to make {names}") if len(names) == 0: names = [a, b] - return ContactResult( - hit=hit, - names=names, - contacts=contacts - ) + return ContactResult(hit=hit, names=names, contacts=contacts) + @dataclass class DistanceResult: @@ -189,31 +193,30 @@ class DistanceResult: names: list[str] data: trimesh.collision.DistanceData + def min_dist( - scene: Scene, - a: Union[str, list[str]], + scene: Scene, + a: Union[str, list[str]], b: Union[str, list[str]] = None, - a_tags: set = None, + a_tags: set = None, b_tags: set = None, - bvh_cache: dict = None + bvh_cache: dict = None, ): - - ''' + """ Computes one-to-one, many-to-one, one-to-many or many-to-many distance In all cases, returns the minimum distance between any object in a and b - ''' + """ # we get fcl error otherwise if len(a) == 1 and len(b) == 1 and a[0] == b[0]: return DistanceResult(dist=0, names=[a[0], b[0]], data=None) a, b, a_tags, b_tags = preprocess_collision_query_cases(a, b, a_tags, b_tags) col = iu.col_from_subset(scene, a, a_tags, bvh_cache) - - if b is None and len(a) == 1: dist, data = 1e9, None elif b is None: + lazydebug(logger, lambda: f"min_dist_internal({a=}, {b=})") dist, data = col.min_distance_internal(return_data=True) elif isinstance(b, str): T, g = scene.graph[b] @@ -222,49 +225,49 @@ def min_dist( obj = iu.blender_objs_from_names(b)[0] mask = tagging.tagged_face_mask(obj, b_tags) if not mask.any(): - logger.warning(f'{b=} had {mask.sum()=} for {b_tags=}') + lazydebug(logger, lambda: f"{b=} had {mask.sum()=} for {b_tags=}") geom = geom.submesh(np.where(mask), append=True) assert len(geom.faces) == mask.sum() + + lazydebug(logger, lambda: f"min_dist_single({a=}, {b=})") dist, data = col.min_distance_single(geom, transform=T, return_data=True) - if '__external' in data.names: - data.names.remove('__external') + + if "__external" in data.names: + data.names.remove("__external") data.names.add(b) - data._points[b] = data._points['__external'] - data._points.pop('__external') + data._points[b] = data._points["__external"] + data._points.pop("__external") logging.debug(f"WARNING: swapped __external for {b} to make {data.names}") elif isinstance(b, (list, set)): + logger.debug(f"min_dist_other({a=}, {b=})") col2 = iu.col_from_subset(scene, b, b_tags, bvh_cache) dist, data = col.min_distance_other(col2, return_data=True) else: - raise ValueError(f'Unhandled case {a=} {b=}') + raise ValueError(f"Unhandled case {a=} {b=}") if data is not None: - assert '__external' not in data.names + assert "__external" not in data.names return DistanceResult( - dist=dist, - names=list(data.names) if data is not None else None, - data=data + dist=dist, names=list(data.names) if data is not None else None, data=data ) -def contains( - scene: Scene, - a: str, - b: str, - tol = 1e-6 -) -> bool: + +def contains(scene: Scene, a: str, b: str, tol=1e-6) -> bool: """ Check if a contains b """ mesh_a = scene.geometry[a] mesh_b = scene.geometry[b] - difference = mesh_a.difference(mesh_b) - + return abs(difference.volume - mesh_a.volume) < tol -def contains_all(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]]) -> bool: + +def contains_all( + scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]] +) -> bool: """ Check if all objects in list 'a' contain all objects in list 'b' within the given scene. @@ -276,20 +279,22 @@ def contains_all(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, l Returns: - True if all objects in list 'a' contain all objects in list 'b', False otherwise. """ - + if isinstance(a, str): a = [a] if isinstance(b, str): b = [b] - + for obj_a in a: if not all(contains(scene, obj_a, obj_b) for obj_b in b): return False - + return True -def contains_any(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]]) -> bool: +def contains_any( + scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]] +) -> bool: """ Check if any object in list 'a' contains any object in list 'b' within the given scene. @@ -301,20 +306,25 @@ def contains_any(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, l Returns: - True if any object in list 'a' contains any object in list 'b', False otherwise. """ - + if isinstance(a, str): a = [a] if isinstance(b, str): b = [b] - + for obj_a in a: if any(contains(scene, obj_a, obj_b) for obj_b in b): return True - + return False -def has_line_of_sight(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]], num_samples: int = 100) -> bool: +def has_line_of_sight( + scene: trimesh.Scene, + a: Union[str, list[str]], + b: Union[str, list[str]], + num_samples: int = 100, +) -> bool: """ Check if any object in list 'a' in the scene has a line of sight to any object in list 'b'. @@ -327,7 +337,7 @@ def has_line_of_sight(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[s Returns: - True if any object in list 'a' has a line of sight to any object in list 'b', False otherwise. """ - + # Ensure 'a' and 'b' are lists if isinstance(a, str): a = [a] @@ -336,38 +346,47 @@ def has_line_of_sight(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[s a = iu.meshes_from_names(scene, a) b = iu.meshes_from_names(scene, b) - + # Check line of sight for each object in 'a' against any object in 'b' for obj_a in a: # Sample points from the surface of object 'a' points_a = obj_a.sample(num_samples) - combined_mesh = trimesh.util.concatenate([mesh for name, mesh in scene.geometry.items() if mesh != obj_a]) - + combined_mesh = trimesh.util.concatenate( + [mesh for name, mesh in scene.geometry.items() if mesh != obj_a] + ) + for obj_b in b: # Sample points from the surface of object 'b' points_b = obj_b.sample(num_samples) - + # Create rays from points on 'a' to points on 'b' ray_origins = np.tile(points_a, (num_samples, 1)) ray_directions = np.repeat(points_b, num_samples, axis=0) - ray_origins ray_directions /= np.linalg.norm(ray_directions, axis=1)[:, None] - + # Check for intersections with the combined mesh - locations, index_ray, index_tri = combined_mesh.ray_pyembree.intersects_location(ray_origins, ray_directions, multiple_hits=False) - + locations, index_ray, index_tri = ( + combined_mesh.ray_pyembree.intersects_location( + ray_origins, ray_directions, multiple_hits=False + ) + ) + # Check if point is reached for i in range(index_ray.shape[0]): index = index_ray[i] hit_location = locations[i] - + # Check if any intersection is close to the point if np.linalg.norm(points_b[index // num_samples] - hit_location) < 1e-6: return True - + return False -def freespace_2d(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]]) -> float: + +def freespace_2d( + scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]] +) -> float: if isinstance(a, str): a = [a] if isinstance(b, str): @@ -379,19 +398,26 @@ def freespace_2d(scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, l total_projected_area = sum(iu.project_to_xy_path2d(mesh).area for mesh in b_meshes) - available_area = sum(iu.project_to_xy_path2d(mesh).area for mesh in a_meshes) - percent_available = ((available_area - total_projected_area) / available_area) * 100 return percent_available -def rasterize_space_with_obstacles(scene, a: Union[str, list[str]], b: Union[str, list[str]], start_location, end_location, cell_size=1.0, visualize=False): + +def rasterize_space_with_obstacles( + scene, + a: Union[str, list[str]], + b: Union[str, list[str]], + start_location, + end_location, + cell_size=1.0, + visualize=False, +): """ - Rasterize the union of multiple space polygons while considering obstacle polygons, + Rasterize the union of multiple space polygons while considering obstacle polygons, then find and visualize the shortest path from start to end. - + Parameters: - space_polygons: list of shapely.geometry.polygon.Polygon objects representing the main spaces - obstacle_polygons: list of shapely.geometry.polygon.Polygon objects representing obstacles @@ -399,20 +425,20 @@ def rasterize_space_with_obstacles(scene, a: Union[str, list[str]], b: Union[str - end_location: tuple (x, y) representing the end location - cell_size: size of each cell in the grid - visualize: boolean, if True, visualize the union of spaces, obstacles, and the shortest path - + Returns: - graph: A networkx.Graph object representing the rasterized union of spaces minus the obstacles - path: list of nodes representing the shortest path from start to end """ + def is_close_to_any_node(neighbor, graph, threshold=1e-6): for node in graph.nodes(): distance = np.linalg.norm(np.array(neighbor) - np.array(node)) if distance < threshold: return node return None - - if isinstance(a, str): + if isinstance(a, str): a = [a] if isinstance(b, str): b = [b] @@ -425,16 +451,16 @@ def is_close_to_any_node(neighbor, graph, threshold=1e-6): # Get the union of all space polygons union_space = unary_union(space_polygons) - + # Get bounding box of the union space minx, miny, maxx, maxy = union_space.bounds - + # Create a grid over the bounding box x_coords = np.arange(minx, maxx, cell_size) y_coords = np.arange(miny, maxy, cell_size) - + graph = nx.Graph() - + # For visualization if visualize: fig, ax = plt.subplots() @@ -442,35 +468,39 @@ def is_close_to_any_node(neighbor, graph, threshold=1e-6): if isinstance(space, Polygon): x, y = space.exterior.xy ax.fill(x, y, alpha=0.5) # Fill the space - ax.plot(x, y, color='black') # Plot the space boundary + ax.plot(x, y, color="black") # Plot the space boundary elif isinstance(space, MultiPolygon): for sub_space in space.geoms: x, y = sub_space.exterior.xy ax.fill(x, y, alpha=0.5) - ax.plot(x, y, color='black') - + ax.plot(x, y, color="black") + for obstacle in obstacle_polygons: if isinstance(obstacle, Polygon): x, y = obstacle.exterior.xy - ax.fill(x, y, color='grey') # Fill the obstacles - ax.plot(x, y, color='black') # Plot the obstacle boundary + ax.fill(x, y, color="grey") # Fill the obstacles + ax.plot(x, y, color="black") # Plot the obstacle boundary elif isinstance(obstacle, MultiPolygon): for sub_obstacle in obstacle.geoms: x, y = sub_obstacle.exterior.xy - ax.fill(x, y, color='grey') - ax.plot(x, y, color='black') - + ax.fill(x, y, color="grey") + ax.plot(x, y, color="black") + # For each cell in the grid, check if its center is inside the union space and outside all obstacle polygons for x in x_coords: for y in y_coords: cell_center = Point(x + cell_size / 2, y + cell_size / 2) - if cell_center.within(union_space) and all(not cell_center.within(obstacle) for obstacle in obstacle_polygons): + if cell_center.within(union_space) and all( + not cell_center.within(obstacle) for obstacle in obstacle_polygons + ): graph.add_node((x + cell_size / 2, y + cell_size / 2)) - + # For visualization if visualize: - ax.plot(cell_center.x, cell_center.y, 'bo', markersize=3) # Plot the point inside the union space and outside obstacles - + ax.plot( + cell_center.x, cell_center.y, "bo", markersize=3 + ) # Plot the point inside the union space and outside obstacles + # Connect each node to its neighboring nodes for node in graph.nodes(): x, y = node @@ -478,35 +508,52 @@ def is_close_to_any_node(neighbor, graph, threshold=1e-6): (x + cell_size, y), (x - cell_size, y), (x, y + cell_size), - (x, y - cell_size) + (x, y - cell_size), ] for neighbor in neighbors: closest_node = is_close_to_any_node(neighbor, graph) if closest_node is not None: graph.add_edge(node, closest_node) - # Find the closest nodes to the start and end locations - start_node = min(graph.nodes(), key=lambda node: np.linalg.norm(np.array(node) - np.array(start_location))) - end_node = min(graph.nodes(), key=lambda node: np.linalg.norm(np.array(node) - np.array(end_location))) - + start_node = min( + graph.nodes(), + key=lambda node: np.linalg.norm(np.array(node) - np.array(start_location)), + ) + end_node = min( + graph.nodes(), + key=lambda node: np.linalg.norm(np.array(node) - np.array(end_location)), + ) # Calculate the shortest path using Dijkstra's algorithm - path = nx.shortest_path(graph, source=start_node, target=end_node, weight='weight') + path = nx.shortest_path(graph, source=start_node, target=end_node, weight="weight") # Visualize the path if visualize: path_x = [x for x, y in path] path_y = [y for x, y in path] - ax.plot(path_x, path_y, c='red', linewidth=2, label='Shortest Path') - ax.scatter([start_node[0], end_node[0]], [start_node[1], end_node[1]], c='green', s=100, label='Start & End') + ax.plot(path_x, path_y, c="red", linewidth=2, label="Shortest Path") + ax.scatter( + [start_node[0], end_node[0]], + [start_node[1], end_node[1]], + c="green", + s=100, + label="Start & End", + ) plt.legend() - plt.title('Shortest Path from Start to End') + plt.title("Shortest Path from Start to End") plt.show() - + return graph, path -def angle_alignment_cost_tagged(state: state_def.State, a: Union[str, list[str]], b: Union[str, list[str]], b_tags=None, visualize=False): + +def angle_alignment_cost_tagged( + state: state_def.State, + a: Union[str, list[str]], + b: Union[str, list[str]], + b_tags=None, + visualize=False, +): """ Return the dot product between the axes of a and the normal of the closest edge of b """ @@ -531,7 +578,13 @@ def angle_alignment_cost_tagged(state: state_def.State, a: Union[str, list[str]] return res -def angle_alignment_cost_base(state: state_def.State, a: Union[str, list[str]], b: Union[str, list[str]], visualize=False): + +def angle_alignment_cost_base( + state: state_def.State, + a: Union[str, list[str]], + b: Union[str, list[str]], + visualize=False, +): """ Return the dot product between the axes of a and the normal of the closest edge of b """ @@ -570,7 +623,7 @@ def angle_alignment_cost_base(state: state_def.State, a: Union[str, list[str]], fig, ax = plt.subplots() for edge, _ in b_edges: x, y = edge.xy - ax.plot(x, y, color='red', linewidth=1, label='B Edges') + ax.plot(x, y, color="red", linewidth=1, label="B Edges") score = 0 @@ -615,50 +668,87 @@ def angle_alignment_cost_base(state: state_def.State, a: Union[str, list[str]], if a_poly is not None: if isinstance(a_poly, Polygon): x, y = a_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') + ax.fill(x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a") elif isinstance(a_poly, MultiPolygon): for sub_poly in a_poly.geoms: x, y = sub_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') + ax.fill( + x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a" + ) else: x, y = a_mesh.vertices[:, 0], a_mesh.vertices[:, 1] - ax.scatter(x, y, color='blue', label='Vertices a') - - ax.arrow(a_centroid.x, a_centroid.y, axis[0], axis[1], head_width=0.15, head_length=0.25, fc='green', ec='green', label='Axis of a') + ax.scatter(x, y, color="blue", label="Vertices a") + + ax.arrow( + a_centroid.x, + a_centroid.y, + axis[0], + axis[1], + head_width=0.15, + head_length=0.25, + fc="green", + ec="green", + label="Axis of a", + ) x, y = closest_line.xy - ax.plot(x, y, color="green", linewidth=2.5, label='Closest Edge') - ax.plot(a_centroid.x, a_centroid.y, 'o', color='black', label='Centroid of a') + ax.plot(x, y, color="green", linewidth=2.5, label="Closest Edge") + ax.plot( + a_centroid.x, a_centroid.y, "o", color="black", label="Centroid of a" + ) mid_point = closest_line.interpolate(0.5, normalized=True) - ax.arrow(mid_point.x, mid_point.y, normal_vector_1[0], normal_vector_1[1], head_width=0.15, head_length=0.25, fc='yellow', ec='yellow', label='Normal Vector 1') - ax.arrow(mid_point.x, mid_point.y, normal_vector_2[0], normal_vector_2[1], head_width=0.15, head_length=0.25, fc='orange', ec='orange', label='Normal Vector 2') + ax.arrow( + mid_point.x, + mid_point.y, + normal_vector_1[0], + normal_vector_1[1], + head_width=0.15, + head_length=0.25, + fc="yellow", + ec="yellow", + label="Normal Vector 1", + ) + ax.arrow( + mid_point.x, + mid_point.y, + normal_vector_2[0], + normal_vector_2[1], + head_width=0.15, + head_length=0.25, + fc="orange", + ec="orange", + label="Normal Vector 2", + ) if visualize: - ax.set_title('Polygons, Closest Edge and Normal') - ax.set_aspect('equal') + ax.set_title("Polygons, Closest Edge and Normal") + ax.set_aspect("equal") ax.grid(True) plt.show() return score + def angle_alignment_cost( - state: state_def.State, - a: Union[str, list[str]], - b: Union[str, list[str]], - b_tags=None, - visualize=False + state: state_def.State, + a: Union[str, list[str]], + b: Union[str, list[str]], + b_tags=None, + visualize=False, ): if b_tags is not None: return angle_alignment_cost_tagged(state, a, b, b_tags, visualize) return angle_alignment_cost_base(state, a, b, visualize) -def focus_score(state: state_def.State, a: Union[str, list[str]], b: str, visualize = False): + +def focus_score( + state: state_def.State, a: Union[str, list[str]], b: str, visualize=False +): """ The how much objects in a focus on b """ scene = state.trimesh_scene if isinstance(a, str): a = [a] - a_meshes = iu.meshes_from_names(scene, a) a_blender_objs = iu.blender_objs_from_names(a) @@ -672,11 +762,11 @@ def focus_score(state: state_def.State, a: Union[str, list[str]], b: str, visual fig, ax = plt.subplots() if isinstance(b_poly, Polygon): x, y = b_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='red', ec='black', label='Polygon b') + ax.fill(x, y, alpha=0.5, fc="red", ec="black", label="Polygon b") elif isinstance(b_poly, MultiPolygon): for sub_poly in b_poly.geoms: x, y = sub_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='red', ec='black', label='Polygon b') + ax.fill(x, y, alpha=0.5, fc="red", ec="black", label="Polygon b") score = 0 for a_poly, a_mesh, a_obj in zip(a_polys, a_meshes, a_blender_objs): @@ -687,7 +777,7 @@ def focus_score(state: state_def.State, a: Union[str, list[str]], b: str, visual # turn centroids to np array a_centroid = np.array([a_centroid.x, a_centroid.y]) b_centroid = np.array([b_centroid.x, b_centroid.y]) - + focus_vec = b_centroid - a_centroid focus_vec /= np.linalg.norm(focus_vec) @@ -695,38 +785,62 @@ def focus_score(state: state_def.State, a: Union[str, list[str]], b: str, visual # Plotting the polygons if isinstance(a_poly, Polygon): x, y = a_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') + ax.fill(x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a") elif isinstance(a_poly, MultiPolygon): for sub_poly in a_poly.geoms: x, y = sub_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') + ax.fill(x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a") # plot axis - ax.arrow(a_centroid[0], a_centroid[1], axis[0], axis[1], head_width=0.15, head_length=0.25, fc='green', ec='green', label='Axis of a') + ax.arrow( + a_centroid[0], + a_centroid[1], + axis[0], + axis[1], + head_width=0.15, + head_length=0.25, + fc="green", + ec="green", + label="Axis of a", + ) # Highlight centroid of a - ax.plot(a_centroid[0], a_centroid[1], 'o', color='black', label='Centroid of a') + ax.plot( + a_centroid[0], a_centroid[1], "o", color="black", label="Centroid of a" + ) # Plot the outward normal vector - ax.arrow(a_centroid[0], a_centroid[1], focus_vec[0], focus_vec[1], head_width=0.15, head_length=0.25, fc='yellow', ec='yellow', label='Focus vector') - - score += -np.dot(axis, focus_vec)/2 + 0.5 + ax.arrow( + a_centroid[0], + a_centroid[1], + focus_vec[0], + focus_vec[1], + head_width=0.15, + head_length=0.25, + fc="yellow", + ec="yellow", + label="Focus vector", + ) + + score += -np.dot(axis, focus_vec) / 2 + 0.5 if visualize: # Set axis properties - ax.set_title('Polygons, Focus Vector') - ax.set_aspect('equal') + ax.set_title("Polygons, Focus Vector") + ax.set_aspect("equal") ax.grid(True) # ax.legend(loc="upper left") plt.show() - return score #/ len(a) + return score # / len(a) + def edge(scene, surface_name: str): surface = iu.meshes_from_names(scene, surface_name)[0] outline_3d = surface.outline() return outline_3d + def min_dist_2d(scene, a: Union[str, list[str]], b, visualize=False): """ projects onto b and finds the min distance between a and b @@ -743,7 +857,10 @@ def min_dist_2d(scene, a: Union[str, list[str]], b, visualize=False): a_meshes = iu.meshes_from_names(scene, a) - a_projections = [trimesh.path.polygons.projected(mesh, plane_normal, plane_origin) for mesh in a_meshes] + a_projections = [ + trimesh.path.polygons.projected(mesh, plane_normal, plane_origin) + for mesh in a_meshes + ] # Measure the distance for a_proj in a_projections: source_geom = a_proj @@ -752,17 +869,18 @@ def min_dist_2d(scene, a: Union[str, list[str]], b, visualize=False): if dist < min_dist: if visualize: pt_a, pt_b = nearest_points(source_geom, target_geom) - ax.plot([pt_a.x, pt_b.x], [pt_a.y, pt_b.y], color='red') - #plot source and target geoms - iu.plot_geometry(ax, source_geom, 'blue') - iu.plot_geometry(ax, target_geom, 'green') + ax.plot([pt_a.x, pt_b.x], [pt_a.y, pt_b.y], color="red") + # plot source and target geoms + iu.plot_geometry(ax, source_geom, "blue") + iu.plot_geometry(ax, target_geom, "green") min_dist = dist - + if visualize: plt.show() return min_dist -def min_dist_boundary(scene: Scene, a: Union[str, list[str]], boundary): + +def min_dist_boundary(scene: Scene, a: Union[str, list[str]], boundary): if isinstance(a, str): a = [a] if isinstance(boundary, trimesh.path.path.Path3D): @@ -770,95 +888,93 @@ def min_dist_boundary(scene: Scene, a: Union[str, list[str]], boundary): elif isinstance(boundary, trimesh.path.path.Path2D): pass else: - raise TypeError(f'Unhandled type {boundary=}') + raise TypeError(f"Unhandled type {boundary=}") + class ConstraintViolated(Exception): pass + FATAL = True + + def constraint_violated(message): if FATAL: raise ConstraintViolated(message) else: - print(f'{ConstraintViolated.__name__}: {message}') + print(f"{ConstraintViolated.__name__}: {message}") + def constrain_contact( - res: ContactResult, - should_touch=True, - max_depth=1e-2, - #normal_dir=None, - #normal_dot_min=None, - #normal_dot_max=None + res: ContactResult, + should_touch=True, + max_depth=1e-2, + # normal_dir=None, + # normal_dot_min=None, + # normal_dot_max=None ): - if res.hit is None: - return False # arises from an internal-contact query on a set of one element + return False # arises from an internal-contact query on a set of one element if should_touch is not None and should_touch != res.hit: if should_touch: - return False #constraint_violated(f'At least one of {res.names} must touch eachother') + return False # constraint_violated(f'At least one of {res.names} must touch eachother') else: - return False #constraint_violated(f'{res.names} must not touch') + return False # constraint_violated(f'{res.names} must not touch') if res.hit and max_depth is not None: observed_depth = max(c.depth for c in res.contacts) if observed_depth > max_depth: - return False #constraint_violated(f'Contact between {res.names} penetrates by depth {observed_depth} > {max_depth}') + return False # constraint_violated(f'Contact between {res.names} penetrates by depth {observed_depth} > {max_depth}') return True -def constrain_dist(res: dict, min=None, max=None): - if res.data is None: # results from internal distance check on 1 object +def constrain_dist(res: dict, min=None, max=None): + if res.data is None: # results from internal distance check on 1 object print("res data error") return - if not ( - min is None or - min < res.dist - ): + if not (min is None or min < res.dist): return False - if not ( - max is None or - max > res.dist - ): + if not (max is None or max > res.dist): return False return True + def constrain_dist_soft(res: dict, min=None, max=None): - - if res.data is None: # results from internal distance check on 1 object - print("res data error") - return - - if res.dist < min: - return min - res.dist - - if res.dist > max: - return res.dist - max - return 0 + if res.data is None: # results from internal distance check on 1 object + print("res data error") + return -def touching_soft(scene, a, b): + if res.dist < min: + return min - res.dist - res = any_touching(scene, a, b) + if res.dist > max: + return res.dist - max + return 0 +def touching_soft(scene, a, b): + res = any_touching(scene, a, b) + if res.hit is None: print("res hit error") - return np.inf # arises from an internal-contact query on a set of one element + return np.inf # arises from an internal-contact query on a set of one element if res.hit: observed_depth = max(c.depth for c in res.contacts) return observed_depth - else: + else: res = min_dist(scene, a, b) if res.data is None: return np.inf else: return res.dist + def dist_soft_score(res: dict, min, max): - if res.data is None: # results from internal distance check on 1 object + if res.data is None: # results from internal distance check on 1 object return 0 if res.dist > max: @@ -867,19 +983,20 @@ def dist_soft_score(res: dict, min, max): return min - res.dist else: return 0 - -_accessibility_vis_seen_objs = set() # used to make vis=True below less spammy + + +_accessibility_vis_seen_objs = set() # used to make vis=True below less spammy + def accessibility_cost_cuboid_penetration( - scene: trimesh.Scene, + scene: trimesh.Scene, a: Union[str, list[str]], b: Union[str, list[str]], normal_dir: np.ndarray, dist: float, bvh_cache: dict = None, - vis=False + vis=False, ): - """ Extrude the bbox of a by dist in the direction of normal_dir, and check for collisions with b Return the maximum distance that any part of b penetrates this extrusion @@ -897,16 +1014,17 @@ def accessibility_cost_cuboid_penetration( # find which of +X, -X +Y, -Y, +Z, -Z is the normal_dir. Only these values are supported if ( - not np.isclose(np.linalg.norm(normal_dir), 1) or - np.isclose(normal_dir, 0).sum() != 2 + not np.isclose(np.linalg.norm(normal_dir), 1) + or np.isclose(normal_dir, 0).sum() != 2 ): - raise ValueError(f'Invalid normal_dir {normal_dir=}, expected +X, -X, +Y, -Y, +Z, -Z') + raise ValueError( + f"Invalid normal_dir {normal_dir=}, expected +X, -X, +Y, -Y, +Z, -Z" + ) normal_axis = np.argmax(np.abs(normal_dir)) normal_sign = np.sign(normal_dir[normal_axis]) visobjs = [] for name in a: - T, g = scene.graph[name] geom = scene.geometry[g] @@ -921,15 +1039,19 @@ def accessibility_cost_cuboid_penetration( origin_to_bbox_center = bbox.mean(axis=0) extent_from_real_origin = bbox[0 if normal_sign < 0 else -1][normal_axis] - offset_vec = normal_dir * (dist/2 + extent_from_real_origin - origin_to_bbox_center[normal_axis]) + offset_vec = normal_dir * ( + dist / 2 + extent_from_real_origin - origin_to_bbox_center[normal_axis] + ) total_offset_vec = origin_to_bbox_center + offset_vec - freespace_box_transform = np.array(bpy_obj.matrix_world) @ trimesh.transformations.translation_matrix(total_offset_vec) - + freespace_box_transform = np.array( + bpy_obj.matrix_world + ) @ trimesh.transformations.translation_matrix(total_offset_vec) + a_free_col.add_object(name, freespace_box, freespace_box_transform) - + visobjs.append(geom.apply_transform(T)) - + visobjs.append(freespace_box.apply_transform(freespace_box_transform)) b_col = iu.col_from_subset(scene, b, bvh_cache=bvh_cache) @@ -937,7 +1059,9 @@ def accessibility_cost_cuboid_penetration( if vis: bobjs = iu.meshes_from_names(scene, b) - print(f"{np.round(origin_to_bbox_center, 3)=} {extent_from_real_origin} {bpy_obj.dimensions}") + print( + f"{np.round(origin_to_bbox_center, 3)=} {extent_from_real_origin} {bpy_obj.dimensions}" + ) if not all(name in _accessibility_vis_seen_objs for name in a + b): trimesh.Scene(visobjs + bobjs).show() _accessibility_vis_seen_objs.update(a + b) @@ -947,13 +1071,14 @@ def accessibility_cost_cuboid_penetration( else: return 0 + @gin.configurable -def accessibility_cost(scene, a, b, normal, visualize=False, fast = True): +def accessibility_cost(scene, a, b, normal, visualize=False, fast=True): """ Computes how much objs b block front access to a. b obj blockages are not summed. the closest b obj to a is taken as the representative blockage """ - + if isinstance(a, str): a = [a] if isinstance(b, str): @@ -969,20 +1094,22 @@ def accessibility_cost(scene, a, b, normal, visualize=False, fast = True): b_trimeshes = iu.meshes_from_names(scene, b) a_objs = iu.blender_objs_from_names(a) - b_objs = iu.blender_objs_from_names(b) + iu.blender_objs_from_names(b) score = 0 for a_name, a_obj, a_trimesh in zip(a, a_objs, a_trimeshes): - a_centroid = a_trimesh.centroid - + front_plane_pt = a_centroid front_plane_normal = np.array(a_obj.matrix_world.to_3x3() @ Vector(normal)) - a_centroid_proj = a_centroid - np.dot(a_centroid - front_plane_pt, front_plane_normal) * front_plane_normal + a_centroid_proj = ( + a_centroid + - np.dot(a_centroid - front_plane_pt, front_plane_normal) + * front_plane_normal + ) - - if fast: + if fast: # get the closest centroid in b and the mesh that it belongs to b_centroids = [b_trimesh.centroid for b_trimesh in b_trimeshes] distances = [np.linalg.norm(pt - a_centroid_proj) for pt in b_centroids] @@ -994,7 +1121,7 @@ def accessibility_cost(scene, a, b, normal, visualize=False, fast = True): res = min_dist(scene, a_name, b) b_chosen = res.names[1] if res.names[0] == a_name else res.names[0] b_closest_pt = res.data.point(b_chosen) - + centroid_to_b = b_closest_pt - a_centroid_proj dist = np.linalg.norm(centroid_to_b) @@ -1003,17 +1130,27 @@ def accessibility_cost(scene, a, b, normal, visualize=False, fast = True): if np.dot(centroid_to_b, front_plane_normal) < 0: continue # cos theta/dist - score += (np.dot(centroid_to_b, front_plane_normal) / dist**2) * diag_length + score += (np.dot(centroid_to_b, front_plane_normal) / dist**2) * diag_length if visualize: - ax.plot([a_centroid_proj[0], b_closest_pt[0]], [a_centroid_proj[1], b_closest_pt[1]], color='red') - #plot source and target geoms - iu.plot_geometry(ax, a_trimesh, 'blue') - iu.plot_geometry(ax, iu.meshes_from_names(scene, b_chosen)[0], 'green') + ax.plot( + [a_centroid_proj[0], b_closest_pt[0]], + [a_centroid_proj[1], b_closest_pt[1]], + color="red", + ) + # plot source and target geoms + iu.plot_geometry(ax, a_trimesh, "blue") + iu.plot_geometry(ax, iu.meshes_from_names(scene, b_chosen)[0], "green") # plot front plane # plot_geometry(ax, planes.extract_tagged_plane(a_obj, a_tag, a_front_plane), 'black') # plot centroid - ax.plot(a_centroid_proj[0], a_centroid_proj[1], 'o', color='black', label='Centroid of a') - + ax.plot( + a_centroid_proj[0], + a_centroid_proj[1], + "o", + color="black", + label="Centroid of a", + ) + if visualize: plt.show() return score @@ -1027,33 +1164,38 @@ def center_stable_surface(scene, a, state): a = [a] score = 0 - a_trimeshes = iu.meshes_from_names( - scene, - [state.objs[ai].obj.name for ai in a] - ) + a_trimeshes = iu.meshes_from_names(scene, [state.objs[ai].obj.name for ai in a]) - for name, mesh in zip(a, a_trimeshes): + for name, mesh in zip(a, a_trimeshes): obj_state = state.objs[name] obj = obj_state.obj for i, relation_state in enumerate(obj_state.relations): relation = relation_state.relation parent_obj = state.objs[relation_state.target_name].obj - obj_tags = relation.child_tags + obj_tags = relation.child_tags parent_tags = relation.parent_tags parent_all_planes = state.planes.get_tagged_planes(parent_obj, parent_tags) obj_all_planes = state.planes.get_tagged_planes(obj, obj_tags) parent_plane = parent_all_planes[relation_state.parent_plane_idx] - obj_plane = obj_all_planes[relation_state.child_plane_idx] - + obj_all_planes[relation_state.child_plane_idx] + if relation_state.parent_plane_idx >= len(parent_all_planes): - logging.warning(f'{parent_obj.name=} had too few planes ({len(parent_all_planes)}) for {relation_state}') + logging.warning( + f"{parent_obj.name=} had too few planes ({len(parent_all_planes)}) for {relation_state}" + ) return False if relation_state.child_plane_idx >= len(obj_all_planes): - logging.warning(f'{obj.name=} had too few planes ({len(obj_all_planes)}) for {relation_state}') + logging.warning( + f"{obj.name=} had too few planes ({len(obj_all_planes)}) for {relation_state}" + ) return False - splitted_parent = state.planes.extract_tagged_plane(parent_obj, parent_tags, parent_plane) - parent_trimesh = add_to_scene(state.trimesh_scene, splitted_parent, preprocess=True) + splitted_parent = state.planes.extract_tagged_plane( + parent_obj, parent_tags, parent_plane + ) + parent_trimesh = add_to_scene( + state.trimesh_scene, splitted_parent, preprocess=True + ) # splitted_obj = planes.extract_tagged_plane(obj, obj_tags, obj_plane) # add_to_scene(state.trimesh_scene, splitted_obj, preprocess=True) obj_centroid = mesh.centroid @@ -1065,7 +1207,9 @@ def center_stable_surface(scene, a, state): return score -def reflectional_asymmetry_score(scene, a: Union[str, list[str]], b: str, use_long_plane=True): +def reflectional_asymmetry_score( + scene, a: Union[str, list[str]], b: str, use_long_plane=True +): """ Computes the reflectional asymmetry score between a and b """ @@ -1074,11 +1218,11 @@ def reflectional_asymmetry_score(scene, a: Union[str, list[str]], b: str, use_lo if b is None or len(b) == 0: return 0 - a_trimeshes = iu.meshes_from_names(scene, a) + iu.meshes_from_names(scene, a) b_trimesh = iu.meshes_from_names(scene, b)[0] a_objs = iu.blender_objs_from_names(a) - b_obj = iu.blender_objs_from_names(b)[0] + iu.blender_objs_from_names(b)[0] bbox = b_trimesh.bounding_box_oriented vertices = bbox.vertices @@ -1089,8 +1233,6 @@ def reflectional_asymmetry_score(scene, a: Union[str, list[str]], b: str, use_lo else: plane_pt, plane_normal = mid_planes[1] - - return symmetry.calculate_reflectional_asymmetry(a_objs, plane_pt, plane_normal) @@ -1101,8 +1243,8 @@ def coplanarity_cost_pair(scene, a: str, b: str): a_trimesh = iu.meshes_from_names(scene, a)[0] b_trimesh = iu.meshes_from_names(scene, b)[0] - a_obj = iu.blender_objs_from_names(a)[0] - b_obj = iu.blender_objs_from_names(b)[0] + iu.blender_objs_from_names(a)[0] + iu.blender_objs_from_names(b)[0] a_trimesh_bbox = a_trimesh.bounding_box_oriented b_trimesh_bbox = b_trimesh.bounding_box_oriented @@ -1120,12 +1262,16 @@ def is_normal_new(normal, normals_list): for i in range(len(a_trimesh_bbox.faces)): normal = a_trimesh_bbox.face_normals[i] if is_normal_new(normal, [n for _, n in object1_planes]): - object1_planes.append((a_trimesh_bbox.vertices[a_trimesh_bbox.faces[i]][0], normal)) + object1_planes.append( + (a_trimesh_bbox.vertices[a_trimesh_bbox.faces[i]][0], normal) + ) for i in range(len(b_trimesh_bbox.faces)): normal = b_trimesh_bbox.face_normals[i] if is_normal_new(normal, [n for _, n in object2_planes]): - object2_planes.append((b_trimesh_bbox.vertices[b_trimesh_bbox.faces[i]][0], normal)) + object2_planes.append( + (b_trimesh_bbox.vertices[b_trimesh_bbox.faces[i]][0], normal) + ) # Calculate angle cost matrix for bipartite matching angle_cost_matrix = np.zeros((len(object1_planes), len(object2_planes))) @@ -1140,13 +1286,18 @@ def is_normal_new(normal, normals_list): # Calculate total costs (angle + distance) for the optimal matching total_costs = [] for r, c in zip(row_ind, col_ind): - distance_cost = iu.distance_to_plane(object1_planes[r][0], object2_planes[c][0], object2_planes[c][1]) - total_cost = angle_cost_matrix[r, c] + distance_cost # Sum angle and distance costs + distance_cost = iu.distance_to_plane( + object1_planes[r][0], object2_planes[c][0], object2_planes[c][1] + ) + total_cost = ( + angle_cost_matrix[r, c] + distance_cost + ) # Sum angle and distance costs total_costs.append(total_cost) total_costs = sorted(total_costs) return sum(total_costs[:-2]) + def coplanarity_cost(scene, a: Union[str, list[str]]): """ Computes the coplanarity cost between a and b @@ -1154,19 +1305,23 @@ def coplanarity_cost(scene, a: Union[str, list[str]]): if isinstance(a, str): a = [a] - a_trimeshes = iu.meshes_from_names(scene, a) + iu.meshes_from_names(scene, a) a_objs = iu.blender_objs_from_names(a) # Order objects by principal axis ordered_objects = iu.order_objects_by_principal_axis(a_objs) - + all_total_costs = [] # To store the sum of angle and distance costs for each optimal matching - + # Iterate over pairs of consecutive objects for i in range(len(ordered_objects) - 1): - all_total_costs.append(coplanarity_cost_pair(scene, ordered_objects[i].name, ordered_objects[i + 1].name)) - + all_total_costs.append( + coplanarity_cost_pair( + scene, ordered_objects[i].name, ordered_objects[i + 1].name + ) + ) + # Calculate the final cost as the sum of the remaining costs final_cost = sum(all_total_costs) / len(a_objs) - - return final_cost \ No newline at end of file + + return final_cost diff --git a/infinigen/core/constraints/example_solver/__init__.py b/infinigen/core/constraints/example_solver/__init__.py index 7a3d280f8..7be9b05bc 100644 --- a/infinigen/core/constraints/example_solver/__init__.py +++ b/infinigen/core/constraints/example_solver/__init__.py @@ -1,3 +1,3 @@ from . import room +from .solve import Solver from .state_def import State -from .solve import Solver \ No newline at end of file diff --git a/infinigen/core/constraints/example_solver/annealing.py b/infinigen/core/constraints/example_solver/annealing.py index 2db976694..f93b254e1 100644 --- a/infinigen/core/constraints/example_solver/annealing.py +++ b/infinigen/core/constraints/example_solver/annealing.py @@ -4,39 +4,34 @@ # Authors: Alexander Raistrick, Karhan Kayan -from collections import defaultdict import logging import os import time -import copy import typing from pprint import pprint -import matplotlib.pyplot as plt import bpy +import gin +import matplotlib.pyplot as plt import numpy as np -import tqdm import pandas as pd -import gin -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - evaluator -) -from .moves import Move -from .state_def import State +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import evaluator +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.constraint_language import util as impl_util from infinigen.core.util import blender as butil -from infinigen.core.constraints.constraint_language import util as impl_util +from .moves import Move +from .state_def import State logger = logging.getLogger(__name__) -BPY_GARBAGE_COLLECT_FREQUENCY = 20 # every X optim steps +BPY_GARBAGE_COLLECT_FREQUENCY = 20 # every X optim steps + @gin.configurable class SimulatedAnnealingSolver: - def __init__( self, max_invalid_candidates, @@ -47,21 +42,20 @@ def __init__( output_folder=None, visualize=False, print_report_freq=10, - print_breakdown_freq=0 + print_breakdown_freq=0, ) -> None: - self.initial_temp = initial_temp self.final_temp = final_temp self.max_invalid_candidates = max_invalid_candidates self.finetune_pct = finetune_pct - self.print_report_freq = print_report_freq + self.print_report_freq = print_report_freq self.print_breakdown_freq = print_breakdown_freq self.checkpoint_best = checkpoint_best if checkpoint_best: - raise NotImplementedError(f'{checkpoint_best=}') - + raise NotImplementedError(f"{checkpoint_best=}") + self.output_folder = output_folder self.visualize = visualize @@ -71,85 +65,87 @@ def __init__( self.eval_memo = {} def save_stats(self, path): - if len(self.stats) == 0: return df = pd.DataFrame.from_records(self.stats) - logger.info(f'Saving stats {path}') + logger.info(f"Saving stats {path}") df.to_csv(path) fig, ax1 = plt.subplots() - ax1.set_xlabel('Iteration') - ax1.set_ylabel('Score', color='C0') - ax1.plot(df['curr_iteration'], df['loss'], color='C0') - - #ax2 = ax1.twinx() - #ax2.set_ylabel('Move Time', color='C1') - #ax2.plot(df['curr_iteration'], df['move_dur'], color='C1') - - figpath = path.parent/(path.stem+'.png') - logger.info(f'Saving plot {figpath}') + ax1.set_xlabel("Iteration") + ax1.set_ylabel("Score", color="C0") + ax1.plot(df["curr_iteration"], df["loss"], color="C0") + + # ax2 = ax1.twinx() + # ax2.set_ylabel('Move Time', color='C1') + # ax2.plot(df['curr_iteration'], df['move_dur'], color='C1') + + figpath = path.parent / (path.stem + ".png") + logger.info(f"Saving plot {figpath}") plt.savefig(figpath) plt.close() logger.info(f"Total elapsed {path.stem} {self.stats[-1]['elapsed']:.2f}") def reset(self, max_iters): - self.curr_iteration = 0 self.stats = [] self.curr_result = None self.best_loss = None - self.eval_memo = {} - + self.eval_memo = {} + self.optim_start_time = time.perf_counter() self.max_iterations = max_iters - + if max_iters == 0: self.cooling_rate = 0 else: - steps = (max_iters * (1 - self.finetune_pct)) + steps = max_iters * (1 - self.finetune_pct) ratio = self.final_temp / self.initial_temp - self.cooling_rate = np.power(ratio, 1/steps) + self.cooling_rate = np.power(ratio, 1 / steps) - logger.debug(f'Reset solver with {max_iters=} cooling_rate={self.cooling_rate:.4f}') + logger.debug( + f"Reset solver with {max_iters=} cooling_rate={self.cooling_rate:.4f}" + ) def checkpoint(self, state): - - filename = os.path.join(self.output_folder, f"checkpoint_state.pkl") + filename = os.path.join(self.output_folder, "checkpoint_state.pkl") state.save(filename) if self.visualize: - #save score plot + # save score plot plt.plot(self.score_history) - plt.savefig(os.path.join(self.output_folder, f"scores.png")) + plt.savefig(os.path.join(self.output_folder, "scores.png")) plt.close() # render image i = 1 while os.path.exists(os.path.join(self.output_folder, f"{i:04}.png")): i += 1 - bpy.context.scene.render.filepath = os.path.join(self.output_folder, f"{i:04}.png") + bpy.context.scene.render.filepath = os.path.join( + self.output_folder, f"{i:04}.png" + ) bpy.ops.render.render(write_still=True) def validate_lazy_eval( - self, - state: State, - consgraph: cl.Problem, - prop_result: evaluator.EvalResult, - filter_domain: r.Domain + self, + state: State, + consgraph: cl.Problem, + prop_result: evaluator.EvalResult, + filter_domain: r.Domain, ): - test_memo = {} impl_util.DISABLE_BVH_CACHE = True - real_result = evaluator.evaluate_problem(consgraph, state, filter_domain, memo=test_memo) + real_result = evaluator.evaluate_problem( + consgraph, state, filter_domain, memo=test_memo + ) impl_util.DISABLE_BVH_CACHE = False if real_result.loss() == prop_result.loss(): return - + for n in consgraph.traverse(inorder=False): key = evaluator.memo_key(n) if key not in self.eval_memo: @@ -157,11 +153,11 @@ def validate_lazy_eval( lazy = self.eval_memo[key] if test_memo[key] == lazy: continue - - print('\n\n INVALID') + + print("\n\n INVALID") pprint(n, depth=3) - print(f'memo for node is out of sync, got {lazy=} yet {test_memo[key]=}') - raise ValueError(f'{real_result.loss()=:.4f} {prop_result.loss()=:.4f}') + print(f"memo for node is out of sync, got {lazy=} yet {test_memo[key]=}") + raise ValueError(f"{real_result.loss()=:.4f} {prop_result.loss()=:.4f}") @gin.configurable def evaluate_move( @@ -171,9 +167,8 @@ def evaluate_move( move: Move, filter_domain: r.Domain, do_lazy_eval=True, - validate_lazy_eval=False + validate_lazy_eval=False, ): - if do_lazy_eval: evaluator.evict_memo_for_move(consgraph, state, self.eval_memo, move) prop_result = evaluator.evaluate_problem( @@ -186,86 +181,86 @@ def evaluate_move( if validate_lazy_eval: self.validate_lazy_eval(state, consgraph, prop_result, filter_domain) - + return prop_result @gin.configurable def retry_attempt_proposals( - self, - propose_func: typing.Callable, - consgraph: cl.Node, - state: State, - temp: float, + self, + propose_func: typing.Callable, + consgraph: cl.Node, + state: State, + temp: float, filter_domain: r.Domain, ) -> typing.Tuple[Move, evaluator.EvalResult, int]: - move_gen = propose_func(consgraph, state, filter_domain, temp) move = None retry = None for retry, move in enumerate(move_gen): - if retry == self.max_invalid_candidates: - logger.debug(f'{move_gen=} reached {self.max_invalid_candidates=} without succeeding an apply()') + logger.debug( + f"{move_gen=} reached {self.max_invalid_candidates=} without succeeding an apply()" + ) break - + succeeded = move.apply(state) if succeeded: evaluator.evict_memo_for_move(consgraph, state, self.eval_memo, move) result = self.evaluate_move(consgraph, state, move, filter_domain) return move, result, retry - - logger.debug(f'{retry=} reverting {move=}') + + logger.debug(f"{retry=} reverting {move=}") evaluator.evict_memo_for_move(consgraph, state, self.eval_memo, move) move.revert(state) else: - logger.debug(f'{move_gen=} produced {retry} attempts and none were valid') - + logger.debug(f"{move_gen=} produced {retry} attempts and none were valid") + return move, None, retry def curr_temp(self) -> float: - temp = self.initial_temp * self.cooling_rate ** self.curr_iteration + temp = self.initial_temp * self.cooling_rate**self.curr_iteration temp = np.clip(temp, self.final_temp, self.initial_temp) return temp def metrop_hastings_with_viol(self, prop_result: evaluator.EvalResult, temp: float): - prop_viol = prop_result.viol_count() curr_viol = self.curr_result.viol_count() diff = prop_result.loss() - self.curr_result.loss() - log_prob = -diff/temp + log_prob = -diff / temp viol_diff = prop_viol - curr_viol - result = {'diff': diff, 'log_prob': log_prob, 'viol_diff': viol_diff} + result = {"diff": diff, "log_prob": log_prob, "viol_diff": viol_diff} if viol_diff < 0: - result['accept'] = True + result["accept"] = True return result elif viol_diff > 0: - result['accept'] = False + result["accept"] = False return result # standard metropolis-hastings rv = np.log(np.random.uniform()) - result['accept'] = rv < log_prob + result["accept"] = rv < log_prob return result def step(self, consgraph, state, move_gen_func, filter_domain): - if self.curr_result is None: - self.curr_result = evaluator.evaluate_problem(consgraph, state, filter_domain) + self.curr_result = evaluator.evaluate_problem( + consgraph, state, filter_domain + ) move_start_time = time.perf_counter() is_log_step = ( - self.print_report_freq != 0 + self.print_report_freq != 0 and self.curr_iteration % self.print_report_freq == 0 ) is_report_step = ( - self.print_breakdown_freq != 0 + self.print_breakdown_freq != 0 and self.curr_iteration % self.print_breakdown_freq == 0 ) @@ -276,66 +271,67 @@ def step(self, consgraph, state, move_gen_func, filter_domain): if prop_result is None: # set null values for logging purposes - accept_result = {'accept': None, 'diff': 0, 'log_prob': 0, 'viol_diff': None} + accept_result = { + "accept": None, + "diff": 0, + "log_prob": 0, + "viol_diff": None, + } else: accept_result = self.metrop_hastings_with_viol(prop_result, temp) - if accept_result['accept']: + if accept_result["accept"]: self.curr_result = prop_result move.accept(state) else: evaluator.evict_memo_for_move(consgraph, state, self.eval_memo, move) move.revert(state) - + dt = time.perf_counter() - move_start_time elapsed = time.perf_counter() - self.optim_start_time - if ( - (self.print_report_freq != 0 and accept_result['accept']) - or is_log_step - ): - + if (self.print_report_freq != 0 and accept_result["accept"]) or is_log_step: n = len(state.objs) move_log = move_gen_func.__name__ if move is None else move - - log_prob = accept_result['log_prob'] - prob = 1 if log_prob > 7 else np.exp(accept_result['log_prob']) # avoid overflow warnings. clamp to exp = exp(7) ~= 1000 - + + log_prob = accept_result["log_prob"] + prob = ( + 1 if log_prob > 7 else np.exp(accept_result["log_prob"]) + ) # avoid overflow warnings. clamp to exp = exp(7) ~= 1000 + loss = self.curr_result.loss() viol = self.curr_result.viol_count() - diff = accept_result['diff'] - accept = accept_result['accept'] - viol_diff = accept_result['viol_diff'] or 0 + diff = accept_result["diff"] + accept = accept_result["accept"] + viol_diff = accept_result["viol_diff"] or 0 logger.info( - f"it={self.curr_iteration} {dt=:.3f} {n=} " + f"it={self.curr_iteration}/{self.max_iterations} {dt=:.3f} {n=} " f"{loss=:.3e} {viol=:.1f} " f"{temp=:.2e} {diff=:.2f} {viol_diff=:.1f} {prob=:.2f} {accept=} " f"{move_log}" ) - if is_log_step: - self.stats.append(dict( - curr_iteration=self.curr_iteration, - loss=self.curr_result.loss(), - viol=self.curr_result.viol_count(), - best_loss=self.best_loss, - temp=temp, - accept=accept, - move_gen=move_gen_func.__name__, - move_type=( - move.__class__.__name__ - if move is not None else None - ), - move_target=( - move.name - if move is not None and hasattr(move, 'name') - else None - ), - move_dur=dt, - elapsed=elapsed, - retry=retry - )) + self.stats.append( + dict( + curr_iteration=self.curr_iteration, + loss=self.curr_result.loss(), + viol=self.curr_result.viol_count(), + best_loss=self.best_loss, + temp=temp, + accept=accept, + move_gen=move_gen_func.__name__, + move_type=(move.__class__.__name__ if move is not None else None), + move_target=( + move.name + if move is not None and hasattr(move, "name") + else None + ), + move_dur=dt, + elapsed=elapsed, + retry=retry, + ) + ) if is_report_step and prop_result is not None: df = prop_result.to_df() @@ -343,29 +339,37 @@ def step(self, consgraph, state, move_gen_func, filter_domain): if self.last_eval_result is not None: last_df = self.last_eval_result.to_df() diff_cols = [ - c for c in df.columns + c + for c in df.columns if ( not last_df[c].equals(df[c]) - or (df[c]["viol_count"] is not None and last_df[c]["viol_count"] > 0) + or ( + df[c]["viol_count"] is not None + and last_df[c]["viol_count"] > 0 + ) ) ] - print(self.last_eval_result.viol_count(), self.curr_result.viol_count(), prop_result.viol_count()) - last_df.index = ['prev_' + x for x in last_df.index] + print( + self.last_eval_result.viol_count(), + self.curr_result.viol_count(), + prop_result.viol_count(), + ) + last_df.index = ["prev_" + x for x in last_df.index] df = pd.concat([last_df[diff_cols], df[diff_cols]]) - + print(df) if self.curr_iteration % BPY_GARBAGE_COLLECT_FREQUENCY == 0: butil.garbage_collect(butil.get_all_bpy_data_targets()) if self.curr_iteration != 0 and self.curr_iteration % 50 == 0: - print(f'CLUTTER REPORT {self.curr_iteration=}') - print(' State Size', len(state.objs)) - print(' Trimesh', len(state.trimesh_scene.graph.nodes)) - print(' Objects', len(bpy.data.objects)) - print(' Meshes', len(bpy.data.meshes)) - print(' Materials', len(bpy.data.materials)) - print(' Textures', len(bpy.data.materials)) + print(f"CLUTTER REPORT {self.curr_iteration=}") + print(" State Size", len(state.objs)) + print(" Trimesh", len(state.trimesh_scene.graph.nodes)) + print(" Objects", len(bpy.data.objects)) + print(" Meshes", len(bpy.data.meshes)) + print(" Materials", len(bpy.data.materials)) + print(" Textures", len(bpy.data.materials)) self.curr_iteration += 1 if prop_result is not None: diff --git a/infinigen/core/constraints/example_solver/geometry/dof.py b/infinigen/core/constraints/example_solver/geometry/dof.py index 5f60e1e43..193428662 100644 --- a/infinigen/core/constraints/example_solver/geometry/dof.py +++ b/infinigen/core/constraints/example_solver/geometry/dof.py @@ -5,33 +5,29 @@ import logging -import gin import bpy +import gin import numpy as np -from infinigen.core.constraints.example_solver.geometry import stability -from mathutils import Vector import trimesh -from shapely.geometry import Point, Polygon, MultiPolygon -import matplotlib.pyplot as plt - -from infinigen.core import tags as t, tagging -from infinigen.core.constraints import constraint_language as cl - -from infinigen.core.constraints.example_solver import ( - state_def -) +from mathutils import Vector +from shapely.geometry import Point -import infinigen.core.util.blender as butil import infinigen.core.constraints.example_solver.geometry.validity as validity -from infinigen.core.constraints.constraint_language.util import meshes_from_names ,delete_obj +import infinigen.core.util.blender as butil +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl from infinigen.core.constraints.constraint_language import util as iu -from infinigen.core import tagging, tags as t -from infinigen.core.constraints.example_solver.room.constants import WALL_HEIGHT, WALL_THICKNESS - - +from infinigen.core.constraints.example_solver import state_def +from infinigen.core.constraints.example_solver.geometry import stability +from infinigen.core.constraints.example_solver.room.constants import ( + WALL_HEIGHT, + WALL_THICKNESS, +) logger = logging.getLogger(__name__) + def stable_against_matrix(point, normal): """ Given a point and normal defining a plane, return a 3x3 matrix that @@ -45,6 +41,7 @@ def stable_against_matrix(point, normal): restriction_matrix = np.identity(3) - np.outer(normalized_normal, normalized_normal) return restriction_matrix + def combined_stability_matrix(parent_planes): """ Given a list of relations (each a tuple of point and normal), @@ -70,6 +67,7 @@ def rotation_constraint(normal): return normalized_normal + def combine_rotation_constraints(parent_planes, eps=0.01): """ Given a list of normals, compute the combined axis of rotation. @@ -106,30 +104,28 @@ def rotate_object_around_axis(obj, axis, std, angle=None): # If no angle is provided, generate a random angle between 0 and 2*pi if angle is None: - angle = np.random.normal(0,std) + angle = np.random.normal(0, std) + + obj.rotation_mode = "AXIS_ANGLE" + obj.rotation_axis_angle = Vector([angle] + list(normalized_axis)) - obj.rotation_mode = 'AXIS_ANGLE' - obj.rotation_axis_angle = Vector([angle]+ list(normalized_axis)) def check_init_valid( - state: state_def.State, - name: str, - obj_planes: list, - assigned_planes: list, - margins + state: state_def.State, name: str, obj_planes: list, assigned_planes: list, margins ): - if len(obj_planes) == 0: raise ValueError(f"{check_init_valid.__name__} for {name=} got {obj_planes=}") if len(obj_planes) > 3: - raise ValueError(f'{check_init_valid.__name__} for {name=} got {len(obj_planes)=}') + raise ValueError( + f"{check_init_valid.__name__} for {name=} got {len(obj_planes)=}" + ) def get_rot(ind): try: a = obj_planes[ind][0] b = assigned_planes[ind][0] except IndexError: - raise ValueError(f'Invalid {ind=} {obj_planes=} {assigned_planes=}') + raise ValueError(f"Invalid {ind=} {obj_planes=} {assigned_planes=}") a_plane = obj_planes[ind] b_plane = assigned_planes[ind] @@ -144,44 +140,43 @@ def get_rot(ind): plane_normal_b = iu.global_polygon_normal(b_obj, b_poly) plane_normal_b = -plane_normal_b rotation_axis = np.cross(plane_normal_a, plane_normal_b) - - if not np.isclose(np.linalg.norm(rotation_axis),0, atol = 1e-03): + + if not np.isclose(np.linalg.norm(rotation_axis), 0, atol=1e-03): rotation_axis = rotation_axis / np.linalg.norm(rotation_axis) else: - rotation_axis = np.array([0,0,1]) + rotation_axis = np.array([0, 0, 1]) dot = plane_normal_a.dot(plane_normal_b) rotation_angle = np.arccos(np.clip(dot, -1, 1)) if np.isnan(rotation_angle): - raise ValueError(f'Invalid {rotation_angle=}') - return a,b,rotation_axis, rotation_angle, plane_normal_b - + raise ValueError(f"Invalid {rotation_angle=}") + return a, b, rotation_axis, rotation_angle, plane_normal_b + def is_rotation_allowed(rotation_axis, reference_normal): # Check if rotation axis is the same as the reference normal (with some tolerance) - res = ( - np.allclose(rotation_axis, reference_normal, atol=1e-02) - or np.allclose(rotation_axis, -reference_normal, atol=1e-02) + res = np.allclose(rotation_axis, reference_normal, atol=1e-02) or np.allclose( + rotation_axis, -reference_normal, atol=1e-02 ) if not res: dot = rotation_axis.dot(reference_normal) - logger.debug(f'{is_rotation_allowed.__name__} got {res=} with {rotation_axis=} {reference_normal=} {dot=}') + logger.debug( + f"{is_rotation_allowed.__name__} got {res=} with {rotation_axis=} {reference_normal=} {dot=}" + ) return res - a, b, rotation_axis, rotation_angle, plane_normal_b = get_rot(0) iu.rotate(state.trimesh_scene, a, rotation_axis, rotation_angle) first_plane_normal = plane_normal_b # Save the normal of the first plane - + dof_remaining = True # Degree of freedom remaining after the first alignment # Check and apply rotations for subsequent planes for i in range(1, len(obj_planes)): - a, b, rotation_axis, rotation_angle, plane_normal_b = get_rot(i) if np.isclose(np.linalg.norm(rotation_angle), 0, atol=1e-01): logger.debug(f"no rotation needed for {i=} of {len(obj_planes)}") continue - + rot_allowed = is_rotation_allowed(rotation_axis, first_plane_normal) if dof_remaining and rot_allowed: # Rotate around the normal of the first plane @@ -189,9 +184,11 @@ def is_rotation_allowed(rotation_axis, reference_normal): dof_remaining = False # No more degrees of freedom remaining logger.debug(f"rotated {a=} to satisfy assignment {i=}") else: - logger.debug(f"dofs failed for {i=} of {len(obj_planes)=}, {rot_allowed=} {dof_remaining=}") + logger.debug( + f"dofs failed for {i=} of {len(obj_planes)=}, {rot_allowed=} {dof_remaining=}" + ) return False, None, None - + # Construct the system of linear equations for translation A = [] c = [] @@ -207,11 +204,15 @@ def is_rotation_allowed(rotation_axis, reference_normal): b_poly = b_obj.data.polygons[b_poly_index] # Get global coordinates and normals - plane_point_a = iu.global_vertex_coordinates(a_obj, a_obj.data.vertices[a_poly.vertices[0]]) - plane_point_b = iu.global_vertex_coordinates(b_obj, b_obj.data.vertices[b_poly.vertices[0]]) + plane_point_a = iu.global_vertex_coordinates( + a_obj, a_obj.data.vertices[a_poly.vertices[0]] + ) + plane_point_b = iu.global_vertex_coordinates( + b_obj, b_obj.data.vertices[b_poly.vertices[0]] + ) plane_normal_b = iu.global_polygon_normal(b_obj, b_poly) plane_point_b += plane_normal_b * margin - + # Append to the matrix A and vector b for Ax = c A.append(plane_normal_b) c.append(plane_normal_b.dot(plane_point_b - plane_point_a)) @@ -224,7 +225,7 @@ def is_rotation_allowed(rotation_axis, reference_normal): a_obj_name, a_poly_index = obj_planes[0] a_obj = bpy.data.objects[a_obj_name] - + # Check if the solution is valid # You can define a threshold to determine if the residuals are acceptable # Manually compute residuals if m <= n @@ -234,28 +235,29 @@ def is_rotation_allowed(rotation_axis, reference_normal): if residuals_sum < 1e-03: return True, A.shape[1] - rank, t # Solution is valid else: - logger.debug(f'{check_init_valid.__name__} failed with {residuals_sum=}') + logger.debug(f"{check_init_valid.__name__} failed with {residuals_sum=}") return False, None, None # Solution is not valid else: if np.all(residuals < 1e-03): return True, A.shape[1] - rank, t # Solution is valid else: - logger.debug(f'{check_init_valid.__name__} failed with {residuals=}') + logger.debug(f"{check_init_valid.__name__} failed with {residuals=}") return False, None, None # No valid solution def project(points, plane_normal): - to_2D = trimesh.geometry.plane_transform(origin=(0,0,0), normal=plane_normal) + to_2D = trimesh.geometry.plane_transform(origin=(0, 0, 0), normal=plane_normal) vertices_2D = trimesh.transformations.transform_points(points, to_2D)[:, :2] return vertices_2D + def apply_relations_surfacesample( - state: state_def.State, - name: str, + state: state_def.State, + name: str, ): obj_state = state.objs[name] obj_name = obj_state.obj.name - + parent_objs = [] parent_planes = [] obj_planes = [] @@ -265,15 +267,20 @@ def apply_relations_surfacesample( if len(obj_state.relations) == 0: raise ValueError(f"Object {name} has no relations") elif len(obj_state.relations) > 3: - raise ValueError(f"Object {name} has more than 2 relations, not supported. {obj_state.relations=}") + raise ValueError( + f"Object {name} has more than 2 relations, not supported. {obj_state.relations=}" + ) for i, relation_state in enumerate(obj_state.relations): - if isinstance(relation_state.relation, cl.AnyRelation): - raise ValueError(f"Got {relation_state.relation} for {name=} {relation_state.target_name=}") + raise ValueError( + f"Got {relation_state.relation} for {name=} {relation_state.target_name=}" + ) parent_obj = state.objs[relation_state.target_name].obj - obj_plane, parent_plane = state.planes.get_rel_state_planes(state, name, relation_state) + obj_plane, parent_plane = state.planes.get_rel_state_planes( + state, name, relation_state + ) if obj_plane is None: continue @@ -281,28 +288,27 @@ def apply_relations_surfacesample( continue obj_planes.append(obj_plane) - parent_planes.append(parent_plane) + parent_planes.append(parent_plane) parent_objs.append(parent_obj) - match relation_state.relation: - case cl.StableAgainst(child_tags, parent_tags, margin): + match relation_state.relation: + case cl.StableAgainst(_child_tags, parent_tags, margin): margins.append(margin) parent_tag_list.append(parent_tags) - case cl.SupportedBy(child_tags, parent_tags): + case cl.SupportedBy(_parent_tags, parent_tags): margins.append(0) parent_tag_list.append(parent_tags) - case _: + case _: raise NotImplementedError - - valid, dof, T = check_init_valid(state, name, obj_planes, parent_planes, margins) - if not valid: + + valid, dof, T = check_init_valid(state, name, obj_planes, parent_planes, margins) + if not valid: rels = [(rels.relation, rels.target_name) for rels in obj_state.relations] - logger.warning(f'Init was invalid for {name=} {rels=}') + logger.warning(f"Init was invalid for {name=} {rels=}") return None - + if dof == 0: iu.translate(state.trimesh_scene, obj_name, T) - elif dof == 1: - + elif dof == 1: assert len(parent_planes) == 2, (name, len(parent_planes)) parent_obj1 = parent_objs[0] @@ -313,58 +319,122 @@ def apply_relations_surfacesample( parent_tags2 = parent_tag_list[1] margin1 = margins[0] margin2 = margins[1] - obj_plane1 = obj_planes[0] - obj_plane2 = obj_planes[1] - - parent1_trimesh = state.planes.get_tagged_submesh(state.trimesh_scene, parent_obj1.name, parent_tags1, parent_plane1) - parent2_trimesh = state.planes.get_tagged_submesh(state.trimesh_scene, parent_obj2.name, parent_tags2, parent_plane2) - + obj_plane1 = obj_planes[0] + obj_plane2 = obj_planes[1] + + parent1_trimesh = state.planes.get_tagged_submesh( + state.trimesh_scene, parent_obj1.name, parent_tags1, parent_plane1 + ) + parent2_trimesh = state.planes.get_tagged_submesh( + state.trimesh_scene, parent_obj2.name, parent_tags2, parent_plane2 + ) + parent1_poly_index = parent_plane1[1] parent1_poly = parent_obj1.data.polygons[parent1_poly_index] plane_normal_1 = iu.global_polygon_normal(parent_obj1, parent1_poly) pts = parent2_trimesh.vertices - projected = project(pts,plane_normal_1) - p1_to_p1 = trimesh.path.polygons.projected(parent1_trimesh, plane_normal_1, (0,0,0)) + projected = project(pts, plane_normal_1) + p1_to_p1 = trimesh.path.polygons.projected( + parent1_trimesh, plane_normal_1, (0, 0, 0) + ) if p1_to_p1 is None: - raise ValueError(f'Failed to project {parent1_trimesh=} {plane_normal_1=} for {name=}') - + raise ValueError( + f"Failed to project {parent1_trimesh=} {plane_normal_1=} for {name=}" + ) - - if all([p1_to_p1.buffer(1e-1).contains(Point(pt[0], pt[1])) for pt in projected]): + if all( + [p1_to_p1.buffer(1e-1).contains(Point(pt[0], pt[1])) for pt in projected] + ): face_mask = tagging.tagged_face_mask(parent_obj2, parent_tags2) - stability.move_obj_random_pt(state, obj_name, parent_obj2.name, face_mask, parent_plane2) - stability.snap_against(state.trimesh_scene, obj_name, parent_obj2.name, obj_plane2, parent_plane2, margin=margin2) - stability.snap_against(state.trimesh_scene, obj_name, parent_obj1.name, obj_plane1, parent_plane1, margin=margin1) + stability.move_obj_random_pt( + state, obj_name, parent_obj2.name, face_mask, parent_plane2 + ) + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj2.name, + obj_plane2, + parent_plane2, + margin=margin2, + ) + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj1.name, + obj_plane1, + parent_plane1, + margin=margin1, + ) else: face_mask = tagging.tagged_face_mask(parent_obj1, parent_tags1) - stability.move_obj_random_pt(state, obj_name, parent_obj1.name, face_mask, parent_plane1) - stability.snap_against(state.trimesh_scene, obj_name, parent_obj1.name, obj_plane1, parent_plane1, margin=margin1) - stability.snap_against(state.trimesh_scene, obj_name, parent_obj2.name, obj_plane2, parent_plane2, margin=margin2) - - elif dof == 2: + stability.move_obj_random_pt( + state, obj_name, parent_obj1.name, face_mask, parent_plane1 + ) + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj1.name, + obj_plane1, + parent_plane1, + margin=margin1, + ) + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj2.name, + obj_plane2, + parent_plane2, + margin=margin2, + ) + + elif dof == 2: assert len(parent_planes) == 1, (name, len(parent_planes)) for i, relation_state in enumerate(obj_state.relations): parent_obj = state.objs[relation_state.target_name].obj - obj_plane, parent_plane = state.planes.get_rel_state_planes(state, name, relation_state) + obj_plane, parent_plane = state.planes.get_rel_state_planes( + state, name, relation_state + ) if obj_plane is None: continue if parent_plane is None: continue - iu.set_rotation(state.trimesh_scene, obj_name, (0, 0, 2*np.pi*np.random.randint(0, 4)/4)) - face_mask = tagging.tagged_face_mask(parent_obj, relation_state.relation.parent_tags) - stability.move_obj_random_pt(state, obj_name, parent_obj.name, face_mask, parent_plane) - match relation_state.relation: - case cl.StableAgainst(child_tags, parent_tags, margin): - stability.snap_against(state.trimesh_scene, obj_name, parent_obj.name, obj_plane, parent_plane, margin=margin) - case cl.SupportedBy(child_tags, parent_tags): - stability.snap_against(state.trimesh_scene, obj_name, parent_obj.name, obj_plane, parent_plane, margin=0) - case _: + iu.set_rotation( + state.trimesh_scene, + obj_name, + (0, 0, 2 * np.pi * np.random.randint(0, 4) / 4), + ) + face_mask = tagging.tagged_face_mask( + parent_obj, relation_state.relation.parent_tags + ) + stability.move_obj_random_pt( + state, obj_name, parent_obj.name, face_mask, parent_plane + ) + match relation_state.relation: + case cl.StableAgainst(_, parent_tags, margin): + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj.name, + obj_plane, + parent_plane, + margin=margin, + ) + case cl.SupportedBy(_, parent_tags): + stability.snap_against( + state.trimesh_scene, + obj_name, + parent_obj.name, + obj_plane, + parent_plane, + margin=0, + ) + case _: raise NotImplementedError return parent_planes - + + def validate_relations_feasible(state: state_def.State, name: str) -> bool: - assignments = state.objs[name].relations targets = [rel.target_name for rel in assignments] @@ -372,15 +442,12 @@ def validate_relations_feasible(state: state_def.State, name: str) -> bool: if len(rooms) > 1: raise ValueError(f"Object {name} has multiple room targets {rooms}") + @gin.configurable def try_apply_relation_constraints( - state: state_def.State, - name: str, - n_try_resolve=10, - visualize=False + state: state_def.State, name: str, n_try_resolve=10, visualize=False ): - - ''' + """ name is in objs.name name has been recently reassigned or added or swapped it needs snapping, and dof updates @@ -389,38 +456,39 @@ def try_apply_relation_constraints( dof_mat and dof axis for name are updated objstate for name has update location rotaton etc - ''' + """ validate_relations_feasible(state, name) for retry in range(n_try_resolve): obj_state = state.objs[name] - if iu.blender_objs_from_names(obj_state.obj.name)[0].dimensions[2] > WALL_HEIGHT - WALL_THICKNESS: - logger.warning(f"Object {obj_state.obj.name} is too tall for the room: {obj_state.obj.dimensions[2]}, {WALL_HEIGHT=}, {WALL_THICKNESS=}") + if ( + iu.blender_objs_from_names(obj_state.obj.name)[0].dimensions[2] + > WALL_HEIGHT - WALL_THICKNESS + ): + logger.warning( + f"Object {obj_state.obj.name} is too tall for the room: {obj_state.obj.dimensions[2]}, {WALL_HEIGHT=}, {WALL_THICKNESS=}" + ) parent_planes = apply_relations_surfacesample(state, name) - # assignments not valid if parent_planes is None: - logger.debug(f'Found {parent_planes=} for {name=} {retry=}') + logger.debug(f"Found {parent_planes=} for {name=} {retry=}") if visualize: vis = butil.copy(obj_state.obj) - vis.name = obj_state.obj.name[:30] + '_noneplanes_' + str(retry) + vis.name = obj_state.obj.name[:30] + "_noneplanes_" + str(retry) return False - + if validity.check_post_move_validity(state, name): obj_state.dof_matrix_translation = combined_stability_matrix(parent_planes) obj_state.dof_rotation_axis = combine_rotation_constraints(parent_planes) return True - + if visualize: vis = butil.copy(obj_state.obj) - vis.name = obj_state.obj.name[:30] + '_failure_' + str(retry) + vis.name = obj_state.obj.name[:30] + "_failure_" + str(retry) # butil.save_blend("test.blend") - - logger.debug(f'Exhausted {n_try_resolve=} tries for {name=}') + logger.debug(f"Exhausted {n_try_resolve=} tries for {name=}") return False - - diff --git a/infinigen/core/constraints/example_solver/geometry/parse_scene.py b/infinigen/core/constraints/example_solver/geometry/parse_scene.py index 11cfd8cf1..bf1292dc1 100644 --- a/infinigen/core/constraints/example_solver/geometry/parse_scene.py +++ b/infinigen/core/constraints/example_solver/geometry/parse_scene.py @@ -3,22 +3,17 @@ # Authors: Karhan Kayan + import bpy -import trimesh -from shapely import LineString, Point +import fcl import numpy as np -from typing import Union -from trimesh import Trimesh, Scene -from mathutils import Vector, Matrix +import trimesh +from mathutils import Matrix +from infinigen.core import tagging +from infinigen.core.constraints.constraint_language.util import sync_trimesh from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -from infinigen.core.constraints.constraint_language.util import ( - translate, - rotate, - sync_trimesh -) -import fcl + def to_trimesh(obj: bpy.types.Object): bpy.context.view_layer.update() @@ -30,21 +25,21 @@ def to_trimesh(obj: bpy.types.Object): def preprocess_obj(obj): - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): butil.select(obj) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bpy.context.view_layer.update() butil.apply_transform(obj, loc=False, rot=False, scale=True) + def preprocess_scene(objects): for o in objects: preprocess_obj(o) - def parse_scene(objects): # convert all bpy.objects into a trimesh.Scene @@ -56,18 +51,19 @@ def parse_scene(objects): return scene + def add_to_scene(scene, obj, preprocess=True): if preprocess: preprocess_obj(obj) obj_matrix_world = Matrix(obj.matrix_world) obj.matrix_world = Matrix.Identity(4) tmesh = to_trimesh(obj) - tmesh.metadata['tags'] = tagging.union_object_tags(obj) + tmesh.metadata["tags"] = tagging.union_object_tags(obj) scene.add_geometry( geometry=tmesh, # transform=np.array(obj.matrix_world), - geom_name=obj.name + '_mesh', - node_name=obj.name + geom_name=obj.name + "_mesh", + node_name=obj.name, ) col = trimesh.collision.CollisionManager() T = trimesh.transformations.identity_matrix() diff --git a/infinigen/core/constraints/example_solver/geometry/planes.py b/infinigen/core/constraints/example_solver/geometry/planes.py index 5e5cd1f9c..28dd589f2 100644 --- a/infinigen/core/constraints/example_solver/geometry/planes.py +++ b/infinigen/core/constraints/example_solver/geometry/planes.py @@ -4,23 +4,29 @@ # Authors: Karhan Kayan from __future__ import annotations + import logging import bpy -import numpy as np import gin +import numpy as np import trimesh - import infinigen.core.util.blender as butil -from infinigen.core import tagging, tags as t -from infinigen.core.constraints.constraint_language.util import meshes_from_names, blender_objs_from_names +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints.constraint_language.util import ( + blender_objs_from_names, + meshes_from_names, +) logger = logging.getLogger(__name__) + def global_vertex_coordinates(obj, local_vertex): return obj.matrix_world @ local_vertex.co + def global_polygon_normal(obj, polygon): loc, rot, scale = obj.matrix_world.decompose() rot = rot.to_matrix() @@ -28,7 +34,10 @@ def global_polygon_normal(obj, polygon): try: return normal / np.linalg.norm(normal) except ZeroDivisionError: - raise ZeroDivisionError(f"Zero division error in global_polygon_normal for {obj.name=}, {polygon.index=}, {normal=}") + raise ZeroDivisionError( + f"Zero division error in global_polygon_normal for {obj.name=}, {polygon.index=}, {normal=}" + ) + class Planes: def __init__(self): @@ -39,7 +48,9 @@ def __init__(self): def calculate_mesh_hash(self, obj): # Simple hash based on counts of vertices, edges, and polygons mesh = obj.data - hash_str = f"{obj.name}_{len(mesh.vertices)}_{len(mesh.edges)}_{len(mesh.polygons)}" + hash_str = ( + f"{obj.name}_{len(mesh.vertices)}_{len(mesh.edges)}_{len(mesh.polygons)}" + ) return hash(hash_str) def hash_face_mask(self, face_mask): @@ -52,16 +63,22 @@ def get_all_planes_cached(self, obj, face_mask, tolerance=1e-4): cache_key = (obj.name, current_face_mask_hash) # Check if mesh has been modified or planes have not been computed before for this object and face_mask - if cache_key not in self._cached_planes or self._mesh_hashes.get(obj.name) != current_mesh_hash: - self._mesh_hashes[obj.name] = current_mesh_hash # Update the hash for this object + if ( + cache_key not in self._cached_planes + or self._mesh_hashes.get(obj.name) != current_mesh_hash + ): + self._mesh_hashes[obj.name] = ( + current_mesh_hash # Update the hash for this object + ) # Recompute planes for this object and face_mask and update cache # logger.info(f'Cache MISS planes for {obj.name=}') - self._cached_planes[cache_key] = self.compute_all_planes_fast(obj, face_mask, tolerance) - + self._cached_planes[cache_key] = self.compute_all_planes_fast( + obj, face_mask, tolerance + ) + # logger.info(f'Cache HIT planes for {obj.name=}') return self._cached_planes[cache_key] - @staticmethod def normalize(v): norm = np.linalg.norm(v) @@ -71,20 +88,29 @@ def normalize(v): def hash_plane(normal, point, tolerance=1e-4): normal_normalized = normal / np.linalg.norm(normal) distance = np.dot(normal_normalized, point) - return (tuple(np.round(normal_normalized / tolerance).astype(int)), round(distance / tolerance)) + return ( + tuple(np.round(normal_normalized / tolerance).astype(int)), + round(distance / tolerance), + ) def compute_all_planes_fast(self, obj, face_mask, tolerance=1e-4): # Cache computations - - vertex_cache = {v.index: global_vertex_coordinates(obj, v) for v in obj.data.vertices} - normal_cache = {p.index: global_polygon_normal(obj, p) for p in obj.data.polygons if face_mask[p.index]} + + vertex_cache = { + v.index: global_vertex_coordinates(obj, v) for v in obj.data.vertices + } + normal_cache = { + p.index: global_polygon_normal(obj, p) + for p in obj.data.polygons + if face_mask[p.index] + } unique_planes = {} for polygon in obj.data.polygons: if not face_mask[polygon.index]: continue - + # Get the normal and a vertex to represent the plane normal = normal_cache[polygon.index] @@ -92,41 +118,50 @@ def compute_all_planes_fast(self, obj, face_mask, tolerance=1e-4): continue vertex = vertex_cache[polygon.vertices[0]] - + # Hash the plane using both normal and the point plane_hash = self.hash_plane(normal, vertex, tolerance) - + if plane_hash not in unique_planes: unique_planes[plane_hash] = (obj.name, polygon.index) return list(unique_planes.values()) - - def get_all_planes_deprecated(self, obj, face_mask, tolerance=1e-4) -> tuple[str, int]: + def get_all_planes_deprecated( + self, obj, face_mask, tolerance=1e-4 + ) -> tuple[str, int]: "get all unique planes formed by faces in face_mask" # ASSUMES: object is triangulated, no quads/polygons unique_planes = [] for polygon in obj.data.polygons: if not face_mask[polygon.index]: continue - vertex = global_vertex_coordinates(obj, obj.data.vertices[polygon.vertices[0]]) + vertex = global_vertex_coordinates( + obj, obj.data.vertices[polygon.vertices[0]] + ) normal = global_polygon_normal(obj, polygon) belongs_to_existing_plane = False for name, polygon2_index in unique_planes: polygon2 = obj.data.polygons[polygon2_index] - plane_vertex = global_vertex_coordinates(obj, obj.data.vertices[polygon2.vertices[0]]) + plane_vertex = global_vertex_coordinates( + obj, obj.data.vertices[polygon2.vertices[0]] + ) plane_normal = global_polygon_normal(obj, polygon2) - if ( - np.allclose(np.cross(normal, plane_normal), 0, rtol=tolerance) and - np.allclose(np.dot(vertex - plane_vertex, plane_normal), 0, rtol=tolerance) + if np.allclose( + np.cross(normal, plane_normal), 0, rtol=tolerance + ) and np.allclose( + np.dot(vertex - plane_vertex, plane_normal), 0, rtol=tolerance ): belongs_to_existing_plane = True break - if not belongs_to_existing_plane and polygon.normal and polygon.normal.length > 0: + if ( + not belongs_to_existing_plane + and polygon.normal + and polygon.normal.length > 0 + ): unique_planes.append((obj.name, polygon.index)) return unique_planes - @gin.configurable def get_tagged_planes(self, obj: bpy.types.Object, tags: set, fast=True): """ @@ -139,10 +174,10 @@ def get_tagged_planes(self, obj: bpy.types.Object, tags: set, fast=True): if not mask.any(): obj_tags = tagging.union_object_tags(obj) logger.warning( - f'Attempted to get_tagged_planes {obj.name=} {tags=} but mask was empty, {obj_tags=}' + f"Attempted to get_tagged_planes {obj.name=} {tags=} but mask was empty, {obj_tags=}" ) return [] - + if fast: planes = self.get_all_planes_cached(obj, mask) else: @@ -150,33 +185,36 @@ def get_tagged_planes(self, obj: bpy.types.Object, tags: set, fast=True): return planes def get_rel_state_planes(self, state, name: str, relation_state: tuple): - obj = state.objs[name].obj relation = relation_state.relation parent_obj = state.objs[relation_state.target_name].obj - obj_tags = relation.child_tags + obj_tags = relation.child_tags parent_tags = relation.parent_tags - + parent_all_planes = self.get_tagged_planes(parent_obj, parent_tags) obj_all_planes = self.get_tagged_planes(obj, obj_tags) - #for i, p in enumerate(parent_all_planes): + # for i, p in enumerate(parent_all_planes): # splitted_parent = planes.extract_tagged_plane(parent_obj, parent_tags, p) # splitted_parent.name = f'parent_plane_{i}' - #for i, p in enumerate(obj_all_planes): + # for i, p in enumerate(obj_all_planes): # splitted_parent = planes.extract_tagged_plane(parent_obj, obj_tags, p) # splitted_parent.name = f'obj_plane_{i}' - #return + # return if relation_state.parent_plane_idx >= len(parent_all_planes): - logging.warning(f'{parent_obj.name=} had too few planes ({len(parent_all_planes)}) for {relation_state}') + logging.warning( + f"{parent_obj.name=} had too few planes ({len(parent_all_planes)}) for {relation_state}" + ) parent_plane = None else: parent_plane = parent_all_planes[relation_state.parent_plane_idx] if relation_state.child_plane_idx >= len(obj_all_planes): - logging.warning(f'{obj.name=} had too few planes ({len(obj_all_planes)}) for {relation_state}') + logging.warning( + f"{obj.name=} had too few planes ({len(obj_all_planes)}) for {relation_state}" + ) obj_plane = None else: obj_plane = obj_all_planes[relation_state.child_plane_idx] @@ -193,7 +231,7 @@ def extract_tagged_plane(self, obj: bpy.types.Object, tags: set, plane: int): get a single plane formed by faces tagged with tags """ - if obj.type != 'MESH': + if obj.type != "MESH": raise TypeError("Object is not a mesh!") face_mask = tagging.tagged_face_mask(obj, tags) @@ -202,31 +240,33 @@ def extract_tagged_plane(self, obj: bpy.types.Object, tags: set, plane: int): if not mask.any(): obj_tags = tagging.union_object_tags(obj) logger.warning( - f'Attempted to extract_tagged_plane {obj.name=} {tags=} but mask was empty, {obj_tags=}' + f"Attempted to extract_tagged_plane {obj.name=} {tags=} but mask was empty, {obj_tags=}" ) butil.select(obj) bpy.context.view_layer.objects.active = obj - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='FACE') - bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type="FACE") + bpy.ops.mesh.select_all(action="DESELECT") # Set initial selection for polygons to False - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") for poly in obj.data.polygons: poly.select = mask[poly.index] # Switch to Edit mode, duplicate the selection, and separate it old_set = set(bpy.data.objects[:]) - bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.object.mode_set(mode="EDIT") bpy.ops.mesh.duplicate() - bpy.ops.mesh.separate(type='SELECTED') - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.mesh.separate(type="SELECTED") + bpy.ops.object.mode_set(mode="OBJECT") new_set = set(bpy.data.objects[:]) - old_set return new_set.pop() - - def get_tagged_submesh(self, scene: trimesh.Scene, name:str, tags: set, plane: int): + + def get_tagged_submesh( + self, scene: trimesh.Scene, name: str, tags: set, plane: int + ): obj = blender_objs_from_names(name)[0] face_mask = tagging.tagged_face_mask(obj, tags) mask = self.tagged_plane_mask(obj, face_mask, plane) @@ -234,37 +274,56 @@ def get_tagged_submesh(self, scene: trimesh.Scene, name:str, tags: set, plane: i geom = tmesh.submesh(np.where(mask), append=True) return geom - def tagged_plane_mask(self, obj: bpy.types.Object, face_mask: np.ndarray, plane: tuple[str, int], hash_tolerance=1e-4, plane_tolerance = 1e-2, fast = True) -> np.ndarray: - if not fast: - return self._compute_tagged_plane_mask(obj, face_mask, plane, plane_tolerance) + def tagged_plane_mask( + self, + obj: bpy.types.Object, + face_mask: np.ndarray, + plane: tuple[str, int], + hash_tolerance=1e-4, + plane_tolerance=1e-2, + fast=True, + ) -> np.ndarray: + if not fast: + return self._compute_tagged_plane_mask( + obj, face_mask, plane, plane_tolerance + ) obj_id = obj.name current_hash = self.calculate_mesh_hash(obj) # Calculate current mesh hash face_mask_hash = self.hash_face_mask(face_mask) # Calculate hash for face_mask ref_poly = self.planerep_to_poly(plane) - ref_vertex = global_vertex_coordinates(obj, obj.data.vertices[ref_poly.vertices[0]]) + ref_vertex = global_vertex_coordinates( + obj, obj.data.vertices[ref_poly.vertices[0]] + ) ref_normal = global_polygon_normal(obj, ref_poly) - plane_hash = self.hash_plane(ref_normal, ref_vertex, hash_tolerance) # Calculate hash for plane - + plane_hash = self.hash_plane( + ref_normal, ref_vertex, hash_tolerance + ) # Calculate hash for plane + # Composite key now includes face_mask_hash cache_key = (obj_id, plane_hash, face_mask_hash) # Check if the mesh has been modified since last calculation or if the face mask has changed - mesh_or_face_mask_changed = cache_key not in self._cached_plane_masks or self._mesh_hashes.get(obj_id) != current_hash + mesh_or_face_mask_changed = ( + cache_key not in self._cached_plane_masks + or self._mesh_hashes.get(obj_id) != current_hash + ) if not mesh_or_face_mask_changed: # logger.info(f'Cache HIT plane mask for {obj.name=}') - return self._cached_plane_masks[cache_key]['mask'] + return self._cached_plane_masks[cache_key]["mask"] # If mesh or face mask changed, update the hash and recompute self._mesh_hashes[obj_id] = current_hash # Compute and cache the plane mask # logger.info(f'Cache MISS plane mask for {obj.name=}') - plane_mask = self._compute_tagged_plane_mask(obj, face_mask, plane, plane_tolerance) - + plane_mask = self._compute_tagged_plane_mask( + obj, face_mask, plane, plane_tolerance + ) + # Update the cache with the new result self._cached_plane_masks[cache_key] = { - 'mask': plane_mask, + "mask": plane_mask, } return plane_mask @@ -275,15 +334,18 @@ def _compute_tagged_plane_mask(self, obj, face_mask, plane, tolerance): """ plane_mask = np.zeros(len(obj.data.polygons), dtype=bool) ref_poly = self.planerep_to_poly(plane) - ref_vertex = global_vertex_coordinates(obj, obj.data.vertices[ref_poly.vertices[0]]) + ref_vertex = global_vertex_coordinates( + obj, obj.data.vertices[ref_poly.vertices[0]] + ) ref_normal = global_polygon_normal(obj, ref_poly) for candidate_polygon in obj.data.polygons: - if not face_mask[candidate_polygon.index]: continue - candidate_vertex = global_vertex_coordinates(obj, obj.data.vertices[candidate_polygon.vertices[0]]) + candidate_vertex = global_vertex_coordinates( + obj, obj.data.vertices[candidate_polygon.vertices[0]] + ) candidate_normal = global_polygon_normal(obj, candidate_polygon) diff_vec = ref_vertex - candidate_vertex if not np.isclose(np.linalg.norm(diff_vec), 0): @@ -292,12 +354,10 @@ def _compute_tagged_plane_mask(self, obj, face_mask, plane, tolerance): ndot = np.dot(ref_normal, candidate_normal) pdot = np.dot(diff_vec, candidate_normal) - in_plane = ( - np.allclose(ndot, 1, atol=tolerance) and - np.allclose(pdot, 0, atol=tolerance) + in_plane = np.allclose(ndot, 1, atol=tolerance) and np.allclose( + pdot, 0, atol=tolerance ) plane_mask[candidate_polygon.index] = in_plane - - - return plane_mask \ No newline at end of file + + return plane_mask diff --git a/infinigen/core/constraints/example_solver/geometry/stability.py b/infinigen/core/constraints/example_solver/geometry/stability.py index c6a5c3b4a..cff9a574f 100644 --- a/infinigen/core/constraints/example_solver/geometry/stability.py +++ b/infinigen/core/constraints/example_solver/geometry/stability.py @@ -4,101 +4,105 @@ # Authors: Karhan Kayan from __future__ import annotations -import logging -from dataclasses import dataclass -from copy import copy -import numpy as np -from shapely.affinity import rotate +import logging +import bmesh import bpy +import gin +import matplotlib.pyplot as plt +import numpy as np import trimesh -from shapely.geometry import Point, LineString -from shapely.ops import unary_union, nearest_points -from shapely import Polygon -from shapely import MultiPolygon -import bmesh +from mathutils import Vector +from shapely import MultiPolygon, Polygon +from shapely.affinity import rotate +from shapely.geometry import Point -import matplotlib.pyplot as plt -import gin -# import fcl +from infinigen.core import tagging +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints.constraint_language import util as iu +from infinigen.core.constraints.example_solver import state_def # from infinigen.core.util import blender as butil from infinigen.core.constraints.example_solver.geometry import planes as planes from infinigen.core.util import blender as butil -from mathutils import Vector, Quaternion -from infinigen.core.constraints.example_solver import state_def -from infinigen.core.constraints import constraint_language as cl, reasoning as r +# import fcl -from infinigen.core import tagging, tags as t -from infinigen.core.constraints.constraint_language import util as iu -import logging logger = logging.getLogger(__name__) + def project_and_align_z_with_x(polygons, z_direction): """ Rotate polygons so that the Z-direction is aligned with the X-axis in 2D. - + Parameters: polygons (list[Polygon]): List of Shapely Polygons representing the projected 2D polygons. z_direction (np.array): The 2D direction vector where the Z-axis is projected. - + Returns: list[Polygon]: Rotated polygons with the Z-direction aligned with the X-axis. """ # Calculate the angle between the Z-direction projection and the X-axis angle_rad = np.arctan2(z_direction[1], z_direction[0]) angle_deg = np.degrees(angle_rad) - + # Rotate polygons to align Z-direction with X-axis - rotated_polygons = [rotate(polygon, angle_deg, origin=(0, 0), use_radians=False) for polygon in polygons] - + rotated_polygons = [ + rotate(polygon, angle_deg, origin=(0, 0), use_radians=False) + for polygon in polygons + ] + return rotated_polygons + def is_vertically_contained(poly_a, poly_b): """ Check if polygon A is vertically contained within polygon B, ignoring X-axis spillover. - + Parameters: poly_a (Polygon): Polygon A. poly_b (Polygon): Polygon B. - + Returns: bool: True if A is vertically contained within B. """ y_coords_a = [point[1] for point in poly_a.exterior.coords] y_coords_b = [point[1] for point in poly_b.exterior.coords] - + # Check vertical containment along the Y-axis min_a, max_a = min(y_coords_a), max(y_coords_a) min_b, max_b = min(y_coords_b), max(y_coords_b) - + return min_b <= min_a and max_a <= max_b + def project_vector(vector, origin, normal): transform = trimesh.geometry.plane_transform(origin, normal) - transformed = trimesh.transformations.transform_points([np.array([0,0,0]), vector], transform)[:, :2] + transformed = trimesh.transformations.transform_points( + [np.array([0, 0, 0]), vector], transform + )[:, :2] transformed_vector = transformed[1] - transformed[0] return transformed_vector + @gin.configurable def stable_against( - state: state_def.State, - obj_name: str, - relation_state: state_def.RelationState, + state: state_def.State, + obj_name: str, + relation_state: state_def.RelationState, visualize=False, - allow_overhangs=False + allow_overhangs=False, ): """ - check paralell, close to, and not overhanging. + check paralell, close to, and not overhanging. """ relation = relation_state.relation assert isinstance(relation, cl.StableAgainst) - logger.debug(f'stable against {obj_name=} {relation_state=}') + logger.debug(f"stable against {obj_name=} {relation_state=}") a_blender_obj = state.objs[obj_name].obj b_blender_obj = state.objs[relation_state.target_name].obj sa = state.objs[obj_name] @@ -113,16 +117,17 @@ def stable_against( normal_b = iu.global_polygon_normal(b_blender_obj, poly_b) dot = np.array(normal_a).dot(normal_b) if not (np.isclose(np.abs(dot), 1, atol=1e-2) or np.isclose(dot, -1, atol=1e-2)): - logger.debug(f'stable against failed, not parallel {dot=}') + logger.debug(f"stable against failed, not parallel {dot=}") return False - - origin_b = iu.global_vertex_coordinates(b_blender_obj, b_blender_obj.data.vertices[poly_b.vertices[0]]) + + origin_b = iu.global_vertex_coordinates( + b_blender_obj, b_blender_obj.data.vertices[poly_b.vertices[0]] + ) scene = state.trimesh_scene a_trimesh = iu.meshes_from_names(scene, sa.obj.name)[0] b_trimesh = iu.meshes_from_names(scene, sb.obj.name)[0] - mask = tagging.tagged_face_mask(sb.obj, relation.parent_tags) mask = state.planes.tagged_plane_mask(sb.obj, mask, pb) assert mask.any() @@ -131,11 +136,12 @@ def stable_against( # Project mesh A onto the plane of mesh B projected_a = trimesh.path.polygons.projected(a_trimesh, normal_b, origin_b) projected_b = trimesh.path.polygons.projected(b_trimesh_mask, normal_b, origin_b) - logger.debug(f'stable_against projecting along {normal_b} for parent_tags {relation.parent_tags}') - + logger.debug( + f"stable_against projecting along {normal_b} for parent_tags {relation.parent_tags}" + ) if projected_a is None or projected_b is None: - raise ValueError(f'Invalid {projected_a=} {projected_b=}') + raise ValueError(f"Invalid {projected_a=} {projected_b=}") if allow_overhangs: res = projected_a.overlaps(projected_b) @@ -143,33 +149,37 @@ def stable_against( res = projected_a.within(projected_b.buffer(1e-2)) else: z_proj = project_vector(np.array([0, 0, 1]), origin_b, normal_b) - projected_a_rotated, projected_b_rotated = project_and_align_z_with_x([projected_a, projected_b], z_proj) + projected_a_rotated, projected_b_rotated = project_and_align_z_with_x( + [projected_a, projected_b], z_proj + ) res = is_vertically_contained(projected_a_rotated, projected_b_rotated) if visualize: fig, ax = plt.subplots() - iu.plot_geometry(ax, projected_a, 'blue') - iu.plot_geometry(ax, projected_b, 'green') - plt.title(f'{obj_name} stable against {relation_state.target_name}? {res=}') + iu.plot_geometry(ax, projected_a, "blue") + iu.plot_geometry(ax, projected_b, "green") + plt.title(f"{obj_name} stable against {relation_state.target_name}? {res=}") plt.show() - logger.debug(f'stable_against {res=}') + logger.debug(f"stable_against {res=}") if not res: return False - + for vertex in poly_a.vertices: - vertex_global = iu.global_vertex_coordinates(a_blender_obj, a_blender_obj.data.vertices[vertex]) + vertex_global = iu.global_vertex_coordinates( + a_blender_obj, a_blender_obj.data.vertices[vertex] + ) distance = iu.distance_to_plane(vertex_global, origin_b, normal_b) if not np.isclose(distance, relation_state.relation.margin, atol=1e-2): - logger.debug(f'stable against failed, not close to {distance=}') + logger.debug(f"stable against failed, not close to {distance=}") return False - return True -def snap_against(scene, a, b, a_plane, b_plane, margin = 0): + +def snap_against(scene, a, b, a_plane, b_plane, margin=0): """ - snap a against b with some margin. + snap a against b with some margin. """ logging.debug("snap_against", a, b, a_plane, b_plane, margin) @@ -180,39 +190,41 @@ def snap_against(scene, a, b, a_plane, b_plane, margin = 0): a_poly = a_obj.data.polygons[a_poly_index] b_poly_index = b_plane[1] b_poly = b_obj.data.polygons[b_poly_index] - plane_point_a = iu.global_vertex_coordinates(a_obj, a_obj.data.vertices[a_poly.vertices[0]]) + plane_point_a = iu.global_vertex_coordinates( + a_obj, a_obj.data.vertices[a_poly.vertices[0]] + ) plane_normal_a = iu.global_polygon_normal(a_obj, a_poly) - plane_point_b = iu.global_vertex_coordinates(b_obj, b_obj.data.vertices[b_poly.vertices[0]]) + plane_point_b = iu.global_vertex_coordinates( + b_obj, b_obj.data.vertices[b_poly.vertices[0]] + ) plane_normal_b = iu.global_polygon_normal(b_obj, b_poly) plane_normal_b = -plane_normal_b - - norm_mag_a = np.linalg.norm(plane_normal_a) norm_mag_b = np.linalg.norm(plane_normal_b) assert np.isclose(norm_mag_a, 1), norm_mag_a assert np.isclose(norm_mag_b, 1), norm_mag_b rotation_axis = np.cross(plane_normal_a, plane_normal_b) - if not np.isclose(np.linalg.norm(rotation_axis),0, atol = 1e-05): + if not np.isclose(np.linalg.norm(rotation_axis), 0, atol=1e-05): rotation_axis = rotation_axis / np.linalg.norm(rotation_axis) else: - rotation_axis = np.array([0,0,1]) + rotation_axis = np.array([0, 0, 1]) dot = plane_normal_a.dot(plane_normal_b) rotation_angle = np.arccos(np.clip(dot, -1, 1)) if np.isnan(rotation_angle): - raise ValueError(f'Invalid {rotation_angle=}') + raise ValueError(f"Invalid {rotation_angle=}") iu.rotate(scene, a, rotation_axis, rotation_angle) - - a_obj = bpy.data.objects[a] a_poly = a_obj.data.polygons[a_poly_index] # Recalculate vertex_a and normal_a after rotation - plane_point_a = iu.global_vertex_coordinates(a_obj, a_obj.data.vertices[a_poly.vertices[0]]) + plane_point_a = iu.global_vertex_coordinates( + a_obj, a_obj.data.vertices[a_poly.vertices[0]] + ) plane_normal_a = iu.global_polygon_normal(a_obj, a_poly) - + distance = (plane_point_a - plane_point_b).dot(plane_normal_b) # Move object a by the average distance minus the margin in the direction of the plane normal of b @@ -220,26 +232,29 @@ def snap_against(scene, a, b, a_plane, b_plane, margin = 0): iu.translate(scene, a, translation) - - -def random_sample_point(state: state_def.State, obj: bpy.types.Object, face_mask: np.ndarray, plane: tuple[str, int]) -> Vector: +def random_sample_point( + state: state_def.State, + obj: bpy.types.Object, + face_mask: np.ndarray, + plane: tuple[str, int], +) -> Vector: """ Given a plane, return a random point on the plane. """ - if obj.type != 'MESH': - raise ValueError(f'Unexpected {obj.type=}') + if obj.type != "MESH": + raise ValueError(f"Unexpected {obj.type=}") plane_mask = state.planes.tagged_plane_mask(obj, face_mask, plane) if not np.any(plane_mask): logging.warning( - f'No faces in object {obj.name} are coplanar with plane {plane}.' + f"No faces in object {obj.name} are coplanar with plane {plane}." ) # Create a bmesh from the object mesh bm = bmesh.new() bm.from_mesh(obj.data) - bm.faces.ensure_lookup_table() + bm.faces.ensure_lookup_table() faces = [bm.faces[i] for i in np.where(plane_mask)[0]] @@ -258,15 +273,20 @@ def random_sample_point(state: state_def.State, obj: bpy.types.Object, face_mask # Use barycentric coordinates to sample a random point in the triangle # Random weights for each vertex weights = np.random.rand(3) - weights /= np.sum(weights) - random_point_local = weights[0] * verts[0] + weights[1] * verts[1] + weights[2] * verts[2] + weights /= np.sum(weights) + random_point_local = ( + weights[0] * verts[0] + weights[1] * verts[1] + weights[2] * verts[2] + ) random_point_global = obj.matrix_world @ Vector(random_point_local) bm.free() return random_point_global -def move_obj_random_pt(state: state_def.State, a, b, face_mask: np.ndarray, plane: tuple[str, int]): + +def move_obj_random_pt( + state: state_def.State, a, b, face_mask: np.ndarray, plane: tuple[str, int] +): """ move a to a random point on b """ @@ -302,10 +322,9 @@ def move_obj_random_pt(state: state_def.State, a, b, face_mask: np.ndarray, plan # set_location(scene, a, Vector((xy_loc.x, xy_loc.y, 0))) -def supported_by(scene, a, b, visualize = False): - - #check for collision first +def supported_by(scene, a, b, visualize=False): + # check for collision first if isinstance(a, str): a = [a] @@ -317,20 +336,20 @@ def supported_by(scene, a, b, visualize = False): if visualize: fig, ax = plt.subplots() - ax.set_aspect('equal', 'box') + ax.set_aspect("equal", "box") b_poly = iu.project_to_xy_poly(b_trimesh) if isinstance(b_poly, Polygon): x, y = b_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='red', ec='black', label='Polygon b') + ax.fill(x, y, alpha=0.5, fc="red", ec="black", label="Polygon b") elif isinstance(b_poly, MultiPolygon): for sub_poly in b_poly.geoms: x, y = sub_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='red', ec='black', label='Polygon b') + ax.fill(x, y, alpha=0.5, fc="red", ec="black", label="Polygon b") for a_mesh, a_trimesh in zip(a_meshes, a_trimeshes): cloned_a = butil.deep_clone_obj( a_mesh, keep_modifiers=True, keep_materials=False - ) + ) butil.modify_mesh( cloned_a, "BOOLEAN", apply=True, operation="INTERSECT", object=b_mesh ) @@ -342,17 +361,19 @@ def supported_by(scene, a, b, visualize = False): if visualize: if isinstance(intersection_poly, Polygon): x, y = intersection_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') + ax.fill(x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a") elif isinstance(intersection_poly, MultiPolygon): for sub_poly in intersection_poly.geoms: x, y = sub_poly.exterior.xy - ax.fill(x, y, alpha=0.5, fc='blue', ec='black', label='Polygon a') - ax.plot(com_projected[0], com_projected[1], 'o', color='black', label='COM of a') - + ax.fill(x, y, alpha=0.5, fc="blue", ec="black", label="Polygon a") + ax.plot( + com_projected[0], com_projected[1], "o", color="black", label="COM of a" + ) + if not intersection_convex.contains(Point(com_projected)): - if visualize: + if visualize: plt.show() - return False - if visualize: + return False + if visualize: plt.show() - return True \ No newline at end of file + return True diff --git a/infinigen/core/constraints/example_solver/geometry/validity.py b/infinigen/core/constraints/example_solver/geometry/validity.py index c0761647f..556c58fe5 100644 --- a/infinigen/core/constraints/example_solver/geometry/validity.py +++ b/infinigen/core/constraints/example_solver/geometry/validity.py @@ -5,30 +5,33 @@ import logging -import bpy -from shapely.geometry import Point, Polygon, MultiPolygon +import gin +from shapely.geometry import MultiPolygon, Point, Polygon -from infinigen.core.util import blender as butil import infinigen.core.constraints.constraint_language as cl -from infinigen.core.constraints.constraint_language.util import meshes_from_names, blender_objs_from_names, subset, project_to_xy_poly -from infinigen.core.constraints.example_solver.state_def import State, ObjectState, RelationState -from infinigen.core.constraints.evaluator.node_impl.trimesh_geometry import constrain_contact, any_touching -from infinigen.core.constraints.example_solver.geometry.stability import stable_against, supported_by - from infinigen.core import tags as t - -import gin +from infinigen.core.constraints.constraint_language.util import ( + blender_objs_from_names, + meshes_from_names, + project_to_xy_poly, +) +from infinigen.core.constraints.evaluator.node_impl.trimesh_geometry import ( + any_touching, + constrain_contact, +) +from infinigen.core.constraints.example_solver.geometry.stability import stable_against +from infinigen.core.constraints.example_solver.state_def import State +from infinigen.core.util import blender as butil logger = logging.getLogger(__name__) + def check_pre_move_validity(scene, a, parent_dict, dx, dy): - """ - """ + """ """ parent = parent_dict[a] a_mesh = meshes_from_names(scene, a)[0] parent_mesh = meshes_from_names(scene, parent)[0] - blender_mesh = blender_objs_from_names(a)[0] - + blender_objs_from_names(a)[0] # move a mesh by dx, dy and check if the projection of a_mesh is contained in parent_mesh # a_mesh.apply_transform(trimesh.transformations.compose_matrix(translate=[dx,dy,0])) @@ -36,7 +39,7 @@ def check_pre_move_validity(scene, a, parent_dict, dx, dy): parent_poly = project_to_xy_poly(parent_mesh) centroid = a_poly.centroid new_centroid = Point([centroid.x + dx, centroid.y + dy]) - # plot + # plot # fig, ax = plt.subplots() # if isinstance(parent_poly, Polygon): # x, y = parent_poly.exterior.xy @@ -61,62 +64,67 @@ def check_pre_move_validity(scene, a, parent_dict, dx, dy): return True -def all_relations_valid(state, name): +def all_relations_valid(state, name): rels = state.objs[name].relations for i, relation_state in enumerate(rels): match relation_state.relation: - case cl.StableAgainst(child_tags, parent_tags, margin): + case cl.StableAgainst(_child_tags, _parent_tags, _margin): res = stable_against(state, name, relation_state) if not res: - logger.debug(f'{name} failed relation {i=}/{len(rels)} {relation_state.relation} on {relation_state.target_name}') + logger.debug( + f"{name} failed relation {i=}/{len(rels)} {relation_state.relation} on {relation_state.target_name}" + ) return False - case unmatched: + case _: raise TypeError(f"Unhandled {relation_state.relation}") - + return True + @gin.configurable def check_post_move_validity( - state: State, - name: str, - disable_collision_checking=False, - visualize=False -): - + state: State, name: str, disable_collision_checking=False, visualize=False +): scene = state.trimesh_scene objstate = state.objs[name] collision_objs = [ - os.obj.name for k, os in state.objs.items() + os.obj.name + for k, os in state.objs.items() if k != name and t.Semantics.NoCollision not in os.tags ] - if len(collision_objs) == 0: + if len(collision_objs) == 0: return True - - if not all_relations_valid(state, name): + if not all_relations_valid(state, name): if visualize: vis_obj = butil.copy(objstate.obj) - vis_obj.name = f'validity_relations_fail_{name}' + vis_obj.name = f"validity_relations_fail_{name}" return False - + if disable_collision_checking: return True if t.Semantics.NoCollision in objstate.tags: return True - touch = any_touching(scene, objstate.obj.name, collision_objs, bvh_cache=state.bvh_cache) + touch = any_touching( + scene, objstate.obj.name, collision_objs, bvh_cache=state.bvh_cache + ) if not constrain_contact(touch, should_touch=None, max_depth=0.0001): if visualize: vis_obj = butil.copy(objstate.obj) - vis_obj.name = f'validity_contact_fail_{name}' - - contact_names = [[x for x in t.names if not x.startswith('_')] for t in touch.contacts] - logger.debug(f'validity failed - {name} touched {contact_names[0]} {len(contact_names)=}') + vis_obj.name = f"validity_contact_fail_{name}" + + contact_names = [ + [x for x in t.names if not x.startswith("_")] for t in touch.contacts + ] + logger.debug( + f"validity failed - {name} touched {contact_names[0]} {len(contact_names)=}" + ) return False - + # supposed to go through the consgraph here - return True \ No newline at end of file + return True diff --git a/infinigen/core/constraints/example_solver/greedy/__init__.py b/infinigen/core/constraints/example_solver/greedy/__init__.py index f83271319..900585cfd 100644 --- a/infinigen/core/constraints/example_solver/greedy/__init__.py +++ b/infinigen/core/constraints/example_solver/greedy/__init__.py @@ -1,3 +1,3 @@ -from .all_substitutions import substitutions, iterate_assignments +from .active_for_stage import set_active, update_active_flags +from .all_substitutions import iterate_assignments, substitutions from .constraint_partition import filter_constraints -from .active_for_stage import update_active_flags, set_active \ No newline at end of file diff --git a/infinigen/core/constraints/example_solver/greedy/active_for_stage.py b/infinigen/core/constraints/example_solver/greedy/active_for_stage.py index 3db38b1e9..351584ef6 100644 --- a/infinigen/core/constraints/example_solver/greedy/active_for_stage.py +++ b/infinigen/core/constraints/example_solver/greedy/active_for_stage.py @@ -7,24 +7,17 @@ import logging from infinigen.core import tags as t -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, -) +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.evaluator import domain_contains from infinigen.core.constraints.example_solver import state_def - from infinigen.core.util import blender as butil logger = logging.getLogger(__name__) + def find_ancestors_of_type( - state: state_def.State, - objkey: str, - filter_type: r.Domain, - seen: set = None + state: state_def.State, objkey: str, filter_type: r.Domain, seen: set = None ) -> set[str]: - """ Find objkeys of all ancestors of `objkey` which match `filter_type` @@ -47,25 +40,20 @@ def find_ancestors_of_type( if domain_contains.domain_contains(filter_type, state, obj): return {objkey} - + result = set() for rel in obj.relations: - if rel.target_name in seen: continue - result.update(find_ancestors_of_type( - state, rel.target_name, filter_type, seen - )) + result.update(find_ancestors_of_type(state, rel.target_name, filter_type, seen)) return result + def _is_active_room_object( - state: state_def.State, - objkey: str, - var_assignments: dict[t.Variable, str] + state: state_def.State, objkey: str, var_assignments: dict[t.Variable, str] ) -> bool: - """ Determine if an object should be active for the given assignment @@ -78,15 +66,23 @@ def _is_active_room_object( if assignment is None: continue match var.name: - case 'room': - room_ancestors = find_ancestors_of_type(state, objkey, r.Domain({t.Semantics.Room})) + case "room": + room_ancestors = find_ancestors_of_type( + state, objkey, r.Domain({t.Semantics.Room}) + ) if assignment not in room_ancestors: - logger.debug(f'{objkey} is inactive due to room {room_ancestors=} {assignment=}') + logger.debug( + f"{objkey} is inactive due to room {room_ancestors=} {assignment=}" + ) return False - case 'obj': - obj_ancestors = find_ancestors_of_type(state, objkey, r.Domain({t.Semantics.Object})) + case "obj": + obj_ancestors = find_ancestors_of_type( + state, objkey, r.Domain({t.Semantics.Object}) + ) if len(obj_ancestors) and objkey not in obj_ancestors: - logger.debug(f'{objkey} is inactive due to obj {assignment=} {obj_ancestors=}') + logger.debug( + f"{objkey} is inactive due to obj {assignment=} {obj_ancestors=}" + ) return False case _: raise NotImplementedError( @@ -96,18 +92,17 @@ def _is_active_room_object( return True + def set_active(state, objkey, active): state.objs[objkey].active = active for child in butil.iter_object_tree(state.objs[objkey].obj): child.hide_viewport = not active -def update_active_flags( - state: state_def.State, - var_assignments: dict[t.Variable, str] -): + +def update_active_flags(state: state_def.State, var_assignments: dict[t.Variable, str]): count = 0 for objkey, objstate in state.objs.items(): active = _is_active_room_object(state, objkey, var_assignments) set_active(state, objkey, active) count += active - return count \ No newline at end of file + return count diff --git a/infinigen/core/constraints/example_solver/greedy/all_substitutions.py b/infinigen/core/constraints/example_solver/greedy/all_substitutions.py index c5237f390..4f7cdec3a 100644 --- a/infinigen/core/constraints/example_solver/greedy/all_substitutions.py +++ b/infinigen/core/constraints/example_solver/greedy/all_substitutions.py @@ -4,29 +4,25 @@ # Authors: Alexander Raistrick -import typing -import itertools import copy +import itertools import logging -import functools +import typing -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) -from infinigen.core.constraints.example_solver import state_def from infinigen.core import tags as t +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.evaluator.domain_contains import objkeys_in_dom +from infinigen.core.constraints.example_solver import state_def logger = logging.getLogger(__name__) + def _resolve_toplevel_var( dom: r.Domain, state: state_def.State, limits: dict[t.Variable, int] = None, ) -> typing.Iterator[str]: - - """ + """ Find and yield all valid substitutions of a toplevel VariableTag in a given dom ASSUMES: there is at most one variable in the domain, and it is at the top level @@ -34,7 +30,7 @@ def _resolve_toplevel_var( if limits is None: limits = {} - + vars = [ti for ti in dom.tags if isinstance(ti, t.Variable)] if len(vars) == 0: yield dom @@ -47,7 +43,9 @@ def _resolve_toplevel_var( result = copy.deepcopy(dom) result.tags.remove(vartag) objkeys = objkeys_in_dom(result, state) - logger.debug(f'Found {len(objkeys)} valid assignments for {repr(vartag)} via {result} on ') + logger.debug( + f"Found {len(objkeys)} valid assignments for {repr(vartag)} via {result} on " + ) # if the user says limit "room" to 3 and we are doing "room", apply the limit name_limit = limits.get(vartag, None) @@ -55,42 +53,39 @@ def _resolve_toplevel_var( objkeys = objkeys[:name_limit] for objkey in objkeys: - logger.debug(f'Assigning {objkey} for {vartag}') + logger.debug(f"Assigning {objkey} for {vartag}") yield result.with_tags(state.objs[objkey].tags) + def substitutions( - dom: r.Domain, + dom: r.Domain, state: state_def.State, limits: dict[t.Variable, int] | None = None, nonempty: bool = False, ) -> typing.Iterator[r.Domain]: - """Find all t.Variable in d's tags or relations, and return one Domain for each possible assignment limits cuts off enumeration of each varname with some integer count """ - child_assignment_prod = itertools.product(*( - substitutions(dchild, state, limits, nonempty) - for _, dchild in dom.relations - )) + child_assignment_prod = itertools.product( + *(substitutions(dchild, state, limits, nonempty) for _, dchild in dom.relations) + ) i = None for i, dsubs in enumerate(child_assignment_prod): - assert len(dsubs) == len(dom.relations) rels = [(rel, dsubs[j]) for j, (rel, _) in enumerate(dom.relations)] - - candidate = r.Domain( - tags=dom.tags, relations=rels - ) - + + candidate = r.Domain(tags=dom.tags, relations=rels) + yield from _resolve_toplevel_var(candidate, state, limits=limits) if i is None and nonempty: - raise ValueError(f'Found no substitutions found for {dom=}') - + raise ValueError(f"Found no substitutions found for {dom=}") + + def iterate_assignments( dom: r.Domain, state: state_def.State, @@ -98,9 +93,8 @@ def iterate_assignments( limits: dict[t.Variable, int] | None = None, nonempty: bool = False, ) -> typing.Iterator[dict[t.Variable, str]]: - - """Find all combinations of assignments for the listed vars. - + """Find all combinations of assignments for the listed vars. + Variables will be considered IN ORDER, IE first variable can affect options for second variable, but not the other way around. @@ -134,9 +128,7 @@ def iterate_assignments( assert isinstance(vars, list), vars var = vars[0] - doms_for_var = [ - d for d in dom.traverse() if var in d.tags - ] + doms_for_var = [d for d in dom.traverse() if var in d.tags] if len(doms_for_var) == 0: yield {} return @@ -144,21 +136,23 @@ def iterate_assignments( combined, *rest = doms_for_var for d in rest: combined = combined.intersection(d) - combined = copy.deepcopy(combined) # prevents modification of original domain if it had the var + combined = copy.deepcopy( + combined + ) # prevents modification of original domain if it had the var combined.tags.remove(var) if not combined.intersects(combined): - raise ValueError(f'{iterate_assignments.__name__} with {var=} arrived at contradictory {combined=}') - + raise ValueError( + f"{iterate_assignments.__name__} with {var=} arrived at contradictory {combined=}" + ) + candidates = sorted(objkeys_in_dom(combined, state)) candidates = [ - c for c in candidates - if t.Semantics.NoChildren not in state.objs[c].tags + c for c in candidates if t.Semantics.NoChildren not in state.objs[c].tags ] - + i = None for i, objkey in enumerate(candidates): - limit = limits.get(var, None) if limit is not None and i >= limits[var]: break @@ -167,17 +161,14 @@ def iterate_assignments( copy.deepcopy(dom), var, combined.with_tags(t.SpecificObject(objkey)) ) rest_iter = iterate_assignments( - dom_objkey, state, vars[1:], limits, + dom_objkey, + state, + vars[1:], + limits, ) for rest_assignments in rest_iter: - yield { - var: objkey, - **rest_assignments - } + yield {var: objkey, **rest_assignments} if i is None and nonempty: - raise ValueError(f'Found no assignments found for {dom=}') - - - + raise ValueError(f"Found no assignments found for {dom=}") diff --git a/infinigen/core/constraints/example_solver/greedy/constraint_partition.py b/infinigen/core/constraints/example_solver/greedy/constraint_partition.py index 1511f9765..675d8e2db 100644 --- a/infinigen/core/constraints/example_solver/greedy/constraint_partition.py +++ b/infinigen/core/constraints/example_solver/greedy/constraint_partition.py @@ -4,49 +4,39 @@ # Authors: Alexander Raistrick -import typing -import operator import copy -from functools import partial -from dataclasses import dataclass import logging +import operator +import typing +from functools import partial -from infinigen.core.constraints import ( - constraint_language as cl, - example_solver as ex, - reasoning as r, -) from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r logger = logging.getLogger(__name__) -OPS_COMMUTATIVE = { - operator.add, - operator.and_, - operator.mul, - operator.or_ -} +OPS_COMMUTATIVE = {operator.add, operator.and_, operator.mul, operator.or_} OPS_UNIT_VALUE = { - operator.add: 0, + operator.add: 0, operator.mul: 1, operator.pow: 0, - operator.truediv: 0 + operator.truediv: 0, } + def _get_op_unit_value(node: cl.BoolExpression | cl.ScalarExpression): match node: - case cl.BoolOperatorExpression(func, operands): + case cl.BoolOperatorExpression(func, _): return True - case cl.ScalarOperatorExpression(func, operands) if func in OPS_UNIT_VALUE: + case cl.ScalarOperatorExpression(func, _) if func in OPS_UNIT_VALUE: return OPS_UNIT_VALUE[func] case _: - raise ValueError(f'Found no unit value for {node.__class__} {node.func}') + raise ValueError(f"Found no unit value for {node.__class__} {node.func}") -def _partition_dict( - terms: dict[str, cl.Node], - recurse: typing.Callable -): + +def _partition_dict(terms: dict[str, cl.Node], recurse: typing.Callable): new_terms = {} for k, v in terms.items(): part, relevant = recurse(v) @@ -55,11 +45,9 @@ def _partition_dict( new_terms[k] = part return new_terms + def _update_item_nodes( - node: cl.Node, - from_varname: str, - to_varname: str, - to_objs: cl.ObjectSetExpression + node: cl.Node, from_varname: str, to_varname: str, to_objs: cl.ObjectSetExpression ): for child in node.traverse(): if not isinstance(child, cl.item): @@ -71,13 +59,13 @@ def _update_item_nodes( return node + def _filter_gather_constraint( node: cl.ForAll | cl.SumOver | cl.MeanOver, recurse: typing.Callable, filter_dom: r.Domain, - var_assignments: dict[t.Variable, r.Domain] + var_assignments: dict[t.Variable, r.Domain], ) -> tuple[cl.Node, bool]: - objs, var, pred = node.objs, node.var, node.pred var = t.Variable(var) @@ -85,7 +73,7 @@ def _filter_gather_constraint( obj_dom = r.constraint_domain(objs) obj_dom = r.substitute_all(obj_dom, var_assignments) - + var_assignments = copy.deepcopy(var_assignments) or {} for varname, dom in var_assignments.items(): assert isinstance(varname, t.Variable) @@ -109,20 +97,20 @@ def _filter_gather_constraint( res.pred = pred_part relevant = pred_rel - return res, relevant - + return res, relevant + + def _filter_object_set( node: cl.ObjectSetExpression, recurse: typing.Callable, filter_dom: r.Domain, var_assignments: dict[t.Variable, r.Domain], ) -> tuple[cl.Node, bool]: - new_consnode = copy.deepcopy(node) dom = r.constraint_domain(node) dom_subst = r.substitute_all(dom, var_assignments) - + if not r.domain_finalized(dom_subst, check_anyrel=False, check_variable=True): raise ValueError( "Domain not finalized, unable to check against filter. " @@ -132,22 +120,26 @@ def _filter_object_set( relevant = dom_subst.intersects(filter_dom, require_satisfies_right=True) if ( - relevant - and not dom_subst.satisfies(filter_dom) # no need to filter something that is already strict enough + relevant + and not dom_subst.satisfies( + filter_dom + ) # no need to filter something that is already strict enough ): - finalized = r.domain_finalized(filter_dom, check_anyrel=False, check_variable=True) + finalized = r.domain_finalized( + filter_dom, check_anyrel=False, check_variable=True + ) assert finalized, filter_dom new_consnode = r.FilterByDomain(new_consnode, filter_dom) return new_consnode, relevant + def _filter_operator( node: cl.BoolOperatorExpression | cl.ScalarOperatorExpression, recurse: typing.Callable, filter_dom: r.Domain, - var_assignments: dict[t.Variable, r.Domain] + var_assignments: dict[t.Variable, r.Domain], ) -> tuple[cl.Node, bool]: - operands, func = node.operands, node.func op_results = [recurse(o) for o in operands] @@ -159,8 +151,7 @@ def _filter_operator( case ([op], f) if f in OPS_COMMUTATIVE: return op, True case (new_operands, f) if ( - len(new_operands) == len(operands) - or f in OPS_COMMUTATIVE + len(new_operands) == len(operands) or f in OPS_COMMUTATIVE ): return node.__class__(f, new_operands), True case _: @@ -168,40 +159,42 @@ def _filter_operator( any_relevant = any(o[1] for o in op_results) return res, any_relevant + def _filter_node_cases( node: cl.Node, recurse: typing.Callable, filter_dom: r.Domain, - var_assignments: dict[t.Variable, r.Domain] + var_assignments: dict[t.Variable, r.Domain], ) -> tuple[cl.Node, bool]: match node: case cl.Problem(cons, score_terms): prob = cl.Problem( - _partition_dict(cons, recurse), - _partition_dict(score_terms, recurse) + _partition_dict(cons, recurse), _partition_dict(score_terms, recurse) ) relevant = len(prob.constraints) > 0 or len(prob.score_terms) > 0 return prob, relevant case cl.ForAll() | cl.SumOver() | cl.MeanOver(): - return _filter_gather_constraint(node, recurse, filter_dom, var_assignments) + return _filter_gather_constraint(node, recurse, filter_dom, var_assignments) case cl.BoolOperatorExpression() | cl.ScalarOperatorExpression(): return _filter_operator(node, recurse, filter_dom, var_assignments) case cl.ObjectSetExpression(): return _filter_object_set(node, recurse, filter_dom, var_assignments) case _: - result_relevant = False result_consnode = copy.deepcopy(node) - + for name, child in node.children(): res, relevant = recurse(child) if not hasattr(node, name): - raise ValueError(f"Node {node.__class__} has child with {name=} but no attribute {name} to set") + raise ValueError( + f"Node {node.__class__} has child with {name=} but no attribute {name} to set" + ) setattr(result_consnode, name, res) result_relevant = result_relevant or relevant return result_consnode, result_relevant - + + def _check_partition_correctness( node: cl.ObjectSetExpression, filter_dom: r.Domain, @@ -209,7 +202,7 @@ def _check_partition_correctness( ): res_dom = r.constraint_domain(node) res_dom = r.substitute_all(res_dom, var_assignments) - + if not r.domain_finalized(res_dom, check_anyrel=False, check_variable=True): raise ValueError( f"While doing {_check_partition_correctness.__name__} for {node=} {filter_dom=}, " @@ -219,16 +212,16 @@ def _check_partition_correctness( if not res_dom.satisfies(filter_dom): raise ValueError(f"{res_dom=} does not satisfy {filter_dom=}") + def filter_constraints( node: cl.Node, filter_dom: r.Domain, var_assignments: dict[str, r.Domain] = None, - check_correctness=True + check_correctness=True, ) -> tuple[cl.Node, bool]: - - """ Return a constraint graph representing the component of `node` that is relevant for + """Return a constraint graph representing the component of `node` that is relevant for to a particular greedy filter domain. - + Parameters ---------- node : cl.Node @@ -236,14 +229,14 @@ def filter_constraints( filter_dom : Domain The domain which determines whether a constraint is relevant var_assignments : Domain - Domains to substitute for any t.Variable(name: str) in the constraint program, typically used for recursive calls. + Domains to substitute for any t.Variable(name: str) in the constraint program, typically used for recursive calls. Returns ------- partitioned: cl.Node The partitioned constraint program relevant: bool - Was any part of the constraint program relevant? + Was any part of the constraint program relevant? """ @@ -252,18 +245,20 @@ def filter_constraints( if var_assignments is None: var_assignments = {} - recurse = partial(filter_constraints, filter_dom=filter_dom, var_assignments=var_assignments) - - logger.debug(f"{filter_constraints.__name__} for {node.__class__.__name__}, {var_assignments.keys()=}") + recurse = partial( + filter_constraints, filter_dom=filter_dom, var_assignments=var_assignments + ) + + logger.debug( + f"{filter_constraints.__name__} for {node.__class__.__name__}, {var_assignments.keys()=}" + ) new_node, relevant = _filter_node_cases(node, recurse, filter_dom, var_assignments) - if ( - relevant - and check_correctness - and isinstance(new_node, cl.ObjectSetExpression) - ): + if relevant and check_correctness and isinstance(new_node, cl.ObjectSetExpression): _check_partition_correctness(new_node, filter_dom, var_assignments) - logger.debug(f"Partitioned {node.__class__.__name__} to {new_node.__class__.__name__}") + logger.debug( + f"Partitioned {node.__class__.__name__} to {new_node.__class__.__name__}" + ) - return new_node, relevant \ No newline at end of file + return new_node, relevant diff --git a/infinigen/core/constraints/example_solver/moves/__init__.py b/infinigen/core/constraints/example_solver/moves/__init__.py index 1e0feb4ed..87c6ca28b 100644 --- a/infinigen/core/constraints/example_solver/moves/__init__.py +++ b/infinigen/core/constraints/example_solver/moves/__init__.py @@ -1,6 +1,6 @@ -from .moves import Move from .addition import Addition, Resample from .deletion import Deletion -from .swap import Swap +from .moves import Move +from .pose import ReinitPoseMove, RotateMove, TranslateMove from .reassignment import RelationPlaneChange, RelationTargetChange -from .pose import TranslateMove, RotateMove, ReinitPoseMove \ No newline at end of file +from .swap import Swap diff --git a/infinigen/core/constraints/example_solver/moves/addition.py b/infinigen/core/constraints/example_solver/moves/addition.py index 26326a017..b4fb2de27 100644 --- a/infinigen/core/constraints/example_solver/moves/addition.py +++ b/infinigen/core/constraints/example_solver/moves/addition.py @@ -4,45 +4,30 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass, field -import numpy as np -import typing import logging -import gin - -from pprint import pprint +import typing +from dataclasses import dataclass import bpy -from mathutils import Vector, Matrix -import trimesh +import numpy as np from infinigen.assets.utils import bbox_from_mesh - -from infinigen.core.constraints.example_solver.state_def import State, ObjectState -from infinigen.core import tagging, tags as t - -from infinigen.core.constraints import ( - constraint_language as cl, - usage_lookup -) - -from infinigen.core.util import blender as butil -from infinigen.core.constraints.constraint_language.util import ( - delete_obj, - meshes_from_names -) -from infinigen.core.constraints.example_solver.geometry import( - validity +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import usage_lookup +from infinigen.core.constraints.constraint_language.util import delete_obj +from infinigen.core.constraints.example_solver.geometry import ( + dof, + parse_scene, + validity, ) - -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils +from infinigen.core.constraints.example_solver.state_def import ObjectState, State from infinigen.core.placement.factory import AssetFactory +from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver.geometry import dof, parse_scene from . import moves from .reassignment import pose_backup, restore_pose_backup -from time import time + # from line_profiler import LineProfiler @@ -50,8 +35,8 @@ GLOBAL_GENERATOR_SINGLETON_CACHE = {} -def sample_rand_placeholder(gen_class: type[AssetFactory]): +def sample_rand_placeholder(gen_class: type[AssetFactory]): singleton_gen = usage_lookup.has_usage(gen_class, t.Semantics.SingleGenerator) if singleton_gen and gen_class in GLOBAL_GENERATOR_SINGLETON_CACHE: @@ -65,47 +50,43 @@ def sample_rand_placeholder(gen_class: type[AssetFactory]): inst_seed = np.random.randint(1e7) if usage_lookup.has_usage(gen_class, t.Semantics.RealPlaceholder): - new_obj = gen.spawn_placeholder(inst_seed, loc=(0,0,0), rot=(0,0,0)) + new_obj = gen.spawn_placeholder(inst_seed, loc=(0, 0, 0), rot=(0, 0, 0)) elif usage_lookup.has_usage(gen_class, t.Semantics.AssetAsPlaceholder): - new_obj = gen.spawn_asset(inst_seed, loc=(0,0,0), rot=(0,0,0)) + new_obj = gen.spawn_asset(inst_seed, loc=(0, 0, 0), rot=(0, 0, 0)) elif usage_lookup.has_usage(gen_class, t.Semantics.PlaceholderBBox): new_obj = bbox_from_mesh.bbox_mesh_from_hipoly(gen, inst_seed, use_pholder=True) else: new_obj = bbox_from_mesh.bbox_mesh_from_hipoly(gen, inst_seed) - if new_obj.type != 'MESH': - raise ValueError(f'Addition created {new_obj.name=} with type {new_obj.type}') + if new_obj.type != "MESH": + raise ValueError(f"Addition created {new_obj.name=} with type {new_obj.type}") if len(new_obj.data.polygons) == 0: - raise ValueError(f'Addition created {new_obj.name=} with 0 faces') + raise ValueError(f"Addition created {new_obj.name=} with 0 faces") butil.put_in_collection( - list(butil.iter_object_tree(new_obj)), - butil.get_collection(f'placeholders') + list(butil.iter_object_tree(new_obj)), butil.get_collection("placeholders") ) parse_scene.preprocess_obj(new_obj) tagging.tag_canonical_surfaces(new_obj) return new_obj, gen + @dataclass class Addition(moves.Move): - - """ Move which generates an object and adds it to the scene with certain relations - """ + """Move which generates an object and adds it to the scene with certain relations""" gen_class: typing.Any relation_assignments: list temp_force_tags: set _new_obj: bpy.types.Object = None - def __repr__(self): - return f'{self.__class__.__name__}({self.gen_class.__name__}, {len(self.relation_assignments)} relations)' + return f"{self.__class__.__name__}({self.gen_class.__name__}, {len(self.relation_assignments)} relations)" def apply(self, state: State): - - target_name, = self.names + (target_name,) = self.names assert target_name not in state.objs self._new_obj, gen = sample_rand_placeholder(self.gen_class) @@ -119,26 +100,25 @@ def apply(self, state: State): obj=self._new_obj, generator=gen, tags=tags, - relations=self.relation_assignments + relations=self.relation_assignments, ) state.objs[target_name] = objstate success = dof.try_apply_relation_constraints(state, target_name) - logger.debug(f'{self} {success=}') + logger.debug(f"{self} {success=}") return success def revert(self, state: State): to_delete = list(butil.iter_object_tree(self._new_obj)) delete_obj(state.trimesh_scene, [a.name for a in to_delete]) - new_name, = self.names + (new_name,) = self.names del state.objs[new_name] + @dataclass class Resample(moves.Move): - - """ Move which replaces an existing object with a new one from the same generator - """ + """Move which replaces an existing object with a new one from the same generator""" align_corner: int = None @@ -147,7 +127,6 @@ class Resample(moves.Move): _backup_poseinfo = None def apply(self, state: State): - assert len(self.names) == 1 target_name = self.names[0] @@ -158,23 +137,22 @@ def apply(self, state: State): scene = state.trimesh_scene scene.graph.transforms.remove_node(os.obj.name) - scene.delete_geometry(os.obj.name + '_mesh') + scene.delete_geometry(os.obj.name + "_mesh") os.obj, os.generator = sample_rand_placeholder(os.generator.__class__) if self.align_corner is not None: - c_old = self._backup_obj.bound_box[self.align_corner] - c_new = os.obj.bound_box[self.align_corner] - raise NotImplementedError(f'{self.align_corner=}') - - parse_scene.add_to_scene(state.trimesh_scene, os.obj, preprocess=True) + self._backup_obj.bound_box[self.align_corner] + os.obj.bound_box[self.align_corner] + raise NotImplementedError(f"{self.align_corner=}") + + parse_scene.add_to_scene(state.trimesh_scene, os.obj, preprocess=True) dof.apply_relations_surfacesample(state, target_name) - + return validity.check_post_move_validity(state, target_name) - - def revert(self, state: State): - target_name, = self.names + def revert(self, state: State): + (target_name,) = self.names os = state.objs[target_name] delete_obj(state.trimesh_scene, os.obj.name) diff --git a/infinigen/core/constraints/example_solver/moves/deletion.py b/infinigen/core/constraints/example_solver/moves/deletion.py index 58097bc5c..dd23aa93c 100644 --- a/infinigen/core/constraints/example_solver/moves/deletion.py +++ b/infinigen/core/constraints/example_solver/moves/deletion.py @@ -4,49 +4,42 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass -import numpy as np -import typing import logging - -import bpy -from infinigen.core.constraints.example_solver.geometry import parse_scene -from mathutils import Vector, Matrix -import trimesh +from dataclasses import dataclass from infinigen.core.constraints.example_solver import state_def +from infinigen.core.constraints.example_solver.geometry import parse_scene +from infinigen.core.constraints.example_solver.moves.moves import Move from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver.moves import Move - logger = logging.getLogger(__name__) @dataclass class Deletion(Move): - # remove obj from scene _backup_state: state_def.ObjectState = None def __repr__(self): - return f'{self.__class__.__name__}({self.names})' + return f"{self.__class__.__name__}({self.names})" def apply(self, state): - - target_name, = self.names + (target_name,) = self.names self._backup_state = state.objs[target_name] - + for obj in butil.iter_object_tree(state.objs[target_name].obj): state.trimesh_scene.graph.transforms.remove_node(obj.name) - state.trimesh_scene.delete_geometry(obj.name + '_mesh') + state.trimesh_scene.delete_geometry(obj.name + "_mesh") del state.objs[target_name] return True - + def accept(self, state): butil.delete(list(butil.iter_object_tree(self._backup_state.obj))) def revert(self, state): - target_name, = self.names + (target_name,) = self.names state.objs[target_name] = self._backup_state - parse_scene.add_to_scene(state.trimesh_scene, self._backup_state.obj, preprocess=True) \ No newline at end of file + parse_scene.add_to_scene( + state.trimesh_scene, self._backup_state.obj, preprocess=True + ) diff --git a/infinigen/core/constraints/example_solver/moves/moves.py b/infinigen/core/constraints/example_solver/moves/moves.py index ada8cb2c0..d7473a745 100644 --- a/infinigen/core/constraints/example_solver/moves/moves.py +++ b/infinigen/core/constraints/example_solver/moves/moves.py @@ -4,24 +4,17 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass -import numpy as np -import typing import logging - -import bpy -from infinigen.core.constraints.example_solver.geometry import parse_scene -from mathutils import Vector, Matrix -import trimesh +import typing +from dataclasses import dataclass from infinigen.core.constraints.example_solver import state_def -from infinigen.core.util import blender as butil logger = logging.getLogger(__name__) + @dataclass class Move: - names: typing.List[str] def __post_init__(self): @@ -32,6 +25,6 @@ def apply(self, state: state_def.State): def revert(self, state: state_def.State): raise NotImplementedError - + def accept(self, state: state_def.State): - pass \ No newline at end of file + pass diff --git a/infinigen/core/constraints/example_solver/moves/pose.py b/infinigen/core/constraints/example_solver/moves/pose.py index 454353538..69d313bd4 100644 --- a/infinigen/core/constraints/example_solver/moves/pose.py +++ b/infinigen/core/constraints/example_solver/moves/pose.py @@ -4,29 +4,25 @@ # Authors: Alexander Raistrick, Karhan Kayan -from dataclasses import dataclass -import numpy as np -import typing import logging +from dataclasses import dataclass -import bpy -from infinigen.core.constraints.example_solver.geometry import dof -import mathutils +import numpy as np -from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver.geometry import validity, dof from infinigen.core.constraints.constraint_language import util as iu +from infinigen.core.constraints.example_solver.geometry import dof, validity -from . import moves from ..state_def import State +from . import moves from .reassignment import pose_backup, restore_pose_backup logger = logging.getLogger(__name__) + @dataclass class TranslateMove(moves.Move): # translate obj by vector - + translation: np.array _backup_pose: dict = None @@ -36,9 +32,8 @@ def __repr__(self): return f"{self.__class__.__name__}({self.names}, {norm:.2e})" def apply(self, state: State): + (target_name,) = self.names - target_name, = self.names - os = state.objs[target_name] self._backup_pose = pose_backup(os, dof=False) @@ -48,14 +43,14 @@ def apply(self, state: State): return False return True - + def revert(self, state: State): - target_name, = self.names + (target_name,) = self.names restore_pose_backup(state, target_name, self._backup_pose) + @dataclass class RotateMove(moves.Move): - axis: np.array angle: float @@ -65,8 +60,7 @@ def __repr__(self): return f"{self.__class__.__name__}({self.names}, {self.angle:.2e})" def apply(self, state: State): - - target_name, = self.names + (target_name,) = self.names os = state.objs[target_name] self._backup_pose = pose_backup(os, dof=False) @@ -75,32 +69,33 @@ def apply(self, state: State): if not validity.check_post_move_validity(state, target_name): return False - + return True def revert(self, state: State): - target_name, = self.names + (target_name,) = self.names restore_pose_backup(state, target_name, self._backup_pose) + @dataclass class ReinitPoseMove(moves.Move): - _backup_pose: dict = None def __repr__(self): return f"{self.__class__.__name__}({self.names})" def apply(self, state: State): - target_name, = self.names + (target_name,) = self.names ostate = state.objs[target_name] self._backup_pose = pose_backup(ostate) return dof.try_apply_relation_constraints(state, target_name) - + def revert(self, state: State): - target_name, = self.names + (target_name,) = self.names restore_pose_backup(state, target_name, self._backup_pose) -''' + +""" @dataclass class ScaleMove(Move): name: str @@ -119,4 +114,4 @@ def revert(self, state: State): blender_obj.scale /= Vector(self.scale) trimesh_obj.apply_transform(trimesh.transformations.compose_matrix(scale=list(1/self.scale))) self.obj.update() -''' \ No newline at end of file +""" diff --git a/infinigen/core/constraints/example_solver/moves/reassignment.py b/infinigen/core/constraints/example_solver/moves/reassignment.py index 421391db9..ac63df077 100644 --- a/infinigen/core/constraints/example_solver/moves/reassignment.py +++ b/infinigen/core/constraints/example_solver/moves/reassignment.py @@ -2,56 +2,48 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: primary author # - Karhan Kayan: sync with trimesh fix -from dataclasses import dataclass -import numpy as np -import typing -import logging import copy +from dataclasses import dataclass -from infinigen.core.util import blender as butil -from infinigen.core.constraints.constraint_language.util import ( - translate, - rotate, - sync_trimesh -) - -from infinigen.core.constraints.example_solver.geometry import validity +from infinigen.core.constraints.constraint_language.util import sync_trimesh from infinigen.core.constraints.example_solver.geometry import dof + +from ..state_def import ObjectState, State from . import moves -from ..state_def import State, ObjectState -def pose_backup(os: ObjectState, dof=True): +def pose_backup(os: ObjectState, dof=True): bak = dict( loc=tuple(os.obj.location), rot=tuple(os.obj.rotation_euler), ) if dof: - bak['dof_trans'] = copy.copy(os.dof_matrix_translation) - bak['dof_rot'] = copy.copy(os.dof_rotation_axis) + bak["dof_trans"] = copy.copy(os.dof_matrix_translation) + bak["dof_rot"] = copy.copy(os.dof_rotation_axis) return bak + def restore_pose_backup(state, name, bak): os = state.objs[name] - os.obj.location = bak['loc'] - os.obj.rotation_euler = bak['rot'] + os.obj.location = bak["loc"] + os.obj.rotation_euler = bak["rot"] - if 'dof_trans' in bak: - os.dof_matrix_translation = bak['dof_trans'] - if 'dof_rot' in bak: - os.dof_rotation_axis = bak['dof_rot'] + if "dof_trans" in bak: + os.dof_matrix_translation = bak["dof_trans"] + if "dof_rot" in bak: + os.dof_rotation_axis = bak["dof_rot"] sync_trimesh(state.trimesh_scene, state.objs[name].obj.name) + @dataclass class RelationPlaneChange(moves.Move): - relation_idx: int plane_idx: int @@ -59,12 +51,11 @@ class RelationPlaneChange(moves.Move): _backup_poseinfo = None def apply(self, state: State): - - target_name, = self.names + (target_name,) = self.names os = state.objs[target_name] rels = os.relations[self.relation_idx] - + self._backup_idx = rels.parent_plane_idx self._backup_poseinfo = pose_backup(os) @@ -74,16 +65,15 @@ def apply(self, state: State): return success def revert(self, state: State): + (target_name,) = self.names - target_name, = self.names - os = state.objs[target_name] os.relations[self.relation_idx].parent_plane_idx = self._backup_idx restore_pose_backup(state, target_name, self._backup_poseinfo) + @dataclass class RelationTargetChange(moves.Move): - # reassign obj to new parent name: str relation_idx: int @@ -95,7 +85,7 @@ class RelationTargetChange(moves.Move): def apply(self, state: State): os = state.objs[self.name] rels = os.relations[self.relation_idx] - + self._backup_target = rels.target_name self._backup_poseinfo = pose_backup(os) rels.target_name = self.new_target @@ -107,4 +97,4 @@ def revert(self, state: State): rels = os.relations[self.relation_idx] rels.target_name = self._backup_target - restore_pose_backup(state, self.name, self._backup_poseinfo) \ No newline at end of file + restore_pose_backup(state, self.name, self._backup_poseinfo) diff --git a/infinigen/core/constraints/example_solver/moves/swap.py b/infinigen/core/constraints/example_solver/moves/swap.py index 73b4ba846..57dfca8f0 100644 --- a/infinigen/core/constraints/example_solver/moves/swap.py +++ b/infinigen/core/constraints/example_solver/moves/swap.py @@ -4,19 +4,10 @@ # Authors: Alexander Raistrick -from dataclasses import dataclass -import numpy as np -import typing import logging - -import bpy -from infinigen.core.constraints.example_solver.geometry import parse_scene -from mathutils import Vector, Matrix -import trimesh +from dataclasses import dataclass from infinigen.core.constraints.example_solver import state_def -from infinigen.core.util import blender as butil - from infinigen.core.constraints.example_solver.moves import Move from .reassignment import pose_backup, restore_pose_backup @@ -35,7 +26,6 @@ def __post_init__(self): raise NotImplementedError(f"{self.__class__.__name__} untested") def apply(self, state: state_def.State): - target1, target2 = self.names o1 = state[target1].obj @@ -45,11 +35,16 @@ def apply(self, state: state_def.State): self._obj2_backup = pose_backup(o2, dof=False) o1.loc, o2.loc = o2.loc, o1.loc - o1.rotation_axis_angle, o2.rotation_axis_angle = o2.rotation_axis_angle, o1.rotation_axis_angle - o1.relation_assignments, o2.relation_assignments = o2.relation_assignments, o1.relation_assignments + o1.rotation_axis_angle, o2.rotation_axis_angle = ( + o2.rotation_axis_angle, + o1.rotation_axis_angle, + ) + o1.relation_assignments, o2.relation_assignments = ( + o2.relation_assignments, + o1.relation_assignments, + ) def revert(self, state: state_def.State): - target1, target2 = self.names restore_pose_backup(state, target1, self._obj1_backup) restore_pose_backup(state, target2, self._obj2_backup) @@ -57,6 +52,7 @@ def revert(self, state: state_def.State): o1 = state[target1].obj o2 = state[target2].obj - o1.relation_assignments, o2.relation_assignments = o2.relation_assignments, o1.relation_assignments - - \ No newline at end of file + o1.relation_assignments, o2.relation_assignments = ( + o2.relation_assignments, + o1.relation_assignments, + ) diff --git a/infinigen/core/constraints/example_solver/populate.py b/infinigen/core/constraints/example_solver/populate.py index affbc6d9d..9c0de0c44 100644 --- a/infinigen/core/constraints/example_solver/populate.py +++ b/infinigen/core/constraints/example_solver/populate.py @@ -1,7 +1,7 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: populate_state_placeholders, apply_cutter # - Stamatis Alexandropoulos: Initial version of window cutting @@ -10,70 +10,67 @@ import bpy from tqdm import tqdm +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import usage_lookup +from infinigen.core.constraints.constraint_language.util import delete_obj from infinigen.core.constraints.example_solver.geometry import parse_scene - from infinigen.core.constraints.example_solver.state_def import State -from infinigen.core.constraints.constraint_language.util import delete_obj - from infinigen.core.placement.placement import parse_asset_name from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -from infinigen.core.constraints import usage_lookup logger = logging.getLogger(__name__) -def apply_cutter(state, objkey, cutter): +def apply_cutter(state, objkey, cutter): os = state.objs[objkey] cut_objs = [] for i, relation_state in enumerate(os.relations): - # TODO in theory we maybe should check if they actually intersect parent_obj = state.objs[relation_state.target_name].obj butil.modify_mesh( - parent_obj, - 'BOOLEAN', - object=butil.copy(cutter), - operation='DIFFERENCE', - solver='FAST' + parent_obj, + "BOOLEAN", + object=butil.copy(cutter), + operation="DIFFERENCE", + solver="FAST", ) - target_obj_name = state.objs[relation_state.target_name].obj.name + target_obj_name = state.objs[relation_state.target_name].obj.name cut_objs.append((relation_state.target_name, target_obj_name)) - cutter_col = butil.get_collection('placeholders:asset_cutters') + cutter_col = butil.get_collection("placeholders:asset_cutters") butil.put_in_collection(cutter, cutter_col) return cut_objs + def populate_state_placeholders(state: State, filter=None, final=True): - - logger.info(f'Populating placeholders {final=} {filter=}') - unique_assets = butil.get_collection('unique_assets') + logger.info(f"Populating placeholders {final=} {filter=}") + unique_assets = butil.get_collection("unique_assets") + unique_assets.hide_viewport = True if final: for os in state.objs.values(): if t.Semantics.Room in os.tags: - os.obj = bpy.data.objects[os.obj.name + '.meshed'] - + os.obj = bpy.data.objects[os.obj.name + ".meshed"] + targets = [] for objkey, os in state.objs.items(): - if os.generator is None: continue - - if ( - filter is not None - and not usage_lookup.has_usage(os.generator.__class__, filter) + + if filter is not None and not usage_lookup.has_usage( + os.generator.__class__, filter ): continue - if 'spawn_asset' in os.obj.name: + if "spawn_asset" in os.obj.name: butil.put_in_collection(os.obj, unique_assets) - logger.debug(f'Found already populated asset {os.obj.name=}, continuing') + logger.debug(f"Found already populated asset {os.obj.name=}, continuing") continue targets.append(objkey) @@ -81,11 +78,10 @@ def populate_state_placeholders(state: State, filter=None, final=True): update_state_mesh_objs = [] for i, objkey in enumerate(targets): - os = state.objs[objkey] placeholder = os.obj - logger.info(f'Populating {i}/{len(targets)} {placeholder.name=}') + logger.info(f"Populating {i}/{len(targets)} {placeholder.name=}") old_objname = placeholder.name update_state_mesh_objs.append((objkey, old_objname)) @@ -93,26 +89,36 @@ def populate_state_placeholders(state: State, filter=None, final=True): *_, inst_seed = parse_asset_name(placeholder.name) os.obj = os.generator.spawn_asset( i=int(inst_seed), - loc=placeholder.location, # we could use placeholder=pholder here, but I worry pholder may have been modified - rot=placeholder.rotation_euler + loc=placeholder.location, # we could use placeholder=pholder here, but I worry pholder may have been modified + rot=placeholder.rotation_euler, ) os.generator.finalize_assets([os.obj]) butil.put_in_collection(os.obj, unique_assets) - cutter = next((o for o in butil.iter_object_tree(os.obj) if o.name.endswith('.cutter')), None) - logger.debug(f'{populate_state_placeholders.__name__} found {cutter=} for {os.obj.name=}') + cutter = next( + (o for o in butil.iter_object_tree(os.obj) if o.name.endswith(".cutter")), + None, + ) + logger.debug( + f"{populate_state_placeholders.__name__} found {cutter=} for {os.obj.name=}" + ) if cutter is not None: cut_objs = apply_cutter(state, objkey, cutter) - logger.debug(f'{populate_state_placeholders.__name__} cut {cutter.name=} from {cut_objs=}') + logger.debug( + f"{populate_state_placeholders.__name__} cut {cutter.name=} from {cut_objs=}" + ) update_state_mesh_objs += cut_objs + unique_assets.hide_viewport = False + if final: return # objects modified in any way (via pholder update or boolean cut) must be synched with trimesh state - for objkey, old_objname in tqdm(set(update_state_mesh_objs), desc='Updating trimesh with populated objects'): - - os = state.objs[objkey] + for objkey, old_objname in tqdm( + set(update_state_mesh_objs), desc="Updating trimesh with populated objects" + ): + os = state.objs[objkey] # delete old trimesh delete_obj(state.trimesh_scene, old_objname, delete_blender=False) diff --git a/infinigen/core/constraints/example_solver/propose_continous.py b/infinigen/core/constraints/example_solver/propose_continous.py index 1e3aa9b56..2b9c2a43b 100644 --- a/infinigen/core/constraints/example_solver/propose_continous.py +++ b/infinigen/core/constraints/example_solver/propose_continous.py @@ -4,67 +4,57 @@ # Authors: Alexander Raistrick -import typing import logging +import typing import numpy as np -import gin - -from .geometry import dof -from mathutils import Vector - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - usage_lookup -) +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.evaluator.domain_contains import domain_contains -from . import ( - moves, - state_def -) - -from infinigen.core import tags as t +from . import moves, state_def logger = logging.getLogger(__name__) TRANS_MULT = 8 TRANS_MIN = 0.01 ROT_MULT = np.pi -ROT_MIN = 0 #2 * np.pi / 200 +ROT_MIN = 0 # 2 * np.pi / 200 ANGLE_STEP_SIZE = (2 * np.pi) / 8 + def get_pose_candidates( - consgraph: cl.Node, - state: state_def.State, + consgraph: cl.Node, + state: state_def.State, filter_domain: r.Domain, require_rot_free: bool = False, ): - return [ - k for k, o in state.objs.items() + k + for k, o in state.objs.items() if o.active - and domain_contains(filter_domain, state, o) + and domain_contains(filter_domain, state, o) and not (require_rot_free and o.dof_rotation_axis is None) ] + def propose_translate( consgraph: cl.Node, - state: state_def.State, + state: state_def.State, filter_domain: r.Domain, - temperature: float + temperature: float, ) -> typing.Iterator[moves.TranslateMove]: - candidates = get_pose_candidates(consgraph, state, filter_domain) - candidates = [c for c in candidates if state.objs[c].dof_matrix_translation is not None] + candidates = [ + c for c in candidates if state.objs[c].dof_matrix_translation is not None + ] if not len(candidates): return - - while True: + while True: obj_state_name = np.random.choice(candidates) obj_state = state.objs[obj_state_name] @@ -77,24 +67,26 @@ def propose_translate( translation=projected_vector, ) + def propose_rotate( consgraph: cl.Node, - state: state_def.State, + state: state_def.State, filter_domain: r.Domain, - temperature: float + temperature: float, ) -> typing.Iterator[moves.RotateMove]: - candidates = get_pose_candidates(consgraph, state, filter_domain) candidates = [ - c for c in candidates if ( + c + for c in candidates + if ( t.Semantics.NoRotation not in state.objs[c].tags - and state.objs[c].dof_rotation_axis is not None - and state.objs[c].dof_rotation_axis.dot(np.array((0,0,1))) > 0.95 + and state.objs[c].dof_rotation_axis is not None + and state.objs[c].dof_rotation_axis.dot(np.array((0, 0, 1))) > 0.95 ) ] if not len(candidates): return - + while True: obj_state_name = np.random.choice(candidates) obj_state = state.objs[obj_state_name] @@ -107,42 +99,34 @@ def propose_rotate( random_angle = ang * ANGLE_STEP_SIZE axis = obj_state.dof_rotation_axis - yield moves.RotateMove( - names=[obj_state_name], - axis=axis, - angle=random_angle - ) + yield moves.RotateMove(names=[obj_state_name], axis=axis, angle=random_angle) + def propose_reinit_pose( consgraph: cl.Node, - state: state_def.State, + state: state_def.State, filter_domain: r.Domain, - temperature: float + temperature: float, ) -> typing.Iterator[moves.ReinitPoseMove]: - candidates = get_pose_candidates(consgraph, state, filter_domain) - candidates = [c for c in candidates if state.objs[c].dof_matrix_translation is not None] + candidates = [ + c for c in candidates if state.objs[c].dof_matrix_translation is not None + ] if len(candidates) == 0: return while True: obj_state_name = np.random.choice(candidates) - obj_state = state.objs[obj_state_name] + state.objs[obj_state_name] yield moves.ReinitPoseMove( names=[obj_state_name], ) -def propose_scale( - consgraph, - state, - temperature -): + +def propose_scale(consgraph, state, temperature): raise NotImplementedError obj_state = np.random.choice(state.objs) random_scale = np.random.normal(0, temperature, size=3) - return moves.ScaleMove( - name=obj_state.name, - scale=random_scale - ) \ No newline at end of file + return moves.ScaleMove(name=obj_state.name, scale=random_scale) diff --git a/infinigen/core/constraints/example_solver/propose_discrete.py b/infinigen/core/constraints/example_solver/propose_discrete.py index 8b3b6d8f2..5ad52e311 100644 --- a/infinigen/core/constraints/example_solver/propose_discrete.py +++ b/infinigen/core/constraints/example_solver/propose_discrete.py @@ -2,60 +2,49 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: primary author # - Karhan Kayan: fix bug to ensure deterministic behavior -import logging -import pdb import copy +import logging from itertools import product -import typing import gin import numpy as np -from pprint import pprint, pformat - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - usage_lookup -) -from infinigen.core.constraints.evaluator.domain_contains import ( - domain_contains, objkeys_in_dom -) -from .geometry import planes -from . import ( - moves, - state_def, - propose_relations -) + +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints import usage_lookup +from infinigen.core.constraints.evaluator.domain_contains import objkeys_in_dom from infinigen.core.placement.factory import AssetFactory from infinigen.core.util import blender as butil -from infinigen.core import tags as t + +from . import moves, propose_relations, state_def logger = logging.getLogger(__name__) -class DummyCubeGenerator(AssetFactory): +class DummyCubeGenerator(AssetFactory): def __init__(self, seed): super().__init__(seed) def create_asset(self, *_, **__): return butil.spawn_cube() -def lookup_generator(preds: set[t.Semantics]): +def lookup_generator(preds: set[t.Semantics]): if t.contradiction(preds): - raise ValueError(f'Got lookup_generator for unsatisfiable {preds=}') - + raise ValueError(f"Got lookup_generator for unsatisfiable {preds=}") + preds_pos, preds_neg = t.decompose_tags(preds) fac_class_tags = [x.generator for x in preds if isinstance(x, t.FromGenerator)] if len(fac_class_tags) > 1: - raise ValueError(f'{preds=} had {len(fac_class_tags)=}, only 1 is allowed') + raise ValueError(f"{preds=} had {len(fac_class_tags)=}, only 1 is allowed") elif len(fac_class_tags) == 1: - fac_class_tag, = fac_class_tags + (fac_class_tag,) = fac_class_tags usage = usage_lookup.usages_of_factory(fac_class_tag) remainder = preds_pos - usage - {fac_class_tag} if len(remainder): @@ -79,108 +68,115 @@ def lookup_generator(preds: set[t.Semantics]): return options + def propose_addition_bound_gen( cons: cl.Node, - curr: state_def.State, + curr: state_def.State, bounds: list[r.Bound], goal_bound_idx: r.Bound, gen_class: AssetFactory, - filter_domain: r.Domain + filter_domain: r.Domain, ): - - ''' + """ Try to propose any addition move involving the specified bound and generator - ''' + """ goal_bound = bounds[goal_bound_idx] - logger.debug(f'attempt propose_addition for {gen_class.__name__} rels={len(goal_bound.domain.relations)}') + logger.debug( + f"attempt propose_addition for {gen_class.__name__} rels={len(goal_bound.domain.relations)}" + ) assert r.domain_finalized(goal_bound.domain), goal_bound if not active_for_stage(goal_bound.domain, filter_domain): - raise ValueError(f'Attempted to propose {goal_bound} but it should not be active for {filter_domain=}') + raise ValueError( + f"Attempted to propose {goal_bound} but it should not be active for {filter_domain=}" + ) if len(goal_bound.domain.relations) == 0: - raise ValueError(f'Attempted to propose unconstrained {gen_class.__name__} with no relations') + raise ValueError( + f"Attempted to propose unconstrained {gen_class.__name__} with no relations" + ) found_tags = usage_lookup.usages_of_factory(gen_class) goal_pos, *_ = t.decompose_tags(goal_bound.domain.tags) if not t.implies(found_tags, goal_pos) and found_tags.issuperset(goal_pos): - raise ValueError(f'Got {gen_class=} for {goal_pos=}, but it had {found_tags=}') + raise ValueError(f"Got {gen_class=} for {goal_pos=}, but it had {found_tags=}") prop_dom = goal_bound.domain.intersection(filter_domain) prop_dom.tags.update(found_tags) - #logger.debug(f'GOAL {goal_bound.domain} \nFILTER {filter_domain}\nPROP {prop_dom}\n\n') + # logger.debug(f'GOAL {goal_bound.domain} \nFILTER {filter_domain}\nPROP {prop_dom}\n\n') logger.debug( - 'GOAL %s\n FILTER %s\n PROP %s\n', - goal_bound.domain.repr(abbrv=True), - filter_domain.repr(abbrv=True), + "GOAL %s\n FILTER %s\n PROP %s\n", + goal_bound.domain.repr(abbrv=True), + filter_domain.repr(abbrv=True), prop_dom, ) assert active_for_stage(prop_dom, filter_domain) - + search_rels = [ - rd for rd in prop_dom.relations - if not isinstance(rd[0], cl.NegatedRelation) + rd for rd in prop_dom.relations if not isinstance(rd[0], cl.NegatedRelation) ] i = None - for i, assignments in enumerate(propose_relations.find_assignments(curr, search_rels)): - - logger.debug(f'Found assignments %d %s %s', i, len(assignments), assignments) + for i, assignments in enumerate( + propose_relations.find_assignments(curr, search_rels) + ): + logger.debug("Found assignments %d %s %s", i, len(assignments), assignments) yield moves.Addition( - names=[f'{np.random.randint(1e6):04d}_{gen_class.__name__}'], # decided later + names=[ + f"{np.random.randint(1e6):04d}_{gen_class.__name__}" + ], # decided later gen_class=gen_class, relation_assignments=assignments, temp_force_tags=prop_dom.tags, ) if i is None: - #raise ValueError(f'Found no assignments for {prop_dom}') - logger.debug(f'Found no assignments for {prop_dom.repr(abbrv=True)}') + # raise ValueError(f'Found no assignments for {prop_dom}') + logger.debug(f"Found no assignments for {prop_dom.repr(abbrv=True)}") pass else: - logger.debug(f'Exhausted all assignments for {gen_class=}') + logger.debug(f"Exhausted all assignments for {gen_class=}") -def active_for_stage( - prop_dom: r.Domain, - filter_dom: r.Domain -): + +def active_for_stage(prop_dom: r.Domain, filter_dom: r.Domain): return prop_dom.intersects(filter_dom, require_satisfies_right=True) + @gin.configurable def preproc_bounds( - bounds: list[r.Bound], - state: state_def.State, - filter: r.Domain, + bounds: list[r.Bound], + state: state_def.State, + filter: r.Domain, reverse=False, shuffle=True, - print_bounds=False + print_bounds=False, ): - if print_bounds: - print(f"{preproc_bounds.__name__} for {filter.get_objs_named()} (total {len(bounds)}):") + print( + f"{preproc_bounds.__name__} for {filter.get_objs_named()} (total {len(bounds)}):" + ) for b in bounds: res = active_for_stage(b.domain, filter) if res: - print("BOUND", res, b.domain.intersection(filter).repr(abbrv=True), "\n") + print( + "BOUND", res, b.domain.intersection(filter).repr(abbrv=True), "\n" + ) for b in bounds: if not r.domain_finalized(b.domain, check_anyrel=False, check_variable=True): - raise ValueError(f'{preproc_bounds.__name__} found non-finalized {b.domain=}') + raise ValueError( + f"{preproc_bounds.__name__} found non-finalized {b.domain=}" + ) + + bounds = [b for b in bounds if active_for_stage(b.domain, filter)] - bounds = [ - b for b in bounds if active_for_stage(b.domain, filter) - ] - if shuffle: np.random.shuffle(bounds) - bound_counts = [ - len(objkeys_in_dom(b.domain, state)) - for b in bounds - ] + bound_counts = [len(objkeys_in_dom(b.domain, state)) for b in bounds] order = np.arange(len(bounds)) @@ -194,27 +190,26 @@ def key(i): else: res = 0 return -res if reverse else res - + order = sorted(order, key=key) return [bounds[i] for i in order if key(i) != 1] + def propose_addition( - cons: cl.Node, - curr: state_def.State, + cons: cl.Node, + curr: state_def.State, filter_domain: r.Domain, temperature: float, ): - bounds = r.constraint_bounds(cons) bounds = preproc_bounds(bounds, curr, filter_domain) if len(bounds) == 0: - logger.debug(f'Found no bounds for {filter_domain=}') + logger.debug(f"Found no bounds for {filter_domain=}") return - - for i, bound in enumerate(bounds): + for i, bound in enumerate(bounds): if bound.low is None: # bounds with low=None are supposed to cap other bounds, not introduce new objects continue @@ -223,12 +218,15 @@ def propose_addition( if len(fac_options) == 0: if bound.low is None or bound.low == 0: continue - raise ValueError(f'Found no generators for {bound}') + raise ValueError(f"Found no generators for {bound}") for gen_class in fac_options: - yield from propose_addition_bound_gen(cons, curr, bounds, i, gen_class, filter_domain) + yield from propose_addition_bound_gen( + cons, curr, bounds, i, gen_class, filter_domain + ) + + logger.debug(f"propose_addition found no candidate moves for {bound}") - logger.debug(f'propose_addition found no candidate moves for {bound}') def propose_deletion( cons: cl.Node, @@ -236,12 +234,11 @@ def propose_deletion( filter_domain: r.Domain, temperature: float, ): - bounds = r.constraint_bounds(cons) bounds = preproc_bounds(bounds, curr, filter_domain, reverse=True, shuffle=True) if len(bounds) == 0: - logger.debug(f'Found no bounds for {filter_domain=}') + logger.debug(f"Found no bounds for {filter_domain=}") return np.random.shuffle(bounds) @@ -252,6 +249,7 @@ def propose_deletion( for cand in candidates: yield moves.Deletion([cand]) + def propose_relation_plane_change( cons: cl.Node, curr: state_def.State, @@ -261,18 +259,19 @@ def propose_relation_plane_change( cand_objs = objkeys_in_dom(filter_domain, curr) if len(cand_objs) == 0: - logger.debug(f'Found no cand_objs for {filter_domain=}') + logger.debug(f"Found no cand_objs for {filter_domain=}") return np.random.shuffle(cand_objs) for cand in cand_objs: for i, rels in enumerate(curr.objs[cand].relations): - if not isinstance(rels.relation, cl.GeometryRelation): continue - + target_obj = curr.objs[rels.target_name].obj - n_planes = len(curr.planes.get_tagged_planes(target_obj, rels.relation.parent_tags)) + n_planes = len( + curr.planes.get_tagged_planes(target_obj, rels.relation.parent_tags) + ) if n_planes <= 1: continue @@ -282,61 +281,55 @@ def propose_relation_plane_change( if plane_idx == rels.parent_plane_idx: continue yield moves.RelationPlaneChange( - names=[cand], - relation_idx=i, - plane_idx=plane_idx + names=[cand], relation_idx=i, plane_idx=plane_idx ) + def propose_resample( cons: cl.Node, curr: state_def.State, filter_domain: r.Domain, temperature: float, ): - cand_objs = objkeys_in_dom(filter_domain, curr) if len(cand_objs) == 0: - logger.debug(f'Found no cand_objs for {filter_domain=}') + logger.debug(f"Found no cand_objs for {filter_domain=}") return np.random.shuffle(cand_objs) - - for cand in cand_objs: + for cand in cand_objs: os = curr.objs[cand] if usage_lookup.has_usage(os.generator.__class__, t.Semantics.SingleGenerator): continue - + yield moves.Resample(names=[cand], align_corner=None) - #corner_options = [None] + list(range(6)) - #np.random.shuffle(corner_options) - #for c in corner_options: + # corner_options = [None] + list(range(6)) + # np.random.shuffle(corner_options) + # for c in corner_options: # yield moves.Resample(name=cand, align_corner=c) -def is_swap_domains_unaffected( - state: state_def.State, - name1: str, - name2: str -): + +def is_swap_domains_unaffected(state: state_def.State, name1: str, name2: str): raise NotImplementedError() - + + def propose_swap( cons: cl.Node, curr: state_def.State, filter_domain: r.Domain, temperature: float, ): - raise NotImplementedError() - + cand_objs = objkeys_in_dom(filter_domain, curr) if len(cand_objs) == 0: - logger.debug(f'Found no cand_objs for {filter_domain=}') + logger.debug(f"Found no cand_objs for {filter_domain=}") return - + a_objs = copy.copy(cand_objs) b_objs = copy.copy(cand_objs) np.random.shuffle(a_objs) @@ -347,4 +340,4 @@ def propose_swap( continue if not is_swap_domains_unaffected(curr, a, b): continue - yield moves.Swap(names=[a, b]) \ No newline at end of file + yield moves.Swap(names=[a, b]) diff --git a/infinigen/core/constraints/example_solver/propose_relations.py b/infinigen/core/constraints/example_solver/propose_relations.py index 841d873b8..877dcea8e 100644 --- a/infinigen/core/constraints/example_solver/propose_relations.py +++ b/infinigen/core/constraints/example_solver/propose_relations.py @@ -6,62 +6,50 @@ import logging import typing +from pprint import pprint import numpy as np -from pprint import pprint -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, -) -from infinigen.core.constraints.evaluator.domain_contains import ( - objkeys_in_dom -) -from .geometry import planes -from . import ( - moves, - state_def -) - -from infinigen.core import tags as t, tagging +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.evaluator.domain_contains import objkeys_in_dom + +from . import state_def logger = logging.getLogger(__name__) -def minimize_redundant_relations( - relations: list[tuple[cl.Relation, r.Domain]] -): - + +def minimize_redundant_relations(relations: list[tuple[cl.Relation, r.Domain]]): """ Given a list of relations that must be true, use the first as a constraint to tighten the remaining relations """ - + assert len(relations) > 0 # TODO Hacky: moves AnyRelations to the back so _hopefully_ they get implied before we get to them relations = sorted( - relations, - key=lambda r: isinstance(r[0], cl.AnyRelation), - reverse=True + relations, key=lambda r: isinstance(r[0], cl.AnyRelation), reverse=True ) (rel, dom), *rest = relations # Force all remaining relations to be compatible with (rel, dom), thereby reducing their search space remaining_relations = [] - for (r_later, d_later) in rest: - - logger.debug(f'Inspecting {r_later=} {d_later=}') + for r_later, d_later in rest: + logger.debug(f"Inspecting {r_later=} {d_later=}") if d_later.intersects(dom): logger.debug(f"Intersecting {d_later} with {dom}") d_later = d_later.intersection(dom) - + if r.reldom_implies((rel, dom), (r_later, d_later)): # (rlater, dlater) is guaranteed true so long as we satisfied (rel, dom), we dont need to separately assign it - logger.debug(f'Discarding since rlater,dlater it is implied') + logger.debug("Discarding since rlater,dlater it is implied") continue else: - logger.debug(f'Keeping {r_later, d_later} since it is not implied by {rel, dom} ') + logger.debug( + f"Keeping {r_later, d_later} since it is not implied by {rel, dom} " + ) remaining_relations.append((r_later, d_later)) implied = any( @@ -70,72 +58,72 @@ def minimize_redundant_relations( ) return (rel, dom), remaining_relations, implied - + def find_assignments( curr: state_def.State, relations: list[tuple[cl.Relation, r.Domain]], assignments: list[state_def.RelationState] = None, ) -> typing.Iterator[list[state_def.RelationState]]: - - """Iterate over possible assignments that satisfy the given relations. Some assignments may not be feasible geometrically - + """Iterate over possible assignments that satisfy the given relations. Some assignments may not be feasible geometrically - a naive implementation of this function would just enumerate all possible objects matching the assignments, and let the solver - discover that many combinations are impossible. *This* implementation attemps to never generate guaranteed-invalid combinations in the first place. + discover that many combinations are impossible. *This* implementation attemps to never generate guaranteed-invalid combinations in the first place. Complexity is pretty astronomical: - N^M where N is number of candidates per relation, and M is number of relations - reduced somewhat when relations intersect or imply eachother - luckily, M is typically 1, 2 or 3, as objects arent often related to lots of other objects - TODO: + TODO: - discover new relations constraints, which can arise from the particular choice of objects - prune early when object choice causes bounds to be violated This function essentially does a complex form of SAT-solving. It *really* shouldnt be written in python """ - + if assignments is None: assignments = [] - #print('FIND ASSIGNMENTS TOPLEVEL') - #pprint(relations) + # print('FIND ASSIGNMENTS TOPLEVEL') + # pprint(relations) if len(relations) == 0: yield assignments return - - logger.debug(f'Attempting to assign {relations[0]}') + + logger.debug(f"Attempting to assign {relations[0]}") (rel, dom), remaining_relations, implied = minimize_redundant_relations(relations) assert len(remaining_relations) < len(relations) if implied: - logger.debug(f'Found remaining_relations implies {(rel, dom)=}, skipping it') + logger.debug(f"Found remaining_relations implies {(rel, dom)=}, skipping it") yield from find_assignments( - curr, - relations=remaining_relations, - assignments=assignments + curr, relations=remaining_relations, assignments=assignments ) return - + if isinstance(rel, cl.AnyRelation): pprint(relations) pprint([(rel, dom)] + remaining_relations) - raise ValueError(f'Got {rel} as first relation. Invalid! Maybe the program is underspecified?') - + raise ValueError( + f"Got {rel} as first relation. Invalid! Maybe the program is underspecified?" + ) + candidates = objkeys_in_dom(dom, curr) for parent_candidate_name in candidates: - logging.debug(f'{parent_candidate_name=}') + logging.debug(f"{parent_candidate_name=}") parent_state = curr.objs[parent_candidate_name] - n_parent_planes = len(curr.planes.get_tagged_planes(parent_state.obj, rel.parent_tags)) + n_parent_planes = len( + curr.planes.get_tagged_planes(parent_state.obj, rel.parent_tags) + ) parent_order = np.arange(n_parent_planes) - np.random.shuffle(parent_order) + np.random.shuffle(parent_order) for parent_plane in parent_order: - - #logger.debug(f'Considering {parent_candidate_name=} {parent_plane=} {n_parent_planes=}') + # logger.debug(f'Considering {parent_candidate_name=} {parent_plane=} {n_parent_planes=}') assignment = state_def.RelationState( relation=rel, @@ -145,7 +133,7 @@ def find_assignments( ) yield from find_assignments( - curr, + curr, relations=remaining_relations, assignments=assignments + [assignment], ) diff --git a/infinigen/core/constraints/example_solver/room/__init__.py b/infinigen/core/constraints/example_solver/room/__init__.py index 58157b2f5..7b456e393 100644 --- a/infinigen/core/constraints/example_solver/room/__init__.py +++ b/infinigen/core/constraints/example_solver/room/__init__.py @@ -2,5 +2,5 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -from .blueprint import RoomSolver, MultistoryRoomSolver +from .blueprint import MultistoryRoomSolver, RoomSolver from .graph import GraphMaker diff --git a/infinigen/core/constraints/example_solver/room/blueprint.py b/infinigen/core/constraints/example_solver/room/blueprint.py index e2e3dddc8..0716a5512 100644 --- a/infinigen/core/constraints/example_solver/room/blueprint.py +++ b/infinigen/core/constraints/example_solver/room/blueprint.py @@ -6,38 +6,36 @@ from copy import deepcopy -import bpy +import gin import numpy as np from numpy.random import uniform from shapely import Polygon from tqdm import tqdm, trange -import gin -from infinigen.assets.utils.misc import toggle_hide -from infinigen.core.placement.factory import AssetFactory +from infinigen.core.constraints.constraint_language import Semantics +from infinigen.core.constraints.example_solver.room import constants +from infinigen.core.constraints.example_solver.state_def import State from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import random_general as rg -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -from .constants import WALL_HEIGHT +from .constants import WALL_HEIGHT +from .contour import ContourFactory from .graph import GraphMaker from .scorer import BlueprintScorer, JointBlueprintScorer -from .contour import ContourFactory -from .solidifier import BlueprintSolidifier from .segment import SegmentMaker +from .solidifier import BlueprintSolidifier from .solver import BlueprintSolver, BlueprintStaircaseSolver -from .utils import polygon2obj, unit_cast -from infinigen.core.constraints.example_solver.room import constants - -from infinigen.core.constraints.example_solver.state_def import State, ObjectState -from infinigen.core.constraints.constraint_language import Semantics +from .utils import unit_cast @gin.configurable class RoomSolver: - - def __init__(self, factory_seed, n_divide_trials=2500, iters_mult=150, ): + def __init__( + self, + factory_seed, + n_divide_trials=2500, + iters_mult=150, + ): self.factory_seed = factory_seed with FixedSeed(factory_seed): self.graph_maker = GraphMaker(factory_seed) @@ -59,7 +57,7 @@ def __init__(self, factory_seed, n_divide_trials=2500, iters_mult=150, ): def simulated_anneal(self, assignment, info): score = self.scorer.find_score(assignment, info) - with tqdm(total=self.iterations, desc='Sampling solutions') as pbar: + with tqdm(total=self.iterations, desc="Sampling solutions") as pbar: while pbar.n < self.iterations: assignment_, info_ = deepcopy(assignment), deepcopy(info) resp = self.solver.perturb_solution(assignment_, info_) @@ -70,12 +68,11 @@ def simulated_anneal(self, assignment, info): scale = self.score_scale * pbar.n / self.iterations if np.log(uniform()) < (score - score_) * scale: assignment, info, score = assignment_, info_, score_ - pbar.set_description(f'loss={score:.4f}') + pbar.set_description(f"loss={score:.4f}") return assignment, info def solve(self): - assignment, info = [], {} for i in range(self.n_divide_trials): info = self.segment_maker.build_segments() @@ -84,13 +81,15 @@ def solve(self): break if assignment is None: - raise ValueError(f'{self.__class__.__name__} got {assignment=} after {self.n_divide_trials=}') + raise ValueError( + f"{self.__class__.__name__} got {assignment=} after {self.n_divide_trials=}" + ) assignment, info = self.simulated_anneal(assignment, info) state, rooms_meshed = self.solidifier.solidify(assignment, info) - unique_roomtypes = set(Semantics(s.split('_')[0]) for s in self.graph.rooms) + unique_roomtypes = set(Semantics(s.split("_")[0]) for s in self.graph.rooms) dimensions = self.width, self.height, constants.WALL_HEIGHT return state, unique_roomtypes, dimensions @@ -98,9 +97,14 @@ def solve(self): @gin.configurable class MultistoryRoomSolver: - - def __init__(self, factory_seed, n_divide_trials=2500, iters_mult=150, - n_stories=('categorical', 0., .0, .5 ,.5), fixed_contour=('bool', .5)): + def __init__( + self, + factory_seed, + n_divide_trials=2500, + iters_mult=150, + n_stories=("categorical", 0.0, 0.0, 0.5, 0.5), + fixed_contour=("bool", 0.5), + ): self.factory_seed = factory_seed with FixedSeed(factory_seed): self.n_stories = rg(n_stories) @@ -112,21 +116,28 @@ def __init__(self, factory_seed, n_divide_trials=2500, iters_mult=150, self.contour_factories, self.contours = [], [] self.build_contours() - self.segment_makers = [SegmentMaker(self.factory_seed, self.contours[i], len(self.graphs[i])) for i - in range(self.n_stories)] - self.solvers = [BlueprintSolver(self.contours[i], self.graphs[i]) for i in range(self.n_stories)] + self.segment_makers = [ + SegmentMaker(self.factory_seed, self.contours[i], len(self.graphs[i])) + for i in range(self.n_stories) + ] + self.solvers = [ + BlueprintSolver(self.contours[i], self.graphs[i]) + for i in range(self.n_stories) + ] self.staircase_solver = BlueprintStaircaseSolver(self.contours) self.scorer = JointBlueprintScorer(self.graphs) - self.solidifiers = [BlueprintSolidifier(self.graphs[i], i) for i in range(self.n_stories)] + self.solidifiers = [ + BlueprintSolidifier(self.graphs[i], i) for i in range(self.n_stories) + ] self.n_divide_trials = n_divide_trials self.iterations = iters_mult * sum(len(g) for g in self.graphs) self.score_scale = 5 - self.staircase_solver_prob = .1 + self.staircase_solver_prob = 0.1 def build_graphs(self, factory_seed): for i in range(self.n_stories): - kwargs = {'entrance_type': 'none'} if i > 0 else {} + kwargs = {"entrance_type": "none"} if i > 0 else {} graph_maker = GraphMaker(factory_seed, i, self.n_stories > 1, **kwargs) self.graph_makers.append(graph_maker) if self.fixed_contour and i > 0: @@ -135,12 +146,16 @@ def build_graphs(self, factory_seed): else: for j in range(self.n_contour_trials): graph = graph_maker.make_graph(np.random.randint(1e6)) - args = [self.widths[-1], self.heights[-1]] if len(self.graphs) > 0 else [None, None] + args = ( + [self.widths[-1], self.heights[-1]] + if len(self.graphs) > 0 + else [None, None] + ) width, height = graph_maker.suggest_dimensions(graph, *args) if width is not None and height is not None: break else: - raise Exception('Invalid graph') + raise Exception("Invalid graph") self.widths.append(width) self.heights.append(height) self.graphs.append(graph) @@ -156,9 +171,15 @@ def build_contours(self): contour = contour_factory.make_contour(np.random.randint(1e6)) if len(self.contours) > 0: x_offset = unit_cast((self.widths[i] - self.widths[0]) / 2) - y_offset = unit_cast((self.heights[i] - self.heights[0]) / 2) + y_offset = unit_cast( + (self.heights[i] - self.heights[0]) / 2 + ) contour = Polygon( - [(x - x_offset, y - y_offset) for x, y in contour.boundary.coords[:]]) + [ + (x - x_offset, y - y_offset) + for x, y in contour.boundary.coords[:] + ] + ) if not self.contours[-1].contains(contour): continue self.contour_factories.append(contour_factory) @@ -173,7 +194,9 @@ def solve(self): while len(assignments) == 0: staircase = self.contour_factories[-1].add_staircase(self.contours[-1]) for j in range(self.n_stories): - for _ in trange(self.n_divide_trials, desc=f'Dividing segments for {j}'): + for _ in trange( + self.n_divide_trials, desc=f"Dividing segments for {j}" + ): info = self.segment_makers[j].build_segments(staircase) assignment = self.solvers[j].find_assignment(info) if assignment is not None: @@ -193,20 +216,22 @@ def solve(self): unique_roomtypes = set() for graph in self.graphs: for s in graph.rooms: - unique_roomtypes.add(Semantics(s.split('_')[0])) + unique_roomtypes.add(Semantics(s.split("_")[0])) dimensions = self.widths[0], self.heights[0], WALL_HEIGHT * self.n_stories return State(obj_states), unique_roomtypes, dimensions def simulated_anneal(self, assignments, infos): score = self.scorer.find_score(assignments, infos) - with tqdm(total=self.iterations, desc='Sampling solutions') as pbar: + with tqdm(total=self.iterations, desc="Sampling solutions") as pbar: while pbar.n < self.iterations: assignments_, infos_ = deepcopy(assignments), deepcopy(infos) if uniform() < self.staircase_solver_prob: resp = self.staircase_solver.perturb_solution(assignments, infos) else: probs = np.array([len(g) for g in self.graphs]) - j = np.random.choice(np.arange(self.n_stories), p=probs / probs.sum()) + j = np.random.choice( + np.arange(self.n_stories), p=probs / probs.sum() + ) resp = self.solvers[j].perturb_solution(assignments_[j], infos_[j]) if not resp.is_success: continue diff --git a/infinigen/core/constraints/example_solver/room/configs.py b/infinigen/core/constraints/example_solver/room/configs.py index 8c481faac..c41d41d40 100644 --- a/infinigen/core/constraints/example_solver/room/configs.py +++ b/infinigen/core/constraints/example_solver/room/configs.py @@ -4,18 +4,32 @@ # Authors: Lingjie Mei from collections import defaultdict -from infinigen.assets.materials import brick, hardwood_floor, plaster, rug, tile -from infinigen.assets.materials.woods import tiled_wood +from infinigen.assets.materials import brick, plaster, rug, tile from infinigen.assets.materials.stone_and_concrete import concrete +from infinigen.assets.materials.woods import tiled_wood from infinigen.core.constraints.example_solver.room.types import RoomType from infinigen.core.util.color import hsv2rgba from infinigen.core.util.random import log_uniform -from infinigen.core.util.random import random_general as rg -EXTERIOR_CONNECTED_ROOM_TYPES = [RoomType.Bedroom, RoomType.Garage, RoomType.Balcony, RoomType.DiningRoom, - RoomType.Kitchen, RoomType.LivingRoom] -SQUARE_ROOM_TYPES = [RoomType.Kitchen, RoomType.Bedroom, RoomType.LivingRoom, RoomType.Closet, - RoomType.Bathroom, RoomType.Garage, RoomType.Balcony, RoomType.DiningRoom, RoomType.Utility] +EXTERIOR_CONNECTED_ROOM_TYPES = [ + RoomType.Bedroom, + RoomType.Garage, + RoomType.Balcony, + RoomType.DiningRoom, + RoomType.Kitchen, + RoomType.LivingRoom, +] +SQUARE_ROOM_TYPES = [ + RoomType.Kitchen, + RoomType.Bedroom, + RoomType.LivingRoom, + RoomType.Closet, + RoomType.Bathroom, + RoomType.Garage, + RoomType.Balcony, + RoomType.DiningRoom, + RoomType.Utility, +] TYPICAL_AREA_ROOM_TYPES = { RoomType.Kitchen: 20, RoomType.Bedroom: 25, @@ -30,27 +44,38 @@ RoomType.Staircase: 20, } ROOM_NUMBERS = {RoomType.Bathroom: (1, 10), RoomType.LivingRoom: (1, 10)} -COMBINED_ROOM_TYPES = [[RoomType.Hallway, RoomType.LivingRoom, RoomType.DiningRoom], [RoomType.Garage]] +COMBINED_ROOM_TYPES = [ + [RoomType.Hallway, RoomType.LivingRoom, RoomType.DiningRoom], + [RoomType.Garage], +] PANORAMIC_ROOM_TYPES = { - RoomType.Hallway: .3, - RoomType.LivingRoom: .5, - RoomType.DiningRoom: .5, + RoomType.Hallway: 0.3, + RoomType.LivingRoom: 0.5, + RoomType.DiningRoom: 0.5, RoomType.Balcony: 1, } -FUNCTIONAL_ROOM_TYPES = [RoomType.Kitchen, RoomType.Bedroom, RoomType.LivingRoom, RoomType.Bathroom, - RoomType.DiningRoom] -WINDOW_ROOM_TYPES = defaultdict(lambda: 1, { - RoomType.Utility: .3, - RoomType.Closet: 0., - RoomType.Bathroom: .5, - RoomType.Garage: .5, -}) +FUNCTIONAL_ROOM_TYPES = [ + RoomType.Kitchen, + RoomType.Bedroom, + RoomType.LivingRoom, + RoomType.Bathroom, + RoomType.DiningRoom, +] +WINDOW_ROOM_TYPES = defaultdict( + lambda: 1, + { + RoomType.Utility: 0.3, + RoomType.Closet: 0.0, + RoomType.Bathroom: 0.5, + RoomType.Garage: 0.5, + }, +) def make_room_colors(): - bedroom_color = hsv2rgba(0., .8, log_uniform(.02, .1)) - hallway_color = hsv2rgba(.4, .8, log_uniform(.02, .1)) - utility_color = hsv2rgba(.8, .8, log_uniform(.02, .1)) + bedroom_color = hsv2rgba(0.0, 0.8, log_uniform(0.02, 0.1)) + hallway_color = hsv2rgba(0.4, 0.8, log_uniform(0.02, 0.1)) + utility_color = hsv2rgba(0.8, 0.8, log_uniform(0.02, 0.1)) return { RoomType.Kitchen: hallway_color, RoomType.Bedroom: bedroom_color, @@ -67,66 +92,108 @@ def make_room_colors(): ROOM_COLORS = make_room_colors() -ROOM_CHILDREN = defaultdict(dict, { - RoomType.LivingRoom: { - RoomType.LivingRoom: ('bool', .1), - RoomType.Bedroom: ('categorical', .0, .45, .4, .1, .05), - RoomType.Closet: ('bool', .1), - RoomType.Bathroom: ('bool', .2), - RoomType.Garage: ('bool', .2), - RoomType.Balcony: ('bool', .2), - RoomType.DiningRoom: ('bool', 1.0), - RoomType.Utility: ('bool', .2), - RoomType.Hallway: ('categorical', .5, .4, .1) +ROOM_CHILDREN = defaultdict( + dict, + { + RoomType.LivingRoom: { + RoomType.LivingRoom: ("bool", 0.1), + RoomType.Bedroom: ("categorical", 0.0, 0.45, 0.4, 0.1, 0.05), + RoomType.Closet: ("bool", 0.1), + RoomType.Bathroom: ("bool", 0.2), + RoomType.Garage: ("bool", 0.2), + RoomType.Balcony: ("bool", 0.2), + RoomType.DiningRoom: ("bool", 1.0), + RoomType.Utility: ("bool", 0.2), + RoomType.Hallway: ("categorical", 0.5, 0.4, 0.1), + }, + RoomType.Kitchen: { + RoomType.Garage: ("bool", 0.5), + RoomType.Utility: ("bool", 0.1), + }, + RoomType.Bedroom: { + RoomType.Bathroom: ("bool", 0.3), + RoomType.Closet: ("bool", 0.5), + }, + RoomType.Bathroom: {RoomType.Closet: ("bool", 0.2)}, + RoomType.DiningRoom: { + RoomType.Kitchen: ("bool", 1.0), + RoomType.Hallway: ("bool", 0.2), + }, }, - RoomType.Kitchen: {RoomType.Garage: ('bool', .5), RoomType.Utility: ('bool', .1) - }, - RoomType.Bedroom: {RoomType.Bathroom: ('bool', .3), RoomType.Closet: ('bool', .5)}, - RoomType.Bathroom: {RoomType.Closet: ('bool', .2)}, - RoomType.DiningRoom: {RoomType.Kitchen: ('bool', 1.), RoomType.Hallway: ('bool', .2) - } -}) +) -STUDIO_ROOM_CHILDREN = defaultdict(dict, { - RoomType.LivingRoom: { - RoomType.Bedroom: ('categorical', .0, 1.), - RoomType.DiningRoom: ('bool', 1.), +STUDIO_ROOM_CHILDREN = defaultdict( + dict, + { + RoomType.LivingRoom: { + RoomType.Bedroom: ("categorical", 0.0, 1.0), + RoomType.DiningRoom: ("bool", 1.0), + }, + RoomType.Bedroom: {RoomType.Bathroom: ("bool", 1.0)}, + RoomType.DiningRoom: {RoomType.Kitchen: ("bool", 1.0)}, }, - RoomType.Bedroom: {RoomType.Bathroom: ('bool', 1.)}, - RoomType.DiningRoom: {RoomType.Kitchen: ('bool', 1.) - } -}) -UPSTAIRS_ROOM_CHILDREN = defaultdict(dict, { - RoomType.LivingRoom: { - RoomType.Bedroom: ('categorical', .0, .4, .5, .2), - RoomType.Closet: ('bool', .2), - RoomType.Bathroom: ('bool', .4), - RoomType.Balcony: ('bool', .4), - RoomType.Utility: ('bool', .2), - RoomType.Hallway: ('categorical', .0, .5, .5) +) +UPSTAIRS_ROOM_CHILDREN = defaultdict( + dict, + { + RoomType.LivingRoom: { + RoomType.Bedroom: ("categorical", 0.0, 0.4, 0.5, 0.2), + RoomType.Closet: ("bool", 0.2), + RoomType.Bathroom: ("bool", 0.4), + RoomType.Balcony: ("bool", 0.4), + RoomType.Utility: ("bool", 0.2), + RoomType.Hallway: ("categorical", 0.0, 0.5, 0.5), + }, + RoomType.Bedroom: { + RoomType.Bathroom: ("bool", 0.3), + RoomType.Closet: ("bool", 0.5), + }, + RoomType.Bathroom: {RoomType.Closet: ("bool", 0.2)}, + RoomType.Balcony: { + RoomType.Utility: ("bool", 0.4), + RoomType.Hallway: ("bool", 0.1), + }, }, - RoomType.Bedroom: {RoomType.Bathroom: ('bool', .3), RoomType.Closet: ('bool', .5)}, - RoomType.Bathroom: {RoomType.Closet: ('bool', .2)}, - RoomType.Balcony: {RoomType.Utility: ('bool', .4), RoomType.Hallway: ('bool', .1)}, -}) +) LOOP_ROOM_TYPES = { - RoomType.LivingRoom: {RoomType.Garage: .2, RoomType.Balcony: .2, RoomType.Kitchen: .1}, - RoomType.Bedroom: {RoomType.Balcony: .1}, + RoomType.LivingRoom: { + RoomType.Garage: 0.2, + RoomType.Balcony: 0.2, + RoomType.Kitchen: 0.1, + }, + RoomType.Bedroom: {RoomType.Balcony: 0.1}, } -ROOM_WALLS = defaultdict(lambda: plaster, { - RoomType.Kitchen: ('weighted_choice', (2, tile), (5, plaster)), - RoomType.Garage: ('weighted_choice', (5, concrete), (1, brick), (3, plaster)), - RoomType.Utility: ('weighted_choice', (1, concrete), (1, brick), (1, brick), (5, plaster)), - RoomType.Balcony: ('weighted_choice', (1, brick), (5, plaster)), - RoomType.Bathroom: tile -}) +ROOM_WALLS = defaultdict( + lambda: plaster, + { + RoomType.Kitchen: ("weighted_choice", (2, tile), (5, plaster)), + RoomType.Garage: ("weighted_choice", (5, concrete), (1, brick), (3, plaster)), + RoomType.Utility: ( + "weighted_choice", + (1, concrete), + (1, brick), + (1, brick), + (5, plaster), + ), + RoomType.Balcony: ("weighted_choice", (1, brick), (5, plaster)), + RoomType.Bathroom: tile, + }, +) -ROOM_FLOORS = defaultdict(lambda: ('weighted_choice', (3, tiled_wood), (1, tile), (1, rug)), { - RoomType.Garage: concrete, - RoomType.Utility: ('weighted_choice', (1, concrete), (1, plaster), (1, tile)), - RoomType.Bathroom: tile, - RoomType.Balcony: tile -}) +ROOM_FLOORS = defaultdict( + lambda: ("weighted_choice", (3, tiled_wood), (1, tile), (1, rug)), + { + RoomType.Garage: concrete, + RoomType.Utility: ("weighted_choice", (1, concrete), (1, plaster), (1, tile)), + RoomType.Bathroom: tile, + RoomType.Balcony: tile, + }, +) -PILLAR_ROOM_TYPES = [RoomType.Hallway, RoomType.LivingRoom, RoomType.Staircase, RoomType.DiningRoom] +PILLAR_ROOM_TYPES = [ + RoomType.Hallway, + RoomType.LivingRoom, + RoomType.Staircase, + RoomType.DiningRoom, +] diff --git a/infinigen/core/constraints/example_solver/room/constants.py b/infinigen/core/constraints/example_solver/room/constants.py index c8fec8bc3..4f9c2c549 100644 --- a/infinigen/core/constraints/example_solver/room/constants.py +++ b/infinigen/core/constraints/example_solver/room/constants.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: bug fixes @@ -17,15 +17,19 @@ def make_np(xs): @gin.configurable -def global_params(unit=.5, segment_margin=1.2, wall_thickness=('uniform', .2, .3), - wall_height=('uniform', 2.7, 3.8)): +def global_params( + unit=0.5, + segment_margin=1.2, + wall_thickness=("uniform", 0.2, 0.3), + wall_height=("uniform", 2.7, 3.8), +): wall_thickness = rg(wall_thickness) wall_height = rg(wall_height) return { - 'unit': unit, - 'segment_margin': segment_margin, - 'wall_thickness': wall_thickness, - 'wall_height': wall_height + "unit": unit, + "segment_margin": segment_margin, + "wall_thickness": wall_thickness, + "wall_height": wall_height, } @@ -33,12 +37,16 @@ def global_params(unit=.5, segment_margin=1.2, wall_thickness=('uniform', .2, .3 @gin.configurable -def door_params(door_width=('uniform', .85, 1), door_size=('uniform', 2., 2.4)): +def door_params(door_width=("uniform", 0.85, 1), door_size=("uniform", 2.0, 2.4)): door_width = rg(door_width) assert door_width > 0 door_margin = (door_width + WALL_THICKNESS) / 2 door_size = rg(door_size) - return {'door_width': door_width, 'door_margin': door_margin, 'door_size': door_size, } + return { + "door_width": door_width, + "door_margin": door_margin, + "door_size": door_size, + } DOOR_WIDTH, DOOR_MARGIN, DOOR_SIZE = make_np(door_params().values()) @@ -46,9 +54,9 @@ def door_params(door_width=('uniform', .85, 1), door_size=('uniform', 2., 2.4)): @gin.configurable def window_params( - max_window_length=('uniform', 6, 8), - window_height=('uniform', .4, 1.2), - window_margin=('uniform', .2, .6) + max_window_length=("uniform", 6, 8), + window_height=("uniform", 0.4, 1.2), + window_margin=("uniform", 0.2, 0.6), ): max_window_length = rg(max_window_length) window_height = rg(window_height) @@ -56,19 +64,21 @@ def window_params( window_size = WALL_HEIGHT - WALL_THICKNESS - window_height - window_margin assert window_size > 0 return { - 'max_window_length': max_window_length, - 'window_height': window_height, - 'window_margin': window_margin, - 'window_size': window_size, + "max_window_length": max_window_length, + "window_height": window_height, + "window_margin": window_margin, + "window_size": window_size, } -MAX_WINDOW_LENGTH, WINDOW_HEIGHT, WINDOW_MARGIN, WINDOW_SIZE = make_np(window_params().values()) +MAX_WINDOW_LENGTH, WINDOW_HEIGHT, WINDOW_MARGIN, WINDOW_SIZE = make_np( + window_params().values() +) @gin.configurable -def staircase_params(staircase_snap=('uniform', .8, 1.2)): - return {'staircase_snap': rg(staircase_snap)} +def staircase_params(staircase_snap=("uniform", 0.8, 1.2)): + return {"staircase_snap": rg(staircase_snap)} STAIRCASE_SNAP = make_np(staircase_params().values()) @@ -94,7 +104,8 @@ def init_window_params(): for x, y in zip(xs, ys): x.fill(y) + def initialize_constants(): init_global_params() init_door_params() - init_window_params() \ No newline at end of file + init_window_params() diff --git a/infinigen/core/constraints/example_solver/room/contour.py b/infinigen/core/constraints/example_solver/room/contour.py index 9b116c1d2..62018ca07 100644 --- a/infinigen/core/constraints/example_solver/room/contour.py +++ b/infinigen/core/constraints/example_solver/room/contour.py @@ -4,25 +4,26 @@ # Authors: Lingjie Mei import random -import bpy import gin import numpy as np from numpy.random import uniform from shapely import Polygon, box -from infinigen.core.constraints.example_solver.room.utils import unit_cast -from infinigen.core.constraints.example_solver.room.types import RoomType -from infinigen.core.constraints.example_solver.room.configs import TYPICAL_AREA_ROOM_TYPES from infinigen.assets.utils.decorate import read_co, write_co from infinigen.assets.utils.object import new_plane +from infinigen.core.constraints.example_solver.room.configs import ( + TYPICAL_AREA_ROOM_TYPES, +) +from infinigen.core.constraints.example_solver.room.types import RoomType +from infinigen.core.constraints.example_solver.room.utils import unit_cast from infinigen.core.util import blender as butil -from infinigen.core.util.math import FixedSeed, int_hash +from infinigen.core.util.math import FixedSeed from infinigen.core.util.random import log_uniform LARGE = 100 -@gin.configurable(denylist=['width', 'height']) +@gin.configurable(denylist=["width", "height"]) class ContourFactory: def __init__(self, width=17, height=9): self.width = width @@ -35,7 +36,11 @@ def make_contour(self, i): obj.location = self.width / 2, self.height / 2, 0 obj.scale = self.width / 2, self.height / 2, 1 butil.apply_transform(obj, loc=True) - corners = list((x, y) for x in [0, unit_cast(self.width)] for y in [0, unit_cast(self.height)]) + corners = list( + (x, y) + for x in [0, unit_cast(self.width)] + for y in [0, unit_cast(self.height)] + ) random.shuffle(corners) corners = dict(enumerate(corners)) @@ -49,22 +54,22 @@ def nearest(t): while len(corners) > 0: _, (x, y) = corners.popitem() r = uniform(0, 1) - if r < .2: + if r < 0.2: axes = [] - if nearest((self.width - x, y))[1] < .1: + if nearest((self.width - x, y))[1] < 0.1: axes.append(0) - elif nearest((x, self.height - y))[1] < .1: + elif nearest((x, self.height - y))[1] < 0.1: axes.append(1) if len(axes) > 0: axis = np.random.choice(axes) self.add_long_corner(obj, x, y, axis) t = (self.width - x, y) if axis == 0 else (x, self.height - y) corners.pop(nearest(t)[0]) - elif r < .35: + elif r < 0.35: self.add_round_corner(obj, x, y) - elif r < .5: + elif r < 0.5: self.add_straight_corner(obj, x, y) - elif r < .65: + elif r < 0.65: self.add_sharp_corner(obj, x, y) vertices = obj.data.polygons[0].vertices @@ -73,43 +78,60 @@ def nearest(t): return p def add_round_corner(self, obj, x, y): - vg = obj.vertex_groups.new(name='corner') + vg = obj.vertex_groups.new(name="corner") for i, v in enumerate(obj.data.vertices): - vg.add([i], v.co[0] == x and v.co[1] == y, 'REPLACE') - width = unit_cast(uniform(.2, .3) * min(self.width, self.height)) + vg.add([i], v.co[0] == x and v.co[1] == y, "REPLACE") + width = unit_cast(uniform(0.2, 0.3) * min(self.width, self.height)) try: - butil.modify_mesh(obj, 'BEVEL', affect='VERTICES', limit_method='VGROUP', vertex_group='corner', - segments=np.random.randint(2, 5), width=width) - except: + butil.modify_mesh( + obj, + "BEVEL", + affect="VERTICES", + limit_method="VGROUP", + vertex_group="corner", + segments=np.random.randint(2, 5), + width=width, + ) + except Exception: pass - obj.vertex_groups.remove(obj.vertex_groups['corner']) + obj.vertex_groups.remove(obj.vertex_groups["corner"]) def add_straight_corner(self, obj, x, y): - vg = obj.vertex_groups.new(name='corner') + vg = obj.vertex_groups.new(name="corner") for i, v in enumerate(obj.data.vertices): - vg.add([i], v.co[0] == x and v.co[1] == y, 'REPLACE') - width = unit_cast(uniform(.1, .3) * min(self.width, self.height)) + vg.add([i], v.co[0] == x and v.co[1] == y, "REPLACE") + width = unit_cast(uniform(0.1, 0.3) * min(self.width, self.height)) if width > 0: - butil.modify_mesh(obj, 'BEVEL', affect='VERTICES', limit_method='VGROUP', vertex_group='corner', - segments=1, width=width) - obj.vertex_groups.remove(obj.vertex_groups['corner']) + butil.modify_mesh( + obj, + "BEVEL", + affect="VERTICES", + limit_method="VGROUP", + vertex_group="corner", + segments=1, + width=width, + ) + obj.vertex_groups.remove(obj.vertex_groups["corner"]) def add_sharp_corner(self, obj, x, y): cutter = new_plane(size=LARGE) - butil.modify_mesh(cutter, 'SOLIDIFY', offset=0, thickness=1) - x_ratio, y_ratio = uniform(.1, .3, 2) - cutter.location = x + (LARGE / 2 - unit_cast(x_ratio * self.width)) * (-1) ** (x <= 0), y + ( - LARGE / 2 - unit_cast(y_ratio * self.height)) * (-1) ** (y <= 0), 0 - butil.modify_mesh(obj, 'BOOLEAN', object=cutter, operation='DIFFERENCE') + butil.modify_mesh(cutter, "SOLIDIFY", offset=0, thickness=1) + x_ratio, y_ratio = uniform(0.1, 0.3, 2) + cutter.location = ( + x + (LARGE / 2 - unit_cast(x_ratio * self.width)) * (-1) ** (x <= 0), + y + (LARGE / 2 - unit_cast(y_ratio * self.height)) * (-1) ** (y <= 0), + 0, + ) + butil.modify_mesh(obj, "BOOLEAN", object=cutter, operation="DIFFERENCE") butil.delete(cutter) def add_long_corner(self, obj, x, y, axis): x_, y_, z_ = read_co(obj).T i = np.nonzero((x_ == x) & (y_ == y))[0] if axis == 0: - y_[i] -= self.height * uniform(.1, .3) * (-1) ** (y_[i] <= 0) + y_[i] -= self.height * uniform(0.1, 0.3) * (-1) ** (y_[i] <= 0) else: - x_[i] -= self.width * uniform(.1, .3) * (-1) ** (x_[i] <= 0) + x_[i] -= self.width * uniform(0.1, 0.3) * (-1) ** (x_[i] <= 0) write_co(obj, np.stack([x_, y_, z_], -1)) def add_staircase(self, contour): @@ -118,15 +140,17 @@ def add_staircase(self, contour): y_, y__ = np.min(y), np.max(y) for _ in range(self.n_trials): area = TYPICAL_AREA_ROOM_TYPES[RoomType.Staircase] * uniform(1.4, 1.6) - skewness = log_uniform(.6, .8) - if uniform() < .5: + skewness = log_uniform(0.6, 0.8) + if uniform() < 0.5: skewness = 1 / skewness - width, height = unit_cast(np.sqrt(area * skewness).item()), unit_cast( - np.sqrt(area / skewness).item()) + width, height = ( + unit_cast(np.sqrt(area * skewness).item()), + unit_cast(np.sqrt(area / skewness).item()), + ) x = unit_cast(uniform(x_, x__ - width)) y = unit_cast(uniform(y_, y__ - height)) b = box(x, y, x + width, y + height) if contour.contains(b): return b else: - raise ValueError('Invalid staircase') + raise ValueError("Invalid staircase") diff --git a/infinigen/core/constraints/example_solver/room/decorate.py b/infinigen/core/constraints/example_solver/room/decorate.py index 2d26e0db9..3c172de09 100644 --- a/infinigen/core/constraints/example_solver/room/decorate.py +++ b/infinigen/core/constraints/example_solver/room/decorate.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants @@ -13,74 +13,90 @@ import gin import numpy as np import shapely +import shapely.affinity import trimesh.convex from numpy.random import uniform from shapely import Point from shapely.ops import nearest_points from tqdm import trange from trimesh.transformations import translation_matrix -import shapely.affinity -from infinigen.assets.elements import PillarFactory, random_staircase_factory +import infinigen.core.constraints.example_solver.room.constants as constants from infinigen.assets.materials import plaster, tile -from infinigen.assets.utils.decorate import read_area, read_co, read_edge_direction, read_edge_length, \ - read_edges, remove_edges, remove_faces, remove_vertices +from infinigen.assets.objects.elements import PillarFactory, random_staircase_factory +from infinigen.assets.objects.elements.doors import random_door_factory +from infinigen.assets.objects.windows import WindowFactory +from infinigen.assets.utils.decorate import ( + read_area, + read_co, + read_edge_direction, + read_edge_length, + read_edges, + remove_edges, + remove_faces, + remove_vertices, +) from infinigen.assets.utils.object import obj2trimesh -from infinigen.assets.windows import WindowFactory -from infinigen.assets.elements.doors import random_door_factory - -from infinigen.core.constraints.example_solver.room.configs import PILLAR_ROOM_TYPES, ROOM_FLOORS, ROOM_WALLS -from infinigen.core.constraints.example_solver.room.constants import DOOR_WIDTH, WALL_HEIGHT, WALL_THICKNESS -from infinigen.core.constraints.example_solver.room.types import RoomType, get_room_level +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl from infinigen.core.constraints.example_solver import state_def - +from infinigen.core.constraints.example_solver.room.configs import ( + PILLAR_ROOM_TYPES, + ROOM_FLOORS, + ROOM_WALLS, +) +from infinigen.core.constraints.example_solver.room.constants import ( + DOOR_WIDTH, + WALL_HEIGHT, + WALL_THICKNESS, +) +from infinigen.core.constraints.example_solver.room.types import ( + RoomType, + get_room_level, + get_room_type, +) +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj -import infinigen.core.constraints.example_solver.room.constants as constants -from infinigen.core.constraints.example_solver.room.types import get_room_type from infinigen.core.util.random import random_general as rg -from infinigen.core import tags as t, tagging - -from infinigen.core.constraints import constraint_language as cl -from infinigen.core.util import blender as butil - logger = logging.getLogger(__name__) + def split_rooms(rooms_meshed: list[bpy.types.Object]): - extract_tags = { - 'wall': {t.Subpart.Wall, t.Subpart.Visible}, - 'floor': {t.Subpart.SupportSurface, t.Subpart.Visible}, - 'ceiling': {t.Subpart.Ceiling, t.Subpart.Visible}, + "wall": {t.Subpart.Wall, t.Subpart.Visible}, + "floor": {t.Subpart.SupportSurface, t.Subpart.Visible}, + "ceiling": {t.Subpart.Ceiling, t.Subpart.Visible}, } meshes = { - n: [ - tagging.extract_tagged_faces(r, tags) - for r in rooms_meshed - ] + n: [tagging.extract_tagged_faces(r, tags) for r in rooms_meshed] for n, tags in extract_tags.items() } for k, ms in meshes.items(): m2delete = [] for m in ms: - if m.name.startswith('vert'): + if m.name.startswith("vert"): butil.select_none() butil.delete(m) m2delete.append(m) for m in m2delete: ms.remove(m) - meshes['exterior'] = [tagging.extract_mask(r, 1 - tagging.tagged_face_mask(r, t.Subpart.Visible)) for r in rooms_meshed] + meshes["exterior"] = [ + tagging.extract_mask(r, 1 - tagging.tagged_face_mask(r, t.Subpart.Visible)) + for r in rooms_meshed + ] for n, objs in meshes.items(): for o in objs: - o.name = o.name.split('.')[0] + f'.{n}' - butil.origin_set(objs, 'ORIGIN_GEOMETRY', center='MEDIAN') + o.name = o.name.split(".")[0] + f".{n}" + butil.origin_set(objs, "ORIGIN_GEOMETRY", center="MEDIAN") meshes = { - n: butil.put_in_collection(objs, 'unique_assets:room_' + n) + n: butil.put_in_collection(objs, "unique_assets:room_" + n) for n, objs in meshes.items() } @@ -88,14 +104,15 @@ def split_rooms(rooms_meshed: list[bpy.types.Object]): def room_walls(wall_objs: list[bpy.types.Object]): - wall_fns = list(rg(ROOM_WALLS[get_room_type(r.name)]) for r in wall_objs) - logger.debug(f'{room_walls.__name__} adding materials to {len(wall_objs)=}, using {len(wall_fns)=}') + logger.debug( + f"{room_walls.__name__} adding materials to {len(wall_objs)=}, using {len(wall_fns)=}" + ) for wall_fn in set(wall_fns): rooms_ = [o for o, w in zip(wall_objs, wall_fns) if w == wall_fn] - shape = np.random.choice(['square', 'rectangle', 'hexagon']) + shape = np.random.choice(["square", "rectangle", "hexagon"]) kwargs = dict(vertical=True, alternating=False, shape=shape) if wall_fn in [tile, plaster]: indices = np.random.randint(0, 3, len(rooms_)) @@ -107,13 +124,15 @@ def room_walls(wall_objs: list[bpy.types.Object]): def room_ceilings(ceilings: list[bpy.types.Object]): - logger.debug(f'{room_ceilings.__name__} adding materials to {len(ceilings)=}') + logger.debug(f"{room_ceilings.__name__} adding materials to {len(ceilings)=}") plaster.apply(ceilings, t.Subpart.Ceiling) def room_floors(floors: list[bpy.types.Object]): floor_fns = list(rg(ROOM_FLOORS[get_room_type(r.name)]) for r in floors) - logger.debug(f'{room_floors.__name__} adding materials to {len(floors)=}, using {len(floor_fns)=}') + logger.debug( + f"{room_floors.__name__} adding materials to {len(floors)=}, using {len(floor_fns)=}" + ) for floor_fn in set(floor_fns): rooms_ = [o for o, f in zip(floors, floor_fns) if f == floor_fn] @@ -128,27 +147,27 @@ def room_floors(floors: list[bpy.types.Object]): @gin.configurable def populate_doors( - placeholders: list[bpy.types.Object], - n_doors=3, - door_chance=1, - casing_chance=0.0, - all_open=False + placeholders: list[bpy.types.Object], + n_doors=3, + door_chance=1, + casing_chance=0.0, + all_open=False, ): - factories = [random_door_factory()(np.random.randint(1e7)) for _ in range(3)] - logger.debug(f'{populate_doors.__name__} populating {len(placeholders)=} with {n_doors=} and {len(factories)=}') + logger.debug( + f"{populate_doors.__name__} populating {len(placeholders)=} with {n_doors=} and {len(factories)=}" + ) indices = np.random.randint(0, len(factories), len(placeholders)) - col = butil.get_collection('unique_assets:doors') - casing_col = butil.get_collection('unique_assets:door_casings') + col = butil.get_collection("unique_assets:doors") + casing_col = butil.get_collection("unique_assets:door_casings") - for i in trange(n_doors, desc='Placing doors'): + for i in trange(n_doors, desc="Placing doors"): factory = factories[i] casing_factory = factory.casing_factory doors, casings = [], [] for j in np.nonzero(indices == i)[0]: - if uniform() > door_chance: continue if all_open: @@ -165,7 +184,11 @@ def populate_doors( door = factory(int(j)) door.parent = placeholders[j] - door.location = constants.DOOR_WIDTH / 2, constants.WALL_THICKNESS / 2, -constants.DOOR_SIZE / 2 + door.location = ( + constants.DOOR_WIDTH / 2, + constants.WALL_THICKNESS / 2, + -constants.DOOR_SIZE / 2, + ) door.rotation_euler[-1] = -rot_z doors.append(door) @@ -179,25 +202,26 @@ def populate_doors( factory.finalize_assets(doors) butil.put_in_collection(doors, col) - + casing_factory.finalize_assets(casings) butil.put_in_collection(casings, casing_col) def populate_windows(placeholders: list[bpy.types.Object], n_windows=1): - factories = [WindowFactory(np.random.randint(1e5)) for _ in range(n_windows)] - logger.debug(f'{populate_windows.__name__} populating {len(placeholders)=} with {n_windows=} and {len(factories)=}') + logger.debug( + f"{populate_windows.__name__} populating {len(placeholders)=} with {n_windows=} and {len(factories)=}" + ) indices = np.random.randint(0, len(factories), len(placeholders)) - col = butil.get_collection('unique_assets:windows') + col = butil.get_collection("unique_assets:windows") for i in range(n_windows): factory = factories[i] windows = [] for j in np.nonzero(indices == i)[0]: cutter_dims = placeholders[j].dimensions - dims = cutter_dims[0], cutter_dims[2], cutter_dims[1] * uniform(.1, .2) + dims = cutter_dims[0], cutter_dims[2], cutter_dims[1] * uniform(0.1, 0.2) window = factory(int(j), dimensions=dims) window.parent = placeholders[j] window.location[1] = -WALL_THICKNESS / 2 @@ -207,41 +231,63 @@ def populate_windows(placeholders: list[bpy.types.Object], n_windows=1): def room_stairs(state, rooms_meshed): - - col = butil.get_collection('unique_assets:staircases') - states = list(s for k, s in state.objs.items() if get_room_type(k) == RoomType.Staircase) + col = butil.get_collection("unique_assets:staircases") + states = list( + s for k, s in state.objs.items() if get_room_type(k) == RoomType.Staircase + ) contours, doors = [], [] for s in states: - doors_ = [bpy.data.objects[k] for k, o in state.objs.items() if any( - r.relation == cl.CutFrom() and r.target_name == s.obj.name for r in o.relations) and k.startswith( - 'door')] - contour = shapely.simplify(s.contour.buffer(-WALL_THICKNESS / 2, join_style='mitre'), .1) + doors_ = [ + bpy.data.objects[k] + for k, o in state.objs.items() + if any( + r.relation == cl.CutFrom() and r.target_name == s.obj.name + for r in o.relations + ) + and k.startswith("door") + ] + contour = shapely.simplify( + s.contour.buffer(-WALL_THICKNESS / 2, join_style="mitre"), 0.1 + ) for door in doors_: - box = shapely.box(-DOOR_WIDTH / 2, -DOOR_WIDTH * 2, DOOR_WIDTH / 2, DOOR_WIDTH * 2) - box = shapely.affinity.translate(shapely.affinity.rotate(box, door.rotation_euler[-1]), - *door.location) + box = shapely.box( + -DOOR_WIDTH / 2, -DOOR_WIDTH * 2, DOOR_WIDTH / 2, DOOR_WIDTH * 2 + ) + box = shapely.affinity.translate( + shapely.affinity.rotate(box, door.rotation_euler[-1]), *door.location + ) contour = contour.difference(box) doors.append(doors_) contours.append(contour) geoms = [] for c, c_ in zip(contours[:-1], contours[1:]): geom = c.intersection(c_) - if not geom.geom_type == 'Polygon': - geom = sorted(list(g for g in geom.geoms if g.geom_type == 'Polygon'), key=lambda _: _.area)[-1] + if not geom.geom_type == "Polygon": + geom = sorted( + list(g for g in geom.geoms if g.geom_type == "Polygon"), + key=lambda _: _.area, + )[-1] geoms.append(geom) placeholders, offsets, fns = [], [], [] - for _ in trange(100, desc='Generating staircases'): + for _ in trange(100, desc="Generating staircases"): butil.delete(placeholders) fns = [random_staircase_factory()(np.random.randint(1e7)) for _ in geoms] placeholders, mlss, lower, upper = [], [], [], [] for j, fn in enumerate(fns): ph = fn.create_placeholder(i=np.random.randint(1e7)) placeholders.append(ph) - polygon = shapely.intersection_all(list( - shapely.affinity.translate(geoms[j], -x, -y) for x in [ph.bound_box[0][0], ph.bound_box[-1][0]] - for y in [ph.bound_box[0][1], ph.bound_box[-1][1]])) - mlss.append(polygon.boundary if polygon.geom_type == 'Polygon' else shapely.MultiLineString( - [p.boundary for p in polygon.geoms])) + polygon = shapely.intersection_all( + list( + shapely.affinity.translate(geoms[j], -x, -y) + for x in [ph.bound_box[0][0], ph.bound_box[-1][0]] + for y in [ph.bound_box[0][1], ph.bound_box[-1][1]] + ) + ) + mlss.append( + polygon.boundary + if polygon.geom_type == "Polygon" + else shapely.MultiLineString([p.boundary for p in polygon.geoms]) + ) x, y, z = read_co(ph).T lower.append((x[z < WALL_HEIGHT], y[z < WALL_HEIGHT])) upper.append((x[z >= WALL_HEIGHT], y[z >= WALL_HEIGHT])) @@ -255,26 +301,50 @@ def room_stairs(state, rooms_meshed): y = uniform(p[1], p[3]) p = Point(x, y) projected = nearest_points(mls, p)[0] - if max(np.abs(p.x - projected.x), np.abs(p.y - projected.y)) < constants.STAIRCASE_SNAP: + if ( + max(np.abs(p.x - projected.x), np.abs(p.y - projected.y)) + < constants.STAIRCASE_SNAP + ): p = projected - coords = mls.coords if mls.geom_type == 'LineString' else np.concatenate( - [ls.coords for ls in mls.geoms]) - projected = nearest_points(shapely.MultiPoint(coords), Point(x, y))[0] - if max(np.abs(p.x - projected.x), np.abs(p.y - projected.y)) < constants.STAIRCASE_SNAP: + coords = ( + mls.coords + if mls.geom_type == "LineString" + else np.concatenate([ls.coords for ls in mls.geoms]) + ) + projected = nearest_points(shapely.MultiPoint(coords), Point(x, y))[ + 0 + ] + if ( + max(np.abs(p.x - projected.x), np.abs(p.y - projected.y)) + < constants.STAIRCASE_SNAP + ): p = projected x, y = p.x, p.y placeholders[j].location = x, y, j * WALL_HEIGHT + WALL_THICKNESS / 2 - contains_lower = shapely.contains_xy(contours[j], lower[j][0] + x, lower[j][1] + y).all() - contains_upper = shapely.contains_xy(contours[j + 1], upper[j][0] + x, upper[j][1] + y).all() + contains_lower = shapely.contains_xy( + contours[j], lower[j][0] + x, lower[j][1] + y + ).all() + contains_upper = shapely.contains_xy( + contours[j + 1], upper[j][0] + x, upper[j][1] + y + ).all() lower_valid = fns[j].valid_contour((x, y), contours[j], doors[j]) - upper_valid = fns[j].valid_contour((x, y), contours[j + 1], doors[j + 1], False) - if not (contains_lower and contains_upper and lower_valid and upper_valid): + upper_valid = fns[j].valid_contour( + (x, y), contours[j + 1], doors[j + 1], False + ) + if not ( + contains_lower and contains_upper and lower_valid and upper_valid + ): break offsets.append((x, y)) if len(offsets) == len(geoms): - ts = list(trimesh.convex.convex_hull( - obj2trimesh(ph).apply_transform(translation_matrix([*o, WALL_HEIGHT * j]))) for j, (ph, o) - in enumerate(zip(placeholders, offsets))) + ts = list( + trimesh.convex.convex_hull( + obj2trimesh(ph).apply_transform( + translation_matrix([*o, WALL_HEIGHT * j]) + ) + ) + for j, (ph, o) in enumerate(zip(placeholders, offsets)) + ) if all(t.intersection(t_).is_empty for t, t_ in zip(ts[:-1], ts[1:])): break if len(offsets) == len(geoms): @@ -291,8 +361,14 @@ def room_stairs(state, rooms_meshed): if get_room_type(mesh.name) == RoomType.Staircase: level = get_room_level(mesh.name) if level == j + 1: - butil.modify_mesh(mesh, 'BOOLEAN', object=cutter, operation='DIFFERENCE', use_self=True, - use_hole_tolerant=True) + butil.modify_mesh( + mesh, + "BOOLEAN", + object=cutter, + operation="DIFFERENCE", + use_self=True, + use_hole_tolerant=True, + ) butil.delete(cutter) m = deep_clone_obj(mesh) m.location = -offsets[j][0], -offsets[j][1], 0 @@ -305,30 +381,31 @@ def room_stairs(state, rooms_meshed): def room_pillars(state: state_def.State, walls: list[bpy.types.Object]): - - col = butil.get_collection('pillars') - + col = butil.get_collection("pillars") + pillar_rooms = [ - s for k, s in state.objs.items() - if get_room_type(k) in PILLAR_ROOM_TYPES + s for k, s in state.objs.items() if get_room_type(k) in PILLAR_ROOM_TYPES ] for s in pillar_rooms: factory = PillarFactory(np.random.randint(1e7)) - mesh = next(m for m in walls if m.name.startswith(s.obj.name.split('.')[0])) + mesh = next(m for m in walls if m.name.startswith(s.obj.name.split(".")[0])) interior = tagging.extract_tagged_faces(mesh, {t.Subpart.Interior}) remove_faces(interior, read_area(interior) < WALL_THICKNESS / 2 * WALL_HEIGHT) selection = (read_edge_length(interior) > WALL_HEIGHT / 2) & ( - np.abs(read_edge_direction(interior))[:, -1] > .9) - selection_ = np.bincount(read_edges(interior)[selection].reshape(-1), - minlength=len(interior.data.vertices)) + np.abs(read_edge_direction(interior))[:, -1] > 0.9 + ) + selection_ = np.bincount( + read_edges(interior)[selection].reshape(-1), + minlength=len(interior.data.vertices), + ) remove_vertices(interior, selection_ == 0) remove_vertices(interior, lambda x, y, z: z > WALL_THICKNESS) remove_edges(interior, read_edge_length(interior) < WALL_THICKNESS) interiors = butil.split_object(interior) for i in interiors: - with butil.ViewportMode(i, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(i, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.dissolve_limited() bm = bmesh.from_edit_mesh(i.data) geom = [v for v in bm.verts if len(v.link_edges) < 2] @@ -338,21 +415,25 @@ def room_pillars(state: state_def.State, walls: list[bpy.types.Object]): if len(interiors_) == 0: return - + with butil.Suppress(): interior = butil.join_objects(interiors_) - staircases = list(butil.get_collection('staircases').objects) + staircases = list(butil.get_collection("staircases").objects) if len(staircases) == 0: return - - staircases = np.concatenate([ - read_co(o) + np.array([o.location]) for o in staircases - ]) + + staircases = np.concatenate( + [read_co(o) + np.array([o.location]) for o in staircases] + ) cos = read_co(interior) cos[:, -1] = mesh.location[-1] + WALL_THICKNESS / 2 - cos = cos[np.min(np.linalg.norm(cos[:, np.newaxis] - staircases[np.newaxis], axis=-1), - -1) > WALL_THICKNESS] + cos = cos[ + np.min( + np.linalg.norm(cos[:, np.newaxis] - staircases[np.newaxis], axis=-1), -1 + ) + > WALL_THICKNESS + ] for co in cos: obj = factory(np.random.randint(1e7)) obj.location = co diff --git a/infinigen/core/constraints/example_solver/room/graph.py b/infinigen/core/constraints/example_solver/room/graph.py index 7f21ac06f..9f6bd4640 100644 --- a/infinigen/core/constraints/example_solver/room/graph.py +++ b/infinigen/core/constraints/example_solver/room/graph.py @@ -6,38 +6,59 @@ from collections.abc import Sequence import gin -import numpy as np import networkx as nx +import numpy as np from numpy.random import uniform -from infinigen.core.constraints.example_solver.room.utils import unit_cast -from infinigen.core.util.math import FixedSeed -from infinigen.core.util.random import log_uniform, random_general as rg - -from infinigen.core.constraints.example_solver.room.types import RoomGraph, RoomType, get_room_type from infinigen.core.constraints.example_solver.room.configs import ( - LOOP_ROOM_TYPES, ROOM_CHILDREN, + LOOP_ROOM_TYPES, + ROOM_CHILDREN, ROOM_NUMBERS, - TYPICAL_AREA_ROOM_TYPES, UPSTAIRS_ROOM_CHILDREN, STUDIO_ROOM_CHILDREN, + STUDIO_ROOM_CHILDREN, + TYPICAL_AREA_ROOM_TYPES, + UPSTAIRS_ROOM_CHILDREN, ) +from infinigen.core.constraints.example_solver.room.types import ( + RoomGraph, + RoomType, + get_room_type, +) +from infinigen.core.constraints.example_solver.room.utils import unit_cast +from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform +from infinigen.core.util.random import random_general as rg -@gin.configurable(denylist=['factory_seed', 'level']) +@gin.configurable(denylist=["factory_seed", "level"]) class GraphMaker: - def __init__(self, factory_seed, level=0, requires_staircase=False, room_children='home', typical_area_room_types=TYPICAL_AREA_ROOM_TYPES, - loop_room_types=LOOP_ROOM_TYPES, room_numbers=ROOM_NUMBERS, max_cycle_basis=1, - requires_bathroom_privacy=True, - entrance_type=('weighted_choice', (.5, 'porch'), (.5, 'hallway')), hallway_alpha=1, - no_hallway_children_prob=.4): + def __init__( + self, + factory_seed, + level=0, + requires_staircase=False, + room_children="home", + typical_area_room_types=TYPICAL_AREA_ROOM_TYPES, + loop_room_types=LOOP_ROOM_TYPES, + room_numbers=ROOM_NUMBERS, + max_cycle_basis=1, + requires_bathroom_privacy=True, + entrance_type=("weighted_choice", (0.5, "porch"), (0.5, "hallway")), + hallway_alpha=1, + no_hallway_children_prob=0.4, + ): self.factory_seed = factory_seed with FixedSeed(factory_seed): self.requires_staircase = requires_staircase match room_children: - case 'home': - self.room_children = ROOM_CHILDREN if level == 0 else UPSTAIRS_ROOM_CHILDREN + case "home": + self.room_children = ( + ROOM_CHILDREN if level == 0 else UPSTAIRS_ROOM_CHILDREN + ) case _: self.room_children = STUDIO_ROOM_CHILDREN - self.hallway_room_types = [r for r, m in self.room_children.items() if RoomType.Hallway in m] + self.hallway_room_types = [ + r for r, m in self.room_children.items() if RoomType.Hallway in m + ] self.typical_area_room_types = typical_area_room_types self.loop_room_types = loop_room_types self.room_numbers = room_numbers @@ -48,7 +69,7 @@ def __init__(self, factory_seed, level=0, requires_staircase=False, room_childre self.entrance_type = rg(entrance_type) self.hallway_prob = lambda x: 1 / (x + hallway_alpha) self.no_hallway_children_prob = no_hallway_children_prob - self.skewness_min = .7 + self.skewness_min = 0.7 def make_graph(self, i): with FixedSeed(i): @@ -60,7 +81,7 @@ def make_graph(self, i): def add_room(t, p): i = len(rooms) - name = f'{t}_{room_type_counts[t]}' + name = f"{t}_{room_type_counts[t]}" room_type_counts[t] += 1 if p is not None: children[p].append(i) @@ -81,33 +102,60 @@ def add_room(t, p): for j, s in enumerate(rooms): if (rt := get_room_type(r)) in self.loop_room_types: if (rt_ := get_room_type(s)) in self.loop_room_types[rt]: - if uniform() < self.loop_room_types[rt][rt_] and j not in children[i]: + if ( + uniform() < self.loop_room_types[rt][rt_] + and j not in children[i] + ): children[i].append(j) for i, r in enumerate(rooms): if get_room_type(r) in self.hallway_room_types: - hallways = [j for j in children[i] if get_room_type(rooms[j]) == RoomType.Hallway] - other_rooms = [j for j in children[i] if get_room_type(rooms[j]) != RoomType.Hallway] + hallways = [ + j + for j in children[i] + if get_room_type(rooms[j]) == RoomType.Hallway + ] + other_rooms = [ + j + for j in children[i] + if get_room_type(rooms[j]) != RoomType.Hallway + ] children[i] = hallways.copy() for k, o in enumerate(other_rooms): - if uniform() < self.no_hallway_children_prob or len(hallways) == 0: + if ( + uniform() < self.no_hallway_children_prob + or len(hallways) == 0 + ): children[i].append(o) else: - children[hallways[np.random.randint(len(hallways))]].append(o) - - hallways = [i for i, r in enumerate(rooms) if get_room_type(r) == RoomType.Hallway] + children[ + hallways[np.random.randint(len(hallways))] + ].append(o) + + hallways = [ + i + for i, r in enumerate(rooms) + if get_room_type(r) == RoomType.Hallway + ] if len(hallways) == 0: entrance = 0 else: if self.requires_staircase: - prob = np.array([self.hallway_prob(len(children[h])) for h in hallways]) - add_room(RoomType.Staircase, np.random.choice(hallways, p=prob / prob.sum())) - prob = np.array([self.hallway_prob(len(children[h])) for h in hallways]) + prob = np.array( + [self.hallway_prob(len(children[h])) for h in hallways] + ) + add_room( + RoomType.Staircase, + np.random.choice(hallways, p=prob / prob.sum()), + ) + prob = np.array( + [self.hallway_prob(len(children[h])) for h in hallways] + ) entrance = np.random.choice(hallways, p=prob / prob.sum()) - if self.entrance_type == 'porch': + if self.entrance_type == "porch": add_room(RoomType.Balcony, entrance) entrance = queue.pop() - elif self.entrance_type == 'none': + elif self.entrance_type == "none": entrance = None children_ = [children[i] for i in range(len(rooms))] @@ -132,7 +180,9 @@ def satisfies_constraint(self, graph): def has_bathroom_privacy(self, rooms, children): for i, r in rooms: if get_room_type(r) == RoomType.LivingRoom: - has_public_bathroom = any(get_room_type(rooms[j]) == RoomType.Bathroom for j in children[i]) + has_public_bathroom = any( + get_room_type(rooms[j]) == RoomType.Bathroom for j in children[i] + ) if not has_public_bathroom: for j in children[i]: if get_room_type(rooms[j] == RoomType.Bedroom): @@ -141,25 +191,36 @@ def has_bathroom_privacy(self, rooms, children): return True def suggest_dimensions(self, graph, width=None, height=None): - area = sum([self.typical_area_room_types[get_room_type(r)] for r in graph.rooms]) * self.slackness + area = ( + sum([self.typical_area_room_types[get_room_type(r)] for r in graph.rooms]) + * self.slackness + ) if width is None and height is None: skewness = uniform(self.skewness_min, 1 / self.skewness_min) width = unit_cast(np.sqrt(area * skewness).item()) height = unit_cast(np.sqrt(area / skewness).item()) - elif uniform(0, 1) < .5: + elif uniform(0, 1) < 0.5: height_ = unit_cast(area / width) - height = None if height_ > height and self.skewness_min < height_ / width < 1 / self.skewness_min\ + height = ( + None + if height_ > height + and self.skewness_min < height_ / width < 1 / self.skewness_min else height_ + ) else: width_ = unit_cast(area / height) - width = None if width_ > width and self.skewness_min < width_ / height < 1 / self.skewness_min \ + width = ( + None + if width_ > width + and self.skewness_min < width_ / height < 1 / self.skewness_min else width_ + ) return width, height def draw(self, graph): g = nx.Graph() - shortnames = [r[:3].upper() + r.split('_')[-1] for r in graph.rooms] + shortnames = [r[:3].upper() + r.split("_")[-1] for r in graph.rooms] g.add_nodes_from(shortnames) for k in range(len(shortnames)): for l in graph.neighbours[k]: diff --git a/infinigen/core/constraints/example_solver/room/scorer.py b/infinigen/core/constraints/example_solver/room/scorer.py index c37eb1351..8a826718b 100644 --- a/infinigen/core/constraints/example_solver/room/scorer.py +++ b/infinigen/core/constraints/example_solver/room/scorer.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants @@ -13,32 +13,40 @@ from shapely import LineString, Polygon import infinigen.core.constraints.example_solver.room.constants as constants +from infinigen.core.constraints.example_solver.room.configs import ( + EXTERIOR_CONNECTED_ROOM_TYPES, + FUNCTIONAL_ROOM_TYPES, + SQUARE_ROOM_TYPES, + TYPICAL_AREA_ROOM_TYPES, +) from infinigen.core.constraints.example_solver.room.types import RoomType, get_room_type -from infinigen.core.constraints.example_solver.room.configs import EXTERIOR_CONNECTED_ROOM_TYPES, FUNCTIONAL_ROOM_TYPES, SQUARE_ROOM_TYPES, \ - TYPICAL_AREA_ROOM_TYPES -from infinigen.core.constraints.example_solver.room.utils import abs_distance, buffer, unit_cast +from infinigen.core.constraints.example_solver.room.utils import ( + abs_distance, + buffer, + unit_cast, +) -@gin.configurable(denylist=['graph']) +@gin.configurable(denylist=["graph"]) class BlueprintScorer: def __init__( - self, - graph, - shortest_path_weight=2., - typical_area_weight=10., - typical_area_room_types=TYPICAL_AREA_ROOM_TYPES, - aspect_ratio_weight=10., - aspect_ratio_room_types=SQUARE_ROOM_TYPES, - convexity_weight=50., - conciseness_weight=2., - exterior_connected_room_types=EXTERIOR_CONNECTED_ROOM_TYPES, - exterior_length_weight=.2, - exterior_corner_weight=.02, - collinearity_weight=.02, - functional_room_weight=.2, - functional_room_types=FUNCTIONAL_ROOM_TYPES, - narrow_passage_weight=5., - narrow_passage_thresh=1.5 + self, + graph, + shortest_path_weight=2.0, + typical_area_weight=10.0, + typical_area_room_types=TYPICAL_AREA_ROOM_TYPES, + aspect_ratio_weight=10.0, + aspect_ratio_room_types=SQUARE_ROOM_TYPES, + convexity_weight=50.0, + conciseness_weight=2.0, + exterior_connected_room_types=EXTERIOR_CONNECTED_ROOM_TYPES, + exterior_length_weight=0.2, + exterior_corner_weight=0.02, + collinearity_weight=0.02, + functional_room_weight=0.2, + functional_room_types=FUNCTIONAL_ROOM_TYPES, + narrow_passage_weight=5.0, + narrow_passage_thresh=1.5, ): self.graph = graph self.shortest_path_weight = shortest_path_weight @@ -70,50 +78,54 @@ def find_score(self, assignment, info): return sum(self.compute_scores(assignment, info).values()) def compute_scores(self, assignment, info): - info['neighbours'] = {a: set(assignment[_] for _ in self.graph.neighbours[i]) for i, a in - enumerate(assignment)} + info["neighbours"] = { + a: set(assignment[_] for _ in self.graph.neighbours[i]) + for i, a in enumerate(assignment) + } scores = {} if self.shortest_path_weight > 0: score = self.shortest_path_weight * self.shortest_path(assignment, info) - scores['shortest_path'] = score + scores["shortest_path"] = score if self.typical_area_weight > 0: score = self.typical_area_weight * self.typical_area(assignment, info) - scores['typical_area'] = score + scores["typical_area"] = score if self.aspect_ratio_weight > 0: score = self.aspect_ratio_weight * self.aspect_ratio(assignment, info) - scores['aspect_ratio'] = score + scores["aspect_ratio"] = score if self.convexity_weight > 0: score = self.convexity_weight * self.convexity(assignment, info) - scores['convexity'] = score + scores["convexity"] = score if self.conciseness_weight > 0: score = self.conciseness_weight * self.conciseness(assignment, info) - scores['conciseness'] = score + scores["conciseness"] = score if self.exterior_length_weight > 0: score = self.exterior_length_weight * self.exterior_length(assignment, info) - scores['exterior_length'] = score + scores["exterior_length"] = score if self.exterior_corner_weight > 0: score = self.exterior_corner_weight * self.exterior_corner(assignment, info) - scores['exterior_corner'] = score + scores["exterior_corner"] = score if self.collinearity_weight > 0: score = self.collinearity_weight * self.collinearity(assignment, info) - scores['collinearity'] = score + scores["collinearity"] = score if self.functional_room_weight > 0: score = self.functional_room_weight * self.functional_room(assignment, info) - scores['functional_room'] = score + scores["functional_room"] = score if self.narrow_passage_weight > 0: score = self.narrow_passage_weight * self.narrow_passage(assignment, info) - scores['narrow_passage'] = score + scores["narrow_passage"] = score return scores def shortest_path(self, assignment, info): shortest_paths = defaultdict(dict) - centroids = {k: s.centroid.coords[:][0] for k, s in info['segments'].items()} - for k, ses in info['shared_edges'].items(): + centroids = {k: s.centroid.coords[:][0] for k, s in info["segments"].items()} + for k, ses in info["shared_edges"].items(): for l, se in ses.items(): min_distance = np.full(100, 4) for ls in se.geoms: for c in ls.coords[:]: - dist = abs_distance(centroids[k], c) + abs_distance(c, centroids[l]) + dist = abs_distance(centroids[k], c) + abs_distance( + c, centroids[l] + ) if np.sum(dist) <= np.sum(min_distance): min_distance = dist shortest_paths[k][l] = min_distance @@ -128,7 +140,7 @@ def shortest_path(self, assignment, info): updated = True while updated: updated = False - for k, ns in info['neighbours'].items(): + for k, ns in info["neighbours"].items(): for n in ns: d = displacement[k] + shortest_paths[k][n] if np.sum(d) < np.sum(displacement[n]): @@ -136,7 +148,10 @@ def shortest_path(self, assignment, info): updated = True displacements = np.stack([d for k, d in displacement.items() if k != root]) x, xx, y, yy = displacements.T - score = (1. / ((np.maximum(x, xx) + np.maximum(y, yy)) / displacements.sum(1)) - 1) ** 2 + score = ( + 1.0 / ((np.maximum(x, xx) + np.maximum(y, yy)) / displacements.sum(1)) + - 1 + ) ** 2 scores[root] = score.sum() return sum(s for s in scores.values()) @@ -144,11 +159,18 @@ def typical_area(self, assignment, info): total_typical_areas, total_face_areas = [], [] for i, r in enumerate(self.graph.rooms): if get_room_type(r) in self.typical_area_room_types: - total_typical_areas.append(self.typical_area_room_types[get_room_type(r)]) - total_face_areas.append(info['segments'][assignment[i]].area) + total_typical_areas.append( + self.typical_area_room_types[get_room_type(r)] + ) + total_face_areas.append(info["segments"][assignment[i]].area) total_typical_areas = np.array(total_typical_areas) total_face_areas = np.array(total_face_areas) - scores = total_face_areas / np.sum(total_face_areas) / total_typical_areas * np.sum(total_typical_areas) + scores = ( + total_face_areas + / np.sum(total_face_areas) + / total_typical_areas + * np.sum(total_typical_areas) + ) scores = np.where(scores > 1, scores, 1 / scores) - 1 return scores.sum() @@ -156,7 +178,7 @@ def aspect_ratio(self, assignment, info): aspect_ratios = [] for i, r in enumerate(self.graph.rooms): if get_room_type(r) in self.aspect_ratio_room_types: - x, y, xx, yy = info['segments'][assignment[i]].bounds + x, y, xx, yy = info["segments"][assignment[i]].bounds aspect_ratios.append((xx - x) / (yy - y)) aspect_ratios = np.array(aspect_ratios) aspect_ratios = np.where(aspect_ratios > 1, aspect_ratios, 1 / aspect_ratios) @@ -165,29 +187,31 @@ def aspect_ratio(self, assignment, info): def convexity(self, assignment, info): sharpness = [] - for s in info['segments'].values(): + for s in info["segments"].values(): sharpness.append(s.convex_hull.area / s.area) sharpness = np.array(sharpness) scores = (sharpness - 1) ** 2 return scores.sum() def conciseness(self, assignment, info): - conciseness = np.array([len(s.boundary.coords) - 1 for s in info['segments'].values()]) + conciseness = np.array( + [len(s.boundary.coords) - 1 for s in info["segments"].values()] + ) scores = (conciseness / self.conciseness_thresh - 1) ** 2 return scores.sum() def exterior_length(self, assignment, info): - exterior_edges = info['exterior_edges'] + exterior_edges = info["exterior_edges"] total_length = 0 for i, r in enumerate(self.graph.rooms): if get_room_type(r) in self.exterior_connected_room_types: if assignment[i] in exterior_edges: total_length += exterior_edges[assignment[i]].length score = total_length / sum(ee.length for ee in exterior_edges.values()) - return (score - 1) ** 2 * len(info['segments']) + return (score - 1) ** 2 * len(info["segments"]) def exterior_corner(self, assignment, info): - exterior_edges = info['exterior_edges'] + exterior_edges = info["exterior_edges"] total_corners, corners = 0, 0 for i, r in enumerate(self.graph.rooms): if assignment[i] in exterior_edges: @@ -198,11 +222,11 @@ def exterior_corner(self, assignment, info): if get_room_type(r) in self.exterior_connected_room_types: total_corners += n score = total_corners / corners - return (score - 1) ** 2 * len(info['segments']) + return (score - 1) ** 2 * len(info["segments"]) def collinearity(self, assignment, info): x_skeletons, y_skeletons = set(), set() - for s in info['segments'].values(): + for s in info["segments"].values(): x, y = s.boundary.xy for i in range(len(x) - 1): if np.abs(x[i] - x[i + 1]) < 1e-2: @@ -210,34 +234,48 @@ def collinearity(self, assignment, info): elif np.abs(y[i] - y[i + 1]) < 1e-2: y_skeletons.add(unit_cast(y[i])) score = len(x_skeletons) + len(y_skeletons) - return score * len(info['segments']) + return score * len(info["segments"]) def functional_room(self, assignment, info): total_area = 0 - segments = info['segments'] + segments = info["segments"] for i, r in enumerate(self.graph.rooms): if get_room_type(r) in self.functional_room_types: total_area += segments[assignment[i]].area score = total_area / sum(s.area for s in segments.values()) - return (1 - score) ** 2 * len(info['segments']) + return (1 - score) ** 2 * len(info["segments"]) def narrow_passage(self, assignment, info): scores = [] - for p in info['segments'].values(): + for p in info["segments"].values(): for d in np.arange(1, int(self.narrow_passage_thresh / constants.UNIT)): with np.errstate(invalid="ignore"): length = d * constants.UNIT / 2 b = buffer(p, -length) c = buffer(b, length) - scores.append(p.area - c.area + ( - self.narrow_passage_thresh ** 2 * 20 if not isinstance(b, Polygon) else 0)) + scores.append( + p.area + - c.area + + ( + self.narrow_passage_thresh**2 * 20 + if not isinstance(b, Polygon) + else 0 + ) + ) scores = np.array(scores).sum() return scores -@gin.configurable(denylist=['graphs']) +@gin.configurable(denylist=["graphs"]) class JointBlueprintScorer: - def __init__(self, graphs, *args, staircase_occupancy_weight=1., staircase_iou_weight=.5, **kwargs): + def __init__( + self, + graphs, + *args, + staircase_occupancy_weight=1.0, + staircase_iou_weight=0.5, + **kwargs, + ): self.scorers = [] self.graphs = graphs for g in self.graphs: @@ -249,14 +287,18 @@ def compute_scores(self, assignments, infos): scores = {} for i, (assignment, info) in enumerate(zip(assignments, infos)): floor_scores = self.scorers[i].compute_scores(assignment, info) - scores.update({f'{k}_{i:01d}': v for k, v in floor_scores.items()}) + scores.update({f"{k}_{i:01d}": v for k, v in floor_scores.items()}) if len(self.graphs) > 1: if self.staircase_occupancy_weight > 0: - score = self.staircase_occupancy_weight * self.staircase_occupancy(assignments, infos) - scores['staircase_occupancy'] = score + score = self.staircase_occupancy_weight * self.staircase_occupancy( + assignments, infos + ) + scores["staircase_occupancy"] = score if self.staircase_iou_weight > 0: - score = self.staircase_iou_weight * self.staircase_iou(assignments, infos) - scores['staircase_iou'] = score + score = self.staircase_iou_weight * self.staircase_iou( + assignments, infos + ) + scores["staircase_iou"] = score return scores def find_score(self, assignments, infos): @@ -266,16 +308,18 @@ def staircase_occupancy(self, assignments, infos): scores = [] for graph, assignment, info in zip(self.graphs, assignments, infos): for _ in graph[RoomType.Staircase]: - scores.append(info['staircase_occupancies'][assignment[_]]) + scores.append(info["staircase_occupancies"][assignment[_]]) scores = np.array(scores) - return ((scores - 1) ** 2).sum() * sum(len(info['segments']) for info in infos) + return ((scores - 1) ** 2).sum() * sum(len(info["segments"]) for info in infos) def staircase_iou(self, assignments, infos): scores = [] for graph, assignment, info in zip(self.graphs, assignments, infos): for _ in graph[RoomType.Staircase]: - segment = info['segments'][assignment[_]] - staircase = info['staircase'] - scores.append(segment.intersection(staircase).area / segment.union(staircase).area) + segment = info["segments"][assignment[_]] + staircase = info["staircase"] + scores.append( + segment.intersection(staircase).area / segment.union(staircase).area + ) scores = np.array(scores) - return ((scores - 1) ** 2).sum() * sum(len(info['segments']) for info in infos) + return ((scores - 1) ** 2).sum() * sum(len(info["segments"]) for info in infos) diff --git a/infinigen/core/constraints/example_solver/room/segment.py b/infinigen/core/constraints/example_solver/room/segment.py index dc02f7962..3705a4084 100644 --- a/infinigen/core/constraints/example_solver/room/segment.py +++ b/infinigen/core/constraints/example_solver/room/segment.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants @@ -14,12 +14,19 @@ from numpy.random import uniform from shapely import LineString, union -from infinigen.assets.utils.shapes import shared -from infinigen.core.constraints.example_solver.room.utils import compute_neighbours, cut_polygon_by_line, canonicalize, is_valid_polygon, \ - unit_cast, update_exterior_edges, update_shared_edges, update_staircase_occupancies import infinigen.core.constraints.example_solver.room.constants as constants -from infinigen.core.util.random import log_uniform +from infinigen.assets.utils.shapes import shared +from infinigen.core.constraints.example_solver.room.utils import ( + canonicalize, + compute_neighbours, + cut_polygon_by_line, + is_valid_polygon, + unit_cast, + update_exterior_edges, + update_staircase_occupancies, +) from infinigen.core.util.math import FixedSeed +from infinigen.core.util.random import log_uniform class SegmentMaker: @@ -29,34 +36,39 @@ def __init__(self, factory_seed, contour, n, merge_alpha=-1): self.n = n self.n_boxes = int(self.n * uniform(1.4, 1.6)) - self.box_ratio = .3 + self.box_ratio = 0.3 self.min_segment_area = log_uniform(1.5, 2) - self.min_segment_size = log_uniform(.5, 1.) + self.min_segment_size = log_uniform(0.5, 1.0) - self.divide_box_fn = lambda x: x.area ** .5 + self.divide_box_fn = lambda x: x.area**0.5 self.n_box_trials = 200 - self.merge_fn = lambda x: x ** merge_alpha + self.merge_fn = lambda x: x**merge_alpha def build_segments(self, staircase=None): while True: try: segments, shared_edges = self.filter_segments() break - except: + except Exception: pass exterior_edges = update_exterior_edges(segments, shared_edges) - neighbours_all = {k: set(compute_neighbours(se, constants.SEGMENT_MARGIN)) for k, se in shared_edges.items()} - exterior_neighbours = set(compute_neighbours(exterior_edges, constants.SEGMENT_MARGIN)) + neighbours_all = { + k: set(compute_neighbours(se, constants.SEGMENT_MARGIN)) + for k, se in shared_edges.items() + } + exterior_neighbours = set( + compute_neighbours(exterior_edges, constants.SEGMENT_MARGIN) + ) staircase_occupancies = update_staircase_occupancies(segments, staircase) return { - 'segments': segments, - 'shared_edges': shared_edges, - 'exterior_edges': exterior_edges, - 'neighbours_all': neighbours_all, - 'exterior_neighbours': exterior_neighbours, - 'staircase_occupancies': staircase_occupancies, - 'staircase': staircase, + "segments": segments, + "shared_edges": shared_edges, + "exterior_edges": exterior_edges, + "neighbours_all": neighbours_all, + "exterior_neighbours": exterior_neighbours, + "staircase_occupancies": staircase_occupancies, + "staircase": staircase, } def divide_segments(self): @@ -68,7 +80,7 @@ def divide_segments(self): k = np.random.choice(list(keys), p=prob / prob.sum()) x, y, xx, yy = segments[k].bounds w, h = xx - x, yy - y - r = uniform(.25, .75) + r = uniform(0.25, 0.75) line = None if w >= h: w_ = unit_cast(r * w) @@ -85,7 +97,10 @@ def divide_segments(self): s, t = cut_polygon_by_line(segments[k], line) s_ = canonicalize(s) t_ = canonicalize(t) - if np.abs(s.area - s_.area) < 1e-3 and np.abs(t.area - t_.area) < 1e-3: + if ( + np.abs(s.area - s_.area) < 1e-3 + and np.abs(t.area - t_.area) < 1e-3 + ): segments[k], segments[i + 1] = s_, t_ break return {k: v for k, v in segments.items()} @@ -93,7 +108,8 @@ def divide_segments(self): def merge_segment(self, segments, shared_edges, attached, i, j): assert i != j s = canonicalize(union(segments[i], segments[j])) - if not is_valid_polygon(s): return + if not is_valid_polygon(s): + return segments[j] = s segments.pop(i) shared_edges.pop(i) @@ -130,8 +146,15 @@ def filter_segments(self): while len(segments) > self.n: prob = np.array([1 / (len(attached[c]) + 1) for c in shared_edges.keys()]) k = np.random.choice(list(shared_edges.keys()), p=prob / prob.sum()) - candidates = list(k for k, se in shared_edges[k].items() if se.length>=1e-6) - prob = np.array([len(attached[c].difference(attached[k]))**2 + .5 for c in candidates]) + candidates = list( + k for k, se in shared_edges[k].items() if se.length >= 1e-6 + ) + prob = np.array( + [ + len(attached[c].difference(attached[k])) ** 2 + 0.5 + for c in candidates + ] + ) n = np.random.choice(candidates, p=prob / prob.sum()) self.merge_segment(segments, shared_edges, attached, k, n) return segments, shared_edges diff --git a/infinigen/core/constraints/example_solver/room/solidifier.py b/infinigen/core/constraints/example_solver/room/solidifier.py index d6eb63637..8144b527e 100644 --- a/infinigen/core/constraints/example_solver/room/solidifier.py +++ b/infinigen/core/constraints/example_solver/room/solidifier.py @@ -2,13 +2,13 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants import logging from collections import defaultdict, deque -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Iterable, Mapping import bmesh import bpy @@ -17,42 +17,77 @@ from numpy.random import uniform from shapely import LineString, line_interpolate_point, remove_repeated_points, simplify from shapely.ops import linemerge -from numpy.random import uniform -from infinigen.core.constraints.example_solver.room.types import RoomGraph, RoomType, get_room_type +from infinigen.assets.utils.autobevel import BevelSharp +from infinigen.assets.utils.decorate import ( + read_area, + read_center, + read_co, + read_edge_direction, + read_edge_length, + remove_faces, + write_attribute, + write_co, +) +from infinigen.assets.utils.object import join_objects, new_cube, new_line +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints.example_solver.geometry import parse_scene from infinigen.core.constraints.example_solver.room.configs import ( - COMBINED_ROOM_TYPES, PANORAMIC_ROOM_TYPES, - WINDOW_ROOM_TYPES, TYPICAL_AREA_ROOM_TYPES, + COMBINED_ROOM_TYPES, + PANORAMIC_ROOM_TYPES, + WINDOW_ROOM_TYPES, ) -from infinigen.core.constraints.example_solver.room.constants import DOOR_MARGIN, DOOR_SIZE, DOOR_WIDTH, \ - MAX_WINDOW_LENGTH, SEGMENT_MARGIN, WALL_HEIGHT, WALL_THICKNESS, WINDOW_HEIGHT, WINDOW_SIZE - -from infinigen.core.constraints.example_solver.room.utils import SIMPLIFY_THRESH, WELD_THRESH, buffer, \ - canonicalize, polygon2obj -from infinigen.assets.utils.decorate import ( - read_area, read_center, read_co, remove_edges, remove_faces, - select_faces, write_attribute, write_co, read_edges, read_edge_direction, read_edge_length, +from infinigen.core.constraints.example_solver.room.constants import ( + DOOR_MARGIN, + DOOR_SIZE, + DOOR_WIDTH, + MAX_WINDOW_LENGTH, + SEGMENT_MARGIN, + WALL_HEIGHT, + WALL_THICKNESS, + WINDOW_HEIGHT, + WINDOW_SIZE, +) +from infinigen.core.constraints.example_solver.room.types import ( + RoomGraph, + RoomType, + get_room_type, +) +from infinigen.core.constraints.example_solver.room.utils import ( + SIMPLIFY_THRESH, + WELD_THRESH, + buffer, + canonicalize, + polygon2obj, +) +from infinigen.core.constraints.example_solver.state_def import ( + ObjectState, + RelationState, + State, ) -from infinigen.assets.utils.object import data2mesh, join_objects, mesh2obj, new_cube, new_line from infinigen.core.surface import write_attr_data from infinigen.core.tagging import PREFIX from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -from infinigen.core.constraints.example_solver.geometry import parse_scene -from infinigen.core.constraints.example_solver.state_def import ObjectState, RelationState, State -from infinigen.core.constraints import constraint_language as cl - -from infinigen.assets.utils.autobevel import BevelSharp from infinigen.core.util.logging import BadSeedError logger = logging.getLogger(__name__) _eps = 0.01 -@gin.configurable(denylist=['graph', 'level']) + +@gin.configurable(denylist=["graph", "level"]) class BlueprintSolidifier: - def __init__(self, graph: RoomGraph, level, has_ceiling=True, combined_room_types=COMBINED_ROOM_TYPES, - panoramic_room_types=PANORAMIC_ROOM_TYPES, enable_open=True): + def __init__( + self, + graph: RoomGraph, + level, + has_ceiling=True, + combined_room_types=COMBINED_ROOM_TYPES, + panoramic_room_types=PANORAMIC_ROOM_TYPES, + enable_open=True, + ): self.graph = graph self.level = level self.has_ceiling = has_ceiling @@ -62,11 +97,20 @@ def __init__(self, graph: RoomGraph, level, has_ceiling=True, combined_room_type self.enable_open = enable_open def get_entrance(self, names): - return None if self.graph.entrance is None else {k for k, n in names.items() if - n == self.graph.rooms[self.graph.entrance]}.pop() + return ( + None + if self.graph.entrance is None + else { + k + for k, n in names.items() + if n == self.graph.rooms[self.graph.entrance] + }.pop() + ) def get_staircase(self, names): - return {k for k, n in names.items() if get_room_type(n) == RoomType.Staircase}.pop() + return { + k for k, n in names.items() if get_room_type(n) == RoomType.Staircase + }.pop() @staticmethod def unroll(x): @@ -82,29 +126,34 @@ def unroll(x): yield (k,), cs def solidify(self, assignment, info): - segments = info['segments'] - neighbours = info['neighbours'] - shared_edges = info['shared_edges'] - exterior_edges = info['exterior_edges'] + segments = info["segments"] + neighbours = info["neighbours"] + shared_edges = info["shared_edges"] + exterior_edges = info["exterior_edges"] names = {k: self.graph.rooms[assignment.index(k)] for k in segments} - rooms = {k: self.make_room(p, exterior_edges.get(k, None)) for k, p in segments.items()} + rooms = { + k: self.make_room(p, exterior_edges.get(k, None)) + for k, p in segments.items() + } for k, o in rooms.items(): - o.name = f'{names[k]}-{self.level}' - # if segments[k].area > 2.5 * TYPICAL_AREA_ROOM_TYPES[get_room_type(names[k])] + 5: - # raise BadSeedError() - # + o.name = f"{names[k]}-{self.level}" + # if segments[k].area > 2.5 * TYPICAL_AREA_ROOM_TYPES[get_room_type(names[k])] + 5: + # raise BadSeedError() + # - open_cutters, door_cutters = self.make_interior_cutters(neighbours, shared_edges, segments, names) + open_cutters, door_cutters = self.make_interior_cutters( + neighbours, shared_edges, segments, names + ) exterior_cutters = self.make_exterior_cutters(exterior_edges, names) - + for k, r in rooms.items(): r.location[-1] += WALL_HEIGHT * self.level for cutters in [open_cutters, door_cutters, exterior_cutters]: for k, c in self.unroll(cutters): c.location[-1] += WALL_HEIGHT * self.level - - butil.put_in_collection(rooms.values(), 'placeholders:room_shells') + + butil.put_in_collection(rooms.values(), "placeholders:room_shells") state = self.convert_solver_state( rooms, segments, shared_edges, open_cutters, door_cutters, exterior_cutters @@ -112,66 +161,84 @@ def solidify(self, assignment, info): def clone_as_meshed(o): new = butil.copy(o) - new.name = o.name + '.meshed' + new.name = o.name + ".meshed" return new + rooms = {k: clone_as_meshed(r) for k, r in rooms.items()} # Cut windows & doors from final room meshes - cutter_col = butil.get_collection('placeholders:portal_cutters') + cutter_col = butil.get_collection("placeholders:portal_cutters") for cutters in [open_cutters, door_cutters, exterior_cutters]: for k, c in self.unroll(cutters): for k_ in k: butil.put_in_collection(c, cutter_col) before = len(rooms[k_].data.polygons) butil.modify_mesh( - rooms[k_], 'BOOLEAN', object=c, operation='DIFFERENCE', use_self=True, - use_hole_tolerant=True + rooms[k_], + "BOOLEAN", + object=c, + operation="DIFFERENCE", + use_self=True, + use_hole_tolerant=True, ) after = len(rooms[k_].data.polygons) - logger.debug(f'Cutting {c.name} from {rooms[k_].name}, {before=} {after=}') - + logger.debug( + f"Cutting {c.name} from {rooms[k_].name}, {before=} {after=}" + ) + for r in rooms.values(): - butil.modify_mesh(r, 'TRIANGULATE', min_vertices=3) + butil.modify_mesh(r, "TRIANGULATE", min_vertices=3) remove_faces(r, read_area(r) < 5e-4) - with butil.ViewportMode(r, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(r, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.dissolve_limited(angle_limit=0.001) x, y, z = read_co(r).T - z = np.where(np.abs(z - WALL_THICKNESS / 2) < .01, WALL_THICKNESS / 2, z) - z = np.where(np.abs(z - WALL_HEIGHT + WALL_THICKNESS / 2) < .01, WALL_HEIGHT - WALL_THICKNESS / 2, - z) + z = np.where(np.abs(z - WALL_THICKNESS / 2) < 0.01, WALL_THICKNESS / 2, z) + z = np.where( + np.abs(z - WALL_HEIGHT + WALL_THICKNESS / 2) < 0.01, + WALL_HEIGHT - WALL_THICKNESS / 2, + z, + ) write_co(r, np.stack([x, y, z], -1)) - butil.modify_mesh(r, 'WELD', merge_threshold=WALL_THICKNESS / 10) - + butil.modify_mesh(r, "WELD", merge_threshold=WALL_THICKNESS / 10) + direction = read_edge_direction(r) z_edges = np.abs(direction[:, -1]) - orthogonal = (z_edges < .1) | (z_edges > .9) - with butil.ViewportMode(r, 'EDIT'): + orthogonal = (z_edges < 0.1) | (z_edges > 0.9) + with butil.ViewportMode(r, "EDIT"): edge_faces = np.zeros(len(orthogonal)) bm = bmesh.from_edit_mesh(r.data) for f in bm.faces: for e in f.edges: edge_faces[e.index] += 1 - orthogonal = (z_edges < .1) | (z_edges > .9) | (edge_faces != 1) | (read_edge_length(r) < .5) + orthogonal = ( + (z_edges < 0.1) + | (z_edges > 0.9) + | (edge_faces != 1) + | (read_edge_length(r) < 0.5) + ) if not orthogonal.all(): - raise BadSeedError('No orthogonal edges') + raise BadSeedError("No orthogonal edges") - butil.group_in_collection(rooms.values(), 'placeholders:room_meshes') + butil.group_in_collection(rooms.values(), "placeholders:room_meshes") return state, rooms - def convert_solver_state(self, rooms, segments, shared_edges, open_cutters, door_cutters, exterior_cutters): + def convert_solver_state( + self, + rooms, + segments, + shared_edges, + open_cutters, + door_cutters, + exterior_cutters, + ): obj_states = {} for k, o in rooms.items(): - - tags = {t.Semantics.Room, t.Semantics(o.name.split('_')[0])} - + tags = {t.Semantics.Room, t.Semantics(o.name.split("_")[0])} + tags.add(t.SpecificObject(o.name)) - obj_states[o.name] = ObjectState( - obj=o, - tags=tags, - contour=segments[k] - ) + obj_states[o.name] = ObjectState(obj=o, tags=tags, contour=segments[k]) for k, r in rooms.items(): relations = obj_states[r.name].relations for other in shared_edges[k]: @@ -181,14 +248,15 @@ def convert_solver_state(self, rooms, segments, shared_edges, open_cutters, door ct = cl.ConnectorType.Door else: ct = cl.ConnectorType.Wall - relations.append(RelationState( - cl.RoomNeighbour({ct}), rooms[other].name) + relations.append( + RelationState(cl.RoomNeighbour({ct}), rooms[other].name) ) - cut_state = lambda x: RelationState(cl.CutFrom(), rooms[x].name) + def cut_state(x): + return RelationState(cl.CutFrom(), rooms[x].name) + for cutters in [door_cutters, open_cutters, exterior_cutters]: for k, c in self.unroll(cutters): - tags = set({t.Semantics.Cutter, t.SpecificObject(c.name)}) # TODO Lingjie - do not store whole-object window/door semantics in per-vertex attributes @@ -202,46 +270,73 @@ def convert_solver_state(self, rooms, segments, shared_edges, open_cutters, door c.scale.x *= (DOOR_WIDTH + WALL_THICKNESS) / DOOR_WIDTH obj_states[c.name] = ObjectState( - obj=c, - tags=tags, - relations=list(cut_state(k_) for k_ in k) + obj=c, tags=tags, relations=list(cut_state(k_) for k_ in k) ) return State(objs=obj_states) def make_room(self, obj, exterior_edges=None): obj = polygon2obj(canonicalize(obj), True) - butil.modify_mesh(obj, "WELD", merge_threshold=.2) - butil.modify_mesh(obj, 'SOLIDIFY', thickness=WALL_HEIGHT, offset=-1) + butil.modify_mesh(obj, "WELD", merge_threshold=0.2) + butil.modify_mesh(obj, "SOLIDIFY", thickness=WALL_HEIGHT, offset=-1) self.tag(obj, False) if exterior_edges is not None: center = read_center(obj) exterior_centers = [] - for ls in exterior_edges.geoms if exterior_edges.geom_type == 'MultiLineString' else [ - exterior_edges]: + for ls in ( + exterior_edges.geoms + if exterior_edges.geom_type == "MultiLineString" + else [exterior_edges] + ): for u, v in zip(ls.coords[:-1], ls.coords[1:]): exterior_centers.append(((u[0] + v[0]) / 2, (u[1] + v[1]) / 2)) - exterior = (np.abs(center[:, np.newaxis, :2] - np.array(exterior_centers)[np.newaxis]).sum( - -1) < WALL_THICKNESS * 4).any(-1).astype(int) + exterior = ( + ( + np.abs( + center[:, np.newaxis, :2] + - np.array(exterior_centers)[np.newaxis] + ).sum(-1) + < WALL_THICKNESS * 4 + ) + .any(-1) + .astype(int) + ) else: exterior = np.zeros(len(obj.data.polygons), dtype=int) - write_attr_data(obj, f'{PREFIX}{t.Subpart.Exterior.value}', exterior, 'INT', 'FACE') - write_attr_data(obj, f'{PREFIX}{t.Subpart.Interior.value}', 1 - exterior, 'INT', 'FACE') - + write_attr_data( + obj, f"{PREFIX}{t.Subpart.Exterior.value}", exterior, "INT", "FACE" + ) + write_attr_data( + obj, f"{PREFIX}{t.Subpart.Interior.value}", 1 - exterior, "INT", "FACE" + ) + assert len(obj.data.vertices) > 0 - obj.vertex_groups.new(name='visible_') - butil.modify_mesh(obj, 'SOLIDIFY', thickness=WALL_THICKNESS / 2, offset=-1, use_even_offset=True, - shell_vertex_group='visible_', use_quality_normals=True) - write_attribute(obj, 'visible_', f'{PREFIX}{t.Subpart.Visible.value}', 'FACE', 'INT') - obj.vertex_groups.remove(obj.vertex_groups['visible_']) + obj.vertex_groups.new(name="visible_") + butil.modify_mesh( + obj, + "SOLIDIFY", + thickness=WALL_THICKNESS / 2, + offset=-1, + use_even_offset=True, + shell_vertex_group="visible_", + use_quality_normals=True, + ) + write_attribute( + obj, "visible_", f"{PREFIX}{t.Subpart.Visible.value}", "FACE", "INT" + ) + obj.vertex_groups.remove(obj.vertex_groups["visible_"]) tagging.tag_object(obj, t.Semantics.Room) return obj def make_interior_cutters(self, neighbours, shared_edges, segments, names): name_groups = {} for k, n in names.items(): - name_groups[k] = set(i for i, rt in enumerate(self.combined_room_types) if get_room_type(n) in rt) + name_groups[k] = set( + i + for i, rt in enumerate(self.combined_room_types) + if get_room_type(n) in rt + ) dist2entrance = self.compute_dist2entrance(neighbours, names) centroids = {k: np.array(s.centroid.coords[0]) for k, s in segments.items()} open_cutters, door_cutters = defaultdict(dict), defaultdict(dict) @@ -249,12 +344,18 @@ def make_interior_cutters(self, neighbours, shared_edges, segments, names): for l, se in ses.items(): if l not in neighbours[k] or k >= l: continue - if len(name_groups[k].intersection(name_groups[l])) > 0 and self.enable_open: + if ( + len(name_groups[k].intersection(name_groups[l])) > 0 + and self.enable_open + ): open_cutters[k][l] = open_cutters[l][k] = self.make_open_cutter(se) else: direction = (centroids[k] - centroids[l]) * ( - 1 if dist2entrance[k] > dist2entrance[l] else -1) - door_cutters[k][l] = door_cutters[l][k] = self.make_door_cutter(se, direction) + 1 if dist2entrance[k] > dist2entrance[l] else -1 + ) + door_cutters[k][l] = door_cutters[l][k] = self.make_door_cutter( + se, direction + ) return open_cutters, door_cutters def compute_dist2entrance(self, neighbours, names): @@ -276,11 +377,10 @@ def make_exterior_cutters(self, exterior_edges, names): entrance = self.get_entrance(names) for k, mls in exterior_edges.items(): - room_type = get_room_type(names[k]) pano_chance = self.panoramic_room_types.get(room_type, 0) is_panoramic = uniform() < pano_chance - + lss = [] for ls in mls.geoms: coords = ls.coords[:] @@ -293,9 +393,13 @@ def make_exterior_cutters(self, exterior_edges, names): for ls in lss: coords = LineString(ls).segmentize(MAX_WINDOW_LENGTH).coords[:] for seg in zip(coords[:-1], coords[1:]): - length = np.linalg.norm([seg[1][1] - seg[0][1], seg[1][0] - seg[0][0]]) - if length >= DOOR_WIDTH + WALL_THICKNESS and uniform() < WINDOW_ROOM_TYPES[ - get_room_type(names[k])]: + length = np.linalg.norm( + [seg[1][1] - seg[0][1], seg[1][0] - seg[0][0]] + ) + if ( + length >= DOOR_WIDTH + WALL_THICKNESS + and uniform() < WINDOW_ROOM_TYPES[get_room_type(names[k])] + ): cutter = self.make_window_cutter(seg, is_panoramic) cutters[k].append(cutter) return cutters @@ -307,19 +411,21 @@ def make_staircase_cutters(self, staircase, names): if get_room_type(name) == RoomType.Staircase: with np.errstate(invalid="ignore"): cutter = polygon2obj(buffer(staircase, -WALL_THICKNESS / 2)) - butil.modify_mesh(cutter, 'SOLIDIFY', thickness=WALL_THICKNESS * 1.2, offset=0) + butil.modify_mesh( + cutter, "SOLIDIFY", thickness=WALL_THICKNESS * 1.2, offset=0 + ) self.tag(cutter) - cutter.name = 'staircase_cutter' + cutter.name = "staircase_cutter" cutters[k].append(cutter) return cutters def make_door_cutter(self, es, direction): lengths = [ls.length for ls in es.geoms] (x, y), (x_, y_) = es.geoms[np.argmax(lengths)].coords - + cutter = new_cube() - vertical = np.abs(x - x_) < .1 - cutter.scale = DOOR_WIDTH / 2 * (1 - _eps), DOOR_WIDTH, DOOR_SIZE / 2 + vertical = np.abs(x - x_) < 0.1 + cutter.scale = DOOR_WIDTH / 2 * (1 - _eps), DOOR_WIDTH, DOOR_SIZE / 2 butil.apply_transform(cutter, True) if vertical: @@ -343,8 +449,11 @@ def make_entrance_cutter(self, ls): lam = uniform(d, 1 - d) cutter.scale = DOOR_WIDTH / 2, DOOR_WIDTH / 2, DOOR_SIZE / 2 butil.apply_transform(cutter, True) - cutter.location = lam * x + (1 - lam) * x_, lam * y + ( - 1 - lam) * y_, DOOR_SIZE / 2 + WALL_THICKNESS / 2 + _eps + cutter.location = ( + lam * x + (1 - lam) * x_, + lam * y + (1 - lam) * y_, + DOOR_SIZE / 2 + WALL_THICKNESS / 2 + _eps, + ) cutter.rotation_euler = 0, 0, np.arctan2(y_ - y, x_ - x) self.tag(cutter) tagging.tag_object(cutter, t.Semantics.Entrance) @@ -376,7 +485,9 @@ def make_window_cutter(self, ls, is_panoramic): return cutter def make_open_cutter(self, es): - es = remove_repeated_points(simplify(es, SIMPLIFY_THRESH).normalize(), WELD_THRESH) + es = remove_repeated_points( + simplify(es, SIMPLIFY_THRESH).normalize(), WELD_THRESH + ) es = linemerge(es) if not isinstance(es, LineString) else es es = [es] if isinstance(es, LineString) else es.geoms lines = [] @@ -387,32 +498,43 @@ def make_open_cutter(self, es): start += 1 while np.linalg.norm(coords[end] - coords[end - 1]) < SEGMENT_MARGIN: end -= 1 - coords = coords[start:end + 1] if end < -1 else coords[start:] + coords = coords[start : end + 1] if end < -1 else coords[start:] if len(coords) < 2: continue - coords[0] = line_interpolate_point(LineString(coords[0: 2]), WALL_THICKNESS / 2 + _eps).coords[0] - coords[-1] = line_interpolate_point(LineString(coords[-1:-3:-1]), WALL_THICKNESS / 2 + _eps).coords[ - 0] + coords[0] = line_interpolate_point( + LineString(coords[0:2]), WALL_THICKNESS / 2 + _eps + ).coords[0] + coords[-1] = line_interpolate_point( + LineString(coords[-1:-3:-1]), WALL_THICKNESS / 2 + _eps + ).coords[0] line = new_line(len(coords) - 1) write_co(line, np.concatenate([coords, np.zeros((len(coords), 1))], -1)) lines.append(line) cutter = join_objects(lines) - butil.modify_mesh(cutter, 'WELD', merge_threshold=WELD_THRESH) + butil.modify_mesh(cutter, "WELD", merge_threshold=WELD_THRESH) butil.select_none() - with butil.ViewportMode(cutter, 'EDIT'): - bpy.ops.mesh.select_mode(type='EDGE') - bpy.ops.mesh.select_all(action='SELECT') + with butil.ViewportMode(cutter, "EDIT"): + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.extrude_edges_move( - TRANSFORM_OT_translate={'value': (0, 0, WALL_HEIGHT - WALL_THICKNESS - 2 * _eps) - }) - bpy.ops.mesh.select_mode(type='FACE') - bpy.ops.mesh.select_all(action='SELECT') + TRANSFORM_OT_translate={ + "value": (0, 0, WALL_HEIGHT - WALL_THICKNESS - 2 * _eps) + } + ) + bpy.ops.mesh.select_mode(type="FACE") + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.normals_make_consistent(inside=False) cutter.location[-1] += WALL_THICKNESS / 2 + _eps butil.apply_transform(cutter, True) - butil.modify_mesh(cutter, 'SOLIDIFY', thickness=WALL_THICKNESS * 3, offset=0, use_even_offset=True) + butil.modify_mesh( + cutter, + "SOLIDIFY", + thickness=WALL_THICKNESS * 3, + offset=0, + use_even_offset=True, + ) self.tag(cutter) tagging.tag_object(cutter, t.Semantics.Open) cutter.name = t.Semantics.Open.value @@ -421,16 +543,30 @@ def make_open_cutter(self, es): @staticmethod def tag(obj, visible=True): center = read_center(obj) + obj.location - ceiling = center[:, -1] > WALL_HEIGHT - WALL_THICKNESS / 2 - .1 - floor = center[:, -1] < WALL_THICKNESS / 2 + .1 + ceiling = center[:, -1] > WALL_HEIGHT - WALL_THICKNESS / 2 - 0.1 + floor = center[:, -1] < WALL_THICKNESS / 2 + 0.1 wall = ~(ceiling | floor) - write_attr_data(obj, f'{PREFIX}{t.Subpart.Ceiling.value}', ceiling, 'INT', 'FACE') - write_attr_data(obj, f'{PREFIX}{t.Subpart.SupportSurface.value}', floor, 'INT', 'FACE') - write_attr_data(obj, f'{PREFIX}{t.Subpart.Wall.value}', wall, 'INT', 'FACE') - write_attr_data(obj, 'segment_id', np.arange(len(center)), 'INT', 'FACE') - write_attr_data(obj, f'{PREFIX}{t.Subpart.Visible.value}', - np.ones_like(ceiling) if visible else np.zeros_like(ceiling), 'INT', 'FACE') - write_attr_data(obj, f'{PREFIX}{t.Subpart.Invisible.value}', - np.zeros_like(ceiling) if visible else np.ones_like(ceiling), 'INT', 'FACE') + write_attr_data( + obj, f"{PREFIX}{t.Subpart.Ceiling.value}", ceiling, "INT", "FACE" + ) + write_attr_data( + obj, f"{PREFIX}{t.Subpart.SupportSurface.value}", floor, "INT", "FACE" + ) + write_attr_data(obj, f"{PREFIX}{t.Subpart.Wall.value}", wall, "INT", "FACE") + write_attr_data(obj, "segment_id", np.arange(len(center)), "INT", "FACE") + write_attr_data( + obj, + f"{PREFIX}{t.Subpart.Visible.value}", + np.ones_like(ceiling) if visible else np.zeros_like(ceiling), + "INT", + "FACE", + ) + write_attr_data( + obj, + f"{PREFIX}{t.Subpart.Invisible.value}", + np.zeros_like(ceiling) if visible else np.ones_like(ceiling), + "INT", + "FACE", + ) parse_scene.preprocess_obj(obj) tagging.tag_canonical_surfaces(obj) diff --git a/infinigen/core/constraints/example_solver/room/solver.py b/infinigen/core/constraints/example_solver/room/solver.py index c431f933a..b25594431 100644 --- a/infinigen/core/constraints/example_solver/room/solver.py +++ b/infinigen/core/constraints/example_solver/room/solver.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants @@ -16,12 +16,23 @@ from shapely import LineString, Polygon, union from shapely.ops import shared_paths -from infinigen.core.constraints.example_solver.room.types import RoomType, get_room_type -from infinigen.core.constraints.example_solver.room.configs import EXTERIOR_CONNECTED_ROOM_TYPES -from infinigen.core.constraints.example_solver.room.constants import SEGMENT_MARGIN import infinigen.core.constraints.example_solver.room.constants as constants -from infinigen.core.constraints.example_solver.room.utils import compute_neighbours, cut_polygon_by_line, is_valid_polygon, linear_extend_x, \ - linear_extend_y, canonicalize, update_exterior_edges, update_shared_edges, update_staircase_occupancies +from infinigen.core.constraints.example_solver.room.configs import ( + EXTERIOR_CONNECTED_ROOM_TYPES, +) +from infinigen.core.constraints.example_solver.room.constants import SEGMENT_MARGIN +from infinigen.core.constraints.example_solver.room.types import RoomType, get_room_type +from infinigen.core.constraints.example_solver.room.utils import ( + canonicalize, + compute_neighbours, + cut_polygon_by_line, + is_valid_polygon, + linear_extend_x, + linear_extend_y, + update_exterior_edges, + update_shared_edges, + update_staircase_occupancies, +) @dataclass @@ -31,13 +42,19 @@ class RoomSolverMsg: @property def is_success(self): - return self.status == 'success' + return self.status == "success" -@gin.configurable(denylist=['contour', 'graph']) +@gin.configurable(denylist=["contour", "graph"]) class BlueprintSolver: - def __init__(self, contour, graph, exterior_connected_room_types=EXTERIOR_CONNECTED_ROOM_TYPES, - max_stride=1, staircase_occupancy_thresh=.75): + def __init__( + self, + contour, + graph, + exterior_connected_room_types=EXTERIOR_CONNECTED_ROOM_TYPES, + max_stride=1, + staircase_occupancy_thresh=0.75, + ): self.contour = contour x, y = self.contour.boundary.xy self.x_min, self.x_max = np.min(x), np.max(x) @@ -46,7 +63,10 @@ def __init__(self, contour, graph, exterior_connected_room_types=EXTERIOR_CONNEC self.staircase_occupancy_thresh = staircase_occupancy_thresh self.exterior_connected_room_types = exterior_connected_room_types self.exterior_connected_rooms = set( - i for i, r in enumerate(self.graph.rooms) if get_room_type(r) in self.exterior_connected_room_types) + i + for i, r in enumerate(self.graph.rooms) + if get_room_type(r) in self.exterior_connected_room_types + ) if self.graph.entrance is not None: self.exterior_connected_rooms.add(self.graph.entrance) self.staircase_rooms = set(self.graph[RoomType.Staircase]) @@ -54,12 +74,17 @@ def __init__(self, contour, graph, exterior_connected_room_types=EXTERIOR_CONNEC def find_assignment(self, info): assignment = [0] * len(self.graph.rooms) - neighbours_all = info['neighbours_all'] - exterior_neighbours = info['exterior_neighbours'] - staircase_occupancies = info['staircase_occupancies'] - if info['staircase'] is not None: + neighbours_all = info["neighbours_all"] + exterior_neighbours = info["exterior_neighbours"] + staircase_occupancies = info["staircase_occupancies"] + if info["staircase"] is not None: staircase_candidates = list( - (k for k, v in staircase_occupancies.items() if v > self.staircase_occupancy_thresh)) + ( + k + for k, v in staircase_occupancies.items() + if v > self.staircase_occupancy_thresh + ) + ) if len(staircase_candidates) == 0: return None else: @@ -76,7 +101,9 @@ def assign_(i): else: candidates = unassigned.copy() n_unassigned = len(list(j for j in self.graph.neighbours[i] if j > i)) - assigned_neighbours = set(assignment[j] for j in self.graph.neighbours[i] if j < i) + assigned_neighbours = set( + assignment[j] for j in self.graph.neighbours[i] if j < i + ) for n in candidates: if assigned_neighbours.issubset(neighbours_all[n]): if len(neighbours_all[n].intersection(unassigned)) >= n_unassigned: @@ -90,23 +117,26 @@ def assign_(i): return assign_(0) def satisfies_constraints(self, assignment, info): - neighbours_all = info['neighbours_all'] - exterior_neighbours = info['exterior_neighbours'] - staircase_occupancies = info['staircase_occupancies'] + neighbours_all = info["neighbours_all"] + exterior_neighbours = info["exterior_neighbours"] + staircase_occupancies = info["staircase_occupancies"] for k, ns in enumerate(self.graph.neighbours): for n in ns: if assignment[k] not in neighbours_all[assignment[n]]: - return RoomSolverMsg('neighbours unsatisfied', [k, n]) + return RoomSolverMsg("neighbours unsatisfied", [k, n]) if k in self.exterior_connected_rooms: if assignment[k] not in exterior_neighbours: - return RoomSolverMsg('exterior neighbours unsatisfied', [k]) + return RoomSolverMsg("exterior neighbours unsatisfied", [k]) if get_room_type(self.graph.rooms[k]) == RoomType.Staircase: - if staircase_occupancies[assignment[k]] < self.staircase_occupancy_thresh: - return RoomSolverMsg('staircase occupancy unsatisfied', [k]) - return RoomSolverMsg('success') + if ( + staircase_occupancies[assignment[k]] + < self.staircase_occupancy_thresh + ): + return RoomSolverMsg("staircase occupancy unsatisfied", [k]) + return RoomSolverMsg("success") def perturb_solution(self, assignment, info): - k = np.random.choice(list(info['segments'].keys())) + k = np.random.choice(list(info["segments"].keys())) while True: info_ = deepcopy(info) assignment_ = deepcopy(assignment) @@ -118,59 +148,75 @@ def perturb_solution(self, assignment, info): resp = self.extrude_room_in(assignment, info, k) else: resp = self.swap_room(assignment, info, k) - except: + except Exception: info, assignment = info_, assignment_ else: break if not resp.is_success: return resp for c in resp.index_changed: - if not is_valid_polygon(info['segments'][c]): - return RoomSolverMsg('invalid segment', [c]) + if not is_valid_polygon(info["segments"][c]): + return RoomSolverMsg("invalid segment", [c]) try: for c in resp.index_changed: - update_shared_edges(info['segments'], info['shared_edges'], c) - update_exterior_edges(info['segments'], info['shared_edges'], info['exterior_edges'], c) - update_staircase_occupancies(info['segments'], info['staircase'], info['staircase_occupancies'], - c) - except: - return RoomSolverMsg('Exception') - info['neighbours_all'] = {k: set(compute_neighbours(se, SEGMENT_MARGIN)) for k, se in - info['shared_edges'].items()} - info['exterior_neighbours'] = set(compute_neighbours(info['exterior_edges'], SEGMENT_MARGIN)) - for k, s in info['segments'].items(): + update_shared_edges(info["segments"], info["shared_edges"], c) + update_exterior_edges( + info["segments"], info["shared_edges"], info["exterior_edges"], c + ) + update_staircase_occupancies( + info["segments"], + info["staircase"], + info["staircase_occupancies"], + c, + ) + except Exception: + return RoomSolverMsg("Exception") + info["neighbours_all"] = { + k: set(compute_neighbours(se, SEGMENT_MARGIN)) + for k, se in info["shared_edges"].items() + } + info["exterior_neighbours"] = set( + compute_neighbours(info["exterior_edges"], SEGMENT_MARGIN) + ) + for k, s in info["segments"].items(): x, y = np.array(s.boundary.coords).T - if np.any((x < -1.) | (y < -1.) | (x > 40.) | (y > 40.)): - return RoomSolverMsg('OOB') + if np.any((x < -1.0) | (y < -1.0) | (x > 40.0) | (y > 40.0)): + return RoomSolverMsg("OOB") satisfies = self.satisfies_constraints(assignment, info) if not satisfies.is_success: return satisfies return resp def extrude_room(self, i, info, out=True): - segments = info['segments'] + segments = info["segments"] coords = canonicalize(segments[i]).boundary.coords[:] indices = [] for k in range(len(coords) - 1): - (x, y), (x_, y_) = coords[k:k + 2] + (x, y), (x_, y_) = coords[k : k + 2] if np.abs(x - x_) < 1e-2 and self.x_min < x < self.x_max: indices.append(k) elif np.abs(y - y_) < 1e-2 and self.y_min < y < self.y_max: indices.append(k) k = np.random.choice(indices) - (x, y), (x_, y_) = coords[k:k + 2] + (x, y), (x_, y_) = coords[k : k + 2] is_vertical = np.abs(x - x_) < 1e-2 - line = LineString(coords[k:k + 2]) + line = LineString(coords[k : k + 2]) mod = len(coords) - 1 stride = constants.UNIT * (np.random.randint(self.max_stride) + 1) if is_vertical: new_x = x + stride if (y_ < y) ^ out else x - stride new_first = new_x, linear_extend_x(coords[(k - 1) % mod], coords[k], new_x) - new_second = new_x, linear_extend_x(coords[(k + 2) % mod], coords[k + 1], new_x) + new_second = ( + new_x, + linear_extend_x(coords[(k + 2) % mod], coords[k + 1], new_x), + ) else: new_y = y + stride if (x_ > x) ^ out else y - stride new_first = linear_extend_y(coords[(k - 1) % mod], coords[k], new_y), new_y - new_second = linear_extend_y(coords[(k + 2) % mod], coords[k + 1], new_y), new_y + new_second = ( + linear_extend_y(coords[(k + 2) % mod], coords[k + 1], new_y), + new_y, + ) coords[k % mod] = new_first coords[(k + 1) % mod] = new_second coords[-1] = coords[0] @@ -178,34 +224,38 @@ def extrude_room(self, i, info, out=True): return s, line, is_vertical def extrude_room_out(self, assignment, info, i): - segments, shared_edges = map(info.get, ['segments', 'info']) + segments, shared_edges = map(info.get, ["segments", "info"]) s, _, _ = self.extrude_room(i, info, True) if not is_valid_polygon(s): - return RoomSolverMsg('extrude_room_out_invalid', [i]) + return RoomSolverMsg("extrude_room_out_invalid", [i]) cutter = s.difference(segments[i]) if not is_valid_polygon(cutter): - return RoomSolverMsg('extrude_room_out_invalid', [i]) + return RoomSolverMsg("extrude_room_out_invalid", [i]) cutter = canonicalize(cutter) - shared = list(k for k in info['shared_edges'][i].keys() if segments[k].intersection(cutter).area > .1) + shared = list( + k + for k in info["shared_edges"][i].keys() + if segments[k].intersection(cutter).area > 0.1 + ) index_changed = [i, *shared] total_pre_area = sum([segments[i].area for i in index_changed]) for l in shared: segments[l] = canonicalize(segments[l].difference(cutter)) segments[i] = s total_post_area = sum([segments[i].area for i in index_changed]) - if np.abs(total_pre_area - total_post_area) < .1: - return RoomSolverMsg('success', index_changed) + if np.abs(total_pre_area - total_post_area) < 0.1: + return RoomSolverMsg("success", index_changed) else: - return RoomSolverMsg('extrude_room_out_oob', index_changed) + return RoomSolverMsg("extrude_room_out_oob", index_changed) def extrude_room_in(self, assignment, info, i): - segments, shared_edges = map(info.get, ['segments', 'shared_edges']) + segments, shared_edges = map(info.get, ["segments", "shared_edges"]) s, line, is_vertical = self.extrude_room(i, info, False) if not is_valid_polygon(s): - return RoomSolverMsg('extrude_room_in_invalid', [i]) + return RoomSolverMsg("extrude_room_in_invalid", [i]) cutter = segments[i].difference(s) if not is_valid_polygon(cutter): - return RoomSolverMsg('extrude_room_in_invalid', [i]) + return RoomSolverMsg("extrude_room_in_invalid", [i]) cutter = canonicalize(cutter) shared = {} for k in shared_edges[i].keys(): @@ -238,17 +288,17 @@ def extrude_room_in(self, assignment, info, i): segments[a] = canonicalize(union(segments[a], p)) segments[i] = s total_post_area = sum([segments[i].area for i in index_changed]) - if np.abs(total_pre_area - total_post_area) < .1: - return RoomSolverMsg('success', index_changed) + if np.abs(total_pre_area - total_post_area) < 0.1: + return RoomSolverMsg("success", index_changed) else: - return RoomSolverMsg('extrude_room_in_oob', index_changed) + return RoomSolverMsg("extrude_room_in_oob", index_changed) def swap_room(self, assignment, info, i): - j = np.random.choice(list(info['neighbours_all'][i])) + j = np.random.choice(list(info["neighbours_all"][i])) j_ = assignment.index(j) i_ = assignment.index(i) assignment[i_], assignment[j_] = j, i - return RoomSolverMsg('success', [i, j]) + return RoomSolverMsg("success", [i, j]) class BlueprintStaircaseSolver: @@ -262,24 +312,31 @@ def perturb_solution(self, assignments, infos): if not resp.is_success: return resp for info in infos: - for k in info['segments']: - update_staircase_occupancies(info['segments'], info['staircase'], info['staircase_occupancies'], - k) + for k in info["segments"]: + update_staircase_occupancies( + info["segments"], + info["staircase"], + info["staircase_occupancies"], + k, + ) return resp def move_staircase(self, infos): - staircase = infos[0]['staircase'] + staircase = infos[0]["staircase"] if staircase is None: - return RoomSolverMsg('success') + return RoomSolverMsg("success") directions = [(-1, 0), (1, 0), (0, -1), (0, 1)] for i in range(self.n_trials): stride = constants.UNIT * (np.random.randint(self.max_stride) + 1) x, y = directions[np.random.randint(4)] - coords = list((x_ + x * stride, y_ + y * stride) for x_, y_ in staircase.boundary.coords[:]) + coords = list( + (x_ + x * stride, y_ + y * stride) + for x_, y_ in staircase.boundary.coords[:] + ) p = Polygon(LineString(coords)) if self.contours[-1].contains(p): for info in infos: - info['staircase'] = p - return RoomSolverMsg('success') + info["staircase"] = p + return RoomSolverMsg("success") else: - return RoomSolverMsg('invalid staircase') + return RoomSolverMsg("invalid staircase") diff --git a/infinigen/core/constraints/example_solver/room/types.py b/infinigen/core/constraints/example_solver/room/types.py index ec9eba04c..7373d4313 100644 --- a/infinigen/core/constraints/example_solver/room/types.py +++ b/infinigen/core/constraints/example_solver/room/types.py @@ -9,24 +9,24 @@ class RoomType: Kitchen = "kitchen" - Bedroom = 'bedroom' - LivingRoom = 'living-room' - Closet = 'closet' - Hallway = 'hallway' - Bathroom = 'bathroom' - Garage = 'garage' - Balcony = 'balcony' - DiningRoom = 'dining-room' - Utility = 'utility' - Staircase = 'staircase' + Bedroom = "bedroom" + LivingRoom = "living-room" + Closet = "closet" + Hallway = "hallway" + Bathroom = "bathroom" + Garage = "garage" + Balcony = "balcony" + DiningRoom = "dining-room" + Utility = "utility" + Staircase = "staircase" def get_room_type(name): - return name.split('_')[0] + return name.split("_")[0] def get_room_level(name): - return int(name.split('-')[-1]) + return int(name.split("-")[-1]) class RoomGraph: @@ -67,13 +67,48 @@ def __len__(self): return len(self.rooms) def __str__(self): - return {'neighbours': self.neighbours, 'rooms': self.rooms, 'entrance': self.entrance} + return { + "neighbours": self.neighbours, + "rooms": self.rooms, + "entrance": self.entrance, + } def make_demo_tree(): - children = [[1, 2], [], [3, 4], [5, 6], [7], [8, 9], [10, 11], [], [], [12], [], [13], [], [14], []] - rooms = ['hallway_0', 'closet_0', 'kitchen_0', 'dining-room_0', 'utility_0', 'hallway_1', 'living-room_0', - 'utility_1', 'bathroom_0', 'bedroom_0', 'balcony_0', 'bedroom_1', 'closet_1', 'bathroom_1', 'closet_2'] + children = [ + [1, 2], + [], + [3, 4], + [5, 6], + [7], + [8, 9], + [10, 11], + [], + [], + [12], + [], + [13], + [], + [14], + [], + ] + rooms = [ + "hallway_0", + "closet_0", + "kitchen_0", + "dining-room_0", + "utility_0", + "hallway_1", + "living-room_0", + "utility_1", + "bathroom_0", + "bedroom_0", + "balcony_0", + "bedroom_1", + "closet_1", + "bathroom_1", + "closet_2", + ] return RoomGraph(children, rooms, 0) diff --git a/infinigen/core/constraints/example_solver/room/utils.py b/infinigen/core/constraints/example_solver/room/utils.py index 9353c9fb5..10c900067 100644 --- a/infinigen/core/constraints/example_solver/room/utils.py +++ b/infinigen/core/constraints/example_solver/room/utils.py @@ -2,7 +2,7 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Lingjie Mei: primary author # - Karhan Kayan: fix constants @@ -11,7 +11,13 @@ import bpy import numpy as np import shapely -from shapely import LineString, MultiLineString, Polygon, remove_repeated_points, simplify +from shapely import ( + LineString, + MultiLineString, + Polygon, + remove_repeated_points, + simplify, +) from shapely.ops import linemerge, orient, polygonize, shared_paths, unary_union import infinigen.core.constraints.example_solver.room.constants as constants @@ -21,8 +27,8 @@ from infinigen.core.util import blender as butil SIMPLIFY_THRESH = 1e-6 -ANGLE_SIMPLIFY_THRESH = .2 -WELD_THRESH = .01 +ANGLE_SIMPLIFY_THRESH = 0.2 +WELD_THRESH = 0.01 def is_valid_polygon(p): @@ -39,16 +45,22 @@ def canonicalize(p): p_ = shapely.force_2d(simplify_polygon(p)) l = len(p.boundary.coords) if p.area == 0: - raise NotImplementedError('Polygon empty.') + raise NotImplementedError("Polygon empty.") p = orient(p_) coords = np.array(p.boundary.coords[:]) rounded = np.round(coords / constants.UNIT) * constants.UNIT - coords = np.where(np.all(np.abs(coords - rounded) < 1e-3, -1)[:, np.newaxis], rounded, coords) + coords = np.where( + np.all(np.abs(coords - rounded) < 1e-3, -1)[:, np.newaxis], + rounded, + coords, + ) diff = coords[1:] - coords[:-1] diff = diff / (np.linalg.norm(diff, axis=-1, keepdims=True) + 1e-6) product = (diff[[-1] + list(range(len(diff) - 1))] * diff).sum(-1) valid_indices = list(range(len(coords) - 1)) - invalid_indices = np.nonzero((product < -.8) | (product > 1 - 1e-6))[0].tolist() + invalid_indices = np.nonzero((product < -0.8) | (product > 1 - 1e-6))[ + 0 + ].tolist() if len(invalid_indices) > 0: i = invalid_indices[len(invalid_indices) // 2] valid_indices.remove(i) @@ -56,10 +68,10 @@ def canonicalize(p): if len(p.exterior.coords) == l: break if not is_valid_polygon(p): - raise NotImplementedError('Invalid polygon') + raise NotImplementedError("Invalid polygon") return p except AttributeError: - raise NotImplementedError('Invalid multi polygon') + raise NotImplementedError("Invalid multi polygon") def unit_cast(x, unit=None): @@ -76,21 +88,25 @@ def abs_distance(x, y): def update_exterior_edges(segments, shared_edges, exterior_edges=None, i=None): - if exterior_edges is None: exterior_edges = {} + if exterior_edges is None: + exterior_edges = {} for k, s in segments.items(): if i is None or k == i: l = s.boundary for ls in shared_edges[k].values(): l = l.difference(ls) if l.length > 0: - exterior_edges[k] = MultiLineString([l]) if isinstance(l, LineString) else l + exterior_edges[k] = ( + MultiLineString([l]) if isinstance(l, LineString) else l + ) elif k in exterior_edges: exterior_edges.pop(k) return exterior_edges def update_shared_edges(segments, shared_edges=None, i=None): - if shared_edges is None: shared_edges = defaultdict(dict) + if shared_edges is None: + shared_edges = defaultdict(dict) for k, s in segments.items(): for l, t in segments.items(): if k != l and (i is None or k == i or l == i): @@ -105,9 +121,13 @@ def update_shared_edges(segments, shared_edges=None, i=None): return shared_edges -def update_staircase_occupancies(segments, staircase, staircase_occupancies=None, i=None): - if staircase is None: return None - if staircase_occupancies is None: staircase_occupancies = defaultdict(dict) +def update_staircase_occupancies( + segments, staircase, staircase_occupancies=None, i=None +): + if staircase is None: + return None + if staircase_occupancies is None: + staircase_occupancies = defaultdict(dict) for k, s in segments.items(): if i is None or k == i: staircase_occupancies[k] = s.intersection(staircase).area / staircase.area @@ -115,15 +135,21 @@ def update_staircase_occupancies(segments, staircase, staircase_occupancies=None def compute_neighbours(ses, margin): - return list(l for l, se in ses.items() if any(ls.length >= margin for ls in se.geoms)) + return list( + l for l, se in ses.items() if any(ls.length >= margin for ls in se.geoms) + ) def linear_extend_x(base, target, new_x): - return target[1] + (new_x - target[0]) * (base[1] - target[1]) / (base[0] - target[0]) + return target[1] + (new_x - target[0]) * (base[1] - target[1]) / ( + base[0] - target[0] + ) def linear_extend_y(base, target, new_y): - return target[0] + (new_y - target[1]) * (base[0] - target[0]) / (base[1] - target[1]) + return target[0] + (new_y - target[1]) * (base[0] - target[0]) / ( + base[1] - target[1] + ) def cut_polygon_by_line(polygon, *args): @@ -136,7 +162,7 @@ def cut_polygon_by_line(polygon, *args): def polygon2obj(p, reversed=False): x, y = orient(p).exterior.xy obj = new_circle(vertices=len(x) - 1) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.edge_face_add() if reversed: bpy.ops.mesh.flip_normals() @@ -146,4 +172,6 @@ def polygon2obj(p, reversed=False): def buffer(p, distance): with np.errstate(invalid="ignore"): - return remove_repeated_points(simplify(p.buffer(distance, join_style='mitre'), SIMPLIFY_THRESH)) + return remove_repeated_points( + simplify(p.buffer(distance, join_style="mitre"), SIMPLIFY_THRESH) + ) diff --git a/infinigen/core/constraints/example_solver/solve.py b/infinigen/core/constraints/example_solver/solve.py index 4a4c6e8ef..7f29131a7 100644 --- a/infinigen/core/constraints/example_solver/solve.py +++ b/infinigen/core/constraints/example_solver/solve.py @@ -4,58 +4,44 @@ # Authors: Alexander Raistrick +import copy import logging from pathlib import Path -import copy import bpy -import numpy as np -from tqdm import trange, tqdm import gin -from infinigen.core import surface -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - usage_lookup, - evaluator, - checks -) -from infinigen.core.constraints.example_solver.geometry import parse_scene, planes - -from .room import RoomSolver, MultistoryRoomSolver - -from infinigen.core.constraints.example_solver.state_def import State -from infinigen.core.constraints.constraint_language.util import delete_obj +import numpy as np +from tqdm import trange +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.evaluator import domain_contains from infinigen.core.constraints.example_solver import ( + greedy, propose_continous, propose_discrete, - greedy, ) -from infinigen.core.constraints.evaluator import domain_contains - -from infinigen.core.placement.placement import parse_asset_name +from infinigen.core.constraints.example_solver.state_def import State from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t from .annealing import SimulatedAnnealingSolver +from .room import MultistoryRoomSolver, RoomSolver logger = logging.getLogger(__name__) -def map_range(x, xmin, xmax, ymin, ymax, exp=1): +def map_range(x, xmin, xmax, ymin, ymax, exp=1): if x < xmin: return ymin if x > xmax: return ymax t = (x - xmin) / (xmax - xmin) - return ymin + (ymax - ymin) * t ** exp + return ymin + (ymax - ymin) * t**exp + @gin.register class LinearDecaySchedule: - def __init__(self, start, end, pct_duration): self.start = start self.end = end @@ -64,17 +50,17 @@ def __init__(self, start, end, pct_duration): def __call__(self, t): return map_range(t, 0, self.pct_duration, self.start, self.end) + @gin.configurable class Solver: - def __init__( self, output_folder: Path, multistory: bool = False, - restrict_moves: list = None + restrict_moves: list = None, + addition_weight_scalar: float = 1.0, ): - - """ Initialize the solver + """Initialize the solver Parameters ---------- @@ -87,7 +73,7 @@ def __init__( constraints_greedy_unsatisfied : str | None What do we do if relevant constraints are unsatisfied at the end of a greedy stage? Options are 'warn` or `abort` or None - + """ self.output_folder = output_folder @@ -100,96 +86,97 @@ def __init__( self.all_roomtypes = None self.dimensions = None - self.moves = self._configure_move_weights(restrict_moves) - - - def _configure_move_weights(self, restrict_moves): + self.moves = self._configure_move_weights( + restrict_moves, addition_weight_scalar=addition_weight_scalar + ) + def _configure_move_weights(self, restrict_moves, addition_weight_scalar=1.0): schedules = { - 'addition': ( + "addition": ( propose_discrete.propose_addition, - LinearDecaySchedule(6, 0.1, 0.9), + LinearDecaySchedule( + 6 * addition_weight_scalar, 0.1 * addition_weight_scalar, 0.9 + ), ), - 'deletion': ( + "deletion": ( propose_discrete.propose_deletion, LinearDecaySchedule(2, 0.0, 0.5), ), - 'plane_change': ( + "plane_change": ( propose_discrete.propose_relation_plane_change, LinearDecaySchedule(2, 0.1, 1), ), - 'resample_asset': ( + "resample_asset": ( propose_discrete.propose_resample, LinearDecaySchedule(1, 0.1, 0.7), ), - 'reinit_pose': ( + "reinit_pose": ( propose_continous.propose_reinit_pose, LinearDecaySchedule(1, 0.5, 1), ), - 'translate': ( - propose_continous.propose_translate, - 1 - ), - 'rotate': ( - propose_continous.propose_rotate, - 0.5 - ), + "translate": (propose_continous.propose_translate, 1), + "rotate": (propose_continous.propose_rotate, 0.5), } if restrict_moves is not None: schedules = {k: v for k, v in schedules.items() if k in restrict_moves} - logger.info(f'Restricting {self.__class__.__name__} moves to {list(schedules.keys())}') + logger.info( + f"Restricting {self.__class__.__name__} moves to {list(schedules.keys())}" + ) return schedules @gin.configurable def choose_move_type( - self, - it: int, + self, + it: int, max_it: int, ): t = it / max_it names, confs = zip(*self.moves.items()) funcs, scheds = zip(*confs) weights = np.array([s if isinstance(s, (float, int)) else s(t) for s in scheds]) - return np.random.choice(funcs, p=weights/weights.sum()) + return np.random.choice(funcs, p=weights / weights.sum()) def solve_rooms(self, scene_seed, consgraph: cl.Problem, filter: r.Domain): - self.state, self.all_roomtypes, self.dimensions = self.room_solver_fn(scene_seed).solve() + self.state, self.all_roomtypes, self.dimensions = self.room_solver_fn( + scene_seed + ).solve() return self.state @gin.configurable def solve_objects( - self, - consgraph: cl.Problem, - filter_domain: r.Domain, + self, + consgraph: cl.Problem, + filter_domain: r.Domain, var_assignments: dict[str, str], - n_steps: int, + n_steps: int, desc: str, abort_unsatisfied: bool = False, - print_bounds: bool = False, + print_bounds: bool = False, ): - filter_domain = copy.deepcopy(filter_domain) desc_full = (desc, *var_assignments.values()) - dom_assignments = {k: r.Domain(self.state.objs[objkey].tags) for k, objkey in var_assignments.items()} + dom_assignments = { + k: r.Domain(self.state.objs[objkey].tags) + for k, objkey in var_assignments.items() + } filter_domain = r.substitute_all(filter_domain, dom_assignments) if not r.domain_finalized(filter_domain): - raise ValueError(f'Cannot solve {desc_full=} with non-finalized domain {filter_domain}') - + raise ValueError( + f"Cannot solve {desc_full=} with non-finalized domain {filter_domain}" + ) + orig_bounds = r.constraint_bounds(consgraph) - bounds = propose_discrete.preproc_bounds( - orig_bounds, - self.state, - filter_domain, - print_bounds=print_bounds + bounds = propose_discrete.preproc_bounds( + orig_bounds, self.state, filter_domain, print_bounds=print_bounds ) if len(bounds) == 0: - logger.info(f'No objects to be added for {desc_full=}, skipping') + logger.info(f"No objects to be added for {desc_full=}, skipping") return self.state active_count = greedy.update_active_flags(self.state, var_assignments) @@ -199,13 +186,13 @@ def solve_objects( f"Greedily solve {desc_full} - stage has {len(bounds)}/{len(orig_bounds)} bounds, " f"{active_count=}/{len(self.state.objs)} objs" ) - + self.optim.reset(max_iters=n_steps) ra = trange(n_steps) if self.optim.print_report_freq == 0 else range(n_steps) for j in ra: move_gen = self.choose_move_type(j, n_steps) self.optim.step(consgraph, self.state, move_gen, filter_domain) - self.optim.save_stats(self.output_folder/f'optim_{desc}.csv') + self.optim.save_stats(self.output_folder / f"optim_{desc}.csv") logger.info( f"Finished solving {desc_full}, added {len(self.state.objs) - n_start} " @@ -215,17 +202,16 @@ def solve_objects( logger.info(self.optim.curr_result.to_df()) violations = { - k: v for k, v in self.optim.curr_result.violations.items() - if v > 0 + k: v for k, v in self.optim.curr_result.violations.items() if v > 0 } if len(violations): - msg = f'Solver has failed to satisfy constraints for stage {desc_full}. {violations=}.' + msg = f"Solver has failed to satisfy constraints for stage {desc_full}. {violations=}." if abort_unsatisfied: - butil.save_blend(self.output_folder/f'abort_{desc}.blend') + butil.save_blend(self.output_folder / f"abort_{desc}.blend") raise ValueError(msg) else: - msg += ' Continuing anyway, override `solve_objects.abort_unsatisfied=True` via gin to crash instead.' + msg += " Continuing anyway, override `solve_objects.abort_unsatisfied=True` via gin to crash instead." logger.warning(msg) # re-enable everything so the blender scene populates / displays correctly etc @@ -233,10 +219,7 @@ def solve_objects( greedy.set_active(self.state, k, True) return self.state - + def get_bpy_objects(self, domain: r.Domain) -> list[bpy.types.Object]: objkeys = domain_contains.objkeys_in_dom(domain, self.state) - return [ - self.state.objs[k].obj for k in objkeys - ] - + return [self.state.objs[k].obj for k in objkeys] diff --git a/infinigen/core/constraints/example_solver/state_def.py b/infinigen/core/constraints/example_solver/state_def.py index bc325d6d8..042ef7c25 100644 --- a/infinigen/core/constraints/example_solver/state_def.py +++ b/infinigen/core/constraints/example_solver/state_def.py @@ -2,40 +2,37 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: state, print, to_json # - Karhan Kayan: add dof / trimesh from __future__ import annotations -from dataclasses import dataclass, field -import typing -import pickle -import copy + +import enum import importlib +import json import logging +import pickle +import typing from collections import OrderedDict -import json +from dataclasses import dataclass, field from pathlib import Path -import enum -from collections.abc import Collection -import numpy as np import bpy +import numpy as np import shapely import trimesh -from infinigen.core.constraints.example_solver.geometry.planes import Planes +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints.example_solver.geometry.planes import Planes from infinigen.core.placement.factory import AssetFactory -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) + from .geometry import parse_scene -import trimesh -from infinigen.core import tags as t logger = logging.getLogger(__name__) + @dataclass class RelationState: relation: cl.Relation @@ -43,9 +40,9 @@ class RelationState: child_plane_idx: int = None parent_plane_idx: int = None + @dataclass class ObjectState: - obj: bpy.types.Object generator: typing.Optional[AssetFactory] = None tags: set = field(default_factory=set) @@ -53,7 +50,7 @@ class ObjectState: dof_matrix_translation: np.array = None dof_rotation_axis: np.array = None - contour : shapely.Geometry = None + contour: shapely.Geometry = None _pose_affects_score = None fcl_obj = None @@ -62,11 +59,13 @@ class ObjectState: # store whether this object is active for the current greedy stage # inactive objects arent returned by scene() and arent accessible through blender (for perf) # updated by greedy.update_active_flags() - active: bool = True + active: bool = True def __post_init__(self): assert not t.contradiction(self.tags) - assert not any(isinstance(r.relation, cl.NegatedRelation) for r in self.relations), self.relations + assert not any( + isinstance(r.relation, cl.NegatedRelation) for r in self.relations + ), self.relations def __repr__(self): obj = self.obj @@ -76,36 +75,32 @@ def __repr__(self): name = obj.name if obj is not None else None return f"{self.__class__.__name__}(obj.name={name}, {tags=}, {relations=})" + @dataclass class State: - objs: OrderedDict[str, ObjectState] trimesh_scene: trimesh.Scene = None - bvh_cache : dict = field(default_factory=dict) + bvh_cache: dict = field(default_factory=dict) planes: Planes = None - - def print(self): + def print(self): print(f"State ({len(self.objs)} objs)") - order = sorted( - self.objs.keys(), - key=lambda s: s.split('_')[-1] - ) + order = sorted(self.objs.keys(), key=lambda s: s.split("_")[-1]) for k in order: v = self.objs[k] - relations = ', '.join( - f'{r.relation.__class__.__name__}({r.target_name})' - for r in v.relations + relations = ", ".join( + f"{r.relation.__class__.__name__}({r.target_name})" for r in v.relations ) - semantics = {tg for tg in t.decompose_tags(v.tags)[0] if not isinstance(tg, t.SpecificObject)} + semantics = { + tg + for tg in t.decompose_tags(v.tags)[0] + if not isinstance(tg, t.SpecificObject) + } print(f" {v.obj.name} {semantics} [{relations}]") def to_json(self, path: Path): - - JSON_SUPPORTED_TYPES = ( - int, float, str, bool, list, dict - ) + JSON_SUPPORTED_TYPES = (int, float, str, bool, list, dict) def preprocess_field(x): match x: @@ -120,7 +115,7 @@ def preprocess_field(x): case enum.Enum(): return x.name case type(): - return x.__module__ + '.' + x.__name__ + return x.__module__ + "." + x.__name__ case set() | frozenset(): return list(x) case val if isinstance(val, JSON_SUPPORTED_TYPES): @@ -131,23 +126,23 @@ def preprocess_field(x): return x.__dict__ case cl.Relation(): res = x.__dict__ - res['relation_type'] = x.__class__.__name__ + res["relation_type"] = x.__class__.__name__ return res case _: return "" data = { - 'objs': self.objs, + "objs": self.objs, } - with path.open('w') as f: + with path.open("w") as f: json.dump( data, - f, + f, default=preprocess_field, - sort_keys=True, + sort_keys=True, indent=4, - check_circular=True + check_circular=True, ) def __post_init__(self): @@ -161,23 +156,23 @@ def save(self, filename: str): for os in self.objs.values(): os.obj = os.obj.name if os.generator is not None: - path = os.generator.__module__ + '.' + os.generator.__name__ + path = os.generator.__module__ + "." + os.generator.__name__ os.generator = path - with open(filename, 'wb') as file: + with open(filename, "wb") as file: pickle.dump(self, file) for os in self.objs.values(): os.obj = bpy.data.objects[os.obj] if os.generator is not None: - *mod, name = os.generator.split('.') - mod = importlib.import_module('.'.join(mod)) + *mod, name = os.generator.split(".") + mod = importlib.import_module(".".join(mod)) os.generator = getattr(mod, name) @classmethod def load(cls, filename: str): - with open(filename, 'rb') as file: + with open(filename, "rb") as file: state = pickle.load(file) # all objs were serialized as strings, unpack them @@ -192,16 +187,10 @@ def load(cls, filename: str): def state_from_dummy_scene(col: bpy.types.Collection) -> State: - objs = {} for obj in col.all_objects: - obj.rotation_mode = 'AXIS_ANGLE' + obj.rotation_mode = "AXIS_ANGLE" tags = {t.Semantics(c.name) for c in col.children if obj.name in c.objects} tags.add(t.SpecificObject(obj.name)) - objs[obj.name] = ObjectState( - obj=obj, - generator=None, - tags=tags - ) + objs[obj.name] = ObjectState(obj=obj, generator=None, tags=tags) return State(objs=objs) - diff --git a/infinigen/core/constraints/reasoning/__init__.py b/infinigen/core/constraints/reasoning/__init__.py index 47a90d7c9..33af63f57 100644 --- a/infinigen/core/constraints/reasoning/__init__.py +++ b/infinigen/core/constraints/reasoning/__init__.py @@ -1,22 +1,13 @@ from .constraint_bounding import Bound, constraint_bounds from .constraint_constancy import is_constant - +from .constraint_domain import Domain, FilterByDomain, constraint_domain from .domain import ( - reldom_implies, + domain_finalized, reldom_compatible, + reldom_implies, reldom_intersection, reldom_intersects, reldom_satisfies, - - domain_finalized, -) -from .domain_substitute import ( - domain_tag_substitute, - substitute_all, -) -from .constraint_domain import ( - Domain, - constraint_domain, - FilterByDomain ) -from .expr_equal import expr_equal \ No newline at end of file +from .domain_substitute import domain_tag_substitute, substitute_all +from .expr_equal import expr_equal diff --git a/infinigen/core/constraints/reasoning/constraint_bounding.py b/infinigen/core/constraints/reasoning/constraint_bounding.py index a283ae853..4d8dfbdb2 100644 --- a/infinigen/core/constraints/reasoning/constraint_bounding.py +++ b/infinigen/core/constraints/reasoning/constraint_bounding.py @@ -2,33 +2,29 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: primary author # - David Yan: bounding for inequalities / expressions +import dataclasses +import logging import operator import typing -import copy -import dataclasses from functools import partial -import logging - -import numpy as np +from infinigen.core import tags as t from infinigen.core.constraints import constraint_language as cl -from .domain import Domain -from .constraint_domain import constraint_domain -from .domain_substitute import domain_tag_substitute - from .constraint_constancy import is_constant -from infinigen.core import tags as t +from .constraint_domain import constraint_domain +from .domain import Domain +from .domain_substitute import domain_tag_substitute logger = logging.getLogger(__name__) + @dataclasses.dataclass class Bound: - domain: Domain = None low: int = None high: int = None @@ -43,15 +39,16 @@ class Bound: @classmethod def from_comparison(cls, opfunc, lhs, rhs): - lhs = lhs() if is_constant(lhs) else None rhs = rhs() if is_constant(rhs) else None if lhs is None == rhs is None: - raise ValueError(f'Attempted to create bound with neither side constant {lhs=} {rhs=}') + raise ValueError( + f"Attempted to create bound with neither side constant {lhs=} {rhs=}" + ) right_const = rhs is not None val = rhs if right_const else lhs - + match (opfunc, right_const): case operator.eq, _: return cls(low=val, high=val) @@ -68,23 +65,17 @@ def from_comparison(cls, opfunc, lhs, rhs): case (operator.gt, True): return cls(low=val + 1) case _: - raise ValueError(f'Unhandled case {opfunc=}, {right_const=}') + raise ValueError(f"Unhandled case {opfunc=}, {right_const=}") def map(self, func, lhs=None, rhs=None): - if lhs is None == rhs is None: - raise ValueError(f'Expected exactly one of {lhs=} {rhs=} to be provided') + raise ValueError(f"Expected exactly one of {lhs=} {rhs=} to be provided") if lhs is not None: - return Bound( - low=func(lhs, self.low), - high=func(lhs, self.high) - ) + return Bound(low=func(lhs, self.low), high=func(lhs, self.high)) else: - return Bound( - low=func(self.low, rhs), - high=func(self.high, rhs) - ) + return Bound(low=func(self.low, rhs), high=func(self.high, rhs)) + int_inverse_op = { operator.add: operator.sub, @@ -92,16 +83,15 @@ def map(self, func, lhs=None, rhs=None): } int_inverse_op.update({v: k for k, v in int_inverse_op.items()}) + def _expression_map_bound_binop( - node: cl.ScalarOperatorExpression, - bound: Bound + node: cl.ScalarOperatorExpression, bound: Bound ) -> list[Bound]: - lhs, rhs = node.operands inv_func = int_inverse_op.get(node.func) if inv_func is None: return [] - + consts = is_constant(lhs), is_constant(rhs) match consts: case (False, False): @@ -110,24 +100,31 @@ def _expression_map_bound_binop( return expression_map_bound(rhs, bound.map(inv_func, lhs=lhs())) case (False, True): return expression_map_bound(lhs, bound.map(inv_func, rhs=rhs())) - case (True, True): # both const, nothing to bound + case (True, True): # both const, nothing to bound return [] case _: raise ValueError("Impossible") + def evaluate_known_vars(node: cl.Node, known_vars) -> cl.constant: if is_constant(node): return None match node: - case cl.ScalarOperatorExpression(f, (lhs, rhs)) if f in int_inverse_op.keys() or f in int_inverse_op: + case cl.ScalarOperatorExpression( + f, (lhs, rhs) + ) if f in int_inverse_op.keys() or f in int_inverse_op: if is_constant(lhs): rhs_eval = evaluate_known_vars(rhs, known_vars) - if is_constant(rhs_eval): return f(lhs, rhs_eval) - else: return None + if is_constant(rhs_eval): + return f(lhs, rhs_eval) + else: + return None else: lhs_eval = evaluate_known_vars(lhs, known_vars) - if is_constant(lhs_eval): return f(lhs_eval, rhs) - else: return None + if is_constant(lhs_eval): + return f(lhs_eval, rhs) + else: + return None case cl.count(objs): return evaluate_known_vars(objs, known_vars) case cl.ObjectSetExpression() as objs: @@ -138,15 +135,15 @@ def evaluate_known_vars(node: cl.Node, known_vars) -> cl.constant: vals.append(known_val) if len(vals) == 0: return None - else: + else: return cl.constant(min(vals)) case _: raise NotImplementedError(node) -def expression_map_bound(node: cl.Node, bound: Bound) -> list[Bound]: +def expression_map_bound(node: cl.Node, bound: Bound) -> list[Bound]: match node: - case cl.ScalarOperatorExpression(f, (lhs, rhs)) if f in int_inverse_op.keys(): + case cl.ScalarOperatorExpression(f, (_, _)) if f in int_inverse_op.keys(): return _expression_map_bound_binop(node, bound) case cl.count(objs): return expression_map_bound(objs, bound) @@ -161,17 +158,15 @@ def expression_map_bound(node: cl.Node, bound: Bound) -> list[Bound]: # distance & other hard constraints do not produce quantity-bounds return [] -def update_var(var, scene_state): + +def update_var(var, scene_state): if not is_constant(var) and not isinstance(var, int) and scene_state is not None: var_eval = evaluate_known_vars(var, scene_state) var = var_eval if is_constant(var_eval) else var return var -def constraint_bounds( - node: cl.Node, - state=None -) -> list[Bound]: +def constraint_bounds(node: cl.Node, state=None) -> list[Bound]: recurse = partial(constraint_bounds, state=state) match node: @@ -184,14 +179,16 @@ def constraint_bounds( high = update_var(high, state) if is_constant(low) and is_constant(high): low = low() - high = high() + high = high() bound = Bound(low=low, high=high) return expression_map_bound(val, bound) case cl.BoolOperatorExpression(f, (lhs, rhs)) if f in Bound._init_ops: lhs, rhs = update_var(lhs, state), update_var(rhs, state) if not is_constant(lhs) and not is_constant(rhs): - logger.debug(f'Encountered {cl.BoolOperatorExpression.__name__} {f} with non-constant lhs and rhs. Producing no bound.') + logger.debug( + f"Encountered {cl.BoolOperatorExpression.__name__} {f} with non-constant lhs and rhs. Producing no bound." + ) return [] bound = Bound.from_comparison(node.func, lhs, rhs) @@ -202,9 +199,10 @@ def constraint_bounds( bounds = recurse(pred) for b in bounds: # TODO INCORRECT. Doesnt force EVERY object in o_domain to satify the bound - b.domain = domain_tag_substitute(b.domain, t.Variable(varname), o_domain) + b.domain = domain_tag_substitute( + b.domain, t.Variable(varname), o_domain + ) return bounds case unmatched: assert isinstance(unmatched, cl.Expression), unmatched return [] - \ No newline at end of file diff --git a/infinigen/core/constraints/reasoning/constraint_constancy.py b/infinigen/core/constraints/reasoning/constraint_constancy.py index 54523b7ca..c6edda805 100644 --- a/infinigen/core/constraints/reasoning/constraint_constancy.py +++ b/infinigen/core/constraints/reasoning/constraint_constancy.py @@ -4,13 +4,10 @@ # Authors: Alexander Raistrick -import operator -import typing - -import numpy as np from infinigen.core.constraints import constraint_language as cl + def is_constant(node: cl.Node): match node: case cl.constant(): @@ -18,4 +15,4 @@ def is_constant(node: cl.Node): case cl.BoolOperatorExpression(_, vs) | cl.ScalarOperatorExpression(_, vs): return all(is_constant(x) for x in vs) case _: - return False \ No newline at end of file + return False diff --git a/infinigen/core/constraints/reasoning/constraint_domain.py b/infinigen/core/constraints/reasoning/constraint_domain.py index e627a3c56..17e856869 100644 --- a/infinigen/core/constraints/reasoning/constraint_domain.py +++ b/infinigen/core/constraints/reasoning/constraint_domain.py @@ -5,27 +5,20 @@ # Authors: Alexander Raistrick from __future__ import annotations + import logging -import itertools +from dataclasses import dataclass from functools import partial -from dataclasses import dataclass, field -import copy -import typing - -import numpy as np - -from infinigen.core.constraints import constraint_language as cl from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl + from .domain import Domain logger = logging.getLogger(__name__) -def constraint_domain( - node: cl.ObjectSetExpression, - finalize_variables=False -) -> Domain: +def constraint_domain(node: cl.ObjectSetExpression, finalize_variables=False) -> Domain: """Given an expression, find a compact representation of what types of objects it is applying to. User can compared the resulting Domain against their State and see what objects fit. @@ -40,7 +33,9 @@ def constraint_domain( d = recurse(objs) d.tags.update(tags) if t.contradiction(d.tags): - raise ValueError(f'Contradictory tags {tags=} for {d=} while parsing constraint {node=}') + raise ValueError( + f"Contradictory tags {tags=} for {d=} while parsing constraint {node=}" + ) return d case cl.related_to(children, parents, relation): c_d = recurse(children) @@ -51,25 +46,27 @@ def constraint_domain( return Domain() case cl.item(x): if finalize_variables: - return recurse(node.member_of) # TODO - worried about infinite recursion somehow + return recurse( + node.member_of + ) # TODO - worried about infinite recursion somehow else: return Domain(tags={t.Variable(x)}) case FilterByDomain(objs, filter): return filter.intersection(recurse(objs)) case _: raise NotImplementedError(node) - + + @dataclass class FilterByDomain(cl.ObjectSetExpression): + """Constraint node which says to return all objects matching a domain. - """ Constraint node which says to return all objects matching a domain. - Used as a compacted representation of the filtering performed many cl.tagged and cl.related_to calls. One r.Domain is sufficient to represent the effect of and combination of intersection-style filtering. Introduced (currently) only by greedy.filter_constraints, since that function needs to work - with domains in order to narrow the scope of some constraints. + with domains in order to narrow the scope of some constraints. """ objs: cl.ObjectSetExpression - filter: Domain \ No newline at end of file + filter: Domain diff --git a/infinigen/core/constraints/reasoning/domain.py b/infinigen/core/constraints/reasoning/domain.py index f36815c23..e57740b9b 100644 --- a/infinigen/core/constraints/reasoning/domain.py +++ b/infinigen/core/constraints/reasoning/domain.py @@ -5,41 +5,33 @@ # Authors: Alexander Raistrick from __future__ import annotations -import logging -import itertools -from dataclasses import dataclass, field import copy -import typing - -import numpy as np +import itertools +import logging +from dataclasses import dataclass, field -from infinigen.core.constraints import constraint_language as cl from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.util.logging import lazydebug logger = logging.getLogger(__name__) -def reldom_implies( - a: tuple[cl.Relation, Domain], - b: tuple[cl.Relation, Domain] -): - """ If relation a is satisfied, is relation guaranteed to be satisfied? - """ + +def reldom_implies(a: tuple[cl.Relation, Domain], b: tuple[cl.Relation, Domain]): + """If relation a is satisfied, is relation guaranteed to be satisfied?""" assert isinstance(a[1], Domain) assert isinstance(b[1], Domain) - return ( - a[0].implies(b[0]) and - a[1].implies(b[1]) - ) + return a[0].implies(b[0]) and a[1].implies(b[1]) + def reldom_compatible( a: tuple[cl.Relation, Domain], b: tuple[cl.Relation, Domain], ): - """ If relation a is satisfied, can relation b be satisfied? - """ + """If relation a is satisfied, can relation b be satisfied?""" assert isinstance(a[1], Domain) assert isinstance(b[1], Domain) @@ -49,62 +41,58 @@ def reldom_compatible( match (a_neg, b_neg): case True, False: if b[0].implies(a[0].rel) and b[1].intersects(a[1]): - logger.debug('reldom_compatible found contradicting negated %s %s', a[0], b[0]) + lazydebug( + logger, + lambda: f"reldom_compatible found contradicting negated {a[0]} {b[0]}", + ) return False case False, True: if a[0].implies(b[0].rel) and a[1].intersects(b[1]): - logger.debug('reldom_compatible found contradicting negated %s %s', a[0], b[0]) + lazydebug( + logger, + lambda: f"reldom_compatible found contradicting negated {a[0]} {b[0]}", + ) return False return True + def reldom_satisfies( a: tuple[cl.Relation, Domain], b: tuple[cl.Relation, Domain], ): - return ( - a[0].intersects(b[0], strict=True) - and a[1].satisfies(b[1]) - ) - + return a[0].intersects(b[0], strict=True) and a[1].satisfies(b[1]) + + def reldom_intersects( - a: tuple[cl.Relation, Domain], - b: tuple[cl.Relation, Domain], - **kwargs + a: tuple[cl.Relation, Domain], b: tuple[cl.Relation, Domain], **kwargs ): - return ( - a[0].intersects(b[0]) and - a[1].intersects(b[1], **kwargs) - ) + return a[0].intersects(b[0]) and a[1].intersects(b[1], **kwargs) def reldom_intersection( a: tuple[cl.Relation, Domain], b: tuple[cl.Relation, Domain], ): - return ( - a[0].intersection(b[0]), - a[1].intersection(b[1]) - ) + return (a[0].intersection(b[0]), a[1].intersection(b[1])) def domain_finalized(dom: Domain, check_anyrel=False, check_variable=True): - if check_variable and any(isinstance(x, t.Variable) for x in dom.tags): return False - + for rel, cdom in dom.relations: if check_anyrel and isinstance(rel, cl.AnyRelation): return False if not domain_finalized(cdom): return False - + return True + @dataclass class Domain: - - ''' + """ Describes a class of object in the scene Objects are in the domain if: @@ -114,43 +102,49 @@ class Domain: WARNING: Recurive datastructure, here be dragons Note: Default-constructed Domain contains Everything - ''' + """ tags: set[t.Semantics] = field(default_factory=set) relations: list[tuple[cl.Relation, Domain]] = field(default_factory=list) def repr(self, abbrv=False, onelevel=False, oneline=False): + def is_neg(x): + return isinstance(x, t.Negated) - is_neg = lambda x: isinstance(x, t.Negated) def setrepr(s): inner = ", ".join( - repr(x) for x in sorted(list(s), key=is_neg) + repr(x) + for x in sorted(list(s), key=is_neg) if not (abbrv and isinstance(x, t.Negated)) ) - return '{' + inner + '}' - + return "{" + inner + "}" + next_abbrv = abbrv or onelevel + def repr_reldom(r, d): if abbrv: - rel = f'-{r.rel.__class__.__name__}' if isinstance(r, cl.NegatedRelation) else f'{r.__class__.__name__}' - return f'({rel}(...), Domain({setrepr(d.tags)}, [...]))' + rel = ( + f"-{r.rel.__class__.__name__}" + if isinstance(r, cl.NegatedRelation) + else f"{r.__class__.__name__}" + ) + return f"({rel}(...), Domain({setrepr(d.tags)}, [...]))" else: - return f'({repr(r)}, {d.repr(abbrv=next_abbrv)})' - + return f"({repr(r)}, {d.repr(abbrv=next_abbrv)})" + relations = [ repr_reldom(r, d) for r, d in sorted( - self.relations, - key=lambda x: isinstance(x[0], cl.NegatedRelation) + self.relations, key=lambda x: isinstance(x[0], cl.NegatedRelation) ) ] if not oneline and sum(len(x) for x in relations) > 20: - relations = [r.replace('\n', '\n\t') for r in relations] - relations = '\n\t' + ',\n\t'.join(relations) + '\n' + relations = [r.replace("\n", "\n\t") for r in relations] + relations = "\n\t" + ",\n\t".join(relations) + "\n" else: - relations = ', '.join(relations) - return f'{self.__class__.__name__}({setrepr(self.tags)}, [{relations}])' + relations = ", ".join(relations) + return f"{self.__class__.__name__}({setrepr(self.tags)}, [{relations}])" __repr__ = repr @@ -159,22 +153,14 @@ def __post_init__(self): assert isinstance(self.relations, list) def implies(self, other: Domain): - - return ( - t.implies(self.tags, other.tags) - and all( - any(reldom_implies(rel, orel) for rel in self.relations) - for orel in other.relations - ) + return t.implies(self.tags, other.tags) and all( + any(reldom_implies(rel, orel) for rel in self.relations) + for orel in other.relations ) def add_relation( - self, - new_rel: cl.Relation, - new_dom: Domain, - optimize_check_implies=True + self, new_rel: cl.Relation, new_dom: Domain, optimize_check_implies=True ): - """ new_rel, new_dom: the relation and domain to be added optimize_check_implies: bool @@ -186,107 +172,114 @@ def add_relation( assert new_dom is not self - logger.debug('add_relation %s %s to existing %i', new_rel, new_dom, len(self.relations)) + lazydebug( + logger, + lambda: f"add_relation {new_rel} {new_dom} to existing {len(self.relations)}", + ) if not optimize_check_implies: self.relations.append((new_rel, new_dom)) return - + covered = False - + for i, (er, ed) in enumerate(self.relations): if isinstance(new_rel, cl.NegatedRelation): - continue + continue elif isinstance(er, cl.NegatedRelation): continue elif reldom_implies((er, ed), (new_rel, new_dom)): covered = True - elif ( - reldom_satisfies((er, ed), (new_rel, new_dom)) - or reldom_satisfies((new_rel, new_dom), (er, ed)) + elif reldom_satisfies((er, ed), (new_rel, new_dom)) or reldom_satisfies( + (new_rel, new_dom), (er, ed) ): - logger.debug('Tightening existing relation %s with %s', (er, ed), (new_rel, new_dom)) + lazydebug( + logger, + lambda: f"Tightening existing relation {(er, ed)} with {(new_rel, new_dom)}", + ) self.relations[i] = reldom_intersection((new_rel, new_dom), (er, ed)) covered = True elif new_dom.intersects(ed, require_satisfies_right=True): - logger.debug('Tightening domain %s with %s', ed, new_dom) + lazydebug(logger, lambda: f"Tightening domain {ed} with {new_dom}") self.relations[i] = (er, ed.intersection(new_dom)) else: - logger.debug('%s is not relevant for %s', (er, ed), (new_rel, new_dom)) - + lazydebug( + logger, + lambda: f"{(er, ed)} is not relevant for {(new_rel, new_dom)}", + ) + if not covered: - logger.debug('optimize_check_implies found nothing, adding relation %s %s', new_rel, new_dom) + lazydebug( + logger, + lambda: f"optimize_check_implies found nothing, adding relation {new_rel} {new_dom}", + ) self.relations.append((new_rel, new_dom)) if self.is_recursive(): - raise ValueError(f'Encountered recursive domain after add_relation {new_rel=} {new_dom=} onto {self.tags=} {len(self.relations)=}') + raise ValueError( + f"Encountered recursive domain after add_relation {new_rel=} {new_dom=} onto {self.tags=} {len(self.relations)=}" + ) def with_relation(self, rel: cl.Relation, dom: Domain): new = copy.deepcopy(self) new.add_relation(rel, dom) return new - + def with_tags(self, tags: set[t.Semantics]): if not isinstance(tags, set): tags = {tags} new = copy.deepcopy(self) new.tags.update(tags) return new - - def satisfies( - self, - other: Domain - ): - + + def satisfies(self, other: Domain): """ - Assumes that 'self' is fully specified: any predicates that arent listed are false. - + Assumes that 'self' is fully specified: any predicates that arent listed are false. + Different from 'implies' in that if `other` contains negative predicates, `self` need not imply these, it just needs to not contradict them. - Different from 'intersects' in that + Different from 'intersects' in that """ - logger.debug("%s for %s %s", Domain.satisfies.__name__, self, other) - + lazydebug(logger, lambda: f"{Domain.satisfies.__name__} for {self} {other}") + if not t.satisfies(self.tags, other.tags): - logger.debug('failed tag implication %s -> %s', self.tags, other.tags) + lazydebug( + logger, lambda: f"failed tag implication {self.tags} -> {other.tags}" + ) return False - + def bothsat(reldom1, reldom2): - return ( - reldom1[0].satisfies(reldom2[0]) - and reldom1[1].satisfies(reldom2[1]) - ) - + return reldom1[0].satisfies(reldom2[0]) and reldom1[1].satisfies(reldom2[1]) + for orel in other.relations: match orel: case (cl.NegatedRelation(n), d): - - contradictor = next(( - srel for srel in self.relations if bothsat(srel, (n, d)) - ), None) + contradictor = next( + (srel for srel in self.relations if bothsat(srel, (n, d))), None + ) if contradictor is not None: - logger.debug( - 'satisfies found %s in self, which contradicts %s because it satisfies %s', contradictor, orel, (n, d) + lazydebug( + logger, + lambda: f"satisfies found {contradictor} in self, which contradicts {orel} because it satisfies {(n, d)}", ) return False case _: if not any(bothsat(srel, orel) for srel in self.relations): - logger.debug('found unsatisfied %s for %s', orel, self.relations) + lazydebug( + logger, + lambda: f"found unsatisfied {orel} for {self.relations}", + ) return False - + return True def intersects( - self, - other: Domain, - require_satisfies_left=False, - require_satisfies_right=False + self, other: Domain, require_satisfies_left=False, require_satisfies_right=False ): - """Return True if self and other could have a non-empty intersection. Parameters @@ -294,7 +287,7 @@ def intersects( self: Domain - the domain to check other: Domain - the domain to check against - require_satisfies_left: bool - + require_satisfies_left: bool - If True, assume that `self` is exhaustively specified (ie, any predicates not listed are false), and therefore `other` must imply `self` for the intersection to be non-empty. require_satisfies_right: bool - @@ -302,70 +295,82 @@ def intersects( and therefore `self` must imply `other` for the intersection to be non-empty. """ - logger.debug('Domain.intersects for \n\t%s \n\t%s', self, other) - + lazydebug(logger, lambda: f"Domain.intersects for \n\t{self} \n\t{other}") + if t.contradiction(self.tags.union(other.tags)): - logger.debug('tag contradiction %s, %s', self.tags, other.tags) + lazydebug(logger, lambda: f"tag contradiction {self.tags}, {other.tags}") return False - + # no relations can contradict eachother for ard, brd in itertools.product(self.relations, other.relations): - if ard is brd: + if ard is brd: continue if not reldom_compatible(ard, brd): - logger.debug('found incompatible %s %s', ard, brd) + lazydebug(logger, lambda: f"found incompatible {ard} {brd}") return False - + # any relations actually known to be present must intersect - a_pos = [rd for rd in self.relations if not isinstance(rd[0], cl.NegatedRelation)] - b_pos = [rd for rd in other.relations if not isinstance(rd[0], cl.NegatedRelation)] + a_pos = [ + rd for rd in self.relations if not isinstance(rd[0], cl.NegatedRelation) + ] + b_pos = [ + rd for rd in other.relations if not isinstance(rd[0], cl.NegatedRelation) + ] if require_satisfies_left: if not t.satisfies(other.tags, self.tags): return False for ard in a_pos: if not any(reldom_intersects(ard, brd) for brd in b_pos): - logger.debug('require_satisfies_left found no intersecting %s %s', ard, b_pos) + lazydebug( + logger, + lambda: f"require_satisfies_left found no intersecting {ard} {b_pos}", + ) return False if require_satisfies_right: if not t.satisfies(self.tags, other.tags): return False for brd in b_pos: if not any(reldom_intersects(ard, brd) for ard in a_pos): - logger.debug('require_satisfies_right found no intersecting %s %s', brd, a_pos) + lazydebug( + logger, + lambda: f"require_satisfies_right found no intersecting {brd} {a_pos}", + ) return False - - logger.debug('Domain.intersects for %s %s returning True', self, other) + + lazydebug( + logger, lambda: f"Domain.intersects for {self} {other} returning True" + ) return True - - def intersection(self, other: Domain): - ''' + def intersection(self, other: Domain): + """ Return a domain representing the intersection of self and other. Result is at least as strict as self and other. contains(self, x) and contains(other, x) -> contains(intersection, x) TODO: - - does order relations are checked for intersection matter? - - almost certainly yes, intersection is not transitive. + - does order relations are checked for intersection matter? + - almost certainly yes, intersection is not transitive. - so what order is best? fewest remaining relations? does it matter? - ''' + """ newtags = self.tags.union(other.tags) if t.contradiction(newtags): - raise ValueError(f'Contradictory {newtags=} for {self.intersection} {other=}') + raise ValueError( + f"Contradictory {newtags=} for {self.intersection} {other=}" + ) newdom = Domain(newtags) for orel, odom in *self.relations, *other.relations: newdom.add_relation(orel, copy.deepcopy(odom)) return newdom - + def is_recursive(self, seen=None): - - """ Check if this domain somehow references itself via its own relations. - Domains should ideally never reach this state; this function is used to check that they dont. + """Check if this domain somehow references itself via its own relations. + Domains should ideally never reach this state; this function is used to check that they dont. """ if seen is None: @@ -376,10 +381,7 @@ def is_recursive(self, seen=None): seen.add(id(self)) - return any( - d.is_recursive(seen=seen) - for _, d in self.relations - ) + return any(d.is_recursive(seen=seen) for _, d in self.relations) def positive_part(self): return Domain( @@ -388,29 +390,21 @@ def positive_part(self): (r, d.positive_part()) for r, d in self.relations if not isinstance(r, cl.NegatedRelation) - ] + ], ) - + def traverse(self): yield self for rel, dom in self.relations: yield from dom.traverse() def all_vartags(self) -> set[t.Variable]: - return { - x - for d in self.traverse() - for x in d.tags - if isinstance(x, t.Variable) - } + return {x for d in self.traverse() for x in d.tags if isinstance(x, t.Variable)} def get_objs_named(self): - objnames = { - x.name for x in self.tags - if isinstance(x, t.SpecificObject) - } + objnames = {x.name for x in self.tags if isinstance(x, t.SpecificObject)} for rel, dom in self.relations: if isinstance(rel, cl.NegatedRelation): continue objnames = objnames.union(dom.get_objs_named()) - return objnames \ No newline at end of file + return objnames diff --git a/infinigen/core/constraints/reasoning/domain_substitute.py b/infinigen/core/constraints/reasoning/domain_substitute.py index 752fea0b6..a76eea52e 100644 --- a/infinigen/core/constraints/reasoning/domain_substitute.py +++ b/infinigen/core/constraints/reasoning/domain_substitute.py @@ -5,35 +5,27 @@ # Authors: Alexander Raistrick from __future__ import annotations -import logging -import itertools -from dataclasses import dataclass, field import copy -import typing - -import numpy as np +import logging -from infinigen.core.constraints import constraint_language as cl from infinigen.core import tags as t + from .constraint_domain import Domain logger = logging.getLogger(__name__) + def domain_tag_substitute( - domain: Domain, - vartag: t.Variable, - subst_domain: Domain, - return_match=False + domain: Domain, vartag: t.Variable, subst_domain: Domain, return_match=False ) -> Domain: - - """Return concrete substitution of `domain`, where `subst_domain` must be satisfied + """Return concrete substitution of `domain`, where `subst_domain` must be satisfied whenever `subst_tag` was present in the original. """ assert isinstance(vartag, t.Variable), vartag - domain = copy.deepcopy(domain) # prevent modification of original - + domain = copy.deepcopy(domain) # prevent modification of original + o_match = vartag in domain.tags rd_sub, rd_matches = [], [] @@ -45,7 +37,7 @@ def domain_tag_substitute( if not (o_match or rd_match): return (domain, False) if return_match else domain - + domain.relations = [] for r, d in rd_sub: domain.add_relation(r, d) @@ -57,10 +49,11 @@ def domain_tag_substitute( return (domain, True) if return_match else domain + def substitute_all( dom: Domain, assignments: dict[t.Variable, Domain], ) -> Domain: for var, d in assignments.items(): dom = domain_tag_substitute(dom, var, d) - return dom \ No newline at end of file + return dom diff --git a/infinigen/core/constraints/reasoning/expr_equal.py b/infinigen/core/constraints/reasoning/expr_equal.py index 9fd4a2a35..b973ee7f7 100644 --- a/infinigen/core/constraints/reasoning/expr_equal.py +++ b/infinigen/core/constraints/reasoning/expr_equal.py @@ -8,6 +8,7 @@ from ..constraint_language.types import Node + @dataclasses.dataclass class FalseEqualityResult: n1: Node @@ -18,47 +19,55 @@ def __repr__(self) -> str: # default dataclass repr is too long c1 = self.n1.__class__.__name__ c2 = self.n2.__class__.__name__ - return f'{self.__class__.__name__}({c1}, {c2}, {repr(self.reason)})' + return f"{self.__class__.__name__}({c1}, {c2}, {repr(self.reason)})" def __bool__(self): return False -def expr_equal(n1: Node, n2: Node, name: str = None) -> bool | FalseEqualityResult: - """ An equality comparison operator for constraint Node expressions +def expr_equal(n1: Node, n2: Node, name: str = None) -> bool | FalseEqualityResult: + """An equality comparison operator for constraint Node expressions - Using the default Node == Node is unsafe since Nodes override == - in order to return another expression + Using the default Node == Node is unsafe since Nodes override == + in order to return another expression """ if not dataclasses.is_dataclass(n1) or not dataclasses.is_dataclass(n2): raise ValueError( - f'expr_equal {name=} called with non-dataclass {n1.__class__=} {n2.__class__=}.' - ' Expected all Node types to be dataclasses' + f"expr_equal {name=} called with non-dataclass {n1.__class__=} {n2.__class__=}." + " Expected all Node types to be dataclasses" ) - + if name is None: name = n1.__class__.__name__ - + if type(n1) is not type(n2): - return FalseEqualityResult(n1, n2, f"Unequal types for {name}: {type(n1).__name__} != {type(n2).__name__}") + return FalseEqualityResult( + n1, + n2, + f"Unequal types for {name}: {type(n1).__name__} != {type(n2).__name__}", + ) n1_child_keys = [k for k, _ in n1.children()] n2_child_keys = [k for k, _ in n1.children()] n1_children = [v for _, v in n1.children()] n2_children = [v for _, v in n1.children()] - + if n1_child_keys != n2_child_keys: - return FalseEqualityResult(n1, n2, f'Unequal child keys for {name}: {n1_children}!={n2_children}') + return FalseEqualityResult( + n1, n2, f"Unequal child keys for {name}: {n1_children}!={n2_children}" + ) for f in dataclasses.fields(n1): v1 = getattr(n1, f.name) v2 = getattr(n2, f.name) if isinstance(v1, Node): - res = expr_equal(v1, v2, name=f'{name}.{f.name}') + res = expr_equal(v1, v2, name=f"{name}.{f.name}") if not res: return res elif v1 != v2: - return FalseEqualityResult(n1, n2, f'Unequal attr {repr(f.name)}, {v1} != {v2}') - - return True \ No newline at end of file + return FalseEqualityResult( + n1, n2, f"Unequal attr {repr(f.name)}, {v1} != {v2}" + ) + + return True diff --git a/infinigen/core/constraints/usage_lookup.py b/infinigen/core/constraints/usage_lookup.py index f2c0a83b1..2f9eaad2e 100644 --- a/infinigen/core/constraints/usage_lookup.py +++ b/infinigen/core/constraints/usage_lookup.py @@ -5,13 +5,14 @@ # Authors: Alexander Raistrick from collections import defaultdict + from infinigen.core import tags as t _factory_lookup: dict[type, set[t.Tag]] = None _tag_lookup: dict[t.Tag, set[type]] = None -def initialize_from_dict(d): +def initialize_from_dict(d): global _factory_lookup, _tag_lookup _factory_lookup = defaultdict(set) _tag_lookup = defaultdict(set) @@ -22,9 +23,11 @@ def initialize_from_dict(d): _factory_lookup[fac].add(tag) _tag_lookup[tag].add(fac) + def usages_of_factory(fac) -> set[t.Tag]: return _factory_lookup[fac].union({t.FromGenerator(fac)}) + def factories_for_usage(tags: set[t.Tag]): if not isinstance(tags, set): tags = [tags] @@ -32,17 +35,20 @@ def factories_for_usage(tags: set[t.Tag]): tags = list(tags) res = _tag_lookup[tags[0]] - for t in tags[1:]: - res.intersection_update(_tag_lookup[t]) + for tag in tags[1:]: + res.intersection_update(_tag_lookup[tag]) return res + def all_usage_tags(): return _tag_lookup.keys() + def all_factories(): return _factory_lookup.keys() + def has_usage(fac, tag): assert fac in _factory_lookup.keys(), fac assert tag in _tag_lookup.keys(), tag - return tag in _factory_lookup[fac] \ No newline at end of file + return tag in _factory_lookup[fac] diff --git a/infinigen/core/execute_tasks.py b/infinigen/core/execute_tasks.py index d4f8b4b7d..e6a6214b8 100644 --- a/infinigen/core/execute_tasks.py +++ b/infinigen/core/execute_tasks.py @@ -1,236 +1,69 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -import argparse -import ast +import logging import os -import random -import sys -import cProfile +import pickle import shutil -from pathlib import Path -import logging -from functools import partial -import pprint import time +import typing from collections import defaultdict -import pickle +from pathlib import Path -# ruff: noqa: F402 -os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. +# ruff: noqa: E402 +os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. # See https://github.com/opencv/opencv/issues/21326#issuecomment-1008517425 import bpy -import mathutils -from mathutils import Vector import gin -import numpy as np -from numpy.random import uniform, normal, randint -from tqdm import tqdm from frozendict import frozendict -from infinigen.terrain import Terrain - -from infinigen.core.placement import ( - particles, placement, density, - camera as cam_util, - split_in_view, - factory, - animation_policy, - instance_scatter, - detail, -) - -from infinigen.assets.scatters import ( - pebbles, grass, snow_layer, ground_leaves, ground_twigs, \ - chopped_trees, pinecone, fern, flowerplant, monocot, ground_mushroom, \ - slime_mold, moss, ivy, lichen, mushroom, decorative_plants, seashells, \ - pine_needle, seaweed, coral_reef, jellyfish, urchin -) - -from infinigen.assets.materials import ( - mountain, sand, water, atmosphere_light_haze, sandstone, cracked_ground, \ - soil, dirt, cobble_stone, chunkyrock, stone, lava, ice, mud, snow -) - -from infinigen.assets import ( - fluid, - cactus, - trees, - rocks, - creatures, - lighting, - weather, -) - +import infinigen.assets.scatters +from infinigen.core import init, surface +from infinigen.core.placement import camera as cam_util from infinigen.core.rendering.render import render_image from infinigen.core.rendering.resample import resample_scene -from infinigen.assets.monocot import kelp -from infinigen.core import surface, init - -from infinigen.core.util.organization import Task, Attributes, TerrainNames - -from infinigen.core.placement.split_in_view import split_inview - -import infinigen.assets.scatters -from infinigen.assets.scatters.utils.selection import scatter_lower, scatter_upward - -from infinigen.core.util import ( - blender as butil, - logging as logging_util, - pipeline, - exporting -) -from infinigen.tools.export import export_scene, triangulate_meshes -from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.logging import Timer, save_polycounts, create_text_file -from infinigen.core.util.pipeline import RandomStageExecutor -from infinigen.core.util.random import sample_registry from infinigen.core.tagging import tag_system +from infinigen.core.util import blender as butil +from infinigen.core.util import exporting +from infinigen.core.util.logging import Timer, create_text_file, save_polycounts +from infinigen.core.util.math import int_hash +from infinigen.core.util.organization import Task +from infinigen.terrain import Terrain +from infinigen.tools.export import export_scene, triangulate_meshes - logger = logging.getLogger(__name__) -@gin.configurable -def populate_scene( - output_folder, - scene_seed, - **params -): - p = RandomStageExecutor(scene_seed, output_folder, params) - camera = [cam_util.get_camera(i, j) for i, j in cam_util.get_cameras_ids()] - - season = p.run_stage('choose_season', trees.random_season, use_chance=False, default=[]) - - fire_cache_system = fluid.FireCachingSystem() if params.get('cached_fire') else None - - populated = {} - populated['trees'] = p.run_stage('populate_trees', use_chance=False, default=[], - fn=lambda: placement.populate_all(trees.TreeFactory, camera, season=season, vis_cull=4))#, - #meshing_camera=camera, adapt_mesh_method='subdivide', cam_meshing_max_dist=8)) - populated['boulders'] = p.run_stage('populate_boulders', use_chance=False, default=[], - fn=lambda: placement.populate_all(rocks.BoulderFactory, camera, vis_cull=3))#, - #meshing_camera=camera, adapt_mesh_method='subdivide', cam_meshing_max_dist=8)) - populated['bushes'] = p.run_stage('populate_bushes', use_chance=False, - fn=lambda: placement.populate_all(trees.BushFactory, camera, vis_cull=1, adapt_mesh_method='subdivide')) - p.run_stage('populate_kelp', use_chance=False, - fn=lambda: placement.populate_all(kelp.KelpMonocotFactory, camera, vis_cull=5)) - populated['cactus'] = p.run_stage('populate_cactus', use_chance=False, - fn=lambda: placement.populate_all(cactus.CactusFactory, camera, vis_cull=6)) - p.run_stage('populate_clouds', use_chance=False, - fn=lambda: placement.populate_all(weather.CloudFactory, camera, dist_cull=None, vis_cull=None)) - p.run_stage('populate_glowing_rocks', use_chance=False, - fn=lambda: placement.populate_all(rocks.GlowingRocksFactory, camera, dist_cull=None, vis_cull=None)) - - populated['cached_fire_trees'] = p.run_stage('populate_cached_fire_trees', use_chance=False, default=[], - fn=lambda: placement.populate_all(fluid.CachedTreeFactory, camera, season=season, vis_cull=4, dist_cull=70, cache_system=fire_cache_system)) - populated['cached_fire_boulders'] = p.run_stage('populate_cached_fire_boulders', use_chance=False, default=[], - fn=lambda: placement.populate_all(fluid.CachedBoulderFactory, camera, vis_cull=3, dist_cull=70, cache_system=fire_cache_system)) - populated['cached_fire_bushes'] = p.run_stage('populate_cached_fire_bushes', use_chance=False, - fn=lambda: placement.populate_all(fluid.CachedBushFactory, camera, vis_cull=1, adapt_mesh_method='subdivide', cache_system=fire_cache_system)) - populated['cached_fire_cactus'] = p.run_stage('populate_cached_fire_cactus', use_chance=False, - fn=lambda: placement.populate_all(fluid.CachedCactusFactory, camera, vis_cull=6, cache_system=fire_cache_system)) - - grime_selection_funcs = { - 'trees': scatter_lower, - 'boulders': scatter_upward, - } - grime_types = { - 'slime_mold': slime_mold.SlimeMold, - 'lichen': lichen.Lichen, - 'ivy': ivy.Ivy, - 'mushroom': ground_mushroom.Mushrooms, - 'moss': moss.MossCover - } - def apply_grime(grime_type, surface_cls): - surface_fac = surface_cls() - for target_type, results, in populated.items(): - selection_func = grime_selection_funcs.get(target_type, None) - for fac_seed, fac_pholders, fac_assets in results: - if len(fac_pholders) == 0: - continue - for inst_seed, obj in fac_assets: - with FixedSeed(int_hash((grime_type, fac_seed, inst_seed))): - p_k = f'{grime_type}_on_{target_type}_per_instance_chance' - if uniform() > params.get(p_k, 0.4): - continue - logger.debug(f'Applying {surface_fac} on {obj}') - surface_fac.apply(obj, selection=selection_func) - for grime_type, surface_cls in grime_types.items(): - p.run_stage(grime_type, lambda: apply_grime(grime_type, surface_cls)) - - def apply_snow_layer(surface_cls): - surface_fac = surface_cls() - for target_type, results, in populated.items(): - selection_func = grime_selection_funcs.get(target_type, None) - for fac_seed, fac_pholders, fac_assets in results: - if len(fac_pholders) == 0: - continue - for inst_seed, obj in fac_assets: - tmp = obj.users_collection[0].hide_viewport - obj.users_collection[0].hide_viewport = False - surface_fac.apply(obj, selection=selection_func) - obj.users_collection[0].hide_viewport = tmp - p.run_stage("snow_layer", lambda: apply_snow_layer(snow_layer.Snowlayer)) - - creature_facs = { - 'beetles': creatures.BeetleFactory, - 'bird': creatures.BirdFactory, - 'carnivore': creatures.CarnivoreFactory, - 'crab': creatures.CrabFactory, - 'crustacean': creatures.CrustaceanFactory, - 'dragonfly': creatures.DragonflyFactory, - 'fish': creatures.FishFactory, - 'flyingbird': creatures.FlyingBirdFactory, - 'herbivore': creatures.HerbivoreFactory, - 'snake': creatures.SnakeFactory, - } - for k, fac in creature_facs.items(): - p.run_stage(f'populate_{k}', use_chance=False, - fn=lambda: placement.populate_all(fac, camera=None)) - - - fire_warmup = params.get('fire_warmup', 50) - simulation_duration = bpy.context.scene.frame_end - bpy.context.scene.frame_start + fire_warmup - - def set_fire(assets): - objs = [o for *_, a in assets for _, o in a] - with butil.EnableParentCollections(objs): - fluid.set_fire_to_assets( - assets, - bpy.context.scene.frame_start-fire_warmup, - simulation_duration, - output_folder - ) - - p.run_stage('trees_fire_on_the_fly', set_fire, populated['trees'], prereq='populate_trees') - p.run_stage('bushes_fire_on_the_fly', set_fire, populated['bushes'], prereq='populate_bushes') - p.run_stage('boulders_fire_on_the_fly', set_fire, populated['boulders'], prereq='populate_boulders') - p.run_stage('cactus_fire_on_the_fly', set_fire, populated['cactus'], prereq='populate_cactus') - - p.save_results(output_folder/'pipeline_fine.csv') def get_scene_tag(name): try: - o = next(o for o in bpy.data.objects if o.name.startswith(f'{name}=')) - return o.name.split('=')[-1].strip('\'\"') + o = next(o for o in bpy.data.objects if o.name.startswith(f"{name}=")) + return o.name.split("=")[-1].strip("'\"") except StopIteration: return None + @gin.configurable -def render(scene_seed, output_folder, camera_id, render_image_func=render_image, resample_idx=None, hide_water = False): +def render( + scene_seed, + output_folder, + camera_id, + render_image_func=render_image, + resample_idx=None, + hide_water=False, +): if hide_water and "water_fine" in bpy.data.objects: logger.info("Hiding water fine") bpy.data.objects["water_fine"].hide_render = True - bpy.data.objects['water_fine'].hide_viewport = True + bpy.data.objects["water_fine"].hide_viewport = True if resample_idx is not None and resample_idx != 0: resample_scene(int_hash((scene_seed, resample_idx))) - with Timer('Render Frames'): + with Timer("Render Frames"): render_image_func(frames_folder=Path(output_folder), camera_id=camera_id) + @gin.configurable def save_meshes(scene_seed, output_folder, frame_range, resample_idx=False): - if resample_idx is not None and resample_idx > 0: resample_scene(int_hash((scene_seed, resample_idx))) @@ -244,8 +77,7 @@ def save_meshes(scene_seed, output_folder, frame_range, resample_idx=False): previous_frame_mesh_id_mapping = frozendict() current_frame_mesh_id_mapping = defaultdict(dict) - for frame_idx in range(int(frame_range[0]), int(frame_range[1]+2)): - + for frame_idx in range(int(frame_range[0]), int(frame_range[1] + 2)): bpy.context.scene.frame_set(frame_idx) bpy.context.view_layer.update() frame_info_folder = Path(output_folder) / f"frame_{frame_idx:04d}" @@ -253,53 +85,78 @@ def save_meshes(scene_seed, output_folder, frame_range, resample_idx=False): logger.info(f"Working on frame {frame_idx}") exporting.save_obj_and_instances( - frame_info_folder / "mesh", - previous_frame_mesh_id_mapping, - current_frame_mesh_id_mapping + frame_info_folder / "mesh", + previous_frame_mesh_id_mapping, + current_frame_mesh_id_mapping, ) cam_util.save_camera_parameters( camera_ids=cam_util.get_cameras_ids(), - output_folder=frame_info_folder / "cameras", - frame=frame_idx + output_folder=frame_info_folder / "cameras", + frame=frame_idx, ) previous_frame_mesh_id_mapping = frozendict(current_frame_mesh_id_mapping) current_frame_mesh_id_mapping.clear() + def validate_version(scene_version): - if scene_version is None or scene_version.split('.')[:-1] != infinigen.__version__.split('.')[:-1]: + if ( + scene_version is None + or scene_version.split(".")[:-1] != infinigen.__version__.split(".")[:-1] + ): raise ValueError( - f'infinigen_examples/generate_nature.py {infinigen.__version__=} attempted to load a scene created by version {scene_version=}') + f"infinigen_examples/generate_nature.py {infinigen.__version__=} attempted to load a scene created by version {scene_version=}" + ) if scene_version != infinigen.__version__: - logger.warning(f'{infinigen.__version__=} has minor version mismatch with {scene_version=}') + logger.warning( + f"{infinigen.__version__=} has minor version mismatch with {scene_version=}" + ) + @gin.configurable def group_collections(config): - for config in config: # Group collections before fine runs - butil.group_in_collection([o for o in bpy.data.objects if o.name.startswith(f'{config["name"]}:')], config["name"]) - butil.group_toplevel_collections(config['name'], hide_viewport=config['hide_viewport'], hide_render=config['hide_render']) + for config in config: # Group collections before fine runs + butil.group_in_collection( + [o for o in bpy.data.objects if o.name.startswith(f'{config["name"]}:')], + config["name"], + ) + butil.group_toplevel_collections( + config["name"], + hide_viewport=config["hide_viewport"], + hide_render=config["hide_render"], + ) + @gin.configurable def execute_tasks( - compose_scene_func, - input_folder, output_folder, - task, scene_seed, - frame_range, camera_id, - resample_idx=None, - output_blend_name="scene.blend", - generate_resolution=(1280,720), - fps=24, + compose_scene_func: typing.Callable, + populate_scene_func: typing.Callable, + input_folder: Path, + output_folder: Path, + task: str, + scene_seed: int, + frame_range: tuple[int], + camera_id: tuple[int], + resample_idx: int = None, + output_blend_name: str = "scene.blend", + generate_resolution=(1280, 720), + fps: int = 24, reset_assets=True, dryrun=False, - optimize_terrain_diskusage=False + optimize_terrain_diskusage=False, ): if input_folder != output_folder: if reset_assets: - if os.path.islink(output_folder/"assets"): - os.unlink(output_folder/"assets") - elif (output_folder/"assets").exists(): - shutil.rmtree(output_folder/"assets") - if (not os.path.islink(output_folder/"assets")) and (not (output_folder/"assets").exists()) and input_folder is not None and (input_folder/"assets").exists(): - os.symlink(input_folder/"assets", output_folder/"assets") + if os.path.islink(output_folder / "assets"): + os.unlink(output_folder / "assets") + elif (output_folder / "assets").exists(): + shutil.rmtree(output_folder / "assets") + if ( + (not os.path.islink(output_folder / "assets")) + and (not (output_folder / "assets").exists()) + and input_folder is not None + and (input_folder / "assets").exists() + ): + os.symlink(input_folder / "assets", output_folder / "assets") # in this way, even coarse task can have input_folder to have pregenerated on-the-fly assets (e.g., in last run) to speed up developing if dryrun: @@ -307,15 +164,19 @@ def execute_tasks( return if Task.Coarse not in task and task != Task.FineTerrain: - with Timer('Reading input blendfile'): - bpy.ops.wm.open_mainfile(filepath=str(input_folder / 'scene.blend')) + with Timer("Reading input blendfile"): + bpy.ops.wm.open_mainfile(filepath=str(input_folder / "scene.blend")) tag_system.load_tag(path=str(input_folder / "MaskTag.json")) butil.approve_all_drivers() - + if frame_range[1] < frame_range[0]: - raise ValueError(f'{frame_range=} is invalid, frame range must be nonempty. Blender end frame is INCLUSIVE') + raise ValueError( + f"{frame_range=} is invalid, frame range must be nonempty. Blender end frame is INCLUSIVE" + ) - logger.info(f'Processing frames {frame_range[0]} through {frame_range[1]} inclusive') + logger.info( + f"Processing frames {frame_range[0]} through {frame_range[1]} inclusive" + ) bpy.context.scene.frame_start = int(frame_range[0]) bpy.context.scene.frame_end = int(frame_range[1]) bpy.context.scene.frame_set(int(frame_range[0])) @@ -326,104 +187,115 @@ def execute_tasks( surface.registry.initialize_from_gin() init.configure_blender() - + if Task.Coarse in task: butil.clear_scene(targets=[bpy.data.objects]) - butil.spawn_empty(f'{infinigen.__version__=}') + butil.spawn_empty(f"{infinigen.__version__=}") info = compose_scene_func(output_folder, scene_seed) - outpath = output_folder/"assets" + outpath = output_folder / "assets" outpath.mkdir(exist_ok=True) - with open(outpath/"info.pickle", 'wb') as f: + with open(outpath / "info.pickle", "wb") as f: pickle.dump(info, f, protocol=pickle.HIGHEST_PROTOCOL) - - camera = cam_util.set_active_camera(*camera_id) + + cam_util.set_active_camera(*camera_id) group_collections() - if Task.Populate in task: - populate_scene(output_folder, scene_seed) + if Task.Populate in task and populate_scene_func is not None: + populate_scene_func(output_folder, scene_seed) - need_terrain_processing = 'OpaqueTerrain' in bpy.data.objects + need_terrain_processing = "OpaqueTerrain" in bpy.data.objects if Task.FineTerrain in task and need_terrain_processing: - with open(output_folder/"assets"/"info.pickle", 'rb') as f: + with open(output_folder / "assets" / "info.pickle", "rb") as f: info = pickle.load(f) - terrain = Terrain(scene_seed, surface.registry, task=task, on_the_fly_asset_folder=output_folder/"assets", height_offset=info["height_offset"], whole_bbox=info["whole_bbox"]) + terrain = Terrain( + scene_seed, + surface.registry, + task=task, + on_the_fly_asset_folder=output_folder / "assets", + height_offset=info["height_offset"], + whole_bbox=info["whole_bbox"], + ) cameras = [cam_util.get_camera(i, j) for i, j in cam_util.get_cameras_ids()] - terrain.fine_terrain(output_folder, cameras=cameras, optimize_terrain_diskusage=optimize_terrain_diskusage) + terrain.fine_terrain( + output_folder, + cameras=cameras, + optimize_terrain_diskusage=optimize_terrain_diskusage, + ) group_collections() if input_folder is not None and input_folder != output_folder: for mesh in os.listdir(input_folder): - if (mesh.endswith(".glb") or mesh.endswith(".b_displacement.npy")) and not os.path.islink(output_folder / mesh): + if ( + mesh.endswith(".glb") or mesh.endswith(".b_displacement.npy") + ) and not os.path.islink(output_folder / mesh): os.symlink(input_folder / mesh, output_folder / mesh) if Task.Coarse in task or Task.Populate in task or Task.FineTerrain in task: + with Timer("Writing output blendfile"): + logging.info( + f"Writing output blendfile to {output_folder / output_blend_name}" + ) + if optimize_terrain_diskusage and task == [Task.FineTerrain]: + os.symlink( + input_folder / output_blend_name, output_folder / output_blend_name + ) + else: + bpy.ops.wm.save_mainfile( + filepath=str(output_folder / output_blend_name) + ) - with Timer(f'Writing output blendfile'): - logging.info(f'Writing output blendfile to {output_folder / output_blend_name}') - if optimize_terrain_diskusage and task == [Task.FineTerrain]: - os.symlink(input_folder / output_blend_name, output_folder / output_blend_name) - else: - bpy.ops.wm.save_mainfile(filepath=str(output_folder / output_blend_name)) - tag_system.save_tag(path=str(output_folder / "MaskTag.json")) - with (output_folder/ "version.txt").open('w') as f: + with (output_folder / "version.txt").open("w") as f: f.write(f"{infinigen.__version__}\n") - with (output_folder/'polycounts.txt').open('w') as f: + with (output_folder / "polycounts.txt").open("w") as f: save_polycounts(f) - for col in bpy.data.collections['unique_assets'].children: + for col in bpy.data.collections["unique_assets"].children: col.hide_viewport = False if need_terrain_processing and ( - Task.Render in task - or Task.GroundTruth in task - or Task.MeshSave in task + Task.Render in task or Task.GroundTruth in task or Task.MeshSave in task ): terrain = Terrain( - scene_seed, - surface.registry, + scene_seed, + surface.registry, task=task, - on_the_fly_asset_folder=output_folder/"assets" + on_the_fly_asset_folder=output_folder / "assets", ) if optimize_terrain_diskusage: terrain.load_glb(output_folder) if Task.Render in task or Task.GroundTruth in task: render( - scene_seed, - output_folder=output_folder, - camera_id=camera_id, - resample_idx=resample_idx + scene_seed, + output_folder=output_folder, + camera_id=camera_id, + resample_idx=resample_idx, ) - + if Task.Export in task: export_scene(input_folder / output_blend_name, output_folder) if Task.MeshSave in task: save_meshes( - scene_seed, - output_folder=output_folder, - frame_range=frame_range, + scene_seed, + output_folder=output_folder, + frame_range=frame_range, ) -def main( - input_folder, - output_folder, - scene_seed, - task, - task_uniqname, - **kwargs -): - - version_req = ['3.6.0'] - assert bpy.app.version_string in version_req, f'You are using blender={bpy.app.version_string} which is ' \ - f'not supported. Please use {version_req}' - logger.info(f'infinigen version {infinigen.__version__}') + +def main(input_folder, output_folder, scene_seed, task, task_uniqname, **kwargs): + version_req = ["3.6.0"] + assert bpy.app.version_string in version_req, ( + f"You are using blender={bpy.app.version_string} which is " + f"not supported. Please use {version_req}" + ) + logger.info(f"infinigen version {infinigen.__version__}") logger.info(f"CUDA_VISIBLE_DEVICES={os.environ.get('CUDA_VISIBLE_DEVICES')}") if input_folder is not None: @@ -434,12 +306,18 @@ def main( if task_uniqname is not None: create_text_file(filename=f"START_{task_uniqname}") - with Timer('MAIN TOTAL'): + with Timer("MAIN TOTAL"): execute_tasks( - input_folder=input_folder, output_folder=output_folder, - task=task, scene_seed=scene_seed, **kwargs + input_folder=input_folder, + output_folder=output_folder, + task=task, + scene_seed=scene_seed, + **kwargs, ) if task_uniqname is not None: create_text_file(filename=f"FINISH_{task_uniqname}") - create_text_file(filename=f"operative_gin_{task_uniqname}.txt", text=gin.operative_config_str()) + create_text_file( + filename=f"operative_gin_{task_uniqname}.txt", + text=gin.operative_config_str(), + ) diff --git a/infinigen/core/generator.py b/infinigen/core/generator.py new file mode 100644 index 000000000..cc2e3d32c --- /dev/null +++ b/infinigen/core/generator.py @@ -0,0 +1,29 @@ +import typing + + +class Generator: + def __init__(self, distribution: typing.Callable): + self.distribution = distribution + self.params = distribution() + + def __repr__(self): + return f"{self.__class__.__name__}({self.distribution.__name__})" + + def __getattr__(self, name: str) -> None: + if name == "generate": + raise AttributeError( + f"Callers should not access {name} directly, use the __call__ method instead" + ) + + return object.__getattribute__(self, name) + + def __call__(self, *args, **kwargs): + generate = object.__getattribute__( + self, "generate" + ) # bypass the __getattr_ restriction, only for this call + return generate(*args, **kwargs) + + def generate(self, *args, **kwargs): + raise NotImplementedError( + f"Subclasses of {self.__class__.__name__} must implement the generate method" + ) diff --git a/infinigen/core/init.py b/infinigen/core/init.py index c8aff5c3c..a3e4d77a8 100644 --- a/infinigen/core/init.py +++ b/infinigen/core/init.py @@ -2,92 +2,84 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -import bpy - -import argparse import ast +import logging import os import random import sys -import cProfile -import shutil from pathlib import Path -import logging -from functools import partial -import pprint -from collections import defaultdict -# ruff: noqa: F402 -os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. +import bpy + +# ruff: noqa: E402 +os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. # See https://github.com/opencv/opencv/issues/21326#issuecomment-1008517425 import gin import numpy as np from numpy.random import randint +import infinigen +from infinigen.core.util.logging import LogLevel, Suppress from infinigen.core.util.math import int_hash from infinigen.core.util.organization import Task -from infinigen.core.util.logging import Suppress, LogLevel logger = logging.getLogger(__name__) CYCLES_GPUTYPES_PREFERENCE = [ - # key must be a valid cycles device_type # ordering indicate preference - earlier device types will be used over later if both are available # - e.g most OPTIX gpus will also show up as a CUDA gpu, but we will prefer to use OPTIX due to this list's ordering - - 'OPTIX', - 'CUDA', - 'METAL', # untested - 'HIP', # untested - 'ONEAPI', # untested - 'CPU', + "OPTIX", + "CUDA", + "METAL", # untested + "HIP", # untested + "ONEAPI", # untested + "CPU", ] + def parse_args_blender(parser): - if '--' in sys.argv: - # Running using a blender commandline python. + if "--" in sys.argv: + # Running using a blender commandline python. # args before '--' are intended for blender not infinigen - argvs = sys.argv[sys.argv.index('--')+1:] + argvs = sys.argv[sys.argv.index("--") + 1 :] return parser.parse_args(argvs) else: return parser.parse_args() - -def parse_seed(seed, task=None): +def parse_seed(seed, task=None): if seed is None: if task is not None and Task.Coarse not in task: raise ValueError( - 'Running tasks on an already generated scene, you need to specify --seed or results will' - ' not be view-consistent') - return randint(1e7), 'chosen at random' + "Running tasks on an already generated scene, you need to specify --seed or results will" + " not be view-consistent" + ) + return randint(1e7), "chosen at random" # WARNING: Do not add support for decimal numbers here, it will cause ambiguity, as some hex numbers are valid decimals try: - return int(seed, 16), 'parsed as hexadecimal' + return int(seed, 16), "parsed as hexadecimal" except ValueError: pass - return int_hash(seed), 'hashed string to integer' + return int_hash(seed), "hashed string to integer" + def apply_scene_seed(seed, task=None): scene_seed, reason = parse_seed(seed, task) - logger.info(f'Converted {seed=} to {scene_seed=}, {reason}') - gin.constant('OVERALL_SEED', scene_seed) + logger.info(f"Converted {seed=} to {scene_seed=}, {reason}") + gin.constant("OVERALL_SEED", scene_seed) random.seed(scene_seed) np.random.seed(scene_seed) return scene_seed -def sanitize_override(override: list): - if ( - ('=' in override) and - not any((c in override) for c in "\"'[]") - ): - k, v = override.split('=') +def sanitize_override(override: list): + if ("=" in override) and not any((c in override) for c in "\"'[]"): + k, v = override.split("=") try: ast.literal_eval(v) except (ValueError, SyntaxError): @@ -96,34 +88,33 @@ def sanitize_override(override: list): return override -def repo_root(): - return Path(__file__).parent.parent.parent def contained_stems(filenames: list[str], folder: Path): assert folder.exists() names = [p.stem for p in folder.iterdir()] return {s.stem in names or s.name in names for s in map(Path, filenames)} + def resolve_folder_maybe_relative(folder, root): folder = Path(folder) if folder.exists(): return folder - folder_rel = root/folder + folder_rel = root / folder if folder_rel.exists(): return folder_rel - raise FileNotFoundError(f'Could not find {folder} or {folder_rel}') + raise FileNotFoundError(f"Could not find {folder} or {folder_rel}") + @gin.configurable def apply_gin_configs( - configs_folder: Path, + config_folders: Path | list[Path], configs: list[str] = None, - overrides: list[str] = None, - skip_unknown: bool = False, + overrides: list[str] = None, + skip_unknown: bool = False, finalize_config=False, mandatory_folders: list[Path] = None, - mutually_exclusive_folders: list[Path] = None + mutually_exclusive_folders: list[Path] = None, ): - """ Apply gin configuration files and bindings. @@ -141,7 +132,7 @@ def apply_gin_configs( For each folder in the list, at least one config file must be loaded from that folder. mutually_exclusive_folders : list[Path] For each folder in the list, at most one config file must be loaded from that folder. - + """ if configs is None: @@ -152,39 +143,42 @@ def apply_gin_configs( mandatory_folders = [] if mutually_exclusive_folders is None: mutually_exclusive_folders = [] - configs_folder = Path(configs_folder) - root = repo_root() + if not isinstance(config_folders, list): + config_folders = [config_folders] - configs_folder_rel = root/configs_folder - if configs_folder_rel.exists(): - configs_folder = configs_folder_rel - gin.add_config_file_search_path(configs_folder) - elif configs_folder.exists(): - gin.add_config_file_search_path(configs_folder) - else: - raise FileNotFoundError(f'Couldnt find {configs_folder} or {configs_folder_rel}') - - search_paths = [configs_folder, root, Path('.')] + root = infinigen.repo_root() def find_config(p): p = Path(p) - for folder in search_paths: - for file in folder.glob('**/*.gin'): + for folder_rel in config_folders: + folder = root / folder_rel + + if not folder.exists(): + raise ValueError( + f"{apply_gin_configs.__name__} got bad {folder_rel=}, {folder=} did not exist" + ) + + for file in folder.glob("**/*.gin"): if file.stem == p.stem: + logger.debug(f"Resolved {p} to file {file}") return file - raise FileNotFoundError(f'Could not find {p} or {p.stem} in any of {search_paths}') - - configs = [find_config(g) for g in ['base.gin'] + configs] + logger.debug(f"Could not find {p} in {folder}") + + raise FileNotFoundError( + f"Could not find {p} or {p.stem} in any of {config_folders}" + ) + + configs = [find_config(g) for g in ["base.gin"] + configs] overrides = [sanitize_override(o) for o in overrides] for mandatory_folder in mandatory_folders: mandatory_folder = resolve_folder_maybe_relative(mandatory_folder, root) if not contained_stems(configs, mandatory_folder): raise FileNotFoundError( - f'At least one config file must be loaded from {mandatory_folder} to avoid unexpected behavior' + f"At least one config file must be loaded from {mandatory_folder} to avoid unexpected behavior" ) - + for mutex_folder in mutually_exclusive_folders: mutex_folder = resolve_folder_maybe_relative(mutex_folder, root) stems = {s.stem for s in mutex_folder.iterdir()} @@ -192,17 +186,18 @@ def find_config(p): both = stems.intersection(config_stems) if len(both) > 1: raise ValueError( - f'At most one config file must be loaded from {mutex_folder} to avoid unexpected behavior, instead got {both=}' + f"At most one config file must be loaded from {mutex_folder} to avoid unexpected behavior, instead got {both=}" ) - + with LogLevel(logger=logging.getLogger(), level=logging.WARNING): gin.parse_config_files_and_bindings( - configs, + configs, bindings=overrides, skip_unknown=skip_unknown, - finalize_config=finalize_config + finalize_config=finalize_config, ) + def import_addons(names): for name in names: try: @@ -211,30 +206,32 @@ def import_addons(names): except Exception: logger.warning(f'Could not load addon "{name}"') + @gin.configurable def configure_render_cycles( - # supplied by gin.config min_samples, num_samples, time_limit, adaptive_threshold, exposure, - denoise + denoise, ): - bpy.context.scene.render.engine = 'CYCLES' + bpy.context.scene.render.engine = "CYCLES" # For now, denoiser is always turned on, but the _used_ bpy.context.scene.cycles.use_denoising = denoise if denoise: try: - bpy.context.scene.cycles.denoiser = 'OPTIX' + bpy.context.scene.cycles.denoiser = "OPTIX" except Exception as e: logger.warning(f"Cannot use OPTIX denoiser {e}") - bpy.context.scene.cycles.samples = num_samples # i.e. infinity + bpy.context.scene.cycles.samples = num_samples # i.e. infinity bpy.context.scene.cycles.adaptive_min_samples = min_samples - bpy.context.scene.cycles.adaptive_threshold = adaptive_threshold # i.e. noise threshold + bpy.context.scene.cycles.adaptive_threshold = ( + adaptive_threshold # i.e. noise threshold + ) bpy.context.scene.cycles.time_limit = time_limit bpy.context.scene.cycles.film_exposure = exposure bpy.context.scene.cycles.volume_step_rate = 0.1 @@ -242,19 +239,17 @@ def configure_render_cycles( bpy.context.scene.cycles.volume_max_steps = 32 bpy.context.scene.cycles.volume_bounces = 4 + @gin.configurable -def configure_cycles_devices( - use_gpu=True -): - +def configure_cycles_devices(use_gpu=True): if use_gpu is False: - logger.info(f'Render will use CPU-only due to {use_gpu=}') - bpy.context.scene.cycles.device = 'CPU' + logger.info(f"Render will use CPU-only due to {use_gpu=}") + bpy.context.scene.cycles.device = "CPU" return - assert bpy.context.scene.render.engine == 'CYCLES' - bpy.context.scene.cycles.device = 'GPU' - prefs = bpy.context.preferences.addons['cycles'].preferences + assert bpy.context.scene.render.engine == "CYCLES" + bpy.context.scene.cycles.device = "GPU" + prefs = bpy.context.preferences.addons["cycles"].preferences # Necessary to "remind" cycles that the devices exist? Not sure. Without this no devices are found. for dt in prefs.get_device_types(bpy.context): @@ -265,19 +260,20 @@ def configure_cycles_devices( types = list(d.type for d in prefs.devices) types = sorted(types, key=CYCLES_GPUTYPES_PREFERENCE.index) - logger.info(f'Available devices have {types=}') + logger.info(f"Available devices have {types=}") use_device_type = types[0] - if use_device_type == 'CPU': - logger.warning(f'Render will use CPU-only, only found {types=}') - bpy.context.scene.cycles.device = 'CPU' + if use_device_type == "CPU": + logger.warning(f"Render will use CPU-only, only found {types=}") + bpy.context.scene.cycles.device = "CPU" return - bpy.context.preferences.addons['cycles'].preferences.compute_device_type = use_device_type + bpy.context.preferences.addons[ + "cycles" + ].preferences.compute_device_type = use_device_type use_devices = [d for d in prefs.devices if d.type == use_device_type] - - logger.info(f'Cycles will use {use_device_type=}, {len(use_devices)=}') + logger.info(f"Cycles will use {use_device_type=}, {len(use_devices)=}") for d in prefs.devices: d.use = False @@ -286,26 +282,25 @@ def configure_cycles_devices( return use_devices + @gin.configurable def configure_blender( - render_engine='CYCLES', + render_engine="CYCLES", motion_blur=False, motion_blur_shutter=0.5, ): - bpy.context.preferences.system.scrollback = 0 + bpy.context.preferences.system.scrollback = 0 bpy.context.preferences.edit.undo_steps = 0 - if render_engine == 'CYCLES': + if render_engine == "CYCLES": configure_render_cycles() configure_cycles_devices() else: - raise ValueError(f'Unrecognized {render_engine=}') + raise ValueError(f"Unrecognized {render_engine=}") bpy.context.scene.render.use_motion_blur = motion_blur - if motion_blur: - bpy.context.scene.cycles.motion_blur_position = 'START' + if motion_blur: + bpy.context.scene.cycles.motion_blur_position = "START" bpy.context.scene.render.motion_blur_shutter = motion_blur_shutter - import_addons(['ant_landscape', 'real_snow']) - - \ No newline at end of file + import_addons(["ant_landscape", "real_snow"]) diff --git a/infinigen/core/nodes/__init__.py b/infinigen/core/nodes/__init__.py index e6573d44c..42ce303fe 100644 --- a/infinigen/core/nodes/__init__.py +++ b/infinigen/core/nodes/__init__.py @@ -1,2 +1,2 @@ -from .node_wrangler import NodeWrangler from .node_info import Nodes +from .node_wrangler import NodeWrangler diff --git a/infinigen/core/nodes/compatibility.py b/infinigen/core/nodes/compatibility.py index ff52e73e9..95107ebd2 100644 --- a/infinigen/core/nodes/compatibility.py +++ b/infinigen/core/nodes/compatibility.py @@ -11,60 +11,97 @@ logger = logging.getLogger(__name__) + def map_dict_keys(d, m): for m_from, m_to in m.items(): if m_from not in d: continue if m_to in d: - raise ValueError(f'{m_from} would map to {m_to} but {d} already contains that key') + raise ValueError( + f"{m_from} would map to {m_to} but {d} already contains that key" + ) d[m_to] = d.pop(m_from) return d + def make_virtual_mixrgb(nw, orig_type, input_args, attrs, input_kwargs): - attrs['data_type'] = 'RGBA' + attrs["data_type"] = "RGBA" - key_mapping = OrderedDict({'Fac': 'Factor', 'Color1': 'A', 'Color2': 'B'}) + key_mapping = OrderedDict({"Fac": "Factor", "Color1": "A", "Color2": "B"}) map_dict_keys(input_kwargs, key_mapping) # any previous uses of input_args are no longer valid, since the node has lots of hidden type-based sockets now # we will convert any input_args present into input_kwargs instead for k, a in zip(key_mapping.values(), input_args): if k in input_kwargs: - raise ValueError(f'In {make_virtual_mixrgb}, encountered {orig_type} with conflicting {len(input_args)=} and {input_kwargs.keys()}') + raise ValueError( + f"In {make_virtual_mixrgb}, encountered {orig_type} with conflicting {len(input_args)=} and {input_kwargs.keys()}" + ) input_kwargs[k] = a input_args = [] - return nw.new_node(node_type=Nodes.Mix, input_args=input_args, - attrs=attrs, input_kwargs=input_kwargs, compat_mode=False) + return nw.new_node( + node_type=Nodes.Mix, + input_args=input_args, + attrs=attrs, + input_kwargs=input_kwargs, + compat_mode=False, + ) + def make_virtual_transfer_attribute(nw, orig_type, input_args, attrs, input_kwargs): if attrs is None: - raise ValueError(f'{attrs=} in make_virtual_transfer_attribute, cannot infer correct node type mapping') + raise ValueError( + f"{attrs=} in make_virtual_transfer_attribute, cannot infer correct node type mapping" + ) - if attrs['mapping'] == 'NEAREST_FACE_INTERPOLATED': + if attrs["mapping"] == "NEAREST_FACE_INTERPOLATED": mapped_type = Nodes.SampleNearestSurface - map_dict_keys(input_kwargs, {'Source': 'Mesh', 'Attribute': 'Value', 'Source Position': 'Sample Position'}) - elif attrs['mapping'] == 'NEAREST': - raise ValueError("Compatibility mapping for mode='NEAREST' is not supported, please modify the code to resolve this outdated instance of TransferAttribute") - elif attrs['mapping'] == 'INDEX': + map_dict_keys( + input_kwargs, + { + "Source": "Mesh", + "Attribute": "Value", + "Source Position": "Sample Position", + }, + ) + elif attrs["mapping"] == "NEAREST": + raise ValueError( + "Compatibility mapping for mode='NEAREST' is not supported, please modify the code to resolve this outdated instance of TransferAttribute" + ) + elif attrs["mapping"] == "INDEX": mapped_type = Nodes.SampleIndex - map_dict_keys(input_kwargs, {'Source': 'Geometry', 'Attribute': 'Value'}) + map_dict_keys(input_kwargs, {"Source": "Geometry", "Attribute": "Value"}) else: assert False - logger.warning(f'Converting request for Nodes.TransferAttribute to {mapped_type}' - f'to ensure compatibility with bl3.3 code, but this is unsafe. Please update to avoid {Nodes.TransferAttribute}') - - return nw.new_node(node_type=mapped_type, input_args=input_args, - attrs=attrs, input_kwargs=input_kwargs, compat_mode=False) + logger.warning( + f"Converting request for Nodes.TransferAttribute to {mapped_type}" + f"to ensure compatibility with bl3.3 code, but this is unsafe. Please update to avoid {Nodes.TransferAttribute}" + ) + + return nw.new_node( + node_type=mapped_type, + input_args=input_args, + attrs=attrs, + input_kwargs=input_kwargs, + compat_mode=False, + ) + def compat_args_sample_curve(nw, orig_type, input_args, attrs, input_kwargs): - map_dict_keys(input_kwargs, {'Curve': 'Curves'}) - return nw.new_node(node_type=orig_type, input_args=input_args, - attrs=attrs, input_kwargs=input_kwargs, compat_mode=False) + map_dict_keys(input_kwargs, {"Curve": "Curves"}) + return nw.new_node( + node_type=orig_type, + input_args=input_args, + attrs=attrs, + input_kwargs=input_kwargs, + compat_mode=False, + ) + COMPATIBILITY_MAPPINGS = { Nodes.MixRGB: make_virtual_mixrgb, Nodes.TransferAttribute: make_virtual_transfer_attribute, - Nodes.SampleCurve: compat_args_sample_curve -} \ No newline at end of file + Nodes.SampleCurve: compat_args_sample_curve, +} diff --git a/infinigen/core/nodes/node_info.py b/infinigen/core/nodes/node_info.py index 4d66073ad..907f42edb 100644 --- a/infinigen/core/nodes/node_info.py +++ b/infinigen/core/nodes/node_info.py @@ -3,9 +3,9 @@ # of this source tree. import bpy - import numpy as np + class Nodes: """ An enum for all node types. @@ -20,11 +20,11 @@ class Nodes: # Attribute Attribute = "ShaderNodeAttribute" CaptureAttribute = "GeometryNodeCaptureAttribute" - AttributeStatistic = 'GeometryNodeAttributeStatistic' + AttributeStatistic = "GeometryNodeAttributeStatistic" TransferAttribute = "GeometryNodeAttributeTransfer" # removed in b3.4, still supported via compatibility.py - DomainSize = 'GeometryNodeAttributeDomainSize' + DomainSize = "GeometryNodeAttributeDomainSize" StoreNamedAttribute = "GeometryNodeStoreNamedAttribute" - NamedAttribute = 'GeometryNodeInputNamedAttribute' + NamedAttribute = "GeometryNodeInputNamedAttribute" SampleIndex = "GeometryNodeSampleIndex" SampleNearest = "GeometryNodeSampleNearest" SampleNearestSurface = "GeometryNodeSampleNearestSurface" @@ -34,60 +34,60 @@ class Nodes: MixRGB = "ShaderNodeMixRGB" RGBCurve = "ShaderNodeRGBCurve" BrightContrast = "CompositorNodeBrightContrast" - Exposure = 'CompositorNodeExposure' - CombineHSV = 'ShaderNodeCombineHSV' - SeparateRGB = 'ShaderNodeSeparateRGB' - SeparateColor = 'ShaderNodeSeparateColor' - CompSeparateColor = 'CompositorNodeSeparateColor' - CombineRGB = 'ShaderNodeCombineRGB' - CombineColor = 'ShaderNodeCombineColor' - CompCombineColor = 'CompositorNodeCombineColor' + Exposure = "CompositorNodeExposure" + CombineHSV = "ShaderNodeCombineHSV" + SeparateRGB = "ShaderNodeSeparateRGB" + SeparateColor = "ShaderNodeSeparateColor" + CompSeparateColor = "CompositorNodeSeparateColor" + CombineRGB = "ShaderNodeCombineRGB" + CombineColor = "ShaderNodeCombineColor" + CompCombineColor = "CompositorNodeCombineColor" # bl3.5 additions - SeparateComponents = 'GeometryNodeSeparateComponents' - SetID = 'GeometryNodeSetID' - InterpolateCurves = 'GeometryNodeInterpolateCurves' - SampleUVSurface = 'GeometryNodeSampleUVSurface' - MeshIsland = 'GeometryNodeInputMeshIsland' - IsViewport = 'GeometryNodeIsViewport' - ImageInfo = 'GeometryNodeImageInfo' - CurveofPoint = 'GeometryNodeCurveOfPoint' - CurvesInfo = 'ShaderNodeHairInfo' - Radius = 'GeometryNodeInputRadius' - EvaluateonDomain = 'GeometryNodeFieldOnDomain' - BlurAttribute = 'GeometryNodeBlurAttribute' - EndpointSelection = 'GeometryNodeCurveEndpointSelection' - PointsofCurve = 'GeometryNodePointsOfCurve' - SetSplineResolution = 'GeometryNodeSetSplineResolution' - OffsetPointinCurve = 'GeometryNodeOffsetPointInCurve' - SplineResolution = 'GeometryNodeInputSplineResolution' + SeparateComponents = "GeometryNodeSeparateComponents" + SetID = "GeometryNodeSetID" + InterpolateCurves = "GeometryNodeInterpolateCurves" + SampleUVSurface = "GeometryNodeSampleUVSurface" + MeshIsland = "GeometryNodeInputMeshIsland" + IsViewport = "GeometryNodeIsViewport" + ImageInfo = "GeometryNodeImageInfo" + CurveofPoint = "GeometryNodeCurveOfPoint" + CurvesInfo = "ShaderNodeHairInfo" + Radius = "GeometryNodeInputRadius" + EvaluateonDomain = "GeometryNodeFieldOnDomain" + BlurAttribute = "GeometryNodeBlurAttribute" + EndpointSelection = "GeometryNodeCurveEndpointSelection" + PointsofCurve = "GeometryNodePointsOfCurve" + SetSplineResolution = "GeometryNodeSetSplineResolution" + OffsetPointinCurve = "GeometryNodeOffsetPointInCurve" + SplineResolution = "GeometryNodeInputSplineResolution" # Curve CurveToMesh = "GeometryNodeCurveToMesh" CurveToPoints = "GeometryNodeCurveToPoints" MeshToCurve = "GeometryNodeMeshToCurve" - SampleCurve = 'GeometryNodeSampleCurve' - SetCurveRadius = 'GeometryNodeSetCurveRadius' - SetCurveTilt = 'GeometryNodeSetCurveTilt' - CurveLength = 'GeometryNodeCurveLength' - CurveSplineType = 'GeometryNodeCurveSplineType' - SetHandlePositions = 'GeometryNodeSetCurveHandlePositions' - SetHandleType = 'GeometryNodeCurveSetHandles' - CurveTangent = 'GeometryNodeInputTangent' - SplineParameter = 'GeometryNodeSplineParameter' - SplineType = 'GeometryNodeCurveSplineType' - SubdivideCurve = 'GeometryNodeSubdivideCurve' - ResampleCurve = 'GeometryNodeResampleCurve' - TrimCurve = 'GeometryNodeTrimCurve' - ReverseCurve = 'GeometryNodeReverseCurve' - SplineLength = 'GeometryNodeSplineLength' - FillCurve = 'GeometryNodeFillCurve' - FilletCurve = 'GeometryNodeFilletCurve' + SampleCurve = "GeometryNodeSampleCurve" + SetCurveRadius = "GeometryNodeSetCurveRadius" + SetCurveTilt = "GeometryNodeSetCurveTilt" + CurveLength = "GeometryNodeCurveLength" + CurveSplineType = "GeometryNodeCurveSplineType" + SetHandlePositions = "GeometryNodeSetCurveHandlePositions" + SetHandleType = "GeometryNodeCurveSetHandles" + CurveTangent = "GeometryNodeInputTangent" + SplineParameter = "GeometryNodeSplineParameter" + SplineType = "GeometryNodeCurveSplineType" + SubdivideCurve = "GeometryNodeSubdivideCurve" + ResampleCurve = "GeometryNodeResampleCurve" + TrimCurve = "GeometryNodeTrimCurve" + ReverseCurve = "GeometryNodeReverseCurve" + SplineLength = "GeometryNodeSplineLength" + FillCurve = "GeometryNodeFillCurve" + FilletCurve = "GeometryNodeFilletCurve" # Curve Primitves - QuadraticBezier = 'GeometryNodeCurveQuadraticBezier' - CurveCircle = 'GeometryNodeCurvePrimitiveCircle' - CurveLine = 'GeometryNodeCurvePrimitiveLine' + QuadraticBezier = "GeometryNodeCurveQuadraticBezier" + CurveCircle = "GeometryNodeCurvePrimitiveCircle" + CurveLine = "GeometryNodeCurvePrimitiveLine" CurveBezierSegment = "GeometryNodeCurvePrimitiveBezierSegment" BezierSegment = "GeometryNodeCurvePrimitiveBezierSegment" @@ -98,11 +98,11 @@ class Nodes: SeparateGeometry = "GeometryNodeSeparateGeometry" BoundingBox = "GeometryNodeBoundBox" Transform = "GeometryNodeTransform" - DeleteGeometry = 'GeometryNodeDeleteGeometry' + DeleteGeometry = "GeometryNodeDeleteGeometry" Proximity = "GeometryNodeProximity" ConvexHull = "GeometryNodeConvexHull" - Raycast = 'GeometryNodeRaycast' - DuplicateElements = 'GeometryNodeDuplicateElements' + Raycast = "GeometryNodeRaycast" + DuplicateElements = "GeometryNodeDuplicateElements" # Input GroupInput = "NodeGroupInput" @@ -112,7 +112,7 @@ class Nodes: RandomValue = "FunctionNodeRandomValue" CollectionInfo = "GeometryNodeCollectionInfo" ObjectInfo = "GeometryNodeObjectInfo" - ObjectInfo_Shader = 'ShaderNodeObjectInfo' + ObjectInfo_Shader = "ShaderNodeObjectInfo" Vector = "FunctionNodeInputVector" InputID = "GeometryNodeInputID" InputPosition = "GeometryNodeInputPosition" @@ -120,16 +120,16 @@ class Nodes: InputEdgeVertices = "GeometryNodeInputMeshEdgeVertices" InputEdgeAngle = "GeometryNodeInputMeshEdgeAngle" InputColor = "FunctionNodeInputColor" - InputMeshFaceArea = 'GeometryNodeInputMeshFaceArea' + InputMeshFaceArea = "GeometryNodeInputMeshFaceArea" TextureCoord = "ShaderNodeTexCoord" - Index = 'GeometryNodeInputIndex' - AmbientOcclusion = 'ShaderNodeAmbientOcclusion' - Integer = 'FunctionNodeInputInt' - LightPath = 'ShaderNodeLightPath' - ShortestEdgePath = 'GeometryNodeInputShortestEdgePaths' - EdgeNeighbors = 'GeometryNodeInputMeshEdgeNeighbors' - ShaderNodeNormalMap = 'ShaderNodeNormalMap' - HueSaturationValue = 'ShaderNodeHueSaturation' + Index = "GeometryNodeInputIndex" + AmbientOcclusion = "ShaderNodeAmbientOcclusion" + Integer = "FunctionNodeInputInt" + LightPath = "ShaderNodeLightPath" + ShortestEdgePath = "GeometryNodeInputShortestEdgePaths" + EdgeNeighbors = "GeometryNodeInputMeshEdgeNeighbors" + ShaderNodeNormalMap = "ShaderNodeNormalMap" + HueSaturationValue = "ShaderNodeHueSaturation" BlackBody = "ShaderNodeBlackbody" # Instances @@ -147,27 +147,27 @@ class Nodes: # Mesh SubdivideMesh = "GeometryNodeSubdivideMesh" SubdivisionSurface = "GeometryNodeSubdivisionSurface" - MeshToPoints = 'GeometryNodeMeshToPoints' + MeshToPoints = "GeometryNodeMeshToPoints" MeshBoolean = "GeometryNodeMeshBoolean" - SetShadeSmooth = 'GeometryNodeSetShadeSmooth' - DualMesh = 'GeometryNodeDualMesh' - ScaleElements = 'GeometryNodeScaleElements' - IcoSphere = 'GeometryNodeMeshIcoSphere' - ExtrudeMesh = 'GeometryNodeExtrudeMesh' - FlipFaces = 'GeometryNodeFlipFaces' - FaceNeighbors = 'GeometryNodeInputMeshFaceNeighbors' - EdgePathToCurve = 'GeometryNodeEdgePathsToCurves' - DeleteGeom = 'GeometryNodeDeleteGeometry' - SplitEdges = 'GeometryNodeSplitEdges' + SetShadeSmooth = "GeometryNodeSetShadeSmooth" + DualMesh = "GeometryNodeDualMesh" + ScaleElements = "GeometryNodeScaleElements" + IcoSphere = "GeometryNodeMeshIcoSphere" + ExtrudeMesh = "GeometryNodeExtrudeMesh" + FlipFaces = "GeometryNodeFlipFaces" + FaceNeighbors = "GeometryNodeInputMeshFaceNeighbors" + EdgePathToCurve = "GeometryNodeEdgePathsToCurves" + DeleteGeom = "GeometryNodeDeleteGeometry" + SplitEdges = "GeometryNodeSplitEdges" VertexNeighbors = "GeometryNodeInputMeshVertexNeighbors" # Mesh Primitives MeshCircle = "GeometryNodeMeshCircle" - MeshGrid = 'GeometryNodeMeshGrid' - MeshLine = 'GeometryNodeMeshLine' - MeshUVSphere = 'GeometryNodeMeshUVSphere' - MeshIcoSphere = 'GeometryNodeMeshIcoSphere' - MeshCube = 'GeometryNodeMeshCube' + MeshGrid = "GeometryNodeMeshGrid" + MeshLine = "GeometryNodeMeshLine" + MeshUVSphere = "GeometryNodeMeshUVSphere" + MeshIcoSphere = "GeometryNodeMeshIcoSphere" + MeshCube = "GeometryNodeMeshCube" # Output Menu GroupOutput = "NodeGroupOutput" @@ -181,8 +181,8 @@ class Nodes: # Point DistributePointsOnFaces = "GeometryNodeDistributePointsOnFaces" PointsToVertices = "GeometryNodePointsToVertices" - PointsToVolume = 'GeometryNodePointsToVolume' - SetPointRadius = 'GeometryNodeSetPointRadius' + PointsToVolume = "GeometryNodePointsToVolume" + SetPointRadius = "GeometryNodeSetPointRadius" # Vector SeparateXYZ = "ShaderNodeSeparateXYZ" @@ -193,12 +193,12 @@ class Nodes: Displacement = "ShaderNodeDisplacement" # Volume - VolumeToMesh = 'GeometryNodeVolumeToMesh' + VolumeToMesh = "GeometryNodeVolumeToMesh" # Math VectorMath = "ShaderNodeVectorMath" Math = "ShaderNodeMath" - MapRange = 'ShaderNodeMapRange' + MapRange = "ShaderNodeMapRange" BooleanMath = "FunctionNodeBooleanMath" Compare = "FunctionNodeCompare" FloatToInt = "FunctionNodeFloatToInt" @@ -212,12 +212,12 @@ class Nodes: MusgraveTexture = "ShaderNodeTexMusgrave" VoronoiTexture = "ShaderNodeTexVoronoi" WaveTexture = "ShaderNodeTexWave" - WhiteNoiseTexture = 'ShaderNodeTexWhiteNoise' + WhiteNoiseTexture = "ShaderNodeTexWhiteNoise" ImageTexture = "GeometryNodeImageTexture" - GradientTexture = 'ShaderNodeTexGradient' + GradientTexture = "ShaderNodeTexGradient" ShaderImageTexture = "ShaderNodeTexImage" MagicTexture = "ShaderNodeTexMagic" - BrickTexture = 'ShaderNodeTexBrick' + BrickTexture = "ShaderNodeTexBrick" CheckerTexture = "ShaderNodeTexChecker" EnvironmentTexture = "ShaderNodeTexEnvironment" @@ -228,10 +228,10 @@ class Nodes: TranslucentBSDF = "ShaderNodeBsdfTranslucent" TransparentBSDF = "ShaderNodeBsdfTransparent" PrincipledVolume = "ShaderNodeVolumePrincipled" - PrincipledHairBSDF = 'ShaderNodeBsdfHairPrincipled' - Emission = 'ShaderNodeEmission' - Fresnel = 'ShaderNodeFresnel' - NewGeometry = 'ShaderNodeNewGeometry' + PrincipledHairBSDF = "ShaderNodeBsdfHairPrincipled" + Emission = "ShaderNodeEmission" + Fresnel = "ShaderNodeFresnel" + NewGeometry = "ShaderNodeNewGeometry" RefractionBSDF = "ShaderNodeBsdfRefraction" GlassBSDF = "ShaderNodeBsdfGlass" GlossyBSDF = "ShaderNodeBsdfGlossy" @@ -251,148 +251,141 @@ class Nodes: # Compositor - Filter RenderLayers = "CompositorNodeRLayers" LensDistortion = "CompositorNodeLensdist" - Glare = 'CompositorNodeGlare' + Glare = "CompositorNodeGlare" # World Nodes SkyTexture = "ShaderNodeTexSky" Background = "ShaderNodeBackground" # bl3.5 additions - SeparateComponents = 'GeometryNodeSeparateComponents' - SetID = 'GeometryNodeSetID' - InterpolateCurves = 'GeometryNodeInterpolateCurves' - SampleUVSurface = 'GeometryNodeSampleUVSurface' - MeshIsland = 'GeometryNodeInputMeshIsland' - IsViewport = 'GeometryNodeIsViewport' - ImageInfo = 'GeometryNodeImageInfo' - CurveofPoint = 'GeometryNodeCurveOfPoint' - CurvesInfo = 'ShaderNodeHairInfo' - Radius = 'GeometryNodeInputRadius' - EvaluateonDomain = 'GeometryNodeFieldOnDomain' - BlurAttribute = 'GeometryNodeBlurAttribute' - EndpointSelection = 'GeometryNodeCurveEndpointSelection' - PointsofCurve = 'GeometryNodePointsOfCurve' - SetSplineResolution = 'GeometryNodeSetSplineResolution' - OffsetPointinCurve = 'GeometryNodeOffsetPointInCurve' - SplineResolution = 'GeometryNodeInputSplineResolution' - - -''' + SeparateComponents = "GeometryNodeSeparateComponents" + SetID = "GeometryNodeSetID" + InterpolateCurves = "GeometryNodeInterpolateCurves" + SampleUVSurface = "GeometryNodeSampleUVSurface" + MeshIsland = "GeometryNodeInputMeshIsland" + IsViewport = "GeometryNodeIsViewport" + ImageInfo = "GeometryNodeImageInfo" + CurveofPoint = "GeometryNodeCurveOfPoint" + CurvesInfo = "ShaderNodeHairInfo" + Radius = "GeometryNodeInputRadius" + EvaluateonDomain = "GeometryNodeFieldOnDomain" + BlurAttribute = "GeometryNodeBlurAttribute" + EndpointSelection = "GeometryNodeCurveEndpointSelection" + PointsofCurve = "GeometryNodePointsOfCurve" + SetSplineResolution = "GeometryNodeSetSplineResolution" + OffsetPointinCurve = "GeometryNodeOffsetPointInCurve" + SplineResolution = "GeometryNodeInputSplineResolution" + + +""" Blender doesnt have an automatic way of discovering what properties exist on a node that might need to be set but are NOT in .inputs. This dict documents what types of properties we might need to set on each type of node Used in transpiler's create_attrs_dict -''' +""" NODE_ATTRS_AVAILABLE = { - - Nodes.Math: ['operation', 'use_clamp'], - Nodes.VectorMath: ['operation'], - Nodes.BooleanMath: ['operation'], - Nodes.Compare: ['mode', 'data_type', 'operation'], - - Nodes.NoiseTexture: ['noise_dimensions'], - Nodes.MusgraveTexture: ['musgrave_dimensions', 'musgrave_type'], - Nodes.VoronoiTexture: ['voronoi_dimensions', 'feature', 'distance'], - Nodes.GradientTexture: ['gradient_type'], - - Nodes.RGB: ['color'], - Nodes.Attribute: ['attribute_name', 'attribute_type'], - Nodes.AttributeStatistic: ['domain', 'data_type'], - Nodes.CaptureAttribute: ['domain', 'data_type'], - Nodes.TextureCoord: ['from_instancer'], - - Nodes.PrincipledBSDF: ['distribution', 'subsurface_method'], - - Nodes.Mapping: ['vector_type'], - Nodes.MapRange: ['data_type', 'interpolation_type', 'clamp'], + Nodes.Math: ["operation", "use_clamp"], + Nodes.VectorMath: ["operation"], + Nodes.BooleanMath: ["operation"], + Nodes.Compare: ["mode", "data_type", "operation"], + Nodes.NoiseTexture: ["noise_dimensions"], + Nodes.MusgraveTexture: ["musgrave_dimensions", "musgrave_type"], + Nodes.VoronoiTexture: ["voronoi_dimensions", "feature", "distance"], + Nodes.GradientTexture: ["gradient_type"], + Nodes.RGB: ["color"], + Nodes.Attribute: ["attribute_name", "attribute_type"], + Nodes.AttributeStatistic: ["domain", "data_type"], + Nodes.CaptureAttribute: ["domain", "data_type"], + Nodes.TextureCoord: ["from_instancer"], + Nodes.PrincipledBSDF: ["distribution", "subsurface_method"], + Nodes.Mapping: ["vector_type"], + Nodes.MapRange: ["data_type", "interpolation_type", "clamp"], Nodes.ColorRamp: [], # Color ramp properties are set in special_case_colorramp, since they are nested - Nodes.MixRGB: ['blend_type'], - Nodes.Mix: ['data_type', 'blend_type', 'clamp_result', 'clamp_factor'], - Nodes.AccumulateField: ['data_type'], - Nodes.CombineRGB: ['mode'], - Nodes.CombineColor: ['mode'], - Nodes.SeparateColor: ['mode'], - - Nodes.DistributePointsOnFaces: ['distribute_method'], - Nodes.CollectionInfo: ['transform_space'], - - Nodes.RandomValue: ['data_type'], - - Nodes.Switch: ['input_type'], - Nodes.TransferAttribute: ['data_type', 'mapping'], - Nodes.SeparateGeometry: ['domain'], - Nodes.MergeByDistance: ['mode'], - - Nodes.Integer: ['integer'], - Nodes.MeshBoolean: ['operation'], - Nodes.MeshCircle: ['fill_type'], - Nodes.CurveSplineType: ['spline_type'], - Nodes.SetHandlePositions: ['mode'], - Nodes.SetHandleType: ['handle_type', 'mode'], - Nodes.NamedAttribute: ['data_type'], - Nodes.StoreNamedAttribute: ['data_type', 'domain'], - Nodes.CurveToPoints: ['mode'], - Nodes.FillCurve: ['mode'], - - Nodes.ResampleCurve: ['mode'], - Nodes.TrimCurve: ['mode'], - Nodes.MeshLine: ['mode'], - Nodes.MeshToPoints: ['mode'], - - Nodes.DeleteGeom: ['mode'], - Nodes.Proximity: ['target_element'], - - Nodes.CurveCircle: ['mode'], - Nodes.SampleCurve: ['mode'], - Nodes.BezierSegment: ['mode'], - Nodes.CurveLine: ['mode'], - Nodes.ExtrudeMesh: ['mode'], - Nodes.Raycast: ['data_type', 'mapping'], - - Nodes.AlignEulerToVector: ['axis', 'pivot_axis'], - Nodes.VectorRotate: ['invert', 'rotation_type'], - Nodes.RotateEuler: ['space', 'type'], - Nodes.DuplicateElements: ['domain'], - Nodes.SeparateRGB: ['mode'], - Nodes.SeparateColor: ['mode'], - - Nodes.DomainSize: ['component'] - + Nodes.MixRGB: ["blend_type"], + Nodes.Mix: ["data_type", "blend_type", "clamp_result", "clamp_factor"], + Nodes.AccumulateField: ["data_type"], + Nodes.CombineRGB: ["mode"], + Nodes.CombineColor: ["mode"], + Nodes.SeparateColor: ["mode"], + Nodes.DistributePointsOnFaces: ["distribute_method"], + Nodes.CollectionInfo: ["transform_space"], + Nodes.RandomValue: ["data_type"], + Nodes.Switch: ["input_type"], + Nodes.TransferAttribute: ["data_type", "mapping"], + Nodes.SeparateGeometry: ["domain"], + Nodes.MergeByDistance: ["mode"], + Nodes.Integer: ["integer"], + Nodes.MeshBoolean: ["operation"], + Nodes.MeshCircle: ["fill_type"], + Nodes.CurveSplineType: ["spline_type"], + Nodes.SetHandlePositions: ["mode"], + Nodes.SetHandleType: ["handle_type", "mode"], + Nodes.NamedAttribute: ["data_type"], + Nodes.StoreNamedAttribute: ["data_type", "domain"], + Nodes.CurveToPoints: ["mode"], + Nodes.FillCurve: ["mode"], + Nodes.ResampleCurve: ["mode"], + Nodes.TrimCurve: ["mode"], + Nodes.MeshLine: ["mode"], + Nodes.MeshToPoints: ["mode"], + Nodes.DeleteGeom: ["mode"], + Nodes.Proximity: ["target_element"], + Nodes.CurveCircle: ["mode"], + Nodes.SampleCurve: ["mode"], + Nodes.BezierSegment: ["mode"], + Nodes.CurveLine: ["mode"], + Nodes.ExtrudeMesh: ["mode"], + Nodes.Raycast: ["data_type", "mapping"], + Nodes.AlignEulerToVector: ["axis", "pivot_axis"], + Nodes.VectorRotate: ["invert", "rotation_type"], + Nodes.RotateEuler: ["space", "type"], + Nodes.DuplicateElements: ["domain"], + Nodes.SeparateRGB: ["mode"], + Nodes.SeparateColor: ["mode"], + Nodes.DomainSize: ["component"], } # Certain nodes should only be created once. This list defines which ones. -SINGLETON_NODES = [Nodes.GroupInput, Nodes.GroupOutput, Nodes.MaterialOutput, Nodes.WorldOutput, Nodes.Viewer, - Nodes.Composite, Nodes.RenderLayers, Nodes.LightOutput] +SINGLETON_NODES = [ + Nodes.GroupInput, + Nodes.GroupOutput, + Nodes.MaterialOutput, + Nodes.WorldOutput, + Nodes.Viewer, + Nodes.Composite, + Nodes.RenderLayers, + Nodes.LightOutput, +] # Map the type of a socket (ie, .outputs[0].type), to the corresponding value to put into a # data_type attr, ie CaptureAttributes data_type. Frustratingly these are not directly related. NODETYPE_TO_DATATYPE = { - 'VALUE': 'FLOAT', - 'INT': 'INT', - 'VECTOR': 'FLOAT_VECTOR', - 'FLOAT_COLOR': 'RGBA', - 'BOOLEAN': 'BOOLEAN' + "VALUE": "FLOAT", + "INT": "INT", + "VECTOR": "FLOAT_VECTOR", + "FLOAT_COLOR": "RGBA", + "BOOLEAN": "BOOLEAN", } NODECLASS_TO_DATATYPE = { - 'NodeSocketFloat': 'FLOAT', - 'NodeSocketInt': 'INT', - 'NodeSocketVector': 'FLOAT_VECTOR', - 'NodeSocketColor': 'RGBA', - 'NodeSocketBool': 'BOOLEAN' + "NodeSocketFloat": "FLOAT", + "NodeSocketInt": "INT", + "NodeSocketVector": "FLOAT_VECTOR", + "NodeSocketColor": "RGBA", + "NodeSocketBool": "BOOLEAN", } DATATYPE_TO_NODECLASS = {v: k for k, v in NODECLASS_TO_DATATYPE.items()} -NODECLASSES = [k for k in dir(bpy.types) if 'NodeSocket' in k] +NODECLASSES = [k for k in dir(bpy.types) if "NodeSocket" in k] PYTYPE_TO_DATATYPE = { - int: 'INT', - float: 'FLOAT', - np.float32: 'FLOAT', - np.float64: 'FLOAT', - np.array: 'FLOAT_VECTOR', - bool: 'BOOLEAN' + int: "INT", + float: "FLOAT", + np.float32: "FLOAT", + np.float64: "FLOAT", + np.array: "FLOAT_VECTOR", + bool: "BOOLEAN", } DATATYPE_TO_PYTYPE = {v: k for k, v in PYTYPE_TO_DATATYPE.items()} @@ -408,18 +401,18 @@ class Nodes: } DATATYPE_DIMS = { - 'FLOAT': 1, - 'INT': 1, - 'FLOAT_VECTOR': 3, - 'FLOAT2': 2, - 'FLOAT_COLOR': 4, - 'BOOLEAN': 1, - 'INT32_2D': 2 + "FLOAT": 1, + "INT": 1, + "FLOAT_VECTOR": 3, + "FLOAT2": 2, + "FLOAT_COLOR": 4, + "BOOLEAN": 1, + "INT32_2D": 2, } DATATYPE_FIELDS = { - 'FLOAT': 'value', - 'INT': 'value', - 'FLOAT_VECTOR': 'vector', - 'FLOAT_COLOR': 'color', - 'BOOLEAN': 'value', + "FLOAT": "value", + "INT": "value", + "FLOAT_VECTOR": "vector", + "FLOAT_COLOR": "color", + "BOOLEAN": "value", } diff --git a/infinigen/core/nodes/node_transpiler/transpiler.py b/infinigen/core/nodes/node_transpiler/transpiler.py index 2a6d33f65..d0152299a 100644 --- a/infinigen/core/nodes/node_transpiler/transpiler.py +++ b/infinigen/core/nodes/node_transpiler/transpiler.py @@ -1,50 +1,98 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: primary author # - Alejandro Newell, Lingjie Mei: bugfixes -import pdb +import importlib +import keyword import logging import re from collections import OrderedDict -import keyword -import importlib import bpy -import bpy_types import mathutils - import numpy as np -from ..node_info import Nodes, OUTPUT_NODE_IDS, SINGLETON_NODES +from ..node_info import OUTPUT_NODE_IDS, SINGLETON_NODES, Nodes logger = logging.getLogger(__name__) -VERSION = '2.6.5' -indent_string = ' ' * 4 +VERSION = "2.6.5" +indent_string = " " * 4 LINE_LEN = 100 -COMMON_ATTR_NAMES = ['data_type', 'mode', 'operation'] +COMMON_ATTR_NAMES = ["data_type", "mode", "operation"] VALUE_NODES = [Nodes.Value, Nodes.Vector, Nodes.RGB, Nodes.InputColor, Nodes.Integer] -UNIVERSAL_ATTR_NAMES = set([ - 'show_preview', '__module__', 'is_registered_node_type', 'bl_rna', 'poll', 'name', - 'internal_links', 'dimensions', 'parent', 'bl_width_max', 'label', 'input_template', - 'show_texture', 'rna_type', 'width_hidden', 'show_options', 'location', 'outputs', - 'use_custom_color', '__doc__', 'width', 'bl_width_default', 'inputs', 'bl_idname', - 'socket_value_update', 'bl_width_min', 'color', 'bl_height_max', '__slots__', 'select', - 'mute', 'bl_height_default', 'bl_static_type', 'bl_height_min', 'height', 'bl_label', - 'bl_icon', 'hide', 'output_template', 'poll_instance', 'draw_buttons_ext', 'type', - 'bl_description', 'draw_buttons', 'update' -]) - -SPECIAL_CASE_ATTR_NAMES = set([ - 'color_ramp', 'mapping', 'vector', 'color', 'integer', 'texture_mapping', 'color_mapping', - 'image_user', 'interface', 'node_tree', 'tag_need_exec' -]) +UNIVERSAL_ATTR_NAMES = set( + [ + "show_preview", + "__module__", + "is_registered_node_type", + "bl_rna", + "poll", + "name", + "internal_links", + "dimensions", + "parent", + "bl_width_max", + "label", + "input_template", + "show_texture", + "rna_type", + "width_hidden", + "show_options", + "location", + "outputs", + "use_custom_color", + "__doc__", + "width", + "bl_width_default", + "inputs", + "bl_idname", + "socket_value_update", + "bl_width_min", + "color", + "bl_height_max", + "__slots__", + "select", + "mute", + "bl_height_default", + "bl_static_type", + "bl_height_min", + "height", + "bl_label", + "bl_icon", + "hide", + "output_template", + "poll_instance", + "draw_buttons_ext", + "type", + "bl_description", + "draw_buttons", + "update", + ] +) + +SPECIAL_CASE_ATTR_NAMES = set( + [ + "color_ramp", + "mapping", + "vector", + "color", + "integer", + "texture_mapping", + "color_mapping", + "image_user", + "interface", + "node_tree", + "tag_need_exec", + ] +) + def node_attrs_available(node): attrs = set(node.__dir__()) @@ -53,11 +101,12 @@ def node_attrs_available(node): logging.info(node.name, attrs) return attrs + def indent(s): - return indent_string + s.strip().replace('\n', f'\n{indent_string}') + return indent_string + s.strip().replace("\n", f"\n{indent_string}") -def prefix(dependencies_used) -> str: +def prefix(dependencies_used) -> str: fixed_prefix = ( "import bpy\n" "import mathutils\n" @@ -68,81 +117,104 @@ def prefix(dependencies_used) -> str: "from infinigen.core import surface\n" ) - deps_table = [(ng_name, name_used[0]) for ng_name, name_used in dependencies_used.items() if name_used[1]] + deps_table = [ + (ng_name, name_used[0]) + for ng_name, name_used in dependencies_used.items() + if name_used[1] + ] module_names = set(d[1] for d in deps_table) deps_by_module = {n: [d[0] for d in deps_table if d[1] == n] for n in module_names} - deps_prefix_lines = [f"from {name} import {', '.join(ngnames)}" for name, ngnames in deps_by_module.items()] + deps_prefix_lines = [ + f"from {name} import {', '.join(ngnames)}" + for name, ngnames in deps_by_module.items() + ] + + return fixed_prefix + "\n" + "\n".join(deps_prefix_lines) - return fixed_prefix + '\n' + '\n'.join(deps_prefix_lines) def postfix(funcnames, targets): header = "def apply(obj, selection=None, **kwargs):\n" - body = '' + body = "" for funcname, target in zip(funcnames, targets): idname = get_node_tree(target).bl_idname - if idname == 'GeometryNodeTree': - body += f'surface.add_geomod(obj, {funcname}, selection=selection, attributes=[])\n' - elif idname == 'ShaderNodeTree': - body += f'surface.add_material(obj, {funcname}, selection=selection)\n' + if idname == "GeometryNodeTree": + body += f"surface.add_geomod(obj, {funcname}, selection=selection, attributes=[])\n" + elif idname == "ShaderNodeTree": + body += f"surface.add_material(obj, {funcname}, selection=selection)\n" else: - raise ValueError(f'Postfix couldnt handle {idname=}, please contact the developer') + raise ValueError( + f"Postfix couldnt handle {idname=}, please contact the developer" + ) return header + indent(body) -def represent_default_value(val, simple=True): - ''' +def represent_default_value(val, simple=True): + """ Attempt to create a python expression to represent val, which was the .default_value of some .input node Unless simple=True, we may encounter things such as Materials which require transpiling. - ''' + """ - code = '' + code = "" new_transpiler_targets = {} - if isinstance(val, (str, int, bool, bpy.types.Object, bpy.types.Collection, set, bpy.types.Image)): + if isinstance( + val, + (str, int, bool, bpy.types.Object, bpy.types.Collection, set, bpy.types.Image), + ): code = repr(val) elif isinstance(val, (float)): - code = f'{val:.4f}' - elif isinstance(val, (tuple, bpy.types.bpy_prop_array, mathutils.Vector, mathutils.Euler)): + code = f"{val:.4f}" + elif isinstance( + val, (tuple, bpy.types.bpy_prop_array, mathutils.Vector, mathutils.Euler) + ): code = represent_tuple(tuple(val)) elif isinstance(val, bpy.types.Collection): - logger.warning(f'Encountered collection {repr(val.name)} as a default_value - please edit the code to remove this dependency on a collection already existing') - code = f'bpy.data.collections[{repr(val.name)}]' + logger.warning( + f"Encountered collection {repr(val.name)} as a default_value - please edit the code to remove this dependency on a collection already existing" + ) + code = f"bpy.data.collections[{repr(val.name)}]" elif isinstance(val, bpy.types.Material): if val.use_nodes: funcname = get_func_name(val) new_transpiler_targets[funcname] = val - code = f'surface.shaderfunc_to_material({funcname})' + code = f"surface.shaderfunc_to_material({funcname})" else: - logger.warning(f'Encountered material {val} but it has use_nodes=False') + logger.warning(f"Encountered material {val} but it has use_nodes=False") code = repr(val) elif val is None: - logger.warning('Transpiler introduced a None into result script, this may not have been intended by the user') - code = 'None' + logger.warning( + "Transpiler introduced a None into result script, this may not have been intended by the user" + ) + code = "None" else: - raise ValueError(f'represent_default_value was unable to handle {val=} with type {type(val)}, please contact the developer') + raise ValueError( + f"represent_default_value was unable to handle {val=} with type {type(val)}, please contact the developer" + ) assert isinstance(code, str) if simple: if len(new_transpiler_targets) != 0: - raise ValueError(f'Encountered {val=} while trying to represent_default_value with simple=True, please contact the developer') + raise ValueError( + f"Encountered {val=} while trying to represent_default_value with simple=True, please contact the developer" + ) return code else: return code, new_transpiler_targets -def has_default_value_changed(node_tree, node, value): - ''' - Utility to check whether a given `value` of a `node` has been changed at all - from its default, and hence to check whether we need to bother to add code to +def has_default_value_changed(node_tree, node, value): + """ + Utility to check whether a given `value` of a `node` has been changed at all + from its default, and hence to check whether we need to bother to add code to set its value. `value` is either an input socket of the node with a default_value, or just a python variable name string to check - ''' + """ def compare(a, b): a = np.array(a) @@ -150,154 +222,170 @@ def compare(a, b): return np.all(a == b) temp_default_node = node_tree.nodes.new(node.bl_idname) - if node.bl_idname.endswith('NodeGroup'): + if node.bl_idname.endswith("NodeGroup"): temp_default_node.node_tree = node.node_tree if isinstance(value, bpy.types.NodeSocket): - assert get_connected_link(node_tree, input_socket=value) is None - assert hasattr(value, 'default_value') + assert hasattr(value, "default_value") observed_val = value.default_value - default_socket = [i for i in temp_default_node.inputs if i.identifier == value.identifier][0] + default_socket = [ + i for i in temp_default_node.inputs if i.identifier == value.identifier + ][0] default_val = default_socket.default_value has_changed = not compare(observed_val, default_val) elif isinstance(value, str): assert hasattr(node, value) - has_changed = not compare(getattr(node, value), getattr(temp_default_node, value)) + has_changed = not compare( + getattr(node, value), getattr(temp_default_node, value) + ) else: node_tree.nodes.remove(temp_default_node) - raise ValueError(f'Unexpected input {value=} in has_default_value_changed') + raise ValueError(f"Unexpected input {value=} in has_default_value_changed") node_tree.nodes.remove(temp_default_node) return has_changed -def special_case_colorramp(node, varname): +def special_case_colorramp(node, varname): assert node.bl_idname == Nodes.ColorRamp code = "" cramp = node.color_ramp - if cramp.interpolation != 'LINEAR': # dont bother if left at default - code += f'{varname}.color_ramp.interpolation = \"{cramp.interpolation}\"\n' + if cramp.interpolation != "LINEAR": # dont bother if left at default + code += f'{varname}.color_ramp.interpolation = "{cramp.interpolation}"\n' # add code to add new elements if need be if len(cramp.elements) > 2: - n_elements_needed =len(cramp.elements) - 2 # starts with 2 by default + n_elements_needed = len(cramp.elements) - 2 # starts with 2 by default for _ in range(n_elements_needed): - code += f'{varname}.color_ramp.elements.new(0)\n' + code += f"{varname}.color_ramp.elements.new(0)\n" for i, ele in enumerate(cramp.elements): - code += f'{varname}.color_ramp.elements[{i}].position = {ele.position:.4f}\n' - code += f'{varname}.color_ramp.elements[{i}].color = {represent_list(ele.color)}\n' + code += f"{varname}.color_ramp.elements[{i}].position = {ele.position:.4f}\n" + code += ( + f"{varname}.color_ramp.elements[{i}].color = {represent_list(ele.color)}\n" + ) return code -def special_case_curve(node, varname): +def special_case_curve(node, varname): assert node.bl_idname in [Nodes.FloatCurve, Nodes.RGBCurve, Nodes.VectorCurve] code = "" for i, c in enumerate(node.mapping.curves): points = [tuple(p.location) for p in c.points] - args = [f'{varname}.mapping.curves[{i}]', represent_list(points)] - if not all(p.handle_type == 'AUTO' for p in c.points): - args.append(f'handles={repr([p.handle_type for p in c.points])}') + args = [f"{varname}.mapping.curves[{i}]", represent_list(points)] + if not all(p.handle_type == "AUTO" for p in c.points): + args.append(f"handles={repr([p.handle_type for p in c.points])}") code += f"node_utils.assign_curve({', '.join(args)})\n" return code -def represent_label_value_expression(expression): - ''' +def represent_label_value_expression(expression): + """ When the user puts "var ~ N(0, 1)" or something of the like as the label of their node, this function parses everything after the ~ into a python expression Must be of form {operation}({argument}) - Valid operations: + Valid operations: - U, uniform - N, normal - color, color_category Valid arguments: str, float, list of float - ''' + """ def parse_arg(arg): + arg = arg.strip(" ,") - arg = arg.strip(' ,') - - if arg.strip("\'\"").isalpha(): - return arg.strip("\'\"") + if arg.strip("'\"").isalpha(): + return arg.strip("'\"") try: return float(arg) - except: + except ValueError: pass - if arg.startswith('['): - vals = arg.strip('[]').split(',') + if arg.startswith("["): + vals = arg.strip("[]").split(",") return [parse_arg(v) for v in vals] else: - raise ValueError(f'represent_label_value_expression had invalid argument {arg}') + raise ValueError( + f"represent_label_value_expression had invalid argument {arg}" + ) - matched_chars = { - '\'': '\'', - '\"': '\"', - '[': ']' - } - def parse_args(arg_str): + matched_chars = {"'": "'", '"': '"', "[": "]"} + def parse_args(arg_str): args = [] remaining = arg_str while len(remaining) != 0: + remaining = remaining.strip(", ") - remaining = remaining.strip(', ') - - search_for = matched_chars.get(remaining[0], ',') - next_idx = remaining[1:].index(search_for) + 1 if search_for in remaining else len(remaining) - arg = remaining[:next_idx+1] - remaining = remaining[next_idx+1:] + search_for = matched_chars.get(remaining[0], ",") + next_idx = ( + remaining[1:].index(search_for) + 1 + if search_for in remaining + else len(remaining) + ) + arg = remaining[: next_idx + 1] + remaining = remaining[next_idx + 1 :] try: args.append(parse_arg(arg)) except ValueError: - raise ValueError(f'Could not parse node label expression {repr(arg_str)}, item {repr(arg)} was not a valid argument') + raise ValueError( + f"Could not parse node label expression {repr(arg_str)}, item {repr(arg)} was not a valid argument" + ) return args - op, args = expression.split('(') + op, args = expression.split("(") op = op.strip() - args = parse_args(args.strip(')')) + args = parse_args(args.strip(")")) - if op in ['N', 'normal', 'U', 'uniform', 'R', 'randint']: + if op in ["N", "normal", "U", "uniform", "R", "randint"]: if not len(args) == 2: - raise ValueError(f'In {expression=}, expected 2 arguments, got {len(args)} instead') + raise ValueError( + f"In {expression=}, expected 2 arguments, got {len(args)} instead" + ) funcname = { - 'N': 'normal', 'normal': 'normal', - 'U': 'uniform', 'uniform': 'uniform', - 'R': 'randint', 'randint': 'randint' + "N": "normal", + "normal": "normal", + "U": "uniform", + "uniform": "uniform", + "R": "randint", + "randint": "randint", }[op] - args = ', '.join(repr(a) for a in args) - return f'{funcname}({args})' + args = ", ".join(repr(a) for a in args) + return f"{funcname}({args})" - elif op in ['color', 'color_category']: + elif op in ["color", "color_category"]: if not len(args) == 1: - raise ValueError(f'In {expression=}, expected 1 argument, got {len(args)} instead') - return f'color_category({repr(args[0])})' + raise ValueError( + f"In {expression=}, expected 1 argument, got {len(args)} instead" + ) + return f"color_category({repr(args[0])})" else: - raise ValueError(f'Failed to represent_label_value_expression({expression=}), unrecognized {op=}') + raise ValueError( + f"Failed to represent_label_value_expression({expression=}), unrecognized {op=}" + ) -def special_case_value(node, varname): - code = '' +def special_case_value(node, varname): + code = "" # Determine value expression - if node.label and '~' in node.label: - labelname, expression = node.label.split('~') + if node.label and "~" in node.label: + labelname, expression = node.label.split("~") value_expr = represent_label_value_expression(expression) else: if node.bl_idname in [Nodes.Value, Nodes.RGB]: @@ -309,71 +397,83 @@ def special_case_value(node, varname): elif node.bl_idname == Nodes.Integer: val = node.integer else: - raise ValueError(f'special_case_value called on unrecognized {node.bl_idname=}') + raise ValueError( + f"special_case_value called on unrecognized {node.bl_idname=}" + ) value_expr = represent_default_value(val, simple=True) # set value if node.bl_idname in [Nodes.Value, Nodes.RGB]: - code += f'{varname}.outputs[0].default_value = {value_expr}\n' + code += f"{varname}.outputs[0].default_value = {value_expr}\n" elif node.bl_idname == Nodes.Vector: - code += f'{varname}.vector = {value_expr}\n' + code += f"{varname}.vector = {value_expr}\n" elif node.bl_idname == Nodes.InputColor: - code += f'{varname}.color = {value_expr}\n' + code += f"{varname}.color = {value_expr}\n" elif node.bl_idname == Nodes.Integer: - code += f'{varname}.integer = {value_expr}\n' + code += f"{varname}.integer = {value_expr}\n" else: - raise ValueError(f'special_case_value called on unrecognized {node.bl_idname=}') + raise ValueError(f"special_case_value called on unrecognized {node.bl_idname=}") return code + def get_connected_link(node_tree, input_socket): links = [l for l in node_tree.links if l.to_socket == input_socket] return None if len(links) == 0 else links -def create_attrs_dict(node_tree, node): - ''' +def create_attrs_dict(node_tree, node): + """ Create a dict to be passed into the attrs=... kwarg of NodeWrangler.new_node IE, the dict should represent all the properties of `node` that need to be set but are NOT part of node.inputs - things like setting what operation a math node does, or how a mix node should mix its inputs. - ''' + """ attr_names = node_attrs_available(node) for a in COMMON_ATTR_NAMES: - if hasattr(node, a) and not a in attr_names: - raise ValueError(f'{node.bl_idname=} has attr {repr(a)} but it is not listed in node_info.NODE_ATTRS_AVAILABLE, please add it to avoid incorrect behavior') + if hasattr(node, a) and a not in attr_names: + raise ValueError( + f"{node.bl_idname=} has attr {repr(a)} but it is not listed in node_info.NODE_ATTRS_AVAILABLE, please add it to avoid incorrect behavior" + ) # Check that the dict is correct / doesnt contain typos for a in attr_names: if not hasattr(node, a): nodetype_expr = get_nodetype_expression(node) - raise ValueError(f"attrs_available[{nodetype_expr} is incorrect, real node {node} did not have an attribute '{a}' - please contact the developer") + raise ValueError( + f"attrs_available[{nodetype_expr} is incorrect, real node {node} did not have an attribute '{a}' - please contact the developer" + ) # Filter out the attrs which havent been changed from their default values - # clearly we dont need to set these ones manually, so we can save code verbosity - attr_names = [a for a in attr_names if has_default_value_changed(node_tree, node, a)] + attr_names = [ + a for a in attr_names if has_default_value_changed(node_tree, node, a) + ] - return {repr(k): represent_default_value(getattr(node, k), simple=True) for k in attr_names} + return { + repr(k): represent_default_value(getattr(node, k), simple=True) + for k in attr_names + } -def create_inputs_dict(node_tree, node, memo): - ''' +def create_inputs_dict(node_tree, node, memo): + """ Produce some `code` that instantiates all node INPUTS to `node`, as well as a python dict `inputs_dict` containing all the variable - names that should be used to refer to each of the inputs we instantiated - ''' + names that should be used to refer to each of the inputs we instantiated + """ inputs_dict = {} code = "" def update_inputs(i, k, v): - is_input_name_unique = ([socket.name for socket in node.inputs].count(k) == 1) + is_input_name_unique = [socket.name for socket in node.inputs].count(k) == 1 k = repr(k) if is_input_name_unique else i - if not k in inputs_dict: + if k not in inputs_dict: inputs_dict[k] = v else: if not isinstance(inputs_dict[k], list): @@ -385,25 +485,31 @@ def update_inputs(i, k, v): links = get_connected_link(node_tree, input_socket) if links is None: - if hasattr(input_socket, 'default_value'): + if hasattr(input_socket, "default_value"): if not has_default_value_changed(node_tree, node, input_socket): continue - input_expression, targets = represent_default_value(input_socket.default_value, simple=False) + input_expression, targets = represent_default_value( + input_socket.default_value, simple=False + ) new_transpile_targets.update(targets) update_inputs(i, input_name, input_expression) continue for link in links: - if not link.from_socket.enabled: - logger.warning(f'Transpiler encountered link from disabled socket {link.from_socket}, ignoring it') + logger.warning( + f"Transpiler encountered link from disabled socket {link.from_socket}, ignoring it" + ) continue if not link.to_socket.enabled: - logger.warning(f'Transpiler encountered link to disabled socket {link.to_socket}, ignoring it') + logger.warning( + f"Transpiler encountered link to disabled socket {link.to_socket}, ignoring it" + ) continue input_varname, input_code, targets = create_node( - node_tree, link.from_node, memo) + node_tree, link.from_node, memo + ) code += input_code new_transpile_targets.update(targets) @@ -411,124 +517,138 @@ def update_inputs(i, k, v): input_expression = input_varname else: socket_name = link.from_socket.name - input_expression = f"{input_varname}.outputs[\"{socket_name}\"]" + input_expression = f'{input_varname}.outputs["{socket_name}"]' # Catch shared socket output names - if link.from_node.outputs[socket_name].identifier != link.from_socket.identifier: - from_idx = [i for i, o in enumerate(link.from_node.outputs) if o.identifier == link.from_socket.identifier][0] + if ( + link.from_node.outputs[socket_name].identifier + != link.from_socket.identifier + ): + from_idx = [ + i + for i, o in enumerate(link.from_node.outputs) + if o.identifier == link.from_socket.identifier + ][0] input_expression = f"{input_varname}.outputs[{from_idx}]" update_inputs(i, input_name, input_expression) return inputs_dict, code, new_transpile_targets + def repr_iter_val(v): if isinstance(v, list): return represent_list(v) elif isinstance(v, str): - return v # String are assumed to be code variables to get passed through + return v # String are assumed to be code variables to get passed through else: return represent_default_value(v, simple=True) -def represent_list(inputs, spacing=' '): + +def represent_list(inputs, spacing=" "): inputs = [repr_iter_val(x) for x in inputs] - return '[' + f",{spacing}".join(inputs) + ']' + return "[" + f",{spacing}".join(inputs) + "]" -def represent_tuple(inputs, spacing=' '): + +def represent_tuple(inputs, spacing=" "): inputs = [repr_iter_val(x) for x in inputs] for x in inputs: assert isinstance(x, str), x - return '(' + f",{spacing}".join(inputs) + ')' + return "(" + f",{spacing}".join(inputs) + ")" -def represent_dict(inputs_dict, spacing=' '): - vals = f',{spacing}'.join(f"{k}: {repr_iter_val(v)}" - for k, v in inputs_dict.items()) - return '{' + vals + '}' -def get_varname(node, taken): +def represent_dict(inputs_dict, spacing=" "): + vals = f",{spacing}".join( + f"{k}: {repr_iter_val(v)}" for k, v in inputs_dict.items() + ) + return "{" + vals + "}" - ''' + +def get_varname(node, taken): + """ Choose a sensible python variable name to represent `node`, notably one which isnt in the list of already used variable names `taken` - ''' + """ if node.label: - name = node.label.split('~')[0].strip() # remove any allowed postprocessor flags + name = node.label.split("~")[ + 0 + ].strip() # remove any allowed postprocessor flags name = snake_case(name) - elif hasattr(node, 'operation'): + elif hasattr(node, "operation"): # name the math nodes after their operations, for readability name = snake_case(node.operation.lower()) elif node.bl_idname == "GeometryNodeGroup": name = snake_case(node.node_tree.name.lower()) else: # for all other nodes, use the node.name, which should be unique to this node - name, *rest = node.name.split('.') - name = name.lower().replace(' ', '_') + name, *rest = node.name.split(".") + name = name.lower().replace(" ", "_") if len(rest) > 0: assert len(rest) == 1 - name += '_' + str(int(rest[0])) + name += "_" + str(int(rest[0])) - name = re.sub('[^0-9a-zA-Z_]+', '_', name) - name = re.sub('_+', '_', name) - name = name.strip('_') + name = re.sub("[^0-9a-zA-Z_]+", "_", name) + name = re.sub("_+", "_", name) + name = name.strip("_") if keyword.iskeyword(name): - name = 'op_' + name + name = "op_" + name if name in taken: i = 1 - while f'{name}_{i}' in taken: + while f"{name}_{i}" in taken: i += 1 - name = f'{name}_{i}' + name = f"{name}_{i}" return name -def get_nodetype_expression(node): - ''' +def get_nodetype_expression(node): + """ Produce a python expression to be passed into the node_type input of NodeWrangler.new_node. IE, return either the node.bl_idname, or an alias for that name if one exists - ''' + """ id = node.bl_idname - lookup = {getattr(Nodes, k): k for k in dir(Nodes) if not k.startswith('__')} + lookup = {getattr(Nodes, k): k for k in dir(Nodes) if not k.startswith("__")} if id in lookup: - return f'Nodes.{lookup[id]}' - elif id.endswith('NodeGroup'): + return f"Nodes.{lookup[id]}" + elif id.endswith("NodeGroup"): return repr(node.node_tree.name) else: - node_name = node.name.split('.')[0].replace(' ', '') + node_name = node.name.split(".")[0].replace(" ", "") logger.warning( - f'Please add an alias for \"{id}\" in nodes.node_info.Nodes.' - f'\n\t Suggestion: {node_name} = {repr(id)}' + f'Please add an alias for "{id}" in nodes.node_info.Nodes.' + f"\n\t Suggestion: {node_name} = {repr(id)}" ) return repr(id) -def create_node(node_tree, node, memo): +def create_node(node_tree, node, memo): if node.name in memo: return memo[node.name], "", {} - + idname = node.bl_idname if idname in SINGLETON_NODES: for n in memo: if node_tree.nodes[n].bl_idname == idname: return memo[n], "", {} - + code = "" new_transpile_targets = {} new_node_args = [] - if node.bl_idname.endswith('NodeGroup'): + if node.bl_idname.endswith("NodeGroup"): # node group will be transpiled to a function, then the funcname will be mapped to the nodegroup name by a decorator funcname = get_func_name(node) new_transpile_targets[funcname] = node - nodetype_expr = f'{funcname}().name' + nodetype_expr = f"{funcname}().name" else: nodetype_expr = get_nodetype_expression(node) new_node_args.append(nodetype_expr) @@ -538,35 +658,41 @@ def create_node(node_tree, node, memo): new_transpile_targets.update(targets) code += inputs_code if len(inputs_dict) > 0: - new_node_args.append(f'input_kwargs={represent_dict(inputs_dict)}') + new_node_args.append(f"input_kwargs={represent_dict(inputs_dict)}") if node.label: - new_node_args.append(f'label={repr(node.label)}') + new_node_args.append(f"label={repr(node.label)}") # Special case: input node if node.bl_idname == Nodes.GroupInput: all_inps = [] for inp in node_tree.inputs: - repr_val, targets = represent_default_value(inp.default_value, simple=False) if hasattr(inp, 'default_value') else (None, {}) + repr_val, targets = ( + represent_default_value(inp.default_value, simple=False) + if hasattr(inp, "default_value") + else (None, {}) + ) new_transpile_targets.update(targets) - all_inps.append(f'({repr(inp.bl_socket_idname)}, {repr(inp.name)}, {repr_val})') + all_inps.append( + f"({repr(inp.bl_socket_idname)}, {repr(inp.name)}, {repr_val})" + ) - args = represent_list(all_inps, spacing='\n'+2*indent_string) + args = represent_list(all_inps, spacing="\n" + 2 * indent_string) new_node_args.append(f"expose_input={args}") # Add code to set the correct 'attrs', ie set the math operations attrs_dict = create_attrs_dict(node_tree, node) if len(attrs_dict) > 0: - new_node_args.append(f'attrs={represent_dict(attrs_dict)}') + new_node_args.append(f"attrs={represent_dict(attrs_dict)}") # Compose the final nw.new_node() function call varname = get_varname(node, taken=list(memo.values())) if sum(len(x) for x in new_node_args) > LINE_LEN: - arg_sep = ',\n' + indent_string + arg_sep = ",\n" + indent_string else: - arg_sep = ', ' + arg_sep = ", " new_node_args_str = arg_sep.join(new_node_args) - code += f'{varname} = nw.new_node({new_node_args_str})\n' + code += f"{varname} = nw.new_node({new_node_args_str})\n" # Handle various special case nodes that dont behave like the others if node.bl_idname == Nodes.ColorRamp: @@ -576,31 +702,33 @@ def create_node(node_tree, node, memo): elif node.bl_idname in VALUE_NODES: code += special_case_value(node, varname) - code += '\n' + code += "\n" memo[node.name] = varname return varname, code, new_transpile_targets -def get_node_tree(target): - ''' +def get_node_tree(target): + """ Blender stores the node tree as a either 'node_group' or 'node_tree' depending on what the target is - ''' + """ - if hasattr(target, 'bl_idname') and target.bl_idname.endswith('NodeGroup'): + if hasattr(target, "bl_idname") and target.bl_idname.endswith("NodeGroup"): return target.node_tree elif isinstance(target, bpy.types.NodesModifier): return target.node_group elif isinstance(target, (bpy.types.Material, bpy.types.World, bpy.types.Scene)): return target.node_tree else: - raise ValueError(f'Couldnt infer node tree from {target=}, {type(target)=}, please contact the developer') + raise ValueError( + f"Couldnt infer node tree from {target=}, {type(target)=}, please contact the developer" + ) -def write_function_body(target): - ''' +def write_function_body(target): + """ Construct a python function body which will produce the node_tree of the `target` - ''' + """ output_node_id = OUTPUT_NODE_IDS[type(target)] node_tree = get_node_tree(target) @@ -609,48 +737,53 @@ def write_function_body(target): output_node = next(n for n in node_tree.nodes if n.bl_idname == output_node_id) except StopIteration: logging.info([n.bl_idname for n in node_tree.nodes]) - raise ValueError(f'Couldnt find expected {output_node_id=} for node tree type {node_tree.bl_idname=}') + raise ValueError( + f"Couldnt find expected {output_node_id=} for node tree type {node_tree.bl_idname=}" + ) memo = {} - final_varname, code, new_transpile_targets = create_node(node_tree, output_node, memo) + final_varname, code, new_transpile_targets = create_node( + node_tree, output_node, memo + ) return code, new_transpile_targets + def snake_case(name): - name = name.replace(' ', '_').replace('.', '_') - name = re.sub(r'(? 1).any(): raise ValueError( - f'expose_input with {names} features duplicate entries. in bl3.5 this is invalid.') + f"expose_input with {names} features duplicate entries. in bl3.5 this is invalid." + ) for inp in expose_input: nodeclass, name, val = inp self.expose_input(name, val=val, dtype=nodeclass) return node - def expose_input(self, name, val=None, attribute=None, dtype=None, use_namednode=False): - ''' + def expose_input( + self, name, val=None, attribute=None, dtype=None, use_namednode=False + ): + """ Expose an input to the nodegroups interface, making it able to be specified externally If this nodegroup is - ''' + """ if attribute is not None: if self.modifier is None and val is None: raise ValueError( - 'Attempted to use expose_input(attribute=...) on NodeWrangler constructed from ' - 'node_tree.\n' - 'Please construct by passing in the modifier instead, or specify expose_input(val=..., ' - 'attribute=...) to provide a fallback') + "Attempted to use expose_input(attribute=...) on NodeWrangler constructed from " + "node_tree.\n" + "Please construct by passing in the modifier instead, or specify expose_input(val=..., " + "attribute=...) to provide a fallback" + ) if use_namednode: assert dtype is not None - return self.new_node(Nodes.NamedAttribute, [name], attrs={'data_type': dtype}) + return self.new_node( + Nodes.NamedAttribute, [name], attrs={"data_type": dtype} + ) group_input = self.new_node(Nodes.GroupInput) # will reuse singleton @@ -263,40 +306,41 @@ def prepare_cast(to_type, val): return val if val is not None: - if not hasattr(inp, 'default_value') or inp.default_value is None: + if not hasattr(inp, "default_value") or inp.default_value is None: raise ValueError( - f'expose_input() recieved {val=} but inp {inp} does not expect a default_value') + f"expose_input() recieved {val=} but inp {inp} does not expect a default_value" + ) inp.default_value = prepare_cast(type(inp.default_value), val) if self.modifier is not None: id = inp.identifier if val is not None: curr_mod_inp_val = self.modifier[id] - if hasattr(curr_mod_inp_val, 'real'): + if hasattr(curr_mod_inp_val, "real"): self.modifier[id] = prepare_cast(type(curr_mod_inp_val.real), val) if attribute is not None: - self.modifier[f'{id}_attribute_name'] = attribute - self.modifier[f'{id}_use_attribute'] = 1 + self.modifier[f"{id}_attribute_name"] = attribute + self.modifier[f"{id}_use_attribute"] = 1 assert len([o for o in group_input.outputs if o.name == name]) == 1 return group_input.outputs[name] @staticmethod def _infer_nodeclass_from_args(dtype, val=None): - ''' + """ We will allow the user to request a 'dtype' that is a python type, blender datatype, or blender nodetype. All of these must be mapped to some node_info.NODECLASS in order to create a node. Optionally, we can try to infer a nodeclass from the type of a provided `val` - ''' + """ if dtype is None: if val is not None: datatype = node_info.PYTYPE_TO_DATATYPE[type(val)] else: # assert attribute is not None - datatype = 'FLOAT_VECTOR' + datatype = "FLOAT_VECTOR" return node_info.DATATYPE_TO_NODECLASS[datatype] else: if dtype in node_info.NODECLASSES: @@ -307,29 +351,34 @@ def _infer_nodeclass_from_args(dtype, val=None): elif dtype in node_info.PYTYPE_TO_DATATYPE: datatype = node_info.PYTYPE_TO_DATATYPE[dtype] else: - raise ValueError(f'Could not parse {dtype=}') + raise ValueError(f"Could not parse {dtype=}") return node_info.DATATYPE_TO_NODECLASS[datatype] def _update_socket(self, input_socket, input_item): output_socket = infer_output_socket(input_item) - if output_socket is None and hasattr(input_socket, 'default_value'): + if output_socket is None and hasattr(input_socket, "default_value"): # we couldnt parse the inp to be any kind of node, it must be a default_value for us to assign try: input_socket.default_value = input_item return except TypeError as e: - print(f'TypeError while assigning {input_item=} as default_value for {input_socket.name}') + print( + f"TypeError while assigning {input_item=} as default_value for {input_socket.name}" + ) raise e self.links.new(output_socket, input_socket) def connect_input(self, input_socket, input_item): - if isinstance(input_item, list) and any(infer_output_socket(i) is not None for i in input_item): + if isinstance(input_item, list) and any( + infer_output_socket(i) is not None for i in input_item + ): if not input_socket.is_multi_input: raise ValueError( - f'list of sockets {input_item} is not valid to connect to {input_socket} as it is not a ' - f'valid multi-input socket') + f"list of sockets {input_item} is not valid to connect to {input_socket} as it is not a " + f"valid multi-input socket" + ) for inp in input_item: self._update_socket(input_socket, inp) else: @@ -343,14 +392,17 @@ def _make_node(self, node_type): except StopIteration: node = self.nodes.new(node_type) elif node_type in bpy.data.node_groups: - assert node_type not in [getattr(Nodes, k) for k in dir(Nodes) if not k.startswith( - '__')], f'Someone has made a node_group named {node_type}, which is also the name of a ' \ - f'regular node' + assert node_type not in [ + getattr(Nodes, k) for k in dir(Nodes) if not k.startswith("__") + ], ( + f"Someone has made a node_group named {node_type}, which is also the name of a " + f"regular node" + ) nodegroup_type = { - 'ShaderNodeTree': 'ShaderNodeGroup', - 'GeometryNodeTree': 'GeometryNodeGroup', - 'CompositorNodeTree': 'CompositorNodeGroup' + "ShaderNodeTree": "ShaderNodeGroup", + "GeometryNodeTree": "GeometryNodeGroup", + "CompositorNodeTree": "CompositorNodeGroup", }[bpy.data.node_groups[node_type].bl_idname] node = self.nodes.new(nodegroup_type) @@ -361,7 +413,7 @@ def _make_node(self, node_type): return node def get_position_translation_seed(self, i): - if not i in self.position_translation_seed: + if i not in self.position_translation_seed: self.position_translation_seed[i] = random_vector3() return self.position_translation_seed[i] @@ -370,32 +422,52 @@ def find(self, name): def find_recursive(self, name): return [(self, n) for n in self.find(name)] + sum( - (NodeWrangler(n.node_tree).find_recursive(name) for n in self.nodes if n.type == 'GROUP'), []) + ( + NodeWrangler(n.node_tree).find_recursive(name) + for n in self.nodes + if n.type == "GROUP" + ), + [], + ) def find_from(self, to_socket): return [l for l in self.links if l.to_socket == to_socket] def find_from_recursive(self, name): return [(self, n) for n in self.find(name)] + sum( - (NodeWrangler(n.node_tree).find_from_recursive(name) for n in self.nodes if n.type == 'GROUP'), []) + ( + NodeWrangler(n.node_tree).find_from_recursive(name) + for n in self.nodes + if n.type == "GROUP" + ), + [], + ) def find_to(self, from_socket): return [l for l in self.links if l.from_socket == from_socket] def find_to_recursive(self, name): return [(self, n) for n in self.find(name)] + sum( - (NodeWrangler(n.node_tree).find_to_recursive(name) for n in self.nodes if n.type == 'GROUP'), []) + ( + NodeWrangler(n.node_tree).find_to_recursive(name) + for n in self.nodes + if n.type == "GROUP" + ), + [], + ) @staticmethod def is_socket(node): - return isinstance(node, bpy.types.NodeSocket) or isinstance(node, bpy.types.Node) + return isinstance(node, bpy.types.NodeSocket) or isinstance( + node, bpy.types.Node + ) @staticmethod def is_vector_socket(node): if isinstance(node, bpy.types.Node): node = [o for o in node.outputs if o.enabled][0] if isinstance(node, bpy.types.NodeSocket): - return 'VECTOR' in node.type + return "VECTOR" in node.type return isinstance(node, Iterable) def add2(self, *nodes): @@ -474,38 +546,63 @@ def scale(self, *nodes): x, y = y, x elif isinstance(y, Iterable): x, y = y, x - return self.new_node(Nodes.VectorMath, input_kwargs={'Vector': x, 'Scale': y}, - attrs={"operation": "SCALE"}) + return self.new_node( + Nodes.VectorMath, + input_kwargs={"Vector": x, "Scale": y}, + attrs={"operation": "SCALE"}, + ) def dot(self, *nodes): - return self.new_node(Nodes.VectorMath, attrs={'operation': 'DOT_PRODUCT'}, input_args=nodes) + return self.new_node( + Nodes.VectorMath, attrs={"operation": "DOT_PRODUCT"}, input_args=nodes + ) def math(self, node_type, *nodes): - return self.new_node(Nodes.Math, attrs={'operation': node_type}, input_args=nodes) + return self.new_node( + Nodes.Math, attrs={"operation": node_type}, input_args=nodes + ) def vector_math(self, node_type, *nodes): - return self.new_node(Nodes.VectorMath, attrs={'operation': node_type}, input_args=nodes) + return self.new_node( + Nodes.VectorMath, attrs={"operation": node_type}, input_args=nodes + ) def boolean_math(self, node_type, *nodes): - return self.new_node(Nodes.BooleanMath, attrs={'operation': node_type}, input_args=nodes) + return self.new_node( + Nodes.BooleanMath, attrs={"operation": node_type}, input_args=nodes + ) def compare(self, node_type, *nodes): - return self.new_node(Nodes.Compare, attrs={'operation': node_type}, input_args=nodes) + return self.new_node( + Nodes.Compare, attrs={"operation": node_type}, input_args=nodes + ) def compare_direction(self, node_type, x, y, angle): - return self.new_node(Nodes.Compare, input_kwargs={'A': x, 'B': y, 'Angle': angle}, - attrs={'data_type': 'VECTOR', 'mode': 'DIRECTION', 'operation': node_type}) + return self.new_node( + Nodes.Compare, + input_kwargs={"A": x, "B": y, "Angle": angle}, + attrs={"data_type": "VECTOR", "mode": "DIRECTION", "operation": node_type}, + ) def bernoulli(self, prob, seed=None): - if seed is None: seed = np.random.randint(1e5) - return self.new_node(Nodes.RandomValue, input_kwargs={'Probability': prob, 'Seed': seed}, - attrs={'data_type': 'BOOLEAN'}) - - def uniform(self, low=0., high=1., seed=None, data_type='FLOAT'): - if seed is None: seed = np.random.randint(1e5) - if isinstance(low, Iterable): data_type = 'FLOAT_VECTOR' - return self.new_node(Nodes.RandomValue, input_kwargs={'Min': low, 'Max': high, 'Seed': seed}, - attrs={'data_type': data_type}) + if seed is None: + seed = np.random.randint(1e5) + return self.new_node( + Nodes.RandomValue, + input_kwargs={"Probability": prob, "Seed": seed}, + attrs={"data_type": "BOOLEAN"}, + ) + + def uniform(self, low=0.0, high=1.0, seed=None, data_type="FLOAT"): + if seed is None: + seed = np.random.randint(1e5) + if isinstance(low, Iterable): + data_type = "FLOAT_VECTOR" + return self.new_node( + Nodes.RandomValue, + input_kwargs={"Min": low, "Max": high, "Seed": seed}, + attrs={"data_type": data_type}, + ) def combine(self, x, y, z): return self.new_node(Nodes.CombineXYZ, [x, y, z]) @@ -513,37 +610,67 @@ def combine(self, x, y, z): def separate(self, x): return self.new_node(Nodes.SeparateXYZ, [x]).outputs - def switch(self, pred, true, false, input_type='FLOAT'): - return self.new_node(Nodes.Switch, input_kwargs={'Switch': pred, 'True': true, 'False': false}, - attrs={'input_type': input_type}) + def switch(self, pred, true, false, input_type="FLOAT"): + return self.new_node( + Nodes.Switch, + input_kwargs={"Switch": pred, "True": true, "False": false}, + attrs={"input_type": input_type}, + ) def vector_switch(self, pred, true, false): - return self.new_node(Nodes.Switch, input_kwargs={'Switch': pred, 'True': true, 'False': false}, - attrs={'input_type': 'VECTOR'}) + return self.new_node( + Nodes.Switch, + input_kwargs={"Switch": pred, "True": true, "False": false}, + attrs={"input_type": "VECTOR"}, + ) def geometry2point(self, geometry): - return self.new_node(Nodes.MergeByDistance, input_kwargs={'Geometry': geometry, 'Distance': 100.}) + return self.new_node( + Nodes.MergeByDistance, + input_kwargs={"Geometry": geometry, "Distance": 100.0}, + ) def position2point(self, position): - return self.new_node(Nodes.MeshLine, input_kwargs={'Count': 1, 'Start Location': position}) + return self.new_node( + Nodes.MeshLine, input_kwargs={"Count": 1, "Start Location": position} + ) def capture(self, geometry, attribute, attrs=None): - if attrs is None: attrs = {} - capture = self.new_node(Nodes.CaptureAttribute, input_kwargs={'Geometry': geometry, 'Value': attribute}, - attrs=attrs) - return capture.outputs['Geometry'], capture.outputs['Attribute'] + if attrs is None: + attrs = {} + capture = self.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": geometry, "Value": attribute}, + attrs=attrs, + ) + return capture.outputs["Geometry"], capture.outputs["Attribute"] def musgrave(self, scale=10, vector=None): - return self.new_node(Nodes.MapRange, - [self.new_node(Nodes.MusgraveTexture, [vector], input_kwargs={'Scale': scale}), -1, - 1, 0, 1]) + return self.new_node( + Nodes.MapRange, + [ + self.new_node( + Nodes.MusgraveTexture, [vector], input_kwargs={"Scale": scale} + ), + -1, + 1, + 0, + 1, + ], + ) def curve2mesh(self, curve, profile_curve=None): - return self.new_node(Nodes.SetShadeSmooth, - [self.new_node(Nodes.CurveToMesh, [curve, profile_curve, True]), None, False]) - - def build_float_curve(self, x, anchors, handle='VECTOR'): - float_curve = self.new_node(Nodes.FloatCurve, input_kwargs={'Value': x}) + return self.new_node( + Nodes.SetShadeSmooth, + [ + self.new_node(Nodes.CurveToMesh, [curve, profile_curve, True]), + None, + False, + ], + ) + + def build_float_curve(self, x, anchors, handle="VECTOR"): + float_curve = self.new_node(Nodes.FloatCurve, input_kwargs={"Value": x}) c = float_curve.mapping.curves[0] for i, p in enumerate(anchors): if i < 2: @@ -554,11 +681,13 @@ def build_float_curve(self, x, anchors, handle='VECTOR'): float_curve.mapping.use_clip = False return float_curve - def build_case(self, value, inputs, outputs, input_type='FLOAT'): + def build_case(self, value, inputs, outputs, input_type="FLOAT"): node = outputs[-1] for i, o in zip(inputs[:-1], outputs[:-1]): - node = self.switch(self.compare('EQUAL', value, i), o, node, input_type) + node = self.switch(self.compare("EQUAL", value, i), o, node, input_type) return node def build_index_case(self, inputs): - return self.build_case(self.new_node(Nodes.Index), inputs + [-1], [True] * len(inputs) + [False]) + return self.build_case( + self.new_node(Nodes.Index), inputs + [-1], [True] * len(inputs) + [False] + ) diff --git a/infinigen/core/nodes/nodegroups/transfer_attributes.py b/infinigen/core/nodes/nodegroups/transfer_attributes.py index 3c8284e65..923825052 100644 --- a/infinigen/core/nodes/nodegroups/transfer_attributes.py +++ b/infinigen/core/nodes/nodegroups/transfer_attributes.py @@ -4,19 +4,16 @@ # Authors: Alexander Raistrick -import bpy -import mathutils import numpy as np -from numpy.random import uniform, normal -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.nodes import node_utils, node_info + from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.util import blender as butil -def uvs_to_attribute(obj, name='uv_map'): - assert obj.type == 'MESH' - +def uvs_to_attribute(obj, name="uv_map"): + assert obj.type == "MESH" + n = len(obj.data.vertices) data = np.empty((n, 3), dtype=np.float32) @@ -24,35 +21,39 @@ def uvs_to_attribute(obj, name='uv_map'): u, v = obj.data.uv_layers.active.data[loop.index].uv data[loop.vertex_index] = u, v, 0 - attr = obj.data.attributes.new(name, type='FLOAT_VECTOR', domain='POINT') - attr.data.foreach_set('vector', data.reshape(-1)) + attr = obj.data.attributes.new(name, type="FLOAT_VECTOR", domain="POINT") + attr.data.foreach_set("vector", data.reshape(-1)) return attr -def attribute_to_uvs(obj, attr_name): - assert obj.type == 'MESH' +def attribute_to_uvs(obj, attr_name): + assert obj.type == "MESH" obj.data.uv_layers.active = obj.data.uv_layers.new() - + n = len(obj.data.vertices) data = np.empty(n * 3, dtype=np.float32) - obj.data.attributes[attr_name].data.foreach_get('vector', data) + obj.data.attributes[attr_name].data.foreach_get("vector", data) data = data.reshape((n, 3)) for loop in obj.data.loops: u, v, _ = data[loop.vertex_index] obj.data.uv_layers.active.data[loop.index].uv = (u, v) + # list of supported data type: # https://docs.blender.org/api/current/bpy.types.GeometryNodeCaptureAttribute.html + def transfer_all(source, target, attributes=None, uvs=False): - assert source.type == 'MESH' - assert target.type == 'MESH' + assert source.type == "MESH" + assert target.type == "MESH" if attributes is None: - attributes = [a.name for a in source.data.attributes if not butil.blender_internal_attr(a)] + attributes = [ + a.name for a in source.data.attributes if not butil.blender_internal_attr(a) + ] if len(source.data.uv_layers) == 0: uvs = False @@ -64,15 +65,25 @@ def transfer_all(source, target, attributes=None, uvs=False): dtypes = [source.data.attributes[n].data_type for n in attributes] domains = [source.data.attributes[n].domain for n in attributes] - surface.add_geomod(source, transfer_att_node, - input_kwargs={'source': source, - 'target': target, - 'attribute_to_transfer_list': list(zip(attributes, dtypes))}, - attributes=attributes, apply=True, domains=domains) - - surface.add_geomod(target, copy_geom_info, - input_kwargs={'source': source, 'target': target}, - apply=True) + surface.add_geomod( + source, + transfer_att_node, + input_kwargs={ + "source": source, + "target": target, + "attribute_to_transfer_list": list(zip(attributes, dtypes)), + }, + attributes=attributes, + apply=True, + domains=domains, + ) + + surface.add_geomod( + target, + copy_geom_info, + input_kwargs={"source": source, "target": target}, + apply=True, + ) if uvs: attribute_to_uvs(target, uv_att_name) @@ -80,33 +91,43 @@ def transfer_all(source, target, attributes=None, uvs=False): def copy_geom_info(nw, source, target): # simply copy the geom back to the target from source - object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': source}) - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': object_info.outputs["Geometry"], }) + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": source}) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": object_info.outputs["Geometry"], + }, + ) def transfer_att_node(nw, source, target, attribute_to_transfer_list=[]): # create a geom node in the non-remeshed version of the mesh (i.e., source) - object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={'Object': target}) - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketGeometry', 'Geometry', None), ]) + object_info = nw.new_node(Nodes.ObjectInfo, input_kwargs={"Object": target}) + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketGeometry", "Geometry", None), + ], + ) for att_name, att_type in attribute_to_transfer_list: nw.expose_input(att_name, attribute=att_name) position = nw.new_node(Nodes.InputPosition) - group_output_sockets = {'Geometry': object_info.outputs["Geometry"]} + group_output_sockets = {"Geometry": object_info.outputs["Geometry"]} for att_name, att_type in attribute_to_transfer_list: transfer_attribute = nw.new_node( Nodes.SampleNearestSurface, - attrs={'data_type': att_type}, + attrs={"data_type": att_type}, input_kwargs={ - 'Mesh': group_input.outputs["Geometry"], - 'Value': group_input.outputs[att_name], - 'Sample Position': position - }) + "Mesh": group_input.outputs["Geometry"], + "Value": group_input.outputs[att_name], + "Sample Position": position, + }, + ) - group_output_sockets[att_name] = (transfer_attribute, 'Value') + group_output_sockets[att_name] = (transfer_attribute, "Value") - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs=group_output_sockets) + nw.new_node(Nodes.GroupOutput, input_kwargs=group_output_sockets) diff --git a/infinigen/core/nodes/shader_utils.py b/infinigen/core/nodes/shader_utils.py index bf4f52f21..d3b9eb1e0 100644 --- a/infinigen/core/nodes/shader_utils.py +++ b/infinigen/core/nodes/shader_utils.py @@ -5,19 +5,20 @@ import bpy + def find_displacement_node(mat): links = mat.node_tree.links shader_nodes = mat.node_tree.nodes - outputNode = shader_nodes['Material Output'] + outputNode = shader_nodes["Material Output"] displacement_node = None for link in links: - if (link.to_node == outputNode and link.to_socket.name == 'Displacement'): + if link.to_node == outputNode and link.to_socket.name == "Displacement": displacement_node = link.from_node break return displacement_node - -def convert_shader_displacement(mat : bpy.types.Material): + +def convert_shader_displacement(mat: bpy.types.Material): mat_copy = mat.copy() mat_copy.name = mat.name + "_copy" @@ -34,20 +35,20 @@ def convert_shader_displacement(mat : bpy.types.Material): new_scale = (height - mid_level) * scale shader_nodes.remove(displacement_node) - geo_node_group = bpy.data.node_groups.new('GeometryNodes', 'GeometryNodeTree') - group_input = geo_node_group.nodes.new('NodeGroupInput') - group_output = geo_node_group.nodes.new('NodeGroupOutput') - geo_node_group.outputs.new('NodeSocketGeometry', 'Geometry') - geo_node_group.inputs.new('NodeSocketGeometry', 'Geometry') - set_pos = geo_node_group.nodes.new('GeometryNodeSetPosition') - normal = geo_node_group.nodes.new('GeometryNodeInputNormal') - scale = geo_node_group.nodes.new('ShaderNodeVectorMath') - scale.operation = 'SCALE' + geo_node_group = bpy.data.node_groups.new("GeometryNodes", "GeometryNodeTree") + group_input = geo_node_group.nodes.new("NodeGroupInput") + group_output = geo_node_group.nodes.new("NodeGroupOutput") + geo_node_group.outputs.new("NodeSocketGeometry", "Geometry") + geo_node_group.inputs.new("NodeSocketGeometry", "Geometry") + set_pos = geo_node_group.nodes.new("GeometryNodeSetPosition") + normal = geo_node_group.nodes.new("GeometryNodeInputNormal") + scale = geo_node_group.nodes.new("ShaderNodeVectorMath") + scale.operation = "SCALE" scale.inputs["Scale"].default_value = new_scale - geo_node_group.links.new(group_input.outputs[0], set_pos.inputs['Geometry']) - geo_node_group.links.new(normal.outputs['Normal'], scale.inputs['Vector']) - geo_node_group.links.new(scale.outputs['Vector'], set_pos.inputs['Offset']) - geo_node_group.links.new(set_pos.outputs['Geometry'], group_output.inputs[0]) + geo_node_group.links.new(group_input.outputs[0], set_pos.inputs["Geometry"]) + geo_node_group.links.new(normal.outputs["Normal"], scale.inputs["Vector"]) + geo_node_group.links.new(scale.outputs["Vector"], set_pos.inputs["Offset"]) + geo_node_group.links.new(set_pos.outputs["Geometry"], group_output.inputs[0]) return mat_copy, geo_node_group diff --git a/infinigen/core/placement/__init__.py b/infinigen/core/placement/__init__.py index 0b2e9209a..0387f0098 100644 --- a/infinigen/core/placement/__init__.py +++ b/infinigen/core/placement/__init__.py @@ -1 +1,2 @@ -from . import camera \ No newline at end of file +from . import camera +from .factory import AssetFactory, make_asset_collection diff --git a/infinigen/core/placement/animation_policy.py b/infinigen/core/placement/animation_policy.py index 7e12cc522..d325c28e5 100644 --- a/infinigen/core/placement/animation_policy.py +++ b/infinigen/core/placement/animation_policy.py @@ -6,37 +6,38 @@ # - Zeyu Ma: Animation with path finding -from copy import deepcopy, copy import logging -import math +from copy import copy, deepcopy import bpy -import mathutils -from mathutils.bvhtree import BVHTree - import gin +import mathutils import numpy as np -from numpy.random import uniform as U, normal as N -from mathutils import Matrix, Vector, Euler -from tqdm import trange, tqdm +from mathutils import Euler, Vector +from mathutils.bvhtree import BVHTree +from numpy.random import normal as N +from numpy.random import uniform as U +from tqdm import tqdm import infinigen.assets.utils.mesh -from infinigen.assets.creatures.util.geometry.curve import Curve - -from infinigen.core.util.math import clip_gaussian, lerp -from infinigen.core.util.random import random_general -from infinigen.core.util import blender as butil +from infinigen.assets.utils.geometry.curve import Curve from infinigen.core.placement.path_finding import path_finding +from infinigen.core.util import blender as butil +from infinigen.core.util.math import lerp +from infinigen.core.util.random import random_general logger = logging.getLogger(__name__) + class PolicyError(ValueError): pass -def get_altitude(loc, scene_bvh, dir=Vector((0.,0.,-1.))): + +def get_altitude(loc, scene_bvh, dir=Vector((0.0, 0.0, -1.0))): *_, straight_down_dist = scene_bvh.ray_cast(loc, dir) return straight_down_dist + @gin.configurable def walk_same_altitude( start_loc, @@ -49,17 +50,15 @@ def walk_same_altitude( ignore_missed_rays=False, z_move_up=1, ): - - ''' + """ fall_ratio: what is the slope at which the camera is willing to go down / glide - ''' + """ # retry until we find something that doesnt walk off the map for retry in range(retries): - pos = start_loc + Vector(sampler()) - pos.z += z_move_up # move it up a ways, so that it can raycast back down onto something + pos.z += z_move_up # move it up a ways, so that it can raycast back down onto something curr_alt = get_altitude(start_loc, bvh) new_alt = get_altitude(pos, bvh) @@ -72,7 +71,9 @@ def walk_same_altitude( if curr_alt is None or new_alt is None: if curr_alt is None: raise PolicyError() - logger.debug(f'walk_same_altitude failed {retry=} with {curr_alt=}, {new_alt=}') + logger.debug( + f"walk_same_altitude failed {retry=} with {curr_alt=}, {new_alt=}" + ) continue fall_dist = new_alt - curr_alt @@ -90,18 +91,19 @@ def walk_same_altitude( return pos + @gin.configurable class AnimPolicyBrownian: - def __init__(self, speed=3, pos_var=15.0): self.speed = speed self.pos_var = pos_var - def __call__(self, obj, frame_curr, bvh, retry_pct): - speed = random_general(self.speed) - sampler = lambda: N(0, [self.pos_var, self.pos_var, 0.5]) + + def sampler(): + return N(0, [self.pos_var, self.pos_var, 0.5]) + pos = walk_same_altitude(obj.location, sampler, bvh) time = np.linalg.norm(pos - obj.location) / speed @@ -109,32 +111,35 @@ def __call__(self, obj, frame_curr, bvh, retry_pct): return Vector(pos), Vector(rot), time, "BEZIER" + @gin.configurable class AnimPolicyPan: - - def __init__(self, speed=3, dist=("uniform", 5, 20), rot_var=[10, 0, 20]): + def __init__(self, speed=3, dist=("uniform", 5, 20), rot_var=[10, 0, 20]): self.speed = speed self.dist = dist self.rot_var = rot_var def __call__(self, obj, frame_curr, bvh, retry_pct): - speed = random_general(self.speed) + def sampler(): - theta = U(0, 2*np.pi) + theta = U(0, 2 * np.pi) zoff = np.sin(np.deg2rad(N(-30, 30))) - off = random_general(self.dist) * np.array([np.sin(theta), np.cos(theta), zoff]) + off = random_general(self.dist) * np.array( + [np.sin(theta), np.cos(theta), zoff] + ) off = off * lerp(1, 0.2, 1 - retry_pct) return off + pos = walk_same_altitude(obj.location, sampler, bvh=bvh) time = np.linalg.norm(pos - obj.location) / speed rot = np.array(obj.rotation_euler) + np.deg2rad(N(0, self.rot_var, 3)) return Vector(pos), Vector(rot), time, "LINEAR" + @gin.configurable class AnimPolicyRandomForwardWalk: - def __init__( self, forward_vec, @@ -152,8 +157,8 @@ def __init__( self.forward_vec = forward_vec def __call__(self, obj, frame_curr, bvh, retry_pct): - orig_rot = np.array(obj.rotation_euler) + def sampler(): obj.rotation_euler = tuple(orig_rot) obj.rotation_euler[2] += np.deg2rad(random_general(self.yaw_dist)) @@ -168,22 +173,21 @@ def sampler(): time = np.linalg.norm(pos - obj.location) / self.speed rot = np.array(obj.rotation_euler) + np.deg2rad(N(0, self.rot_vars, 3)) - return Vector(pos), Vector(rot), time, 'BEZIER' + return Vector(pos), Vector(rot), time, "BEZIER" + @gin.configurable class AnimPolicyRandomWalkLookaround: - def __init__( - self, - speed=('uniform', 1, 2.5), - step_speed_mult=('uniform', 0.5, 2), - yaw_sampler=('uniform',-20, 20), - step_range=('clip_gaussian', 3, 5, 0.5, 10), + self, + speed=("uniform", 1, 2.5), + step_speed_mult=("uniform", 0.5, 2), + yaw_sampler=("uniform", -20, 20), + step_range=("clip_gaussian", 3, 5, 0.5, 10), rot_vars=(5, 0, 5), - motion_dir_zoff=('clip_gaussian', 0, 90, 0, 180), - force_single_keyframe=False + motion_dir_zoff=("clip_gaussian", 0, 90, 0, 180), + force_single_keyframe=False, ): - self.speed = random_general(speed) self.step_speed_mult = step_speed_mult @@ -197,18 +201,20 @@ def __init__( self.force_single_keyframe = force_single_keyframe def __call__(self, obj, frame_curr, bvh, retry_pct): - if self.motion_dir_euler is None: self.motion_dir_euler = copy(obj.rotation_euler) self.motion_dir_euler[2] += np.deg2rad(random_general(self.motion_dir_zoff)) orig_motion_dir_euler = copy(self.motion_dir_euler) + def sampler(): self.motion_dir_euler = copy(orig_motion_dir_euler) self.motion_dir_euler[2] += np.deg2rad(random_general(self.yaw_sampler)) step = random_general(self.step_range) - off = Euler(self.motion_dir_euler, 'XYZ').to_matrix() @ Vector((0, 0, -step)) + off = Euler(self.motion_dir_euler, "XYZ").to_matrix() @ Vector( + (0, 0, -step) + ) off.z = 0 return off @@ -221,19 +227,21 @@ def sampler(): if self.force_single_keyframe: time = bpy.context.scene.frame_end - frame_curr - return Vector(pos), Vector(rot), time, 'BEZIER' + return Vector(pos), Vector(rot), time, "BEZIER" + @gin.configurable class AnimPolicyFollowObject: - def __init__( - self, target_obj, pois, bvh, + self, + target_obj, + pois, + bvh, zrot_vel_var=20, follow_zrot=0, - follow_rad_mult=('uniform', 1, 6), - alt_mult=('uniform', 0.25, 1) + follow_rad_mult=("uniform", 1, 6), + alt_mult=("uniform", 0.25, 1), ): - self.pois = pois self.target_obj = target_obj self.follow_zrot = follow_zrot @@ -249,44 +257,49 @@ def __init__( self.reset() def reset(self): - - ''' + """ Called at __init__ and whenever the animation aborts and retries - ''' + """ self.follow_obj = np.random.choice(self.pois) - if self.follow_obj.type == 'MESH': + if self.follow_obj.type == "MESH": self.follow_size = max(self.follow_obj.dimensions) else: self.follow_size = 2 - logger.warning(f'{self.follow_obj.name} had {self.follow_obj.type=}, using {self.follow_size=} instead of .dimensions') + logger.warning( + f"{self.follow_obj.name} had {self.follow_obj.type=}, using {self.follow_size=} instead of .dimensions" + ) follow_loc = self.follow_obj.matrix_world.translation off = follow_loc - self.target_obj.location s = self.follow_size * random_general(self.follow_rad_mult) self.target_obj.location = follow_loc + off.normalized() * s - self.target_obj.location.z = follow_loc.z + self.follow_size * random_general(self.alt_mult) + self.target_obj.location.z = follow_loc.z + self.follow_size * random_general( + self.alt_mult + ) alt = get_altitude(self.target_obj.location, self.bvh) if alt is None: - logger.warning(f'In AnimPolicyFollowObject.reset(), got {alt=}') + logger.warning(f"In AnimPolicyFollowObject.reset(), got {alt=}") if alt is not None and alt < 2: self.target_obj.location *= self.target_obj.location.z / 2 for c in self.target_obj.constraints: self.target_obj.constraints.remove(c) - butil.constrain_object(self.target_obj, 'TRACK_TO', target=self.follow_obj) + butil.constrain_object(self.target_obj, "TRACK_TO", target=self.follow_obj) def __call__(self, obj, frame_curr, bvh, retry_pct): - try: ts = [] for fc in self.follow_obj.animation_data.action.fcurves: for kp in fc.keyframe_points: ts.append(int(kp.co[0])) frame_next = min(t for t in ts if t > frame_curr) - except (ValueError, AttributeError): # no next frame, or no animation_data.action + except ( + ValueError, + AttributeError, + ): # no next frame, or no animation_data.action frame_next = frame_curr + bpy.context.scene.render.fps time = (frame_next - frame_curr) / bpy.context.scene.render.fps @@ -304,19 +317,21 @@ def __call__(self, obj, frame_curr, bvh, retry_pct): new_dist = np.clip(prev_dist + self.rad_vel, 0.7, 5) new_off = prev_off.normalized() * self.follow_size * new_dist - new_off = mathutils.Matrix.Rotation(np.deg2rad(zrot), 4, 'Z') @ new_off - pos = self.follow_obj.matrix_world.translation + new_off + new_off = mathutils.Matrix.Rotation(np.deg2rad(zrot), 4, "Z") @ new_off + pos = self.follow_obj.matrix_world.translation + new_off + + return Vector(pos), None, time, "BEZIER" - return Vector(pos), None, time, 'BEZIER' def validate_keyframe_range( obj, - start_frame, end_frame, - bvhtree, validate_pose_func=None, - stride=5, # runs faster but imperfect precision - check_straight_line=True # rules out proposals faster, but has imperfect precision + start_frame, + end_frame, + bvhtree, + validate_pose_func=None, + stride=5, # runs faster but imperfect precision + check_straight_line=True, # rules out proposals faster, but has imperfect precision ): - last_pos = deepcopy(obj.location) def freespace_ray_check(a, b): @@ -326,39 +341,39 @@ def freespace_ray_check(a, b): if check_straight_line: bpy.context.scene.frame_set(end_frame) if not freespace_ray_check(last_pos, obj.location): - logger.debug('straight line check failed') + logger.debug("straight line check failed") return False for frame_idx in range(start_frame, end_frame + 1, stride): bpy.context.scene.frame_set(frame_idx) if not freespace_ray_check(last_pos, obj.location): - logger.debug(f'{frame_idx=} freespace_ray_check failed') + logger.debug(f"{frame_idx=} freespace_ray_check failed") return False if validate_pose_func is not None and not validate_pose_func(obj): # technically we should validate against all cameras, but this would be expensive - logger.debug(f'{frame_idx} validate_pose_func failed') + logger.debug(f"{frame_idx} validate_pose_func failed") return False last_pos = deepcopy(obj.location) return True + def try_animate_trajectory( - obj: bpy.types.Object, - bvh: BVHTree, + obj: bpy.types.Object, + bvh: BVHTree, policy_func, - keyframe, duration_frames, + keyframe, + duration_frames, validate_pose_func=None, max_step_tries=50, verbose=True, ): - frame_curr = bpy.context.scene.frame_start pbar = tqdm(total=duration_frames) if verbose else None while frame_curr < bpy.context.scene.frame_start + duration_frames: - orig_loc = copy(obj.location) orig_rot = copy(obj.rotation_euler) for retry in range(max_step_tries): @@ -368,20 +383,24 @@ def try_animate_trajectory( loc, rot, duration, interp = policy_func( obj, frame_curr=frame_curr, - retry_pct=retry/max_step_tries, - bvh=bvh + retry_pct=retry / max_step_tries, + bvh=bvh, ) except PolicyError as e: - logger.debug(f'PolicyError on {retry=} {e=}') + logger.debug(f"PolicyError on {retry=} {e=}") continue step_frames = int(duration * bpy.context.scene.render.fps) + 1 step_end_frame = frame_curr + step_frames - keyframe(obj, loc, rot, step_end_frame, interp='BEZIER') + keyframe(obj, loc, rot, step_end_frame, interp="BEZIER") - if not validate_keyframe_range(obj, frame_curr, step_end_frame, bvh, validate_pose_func): - logger.debug(f'validate_keyframe_range failed on moving {obj.location} to {loc}') + if not validate_keyframe_range( + obj, frame_curr, step_end_frame, bvh, validate_pose_func + ): + logger.debug( + f"validate_keyframe_range failed on moving {obj.location} to {loc}" + ) # clear out the candidate keyframes we just inserted, they were no good for fc in obj.animation_data.action.fcurves: if fc.data_path == "": @@ -390,11 +409,13 @@ def try_animate_trajectory( continue if verbose: - pbar.update(min(step_frames, duration_frames - frame_curr)) # dont overshoot the pbar, it makes the formatting not nice + pbar.update( + min(step_frames, duration_frames - frame_curr) + ) # dont overshoot the pbar, it makes the formatting not nice - break # we found a good pose + break # we found a good pose - else: # for-else block triggers when for loop terminates w/o a break statement + else: # for-else block triggers when for loop terminates w/o a break statement return False frame_curr = step_end_frame @@ -405,19 +426,20 @@ def try_animate_trajectory( @gin.configurable def try_animate_with_pathfinding( - obj, bvh, policy_func, - keyframe, duration_frames, + obj, + bvh, + policy_func, + keyframe, + duration_frames, validate_pose_func, max_step_tries, verbose, bounding_box, turning_limit_degree=10, ): - frame_curr = bpy.context.scene.frame_start pbar = tqdm(total=duration_frames) if verbose else None while frame_curr < bpy.context.scene.frame_start + duration_frames: - orig_loc = copy(obj.location) orig_rot = copy(obj.rotation_euler) for retry in range(max_step_tries): @@ -427,17 +449,17 @@ def try_animate_with_pathfinding( loc, rot, duration, interp = policy_func( obj, frame_curr=frame_curr, - retry_pct=retry/max_step_tries, - bvh=bvh + retry_pct=retry / max_step_tries, + bvh=bvh, ) except PolicyError as e: - logger.debug(f'PolicyError on {retry=} {e=}') + logger.debug(f"PolicyError on {retry=} {e=}") continue bpy.context.scene.frame_set(frame_curr) last_pose = (deepcopy(obj.location), deepcopy(obj.rotation_euler)) - keyframe(obj, loc, rot, frame_curr+1, interp='BEZIER') - bpy.context.scene.frame_set(frame_curr+1) + keyframe(obj, loc, rot, frame_curr + 1, interp="BEZIER") + bpy.context.scene.frame_set(frame_curr + 1) valid_target = True if validate_pose_func is not None and not validate_pose_func(obj): @@ -450,31 +472,34 @@ def try_animate_with_pathfinding( for fc in obj.animation_data.action.fcurves: if fc.data_path == "": continue - obj.keyframe_delete(data_path=fc.data_path, frame=frame_curr+1) - + obj.keyframe_delete(data_path=fc.data_path, frame=frame_curr + 1) + if not valid_target: - logger.debug(f'validate_pose_func at target pose failed, aborting path finding') + logger.debug( + "validate_pose_func at target pose failed, aborting path finding" + ) continue - - bounded = True current_pose = (loc, rot) for i in range(3): - if current_pose[0][i] < bounding_box[0][i] or current_pose[0][i] >= bounding_box[1][i]: + if ( + current_pose[0][i] < bounding_box[0][i] + or current_pose[0][i] >= bounding_box[1][i] + ): bounded = False if not bounded: - logger.debug(f'target pose out of bound, aborting path finding') + logger.debug("target pose out of bound, aborting path finding") continue poses = path_finding(bvh, bounding_box, last_pose, current_pose) if poses is None: - logger.debug(f'path not found, aborting') + logger.debug("path not found, aborting") continue base_length = (last_pose[0] - current_pose[0]).length - scaling = poses[-1][0] / base_length + scaling = poses[-1][0] / base_length step_frames = int(duration * bpy.context.scene.render.fps * scaling) + 1 step_end_frame = frame_curr + step_frames @@ -482,24 +507,40 @@ def try_animate_with_pathfinding( turning_too_fast = False for i in range(len(poses) - 1): rotation_euler0 = poses[i][2] - rotation_euler1 = poses[i+1][2] - if abs(rotation_euler0.z - rotation_euler1.z) > turning_limit_degree / 180 * np.pi * step_frames * (poses[i+1][0] - poses[i][0]) / poses[-1][0]: + rotation_euler1 = poses[i + 1][2] + if ( + abs(rotation_euler0.z - rotation_euler1.z) + > turning_limit_degree + / 180 + * np.pi + * step_frames + * (poses[i + 1][0] - poses[i][0]) + / poses[-1][0] + ): turning_too_fast = True break if turning_too_fast: - logger.debug(f'path turns too fast, aborting') + logger.debug("path turns too fast, aborting") continue for l, location, rotation_euler in poses: t = l / poses[-1][0] - keyframe(obj, location, rotation_euler, round(frame_curr + (step_end_frame - frame_curr) * t), interp='LINEAR') + keyframe( + obj, + location, + rotation_euler, + round(frame_curr + (step_end_frame - frame_curr) * t), + interp="LINEAR", + ) if verbose: - pbar.update(min(step_frames, duration_frames - frame_curr)) # dont overshoot the pbar, it makes the formatting not nice + pbar.update( + min(step_frames, duration_frames - frame_curr) + ) # dont overshoot the pbar, it makes the formatting not nice - break # we found a good pose + break # we found a good pose - else: # for-else block triggers when for loop terminates w/o a break statement + else: # for-else block triggers when for loop terminates w/o a break statement return False frame_curr = step_end_frame @@ -507,17 +548,19 @@ def try_animate_with_pathfinding( return True -def keyframe(obj, loc, rot, t, interp='BEZIER'): +def keyframe(obj, loc, rot, t, interp="BEZIER"): if obj.animation_data is not None and obj.animation_data.action is not None: for fc in obj.animation_data.action.fcurves: for kp in fc.keyframe_points: if kp.co > t: - raise ValueError(f'Unexpected out-of-order keyframing {kp.co=}, {t=}') + raise ValueError( + f"Unexpected out-of-order keyframing {kp.co=}, {t=}" + ) if loc is not None: obj.location = loc - obj.keyframe_insert(data_path="location", frame=t), + (obj.keyframe_insert(data_path="location", frame=t),) if rot is not None: obj.rotation_euler = rot @@ -527,10 +570,13 @@ def keyframe(obj, loc, rot, t, interp='BEZIER'): for k in fc.keyframe_points: if k.co[0] == t: k.interpolation = interp - + + @gin.configurable def animate_trajectory( - obj, bvh, policy_func, + obj, + bvh, + policy_func, validate_pose_func=None, max_step_tries=25, max_full_retries=10, @@ -541,7 +587,7 @@ def animate_trajectory( bounding_box=None, path_finding_enabled=False, ): - duration_frames = (bpy.context.scene.frame_end - bpy.context.scene.frame_start) + duration_frames = bpy.context.scene.frame_end - bpy.context.scene.frame_start duration_sec = duration_frames / bpy.context.scene.render.fps if duration_sec < 1e-3: return @@ -550,48 +596,87 @@ def animate_trajectory( obj_orig_rot = copy(obj.rotation_euler) for attempt in range(max_full_retries): - obj.animation_data_clear() obj.location = obj_orig_loc obj.rotation_euler = obj_orig_rot if attempt > 0 and retry_rotation: obj.rotation_euler.z = U(0, 2 * np.pi) - if hasattr(policy_func, 'reset'): + if hasattr(policy_func, "reset"): infinigen.assets.utils.mesh.reset_preset() - keyframe(obj, obj.location, obj.rotation_euler, 0, interp='LINEAR') - try_animate_trajectory_func = try_animate_trajectory if not path_finding_enabled else try_animate_with_pathfinding - args = [obj, bvh, policy_func, keyframe, duration_frames, validate_pose_func, max_step_tries, verbose] - if path_finding_enabled: args.append(bounding_box) + keyframe(obj, obj.location, obj.rotation_euler, 0, interp="LINEAR") + try_animate_trajectory_func = ( + try_animate_trajectory + if not path_finding_enabled + else try_animate_with_pathfinding + ) + args = [ + obj, + bvh, + policy_func, + keyframe, + duration_frames, + validate_pose_func, + max_step_tries, + verbose, + ] + if path_finding_enabled: + args.append(bounding_box) if try_animate_trajectory_func(*args): if reverse_time: kf_locs = [] kf_rots = [] kf_ts = [] - for j in range(len(obj.animation_data.action.fcurves[0].keyframe_points)): - kf_ts.append(obj.animation_data.action.fcurves[0].keyframe_points[j].co.x) - kf_locs.append(( - obj.animation_data.action.fcurves[0].keyframe_points[j].co.y, - obj.animation_data.action.fcurves[1].keyframe_points[j].co.y, - obj.animation_data.action.fcurves[2].keyframe_points[j].co.y, - )) - kf_rots.append(( - obj.animation_data.action.fcurves[3].keyframe_points[j].co.y, - obj.animation_data.action.fcurves[4].keyframe_points[j].co.y, - obj.animation_data.action.fcurves[5].keyframe_points[j].co.y, - )) + for j in range( + len(obj.animation_data.action.fcurves[0].keyframe_points) + ): + kf_ts.append( + obj.animation_data.action.fcurves[0].keyframe_points[j].co.x + ) + kf_locs.append( + ( + obj.animation_data.action.fcurves[0] + .keyframe_points[j] + .co.y, + obj.animation_data.action.fcurves[1] + .keyframe_points[j] + .co.y, + obj.animation_data.action.fcurves[2] + .keyframe_points[j] + .co.y, + ) + ) + kf_rots.append( + ( + obj.animation_data.action.fcurves[3] + .keyframe_points[j] + .co.y, + obj.animation_data.action.fcurves[4] + .keyframe_points[j] + .co.y, + obj.animation_data.action.fcurves[5] + .keyframe_points[j] + .co.y, + ) + ) obj.animation_data_clear() for i, t in enumerate(kf_ts): - keyframe(obj, kf_locs[i], kf_rots[i], bpy.context.scene.frame_end + bpy.context.scene.frame_start - t, interp='LINEAR') + keyframe( + obj, + kf_locs[i], + kf_rots[i], + bpy.context.scene.frame_end + bpy.context.scene.frame_start - t, + interp="LINEAR", + ) # bpy.context.scene.frame_set(bpy.context.scene.frame_end) # obj.keyframe_insert(data_path="location", frame=bpy.context.scene.frame_end) # obj.keyframe_insert(data_path="rotation_euler", frame=bpy.context.scene.frame_end) # assert(0) break - logger.info(f'Failed {attempt=} out of {max_full_retries=} for {obj.name=}') + logger.info(f"Failed {attempt=} out of {max_full_retries=} for {obj.name=}") else: - err = f'Animation for {obj.name=} failed with {max_full_retries=} and {max_step_tries=}, quitting' + err = f"Animation for {obj.name=} failed with {max_full_retries=} and {max_step_tries=}, quitting" if fatal: raise ValueError(err) else: @@ -599,12 +684,13 @@ def animate_trajectory( return -def policy_create_bezier_path(start_pose_obj, bvh, policy_func, to_mesh=False, eval_offset=(0,0,0), **kwargs): - +def policy_create_bezier_path( + start_pose_obj, bvh, policy_func, to_mesh=False, eval_offset=(0, 0, 0), **kwargs +): eval_offset = Vector(eval_offset) # animate a dummy using the policy - temp = butil.spawn_empty('policy_create_bezier_path.temp') + temp = butil.spawn_empty("policy_create_bezier_path.temp") temp.location = start_pose_obj.location + eval_offset temp.rotation_euler = start_pose_obj.rotation_euler animate_trajectory(temp, bvh, policy_func, **kwargs) @@ -612,14 +698,18 @@ def policy_create_bezier_path(start_pose_obj, bvh, policy_func, to_mesh=False, e # read off the keyframe locations positions = [] if temp.animation_data is not None: - fc = next(fc for fc in temp.animation_data.action.fcurves if fc.data_path == 'location') + fc = next( + fc + for fc in temp.animation_data.action.fcurves + if fc.data_path == "location" + ) for p in fc.keyframe_points: f = int(p.co[0]) bpy.context.scene.frame_set(f) positions.append(deepcopy(temp.location - eval_offset)) - logger.debug(f'Created policy path with {len(positions)} keypoints') + logger.debug(f"Created policy path with {len(positions)} keypoints") - res = Curve(points=positions).to_curve_obj(name='policy_path', to_mesh=to_mesh) + res = Curve(points=positions).to_curve_obj(name="policy_path", to_mesh=to_mesh) butil.delete(temp) return res diff --git a/infinigen/core/placement/camera.py b/infinigen/core/placement/camera.py index d641adbbf..36324f794 100644 --- a/infinigen/core/placement/camera.py +++ b/infinigen/core/placement/camera.py @@ -1,60 +1,50 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Zeyu Ma, Lahav Lipson: Stationary camera selection # - Alexander Raistrick: Refactor into proposal/validate, camera animation # - Lingjie Mei: get_camera_trajectory -from random import sample -import sys -import warnings +import logging import typing -from copy import deepcopy, copy +from copy import deepcopy +from dataclasses import dataclass from functools import partial from itertools import chain -import logging from pathlib import Path -from dataclasses import dataclass - -from numpy.random import uniform as U import bpy -import bpy_extras import gin import imageio import numpy as np -from mathutils import Matrix, Vector, Euler +from mathutils import Vector from mathutils.bvhtree import BVHTree - -from infinigen.core.rendering.post_render import colorize_depth -from tqdm import tqdm, trange -from infinigen.core.placement import placement +from numpy.random import uniform as U +from tqdm import tqdm from infinigen.core.nodes import node_utils -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes - -from . import animation_policy - +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.rendering.post_render import colorize_depth +from infinigen.core.tagging import tag_system from infinigen.core.util import blender as butil +from infinigen.core.util import camera from infinigen.core.util.blender import SelectObjects, delete from infinigen.core.util.logging import Timer -from infinigen.core.util.math import clip_gaussian, lerp -from infinigen.core.util import camera -from infinigen.core.util.random import random_general -from infinigen.core.tagging import tag_system from infinigen.core.util.organization import SelectionCriterions - +from infinigen.core.util.random import random_general from infinigen.tools.suffixes import get_suffix +from . import animation_policy + logger = logging.getLogger(__name__) -CAMERA_RIGS_DIRNAME = 'CameraRigs' +CAMERA_RIGS_DIRNAME = "CameraRigs" + @gin.configurable def get_sensor_coords(cam, H, W, sparse=False): - camd = cam.data f_in_m = camd.lens / 1000 scene = bpy.context.scene @@ -64,54 +54,63 @@ def get_sensor_coords(cam, H, W, sparse=False): scale = scene.render.resolution_percentage / 100 sensor_width_in_m = camd.sensor_width / 1000 sensor_height_in_m = camd.sensor_height / 1000 - assert abs(sensor_width_in_m/sensor_height_in_m - W/H) < 1e-4, (sensor_width_in_m, sensor_height_in_m, W, H) + assert abs(sensor_width_in_m / sensor_height_in_m - W / H) < 1e-4, ( + sensor_width_in_m, + sensor_height_in_m, + W, + H, + ) pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y - if (camd.sensor_fit == 'VERTICAL'): - - # the sensor height is fixed (sensor fit is horizontal), + if camd.sensor_fit == "VERTICAL": + # the sensor height is fixed (sensor fit is horizontal), # the sensor width is effectively changed with the pixel aspect ratio - s_u = resolution_x_in_px * scale / sensor_width_in_m / pixel_aspect_ratio # pixels per milimeter + s_u = ( + resolution_x_in_px * scale / sensor_width_in_m / pixel_aspect_ratio + ) # pixels per milimeter s_v = resolution_y_in_px * scale / sensor_height_in_m - - else: # 'HORIZONTAL' and 'AUTO' - # the sensor width is fixed (sensor fit is horizontal), + else: # 'HORIZONTAL' and 'AUTO' + # the sensor width is fixed (sensor fit is horizontal), # the sensor height is effectively changed with the pixel aspect ratio pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y s_u = resolution_x_in_px * scale / sensor_width_in_m s_v = resolution_y_in_px * scale * pixel_aspect_ratio / sensor_height_in_m - u_0 = resolution_x_in_px * scale / 2 # cx (in pixels) Usually is just W/2 - v_0 = resolution_y_in_px * scale / 2 # cx (in pixels) Usually is just H/2 + u_0 = resolution_x_in_px * scale / 2 # cx (in pixels) Usually is just W/2 + v_0 = resolution_y_in_px * scale / 2 # cx (in pixels) Usually is just H/2 xx, yy = np.meshgrid(np.arange(W).astype(float), np.arange(H).astype(float)) - coords_x = (xx - u_0) / s_u # relative, in mm - coords_y = (yy - v_0 + 1) / s_v # relative, in mm + coords_x = (xx - u_0) / s_u # relative, in mm + coords_y = (yy - v_0 + 1) / s_v # relative, in mm coords_z = np.full(coords_x.shape, -f_in_m) relative_cam_coords = np.stack((coords_x, coords_y, coords_z), axis=-1) - cam_coords_vectors = np.empty((H,W), dtype=Vector) - pixel_locs = np.stack((np.meshgrid(np.arange(W), np.arange(H))), axis=-1).reshape((W*H, 2))#np.array(list(product(range(H), range(W)))) + cam_coords_vectors = np.empty((H, W), dtype=Vector) + pixel_locs = np.stack((np.meshgrid(np.arange(W), np.arange(H))), axis=-1).reshape( + (W * H, 2) + ) # np.array(list(product(range(H), range(W)))) if sparse: - ii = np.random.choice(H*W, size=1000) + ii = np.random.choice(H * W, size=1000) pixel_locs = pixel_locs[ii] - for x,y in tqdm(pixel_locs, desc="Building Camera Vectors", disable=True): - pixelVector = Vector(relative_cam_coords[y,x]) - cam_coords_vectors[y,x] = cam.matrix_world @ pixelVector + for x, y in tqdm(pixel_locs, desc="Building Camera Vectors", disable=True): + pixelVector = Vector(relative_cam_coords[y, x]) + cam_coords_vectors[y, x] = cam.matrix_world @ pixelVector return cam_coords_vectors, pixel_locs + def adjust_camera_sensor(cam): scene = bpy.context.scene W = scene.render.resolution_x H = scene.render.resolution_y - sensor_width = 18 * (W/H) + sensor_width = 18 * (W / H) assert sensor_width.is_integer(), (18, W, H) cam.data.sensor_height = 18 cam.data.sensor_width = int(sensor_width) + def spawn_camera(): bpy.ops.object.camera_add() cam = bpy.context.active_object @@ -119,34 +118,35 @@ def spawn_camera(): adjust_camera_sensor(cam) return cam + def camera_name(rig_id, cam_id): - return f'{CAMERA_RIGS_DIRNAME}/{rig_id}/{cam_id}' + return f"{CAMERA_RIGS_DIRNAME}/{rig_id}/{cam_id}" + @gin.configurable def spawn_camera_rigs( camera_rig_config, n_camera_rigs, ): - def spawn_rig(i): - rig_parent = butil.spawn_empty(f'{CAMERA_RIGS_DIRNAME}/{i}') + rig_parent = butil.spawn_empty(f"{CAMERA_RIGS_DIRNAME}/{i}") for j, config in enumerate(camera_rig_config): cam = spawn_camera() cam.name = camera_name(i, j) cam.parent = rig_parent - cam.location = config['loc'] - cam.rotation_euler = config['rot_euler'] + cam.location = config["loc"] + cam.rotation_euler = config["rot_euler"] return rig_parent camera_rigs = [spawn_rig(i) for i in range(n_camera_rigs)] butil.group_in_collection(camera_rigs, CAMERA_RIGS_DIRNAME) - + return camera_rigs -def get_cameras_ids() -> list[tuple]: +def get_cameras_ids() -> list[tuple]: res = [] col = bpy.data.collections[CAMERA_RIGS_DIRNAME] rigs = [o for o in col.objects if o.name.count("/") == 1] @@ -154,40 +154,52 @@ def get_cameras_ids() -> list[tuple]: for j, subcam in enumerate(root.children): assert subcam.name == camera_name(i, j) res.append((i, j)) - + return res + def get_camera(rig_id, subcam_id, checkonly=False): col = bpy.data.collections[CAMERA_RIGS_DIRNAME] name = camera_name(rig_id, subcam_id) if name in col.objects.keys(): return col.objects[name] - if checkonly: + if checkonly: return None - raise ValueError(f'Could not get_camera({rig_id=}, {subcam_id=}). {list(col.objects.keys())=}') + raise ValueError( + f"Could not get_camera({rig_id=}, {subcam_id=}). {list(col.objects.keys())=}" + ) -@node_utils.to_nodegroup('nodegroup_camera_info', singleton=True, type='GeometryNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_camera_info", singleton=True, type="GeometryNodeTree" +) def nodegroup_active_cam_info(nw: NodeWrangler): info = nw.new_node(Nodes.ObjectInfo, [bpy.context.scene.camera]) - nw.new_node(Nodes.GroupOutput, input_kwargs={ - k: info.outputs[k] for k in info.outputs.keys() - }) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={k: info.outputs[k] for k in info.outputs.keys()}, + ) + def set_active_camera(rig_id, subcam_id): camera = get_camera(rig_id, subcam_id) bpy.context.scene.camera = camera - ng = nodegroup_active_cam_info() # does not create a new node group, retrieves singleton - ng.nodes['Object Info'].inputs['Object'].default_value = camera + ng = ( + nodegroup_active_cam_info() + ) # does not create a new node group, retrieves singleton + ng.nodes["Object Info"].inputs["Object"].default_value = camera return bpy.context.scene.camera + def positive_gaussian(mean, std): while True: val = np.random.normal(mean, std) if val > 0: return val + def set_camera( camera, location, @@ -198,7 +210,9 @@ def set_camera( camera.location = location camera.rotation_euler = rotation if focus_dist is not None: - camera.data.dof.focus_distance = focus_dist # this should come before view_layer.update() + camera.data.dof.focus_distance = ( + focus_dist # this should come before view_layer.update() + ) bpy.context.view_layer.update() camera.keyframe_insert(data_path="location", frame=frame) @@ -206,24 +220,25 @@ def set_camera( if focus_dist is not None: camera.data.dof.keyframe_insert(data_path="focus_distance", frame=frame) -def terrain_camera_query(cam, scene_bvh, terrain_tags_queries, vertexwise_min_dist, min_dist=0): +def terrain_camera_query( + cam, scene_bvh, terrain_tags_queries, vertexwise_min_dist, min_dist=0 +): dists = [] sensor_coords, pix_it = get_sensor_coords(cam, sparse=True) terrain_tags_queries_counts = {q: 0 for q in terrain_tags_queries} - for x,y in pix_it: - direction = (sensor_coords[y,x] - cam.matrix_world.translation).normalized() + for x, y in pix_it: + direction = (sensor_coords[y, x] - cam.matrix_world.translation).normalized() _, _, index, dist = scene_bvh.ray_cast(cam.matrix_world.translation, direction) if dist is None: continue dists.append(dist) - if ( - dist < min_dist or - (vertexwise_min_dist is not None and dist < vertexwise_min_dist[index]) + if dist < min_dist or ( + vertexwise_min_dist is not None and dist < vertexwise_min_dist[index] ): - logger.debug(f'Found {dist=} < {min_dist=}') - dists = None # means dist < min + logger.debug(f"Found {dist=} < {min_dist=}") + dists = None # means dist < min break for q in terrain_tags_queries: terrain_tags_queries_counts[q] += terrain_tags_queries[q][index] @@ -232,7 +247,8 @@ def terrain_camera_query(cam, scene_bvh, terrain_tags_queries, vertexwise_min_di return dists, terrain_tags_queries_counts, n_pix -@dataclass + +@dataclass class CameraProposal: loc: np.array rot: np.array @@ -243,21 +259,23 @@ def apply(self, cam): cam.rotation_euler = self.rot cam.data.lens = self.focal_length + @gin.configurable def camera_pose_proposal( - scene_bvh, - location_sample: typing.Callable | tuple, - altitude=('uniform', 1.5, 2.5), - roll=0, - yaw=('uniform', -180, 180), - pitch=90, + scene_bvh, + location_sample: typing.Callable | tuple, + altitude=("uniform", 1.5, 2.5), + roll=0, + yaw=("uniform", -180, 180), + pitch=90, focal_length=50, override_loc=None, ): - if isinstance(location_sample, tuple): location_sample = Vector(location_sample) - location_sample = lambda: location_sample + + def location_sample(): + return location_sample if override_loc is not None: loc = Vector(random_general(override_loc)) @@ -266,9 +284,9 @@ def camera_pose_proposal( else: loc = location_sample() curr_alt = animation_policy.get_altitude(loc, scene_bvh) - if curr_alt is None: - logger.debug(f'camera_pose_proposal got {curr_alt=} for {loc=}') - butil.spawn_empty(f'fail') + if curr_alt is None: + logger.debug(f"camera_pose_proposal got {curr_alt=} for {loc=}") + # butil.spawn_empty("fail") return None desired_alt = random_general(altitude) loc[2] = loc[2] + desired_alt - curr_alt @@ -277,12 +295,13 @@ def camera_pose_proposal( focal_length = random_general(focal_length) return CameraProposal(loc, rot, focal_length) + @gin.configurable def keep_cam_pose_proposal( cam, terrain, - scene_bvh, - placeholders_kd, + scene_bvh, + placeholders_kd, camera_selection_answers, vertexwise_min_dist, camera_selection_ratio, @@ -290,64 +309,76 @@ def keep_cam_pose_proposal( min_terrain_distance=0, terrain_coverage_range=(0.5, 1), ): + if terrain is not None: # TODO refactor + terrain_sdf = terrain.compute_camera_space_sdf( + np.array(cam.location).reshape((1, 3)) + ) - if terrain is not None: # TODO refactor - terrain_sdf = terrain.compute_camera_space_sdf(np.array(cam.location).reshape((1, 3))) - - if not cam.type == 'CAMERA': + if not cam.type == "CAMERA": cam = cam.children[0] - if not cam.type == 'CAMERA': - raise ValueError(f'{cam.name=} had {cam.type=}') + if not cam.type == "CAMERA": + raise ValueError(f"{cam.name=} had {cam.type=}") bpy.context.view_layer.update() - + # Reject cameras too close to any placeholder vertex v, i, dist_to_placeholder = placeholders_kd.find(cam.matrix_world.translation) if dist_to_placeholder is not None and dist_to_placeholder < min_placeholder_dist: - logger.debug(f'keep_cam_pose_proposal rejects {dist_to_placeholder=}, {v, i}') + logger.debug(f"keep_cam_pose_proposal rejects {dist_to_placeholder=}, {v, i}") return None dists, camera_selection_answers_counts, n_pix = terrain_camera_query( - cam, scene_bvh, camera_selection_answers, vertexwise_min_dist, min_dist=min_terrain_distance) - + cam, + scene_bvh, + camera_selection_answers, + vertexwise_min_dist, + min_dist=min_terrain_distance, + ) + if dists is None: - logger.debug('keep_cam_pose_proposal rejects terrain dists') + logger.debug("keep_cam_pose_proposal rejects terrain dists") return None - - coverage = len(dists)/n_pix + + coverage = len(dists) / n_pix if coverage < terrain_coverage_range[0] or coverage > terrain_coverage_range[1]: - logger.debug(f'keep_cam_pose_proposal rejects {coverage=} for {terrain_coverage_range=}') + logger.debug( + f"keep_cam_pose_proposal rejects {coverage=} for {terrain_coverage_range=}" + ) return None - + if terrain is not None and terrain_sdf <= 0: - logger.debug(f'keep_cam_pose_proposal rejects {terrain_sdf=}') + logger.debug(f"keep_cam_pose_proposal rejects {terrain_sdf=}") return None if rparams := camera_selection_ratio: for q in rparams: if type(q) is tuple and q[0] == SelectionCriterions.CloseUp: - closeup = len([d for d in dists if d < q[1]])/n_pix + closeup = len([d for d in dists if d < q[1]]) / n_pix if closeup < rparams[q][0] or closeup > rparams[q][1]: - logger.debug(f'keep_cam_pose_proposal rejects {closeup=} for {q=}') + logger.debug(f"keep_cam_pose_proposal rejects {closeup=} for {q=}") return None else: minv, maxv = rparams[q][0], rparams[q][1] if q in camera_selection_answers_counts: ratio = camera_selection_answers_counts[q] / n_pix if ratio < minv or ratio > maxv: - logger.debug(f'keep_cam_pose_proposal rejects {ratio=} for {q=}') + logger.debug( + f"keep_cam_pose_proposal rejects {ratio=} for {q=}" + ) return None return np.std(dists) + 1.5 * np.min(dists) - + + @gin.configurable class AnimPolicyGoToProposals: - - def __init__(self, speed=("uniform", 1.5, 2.5), min_dist=4, max_dist=10, retries=30): - self.speed=speed - self.min_dist=min_dist - self.max_dist=max_dist - self.retries=retries + def __init__( + self, speed=("uniform", 1.5, 2.5), min_dist=4, max_dist=10, retries=30 + ): + self.speed = speed + self.min_dist = min_dist + self.max_dist = max_dist + self.retries = retries def __call__(self, camera_rig, frame_curr, retry_pct, bvh): margin = Vector((self.max_dist, self.max_dist, self.max_dist)) @@ -362,61 +393,67 @@ def __call__(self, camera_rig, frame_curr, retry_pct, bvh): continue break else: - raise animation_policy.PolicyError(f'{__name__} found no keyframe after {self.retries=}') + raise animation_policy.PolicyError( + f"{__name__} found no keyframe after {self.retries=}" + ) time = dist / random_general(self.speed) - return Vector(res.loc), Vector(res.rot), time, 'BEZIER' - + return Vector(res.loc), Vector(res.rot), time, "BEZIER" + + @gin.configurable def compute_base_views( - cam, n_views, - terrain, - scene_bvh, - location_sample: typing.Callable, + cam, + n_views, + terrain, + scene_bvh, + location_sample: typing.Callable, placeholders_kd=None, - camera_selection_answers={}, + camera_selection_answers={}, vertexwise_min_dist=None, camera_selection_ratio=None, min_candidates_ratio=20, max_tries=30000, - visualize=False + visualize=False, ): potential_views = [] n_min_candidates = int(min_candidates_ratio * n_views) - with tqdm(total=n_min_candidates, desc='Searching for camera viewpoints') as pbar: + with tqdm(total=n_min_candidates, desc="Searching for camera viewpoints") as pbar: for it in range(1, max_tries): - props = camera_pose_proposal( - scene_bvh=scene_bvh, - location_sample=location_sample + scene_bvh=scene_bvh, location_sample=location_sample ) - - if props is None: - logger.debug(f'{camera_pose_proposal.__name__} returned {props=} for {it=}') + + if props is None: + logger.debug( + f"{camera_pose_proposal.__name__} returned {props=} for {it=}" + ) continue props.apply(cam) - criterion = keep_cam_pose_proposal( - cam, terrain, scene_bvh, placeholders_kd, + cam, + terrain, + scene_bvh, + placeholders_kd, camera_selection_answers=camera_selection_answers, vertexwise_min_dist=vertexwise_min_dist, camera_selection_ratio=camera_selection_ratio, ) if visualize: - criterion_str = f'{criterion:.2f}' if criterion is not None else 'None' - marker = butil.spawn_empty(f'attempt_{it}_{criterion_str}') + criterion_str = f"{criterion:.2f}" if criterion is not None else "None" + marker = butil.spawn_empty(f"attempt_{it}_{criterion_str}") marker.location = cam.location marker.rotation_euler = cam.rotation_euler if criterion is None: - logger.debug(f'{it=} {criterion=}') + logger.debug(f"{it=} {criterion=}") continue # Compute focus distance - destination = cam.matrix_world @ Vector((0.,0.,-1.)) + destination = cam.matrix_world @ Vector((0.0, 0.0, -1.0)) forward_dir = (destination - cam.location).normalized() *_, straight_ahead_dist = scene_bvh.ray_cast(cam.location, forward_dir) @@ -428,11 +465,11 @@ def compute_base_views( if len(potential_views) < n_views: if visualize: - butil.save_blend('compute_base_views-failed.blend') - raise ValueError(f'Could not find {n_views} camera views') - + butil.save_blend("compute_base_views-failed.blend") + raise ValueError(f"Could not find {n_views} camera views") + views = sorted(potential_views, reverse=True) - + return views[:n_views] @@ -440,12 +477,14 @@ def build_bvh_and_attrs(objs, tags_queries): dup_objs = [] for obj in objs: with SelectObjects(obj): - bpy.ops.object.duplicate(linked=0,mode='TRANSLATION') + bpy.ops.object.duplicate(linked=0, mode="TRANSLATION") dup_objs.append(bpy.context.view_layer.objects.active) for obj in dup_objs: - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) with SelectObjects(dup_objs[0]): for obj in dup_objs[1:]: obj.select_set(True) @@ -453,8 +492,9 @@ def build_bvh_and_attrs(objs, tags_queries): obj = bpy.context.view_layer.objects.active bvh = BVHTree.FromObject(obj, bpy.context.evaluated_depsgraph_get()) from infinigen.terrain.utils import Mesh - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") mesh = Mesh(obj=obj) delete(obj) @@ -464,26 +504,31 @@ def build_bvh_and_attrs(objs, tags_queries): q = (q0,) else: q = q0 - if q[0] in [SelectionCriterions.CloseUp]: continue + if q[0] in [SelectionCriterions.CloseUp]: + continue if q[0] == SelectionCriterions.Altitude: min_altitude, max_altitude = q[1:3] altitude = mesh.vertices[:, 2] - camera_selection_answers[q0] = mesh.facewise_mean((altitude > min_altitude) & (altitude < max_altitude)) + camera_selection_answers[q0] = mesh.facewise_mean( + (altitude > min_altitude) & (altitude < max_altitude) + ) else: camera_selection_answers[q0] = np.zeros(len(mesh.faces), dtype=bool) for key in tag_system.tag_dict: - if set(q).issubset(set(key.split('.'))): - camera_selection_answers[q0] |= (mesh.face_attributes["MaskTag"] == tag_system.tag_dict[key]).reshape(-1) + if set(q).issubset(set(key.split("."))): + camera_selection_answers[q0] |= ( + mesh.face_attributes["MaskTag"] == tag_system.tag_dict[key] + ).reshape(-1) return bvh, camera_selection_answers + def camera_selection_preprocessing( - terrain, + terrain, scene_objs, tags_ratio: dict = None, ranges_ratio: dict = None, anim_criterion_keys: dict = None, ): - if tags_ratio is None: tags_ratio = {} if ranges_ratio is None: @@ -493,30 +538,36 @@ def camera_selection_preprocessing( # preprocessing code adapted from mazeyu's original gin-oriented solution tags_ratio = { - k: (*v, anim_criterion_keys.get(k, False)) - for k, v in tags_ratio.items() + k: (*v, anim_criterion_keys.get(k, False)) for k, v in tags_ratio.items() } ranges_ratio = { - v[:-2]: (v[-2], v[-1], anim_criterion_keys.get(k, False)) + v[:-2]: (v[-2], v[-1], anim_criterion_keys.get(k, False)) for k, v in ranges_ratio.items() } all_selection_ratios = {**tags_ratio, **ranges_ratio} - with Timer('Building placeholders KDTree'): - - placeholders = list(chain.from_iterable( - c.all_objects for c in bpy.data.collections if c.name.startswith('placeholders:') - )) - placeholders = [p for p in placeholders if p.type == 'MESH'] - logger.info(f'Building placeholder kd for {len(placeholders)} objects') - placeholders_kd = butil.joined_kd(placeholders, include_origins=True) + with Timer("Building placeholders KDTree"): + placeholders = list( + chain.from_iterable( + c.all_objects + for c in bpy.data.collections + if c.name.startswith("placeholders:") + ) + ) + placeholders = [p for p in placeholders if p.type == "MESH"] + logger.info(f"Building placeholder kd for {len(placeholders)} objects") + placeholders_kd = butil.joined_kd(placeholders, include_origins=True) if terrain is None: - scene_bvh, camera_selection_answers = build_bvh_and_attrs(scene_objs, all_selection_ratios.keys()) + scene_bvh, camera_selection_answers = build_bvh_and_attrs( + scene_objs, all_selection_ratios.keys() + ) vertexwise_min_dist = None else: - scene_bvh, camera_selection_answers, vertexwise_min_dist = terrain.build_terrain_bvh_and_attrs(all_selection_ratios.keys()) + scene_bvh, camera_selection_answers, vertexwise_min_dist = ( + terrain.build_terrain_bvh_and_attrs(all_selection_ratios.keys()) + ) return dict( terrain=terrain, @@ -527,31 +578,33 @@ def camera_selection_preprocessing( placeholders_kd=placeholders_kd, ) -@node_utils.to_nodegroup('geo_distrib', singleton=True, type='GeometryNodeTree') + +@node_utils.to_nodegroup("geo_distrib", singleton=True, type="GeometryNodeTree") def geo_distrib_random_points(nw: NodeWrangler): - input = nw.new_node(Nodes.GroupInput, expose_input=[('NodeSocketGeometry', 'Geometry', None)]) - distribute = nw.new_node(Nodes.DistributePointsOnFaces, input_kwargs={ - 'Mesh': input.outputs['Geometry'], - 'Density': 500 - }) + input = nw.new_node( + Nodes.GroupInput, expose_input=[("NodeSocketGeometry", "Geometry", None)] + ) + distribute = nw.new_node( + Nodes.DistributePointsOnFaces, + input_kwargs={"Mesh": input.outputs["Geometry"], "Density": 500}, + ) verts = nw.new_node(Nodes.PointsToVertices, [distribute]) - output = nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": verts}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": verts}) + def sample_random_locs(surface: bpy.types.Object, eps=0.01): # HACK implementation - uses blender geonodes' uniform surface sample, im fairly sure theres a numpy impl somewhere in the repo surface = butil.copy(surface) butil.apply_transform(surface, loc=True, rot=True, scale=True) butil.modify_mesh( - surface, - "NODES", - node_group=geo_distrib_random_points(), - apply=True + surface, "NODES", node_group=geo_distrib_random_points(), apply=True ) locs = np.array([v.co for v in surface.data.vertices]) locs[:, -1] += eps butil.delete(surface) return locs + @gin.configurable def configure_cameras( cam_rigs, @@ -563,25 +616,27 @@ def configure_cameras( dummy_camera = spawn_camera() if init_bounding_box is not None: - location_sample = lambda: np.random.uniform(*init_bounding_box) + + def location_sample(): + return np.random.uniform(*init_bounding_box) elif init_surfaces is not None: random_locs = sample_random_locs(init_surfaces) + def location_sample(): loc = Vector(random_locs[np.random.randint(len(random_locs)), :]) loc.z += 1e-3 return loc else: - raise ValueError('Either init_bounding_box or init_surfaces must be provided') + raise ValueError("Either init_bounding_box or init_surfaces must be provided") base_views = compute_base_views( - dummy_camera, - n_views=len(cam_rigs), - location_sample=location_sample, - **scene_preprocessed + dummy_camera, + n_views=len(cam_rigs), + location_sample=location_sample, + **scene_preprocessed, ) for view, cam_rig in zip(base_views, cam_rigs): - score, props, focus_dist = view cam_rig.location = props.loc cam_rig.rotation_euler = props.rot @@ -591,63 +646,63 @@ def location_sample(): if focus_dist is not None: for cam in cam_rig.children: - if not cam.type =='CAMERA': continue + if not cam.type == "CAMERA": + continue cam.data.dof.focus_distance = focus_dist butil.delete(dummy_camera) + @gin.configurable def animate_cameras( cam_rigs, bounding_box, - scene_preprocessed, + scene_preprocessed, pois=None, follow_poi_chance=0.0, - policy_registry = None, + policy_registry=None, ): animation_ratio = {} animation_answers = {} - for k in scene_preprocessed['camera_selection_ratio']: - if scene_preprocessed['camera_selection_ratio'][k][2]: - animation_ratio[k] = scene_preprocessed['camera_selection_ratio'][k] - animation_answers[k] = scene_preprocessed['camera_selection_answers'][k] + for k in scene_preprocessed["camera_selection_ratio"]: + if scene_preprocessed["camera_selection_ratio"][k][2]: + animation_ratio[k] = scene_preprocessed["camera_selection_ratio"][k] + animation_answers[k] = scene_preprocessed["camera_selection_answers"][k] anim_valid_pose_func = partial( keep_cam_pose_proposal, - placeholders_kd=scene_preprocessed['placeholders_kd'], - scene_bvh=scene_preprocessed['scene_bvh'], - terrain=scene_preprocessed['terrain'], - vertexwise_min_dist=scene_preprocessed['vertexwise_min_dist'], + placeholders_kd=scene_preprocessed["placeholders_kd"], + scene_bvh=scene_preprocessed["scene_bvh"], + terrain=scene_preprocessed["terrain"], + vertexwise_min_dist=scene_preprocessed["vertexwise_min_dist"], camera_selection_answers=animation_answers, camera_selection_ratio=animation_ratio, ) for cam_rig in cam_rigs: - if policy_registry is None: if U() < follow_poi_chance and pois is not None and len(pois): policy = animation_policy.AnimPolicyFollowObject( - target_obj=cam_rig, - pois=pois, - bvh=scene_preprocessed['scene_bvh'] + target_obj=cam_rig, pois=pois, bvh=scene_preprocessed["scene_bvh"] ) else: policy = animation_policy.AnimPolicyRandomWalkLookaround() else: policy = policy_registry() - logger.info(f'Animating {cam_rig=} using {policy=}') + logger.info(f"Animating {cam_rig=} using {policy=}") animation_policy.animate_trajectory( cam_rig, - scene_preprocessed['scene_bvh'], + scene_preprocessed["scene_bvh"], policy_func=policy, - validate_pose_func=anim_valid_pose_func, - verbose=True, + validate_pose_func=anim_valid_pose_func, + verbose=True, fatal=True, bounding_box=bounding_box, ) + @gin.configurable def save_camera_parameters(camera_ids, output_folder, frame, use_dof=False): output_folder = Path(output_folder) @@ -660,16 +715,23 @@ def save_camera_parameters(camera_ids, output_folder, frame, use_dof=False): camera_obj.data.dof.use_dof = use_dof # Saving camera parameters K = camera.get_calibration_matrix_K_from_blender(camera_obj.data) - suffix = get_suffix(dict(cam_rig=camera_pair_id, resample=0, frame=frame, subcam=camera_id)) + suffix = get_suffix( + dict(cam_rig=camera_pair_id, resample=0, frame=frame, subcam=camera_id) + ) output_file = output_folder / f"camview{suffix}.npz" - height_width = np.array(( - bpy.context.scene.render.resolution_y, - bpy.context.scene.render.resolution_x - )) - T = np.asarray(camera_obj.matrix_world, dtype=np.float64) @ np.diag((1.,-1.,-1.,1.)) # Y down Z forward (aka opencv) + height_width = np.array( + ( + bpy.context.scene.render.resolution_y, + bpy.context.scene.render.resolution_x, + ) + ) + T = np.asarray(camera_obj.matrix_world, dtype=np.float64) @ np.diag( + (1.0, -1.0, -1.0, 1.0) + ) # Y down Z forward (aka opencv) np.savez(output_file, K=np.asarray(K, dtype=np.float64), T=T, HW=height_width) + if __name__ == "__main__": """ This interactive section generates a depth map by raycasting through each pixel. @@ -690,19 +752,21 @@ def save_camera_parameters(camera_ids, output_folder, frame, use_dof=False): to_obj_coords = target_obj.matrix_world.inverted() sensor_coords, pix_it = get_sensor_coords(cam, sparse=False) - H,W = sensor_coords.shape - depth_output = np.zeros((H,W), dtype=np.float64) + H, W = sensor_coords.shape + depth_output = np.zeros((H, W), dtype=np.float64) - for x,y in tqdm(pix_it): - destination = sensor_coords[y,x] + for x, y in tqdm(pix_it): + destination = sensor_coords[y, x] direction = (destination - cam.location).normalized() location, normal, index, dist = bvhtree.ray_cast(cam.location, direction) if dist is not None: dist_diff = (destination - cam.location).length - assert dist > (location - destination).length, (dist, (location - destination).length) + assert dist > (location - destination).length, ( + dist, + (location - destination).length, + ) assert dist > dist_diff - depth_output[H-y-1,x] = dist - dist_diff + depth_output[H - y - 1, x] = dist - dist_diff color_depth = colorize_depth(depth_output) - imageio.imwrite(f"color_depth.png", color_depth) - + imageio.imwrite("color_depth.png", color_depth) diff --git a/infinigen/core/placement/density.py b/infinigen/core/placement/density.py index fb830526a..a389a6030 100644 --- a/infinigen/core/placement/density.py +++ b/infinigen/core/placement/density.py @@ -6,40 +6,42 @@ # - Zeyu Ma: Selection based on tag -import pdb import logging -import bpy import mathutils import numpy as np -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes from infinigen.core.nodes import node_utils as nu +from infinigen.core.nodes.node_wrangler import Nodes from infinigen.core.surface import eval_argument logger = logging.getLogger(__name__) tag_dict = None + def set_tag_dict(tag_dict_): global tag_dict tag_dict = tag_dict_ + def tag_mask(nw, tag): keys = list(tag_dict.keys()) - tag_parts = tag.split(',') - logger.debug(f'Parsing {tag=} into {len(tag_parts)=}, matching against {len(tag_dict)=}') + tag_parts = tag.split(",") + logger.debug( + f"Parsing {tag=} into {len(tag_parts)=}, matching against {len(tag_dict)=}" + ) for part in tag_parts: if part.startswith("-"): - keys = [k for k in keys if part[1:] not in k.split('.')] + keys = [k for k in keys if part[1:] not in k.split(".")] else: - keys = [k for k in keys if part in k.split('.')] + keys = [k for k in keys if part in k.split(".")] conditions = [] for k in keys: comp = nw.new_node( - Nodes.Compare, - attrs={'operation': "EQUAL", "data_type": "FLOAT"}, - input_args=[eval_argument(nw, "MaskTag"), tag_dict[k]] + Nodes.Compare, + attrs={"operation": "EQUAL", "data_type": "FLOAT"}, + input_args=[eval_argument(nw, "MaskTag"), tag_dict[k]], ) conditions.append(comp) if len(conditions): @@ -49,43 +51,67 @@ def tag_mask(nw, tag): mask.outputs["Value"].default_value = 1 return mask -def placement_mask(scale=0.05, select_thresh=0.55, normal_thresh=0.5, normal_thresh_high=2., - normal_dir=(0, 0, 1), tag=None, return_scalar=False, altitude_range=None): + +def placement_mask( + scale=0.05, + select_thresh=0.55, + normal_thresh=0.5, + normal_thresh_high=2.0, + normal_dir=(0, 0, 1), + tag=None, + return_scalar=False, + altitude_range=None, +): def selection(nw): mask = nw.new_node(Nodes.Value) mask.outputs["Value"].default_value = 1 if select_thresh is not None: mininum_val = nw.new_node(Nodes.Value) - mininum_val.outputs[0].default_value = np.random.normal(select_thresh, 0.025) + mininum_val.outputs[0].default_value = np.random.normal( + select_thresh, 0.025 + ) noise_node = nu.noise(nw, scale) - noise_mask = nw.new_node(Nodes.Math, input_args=[noise_node, mininum_val], - attrs={'operation': 'GREATER_THAN'}) + noise_mask = nw.new_node( + Nodes.Math, + input_args=[noise_node, mininum_val], + attrs={"operation": "GREATER_THAN"}, + ) mask = nw.scalar_multiply(mask, noise_mask) if normal_thresh is not None: facing_mask = nu.facing_mask(nw, normal_dir, thresh=normal_thresh) mask = nw.scalar_multiply(mask, facing_mask) if normal_thresh_high is not None: - facing_mask = nu.facing_mask(nw, - mathutils.Vector(normal_dir), thresh=-normal_thresh_high) + facing_mask = nu.facing_mask( + nw, -mathutils.Vector(normal_dir), thresh=-normal_thresh_high + ) mask = nw.scalar_multiply(mask, facing_mask) if tag is not None: - mask = nw.scalar_multiply( - mask, - tag_mask(nw, tag) - ) + mask = nw.scalar_multiply(mask, tag_mask(nw, tag)) if altitude_range is not None: z = (nw.new_node(Nodes.SeparateXYZ, [nw.new_node(Nodes.InputPosition)]), 2) start, end = altitude_range mask = nw.scalar_multiply( mask, - nw.new_node(Nodes.Compare, attrs={'operation': "GREATER_THAN", "data_type": "FLOAT"}, input_args=[z, start]), - nw.new_node(Nodes.Compare, attrs={'operation': "LESS_THAN", "data_type": "FLOAT"}, input_args=[z, end]), + nw.new_node( + Nodes.Compare, + attrs={"operation": "GREATER_THAN", "data_type": "FLOAT"}, + input_args=[z, start], + ), + nw.new_node( + Nodes.Compare, + attrs={"operation": "LESS_THAN", "data_type": "FLOAT"}, + input_args=[z, end], + ), ) if (select_thresh is not None) and return_scalar: - map_range = nw.new_node(Nodes.MapRange, input_kwargs={'Value': noise_node, 1: mininum_val, 2: 0.75}, - attrs={'interpolation_type': 'SMOOTHSTEP'}) + map_range = nw.new_node( + Nodes.MapRange, + input_kwargs={"Value": noise_node, 1: mininum_val, 2: 0.75}, + attrs={"interpolation_type": "SMOOTHSTEP"}, + ) return mask, map_range return mask diff --git a/infinigen/core/placement/detail.py b/infinigen/core/placement/detail.py index e4a4c37c4..8c5635847 100644 --- a/infinigen/core/placement/detail.py +++ b/infinigen/core/placement/detail.py @@ -4,32 +4,31 @@ # Authors: Alexander Raistrick -import pdb -import warnings import logging import bpy +import gin import mathutils - import numpy as np -import gin -from infinigen.core.util import blender as butil from infinigen.core.nodes.nodegroups import transfer_attributes +from infinigen.core.util import blender as butil from infinigen.core.util.blender import deep_clone_obj - logger = logging.getLogger(__name__) -IS_COARSE = False # Global VARIABLE, set by infinigen_examples/generate_nature.py and used only for whether to emit warnings +IS_COARSE = False # Global VARIABLE, set by infinigen_examples/generate_nature.py and used only for whether to emit warnings + @gin.configurable def scatter_res_distance(dist=4): return dist -@gin.configurable -def target_face_size(obj, camera=None, global_multiplier=1, global_clip_min=0.003, global_clip_max=1): +@gin.configurable +def target_face_size( + obj, camera=None, global_multiplier=1, global_clip_min=0.003, global_clip_max=1 +): if camera is None: camera = bpy.context.scene.camera if camera is None: @@ -37,25 +36,31 @@ def target_face_size(obj, camera=None, global_multiplier=1, global_clip_min=0.00 if isinstance(obj, bpy.types.Object): if IS_COARSE: - logger.warn(f'target_face_size({obj.name=}) is using the cameras location which is unsafe for {IS_COARSE=}') + logger.warn( + f"target_face_size({obj.name=}) is using the cameras location which is unsafe for {IS_COARSE=}" + ) bbox = np.array([obj.matrix_world @ mathutils.Vector(v) for v in obj.bound_box]) dists = np.linalg.norm(bbox - np.array(camera.location), axis=-1) eval_point = bbox[dists.argmin()] dist = np.linalg.norm(eval_point - camera.location) - elif hasattr(obj, '__len__') and len(obj) == 3: + elif hasattr(obj, "__len__") and len(obj) == 3: if IS_COARSE: - logger.warn(f'target_face_size({obj.name=}) is using the cameras location which is unsafe for {IS_COARSE=}') + logger.warn( + f"target_face_size({obj.name=}) is using the cameras location which is unsafe for {IS_COARSE=}" + ) eval_point = mathutils.Vector(obj) dist = np.linalg.norm(eval_point - camera.location) elif isinstance(obj, (float, int)): dist = obj else: - raise ValueError(f'target_face_size() could not handle {obj=}, {type(obj)=}') + raise ValueError(f"target_face_size() could not handle {obj=}, {type(obj)=}") if camera is None: camera = bpy.context.scene.camera if camera is None: - return global_clip_min # raise ValueError(f'Please add a camera; attempted to # + return ( + global_clip_min # raise ValueError(f'Please add a camera; attempted to # + ) # detail.target_face_size() but {bpy.context.scene.camera=}') camd = camera.data @@ -64,7 +69,8 @@ def target_face_size(obj, camera=None, global_multiplier=1, global_clip_min=0.00 f_m = mm_to_meter * camd.lens sensor_dims = mm_to_meter * np.array([camd.sensor_width, camd.sensor_height]) pixel_shape = (scene.render.resolution_percentage / 100) * np.array( - [scene.render.resolution_x, scene.render.resolution_y]) + [scene.render.resolution_x, scene.render.resolution_y] + ) pixel_dims = (sensor_dims / pixel_shape) * (dist / f_m) @@ -73,16 +79,23 @@ def target_face_size(obj, camera=None, global_multiplier=1, global_clip_min=0.00 return np.clip(global_multiplier * res, global_clip_min, global_clip_max) -def remesh_with_attrs(obj, face_size, apply=True, min_remesh_size=None, attributes=None): - - logger.debug(f'remesh_with_attrs on {obj.name=} with {face_size=:.4f} {attributes=}') +def remesh_with_attrs( + obj, face_size, apply=True, min_remesh_size=None, attributes=None +): + logger.debug( + f"remesh_with_attrs on {obj.name=} with {face_size=:.4f} {attributes=}" + ) temp_copy = deep_clone_obj(obj) - remesh_size = face_size if min_remesh_size is None else max(face_size, min_remesh_size) - butil.modify_mesh(obj, type='REMESH', apply=True, voxel_size=remesh_size) + remesh_size = ( + face_size if min_remesh_size is None else max(face_size, min_remesh_size) + ) + butil.modify_mesh(obj, type="REMESH", apply=True, voxel_size=remesh_size) - transfer_attributes.transfer_all(source=temp_copy, target=obj, attributes=attributes, uvs=True) + transfer_attributes.transfer_all( + source=temp_copy, target=obj, attributes=attributes, uvs=True + ) bpy.data.objects.remove(temp_copy, do_unlink=True) if remesh_size > face_size: @@ -91,15 +104,26 @@ def remesh_with_attrs(obj, face_size, apply=True, min_remesh_size=None, attribut return obj -def sharp_remesh_with_attrs(obj, face_size, apply=True, min_remesh_size=None, attributes=None): +def sharp_remesh_with_attrs( + obj, face_size, apply=True, min_remesh_size=None, attributes=None +): temp_copy = deep_clone_obj(obj) - remesh_size = face_size if min_remesh_size is None else max(face_size, min_remesh_size) - butil.modify_mesh(obj, 'REMESH', apply=apply, mode='SHARP', - octree_depth=int(np.ceil(np.log2((max(obj.dimensions) + .01) / remesh_size))), - use_remove_disconnected=False) - - transfer_attributes.transfer_all(source=temp_copy, target=obj, attributes=attributes, uvs=True) + remesh_size = ( + face_size if min_remesh_size is None else max(face_size, min_remesh_size) + ) + butil.modify_mesh( + obj, + "REMESH", + apply=apply, + mode="SHARP", + octree_depth=int(np.ceil(np.log2((max(obj.dimensions) + 0.01) / remesh_size))), + use_remove_disconnected=False, + ) + + transfer_attributes.transfer_all( + source=temp_copy, target=obj, attributes=attributes, uvs=True + ) bpy.data.objects.remove(temp_copy, do_unlink=True) return obj @@ -107,23 +131,37 @@ def sharp_remesh_with_attrs(obj, face_size, apply=True, min_remesh_size=None, at def subdivide_to_face_size(obj, from_facesize, to_facesize, apply=True, max_levels=6): if to_facesize > from_facesize: - logger.warn(f'subdivide_to_facesize recieved {from_facesize=} < {to_facesize=}. Subdivision cannot increase facesize') + logger.warn( + f"subdivide_to_facesize recieved {from_facesize=} < {to_facesize=}. Subdivision cannot increase facesize" + ) return None - levels = int(np.ceil(np.log2(from_facesize/to_facesize))) + levels = int(np.ceil(np.log2(from_facesize / to_facesize))) if max_levels is not None and levels > max_levels: - logger.warn(f'subdivide_to_facesize({obj.name=}, {from_facesize=:.6f}, {to_facesize=:.6f}) attempted {levels=}, clamping to {max_levels=}') + logger.warn( + f"subdivide_to_facesize({obj.name=}, {from_facesize=:.6f}, {to_facesize=:.6f}) attempted {levels=}, clamping to {max_levels=}" + ) levels = max_levels - logger.debug(f'subdivide_to_face_size applying {levels=} of subsurf to {obj.name=}') - _, mod = butil.modify_mesh(obj, 'SUBSURF', apply=apply, - levels=levels, render_levels=levels, return_mod=True) - return mod # None if apply=True + logger.debug(f"subdivide_to_face_size applying {levels=} of subsurf to {obj.name=}") + _, mod = butil.modify_mesh( + obj, + "SUBSURF", + apply=apply, + levels=levels, + render_levels=levels, + return_mod=True, + ) + return mod # None if apply=True + def merged_by_distance_col(col, face_size, inplace=False): if not inplace: with butil.SelectObjects(list(col.objects)): bpy.ops.object.duplicate() - col = butil.group_in_collection(list(bpy.context.selected_objects), - name=col.name + f'.detail({face_size:.5f})', reuse=False) + col = butil.group_in_collection( + list(bpy.context.selected_objects), + name=col.name + f".detail({face_size:.5f})", + reuse=False, + ) for obj in col.objects: butil.merge_by_distance(obj, face_size) @@ -143,35 +181,44 @@ def min_max_edgelen(mesh): def adapt_mesh_resolution(obj, face_size, method, approx=0.2, **kwargs): - - assert obj.type == 'MESH' + assert obj.type == "MESH" assert 0 <= approx and approx <= 0.5 - logger.debug(f'adapt_mesh_resolution on {obj.name} with {method=} to {face_size=:.6f}') + logger.debug( + f"adapt_mesh_resolution on {obj.name} with {method=} to {face_size=:.6f}" + ) if len(obj.data.polygons) == 0: - logger.debug(f'Ignoring adapt_mesh_resolution on {obj.name=} due to no polygons') + logger.debug( + f"Ignoring adapt_mesh_resolution on {obj.name=} due to no polygons" + ) return lmin, lmax = min_max_edgelen(obj.data) - if method == 'subdivide': + if method == "subdivide": if lmax > face_size: - subdivide_to_face_size(obj, from_facesize=lmax, to_facesize=face_size, **kwargs) - elif method == 'subdiv_by_area': + subdivide_to_face_size( + obj, from_facesize=lmax, to_facesize=face_size, **kwargs + ) + elif method == "subdiv_by_area": areas = np.zeros(len(obj.data.polygons)) - obj.data.polygons.foreach_get('area', areas) - approx_facesize = np.sqrt(np.percentile(areas, q=1-approx)) + obj.data.polygons.foreach_get("area", areas) + approx_facesize = np.sqrt(np.percentile(areas, q=1 - approx)) if approx_facesize > face_size: - subdivide_to_face_size(obj, from_facesize=approx_facesize, to_facesize=face_size, **kwargs) + subdivide_to_face_size( + obj, from_facesize=approx_facesize, to_facesize=face_size, **kwargs + ) else: - logger.debug(f'No subdivision necessary on {obj.name=} {approx_facesize} < {face_size}') - elif method == 'merge_down': + logger.debug( + f"No subdivision necessary on {obj.name=} {approx_facesize} < {face_size}" + ) + elif method == "merge_down": if lmin < face_size: butil.merge_by_distance(obj, face_size) - elif method == 'remesh': + elif method == "remesh": remesh_with_attrs(obj, face_size, **kwargs) - elif method == 'sharp_remesh': + elif method == "sharp_remesh": sharp_remesh_with_attrs(obj, face_size, **kwargs) else: - raise ValueError(f'Unrecognized adapt_mesh_resolution(..., {method=})') + raise ValueError(f"Unrecognized adapt_mesh_resolution(..., {method=})") diff --git a/infinigen/core/placement/factory.py b/infinigen/core/placement/factory.py index 393ece294..3432d4319 100644 --- a/infinigen/core/placement/factory.py +++ b/infinigen/core/placement/factory.py @@ -7,24 +7,23 @@ # - Lahav Lipson: quickly_resample +import logging import typing import bpy -import mathutils import numpy as np -import logging from tqdm import trange from infinigen.core.util import blender as butil from infinigen.core.util.math import FixedSeed, int_hash -from . import detail + from ...assets.utils.object import center +from . import detail logger = logging.getLogger(__name__) class AssetFactory: - def __init__(self, factory_seed=None, coarse=False): self.factory_seed = factory_seed if self.factory_seed is None: @@ -32,10 +31,10 @@ def __init__(self, factory_seed=None, coarse=False): self.coarse = coarse - logger.debug(f'{self}.__init__()') + logger.debug(f"{self}.__init__()") def __repr__(self): - return f'{self.__class__.__name__}({self.factory_seed})' + return f"{self.__class__.__name__}({self.factory_seed})" @staticmethod def quickly_resample(obj): @@ -54,9 +53,9 @@ def finalize_placeholders(self, placeholders: typing.List[bpy.types.Object]): def asset_parameters(self, distance: float, vis_distance: float) -> dict: # Optionally, override to determine the **params input of create_asset w.r.t. camera distance return { - 'face_size': detail.target_face_size(distance), - 'distance': distance, - 'vis_distance': vis_distance + "face_size": detail.target_face_size(distance), + "distance": distance, + "vis_distance": vis_distance, } def create_asset(self, **params) -> bpy.types.Object: @@ -71,41 +70,55 @@ def finalize_assets(self, assets): def spawn_placeholder(self, i, loc, rot): # Not intended to be overridden - override create_placeholder instead - logger.debug(f'{self}.spawn_placeholder({i}...)') + logger.debug(f"{self}.spawn_placeholder({i}...)") with FixedSeed(int_hash((self.factory_seed, i))): obj = self.create_placeholder(i=i, loc=loc, rot=rot) - has_sensitive_constraint = any(c.type in ['FOLLOW_PATH'] for c in obj.constraints) + has_sensitive_constraint = any( + c.type in ["FOLLOW_PATH"] for c in obj.constraints + ) if not has_sensitive_constraint: obj.location = loc obj.rotation_euler = rot else: - logger.debug(f'Not assigning placeholder {obj.name=} location due to presence of' - 'location-sensitive constraint, typically a follow curve') - obj.name = f'{repr(self)}.spawn_placeholder({i})' + logger.debug( + f"Not assigning placeholder {obj.name=} location due to presence of" + "location-sensitive constraint, typically a follow curve" + ) + obj.name = f"{repr(self)}.spawn_placeholder({i})" if obj.parent is not None: logger.warning( - f'{obj.name=} has no-none parent {obj.parent.name=}, this may cause it not to get populated') + f"{obj.name=} has no-none parent {obj.parent.name=}, this may cause it not to get populated" + ) return obj - def spawn_asset(self, i, placeholder=None, distance=None, vis_distance=0, loc=(0, 0, 0), rot=(0, 0, 0), - **kwargs): - + def spawn_asset( + self, + i, + placeholder=None, + distance=None, + vis_distance=0, + loc=(0, 0, 0), + rot=(0, 0, 0), + **kwargs, + ): if not isinstance(i, int): - raise TypeError(f'{i=} {type(i)=}, expected int') + raise TypeError(f"{i=} {type(i)=}, expected int") # Not intended to be overridden - override create_asset instead - logger.debug(f'{self}.spawn_asset({i}...)') + logger.debug(f"{self}.spawn_asset({i}...)") if distance is None: distance = detail.scatter_res_distance() if self.coarse: - raise ValueError('Attempted to spawn_asset() on an AssetFactory(coarse=True)') + raise ValueError( + "Attempted to spawn_asset() on an AssetFactory(coarse=True)" + ) user_provided_placeholder = placeholder is not None @@ -114,16 +127,23 @@ def spawn_asset(self, i, placeholder=None, distance=None, vis_distance=0, loc=(0 else: placeholder = self.spawn_placeholder(i=i, loc=loc, rot=rot) self.finalize_placeholders([placeholder]) - - - gc_targets = [bpy.data.meshes, bpy.data.textures, bpy.data.node_groups, bpy.data.materials] - with FixedSeed(int_hash((self.factory_seed, i))), butil.GarbageCollect(gc_targets, verbose=False): + gc_targets = [ + bpy.data.meshes, + bpy.data.textures, + bpy.data.node_groups, + bpy.data.materials, + ] + + with ( + FixedSeed(int_hash((self.factory_seed, i))), + butil.GarbageCollect(gc_targets, verbose=False), + ): params = self.asset_parameters(distance, vis_distance) params.update(kwargs) obj = self.create_asset(i=i, placeholder=placeholder, **params) - obj.name = f'{repr(self)}.spawn_asset({i})' + obj.name = f"{repr(self)}.spawn_asset({i})" if user_provided_placeholder: if obj is not placeholder: @@ -144,8 +164,17 @@ def spawn_asset(self, i, placeholder=None, distance=None, vis_distance=0, loc=(0 def post_init(self): pass -def make_asset_collection(spawn_fns, n, name=None, weights=None, as_list=False, verbose=True, centered=False, - **kwargs): + +def make_asset_collection( + spawn_fns, + n, + name=None, + weights=None, + as_list=False, + verbose=True, + centered=False, + **kwargs, +): if not isinstance(spawn_fns, list): spawn_fns = [spawn_fns] if weights is None: @@ -153,10 +182,10 @@ def make_asset_collection(spawn_fns, n, name=None, weights=None, as_list=False, weights /= sum(weights) if name is None: - name = ','.join([repr(f) for f in spawn_fns]) + name = ",".join([repr(f) for f in spawn_fns]) if verbose: - logger.info(f'Generating collection of {n} assets from {name}') + logger.info(f"Generating collection of {n} assets from {name}") objs = [[] for _ in range(len(spawn_fns))] r = trange(n) if verbose else range(n) @@ -169,7 +198,7 @@ def make_asset_collection(spawn_fns, n, name=None, weights=None, as_list=False, objs[fn_idx].append(obj) for os, f in zip(objs, spawn_fns): - if hasattr(f, 'finalize_assets'): + if hasattr(f, "finalize_assets"): f.finalize_assets(os) objs = sum(objs, start=[]) @@ -177,8 +206,7 @@ def make_asset_collection(spawn_fns, n, name=None, weights=None, as_list=False, if as_list: return objs else: - col = butil.group_in_collection(objs, name=f'assets:{name}', reuse=False) + col = butil.group_in_collection(objs, name=f"assets:{name}", reuse=False) col.hide_viewport = True col.hide_render = True return col - diff --git a/infinigen/core/placement/instance_scatter.py b/infinigen/core/placement/instance_scatter.py index ace0c62d9..5c81dbd01 100644 --- a/infinigen/core/placement/instance_scatter.py +++ b/infinigen/core/placement/instance_scatter.py @@ -4,28 +4,32 @@ # Authors: Alexander Raistrick, Lahav Lipson -from math import prod import logging +from math import prod import bpy -from mathutils import Vector - import numpy as np +from mathutils import Vector from infinigen.assets.utils.misc import CountInstance from infinigen.core import surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.core.util import blender as butil from infinigen.core.placement.camera import nodegroup_active_cam_info +from infinigen.core.util import blender as butil logger = logging.getLogger(__name__) + def _less(a, b, nw): - return nw.new_node(Nodes.Compare, [a, b], attrs={"data_type": "FLOAT", "operation": "LESS_THAN"}) + return nw.new_node( + Nodes.Compare, [a, b], attrs={"data_type": "FLOAT", "operation": "LESS_THAN"} + ) def _greater(a, b, nw): - return nw.new_node(Nodes.Compare, [a, b], attrs={"data_type": "FLOAT", "operation": "GREATER_THAN"}) + return nw.new_node( + Nodes.Compare, [a, b], attrs={"data_type": "FLOAT", "operation": "GREATER_THAN"} + ) def _band(a, b, nw): @@ -54,23 +58,61 @@ def camera_cull_points(nw, fov=25, camera=None, near_dist_margin=5): camera = bpy.context.scene.camera camera_info = nw.new_node(nodegroup_active_cam_info().name) - distance = nw.new_node(Nodes.VectorMath, [instance_position, camera_info], attrs={"operation": "DISTANCE"}) - pt_to_cam = nw.new_node(Nodes.VectorMath, [instance_position, camera_info], attrs={"operation": "SUBTRACT"}) - pt_to_cam_normalized = nw.new_node(Nodes.VectorMath, [pt_to_cam], attrs={"operation": "NORMALIZE"}) - cam_dir = nw.new_node(Nodes.VectorRotate, attrs={"rotation_type": 'EULER_XYZ'}, - input_kwargs={"Vector": _vecnode((0., 0., -1.), nw), - "Rotation": (camera_info, "Rotation")}) - dot_prod = nw.new_node(Nodes.VectorMath, [pt_to_cam_normalized, cam_dir], {"operation": "DOT_PRODUCT"}) + distance = nw.new_node( + Nodes.VectorMath, + [instance_position, camera_info], + attrs={"operation": "DISTANCE"}, + ) + pt_to_cam = nw.new_node( + Nodes.VectorMath, + [instance_position, camera_info], + attrs={"operation": "SUBTRACT"}, + ) + pt_to_cam_normalized = nw.new_node( + Nodes.VectorMath, [pt_to_cam], attrs={"operation": "NORMALIZE"} + ) + cam_dir = nw.new_node( + Nodes.VectorRotate, + attrs={"rotation_type": "EULER_XYZ"}, + input_kwargs={ + "Vector": _vecnode((0.0, 0.0, -1.0), nw), + "Rotation": (camera_info, "Rotation"), + }, + ) + dot_prod = nw.new_node( + Nodes.VectorMath, [pt_to_cam_normalized, cam_dir], {"operation": "DOT_PRODUCT"} + ) angle_rad = nw.new_node(Nodes.Math, [dot_prod], {"operation": "ARCCOSINE"}) angle_deg = nw.new_node(Nodes.Math, [angle_rad], {"operation": "DEGREES"}) - visible = nw.new_node(Nodes.BooleanMath, [_less(angle_deg, fov, nw), _less(distance, near_dist_margin, nw)], - attrs={'operation': 'OR'}) + visible = nw.new_node( + Nodes.BooleanMath, + [_less(angle_deg, fov, nw), _less(distance, near_dist_margin, nw)], + attrs={"operation": "OR"}, + ) return visible, distance -def bucketed_instance(nw, points, collection, distance, buckets, selection, scaling, rotation, instance_index=None): - instance_index = {'Instance Index': surface.eval_argument(nw, instance_index, n=len( - collection.objects))} if instance_index is not None else {} + +def bucketed_instance( + nw, + points, + collection, + distance, + buckets, + selection, + scaling, + rotation, + instance_index=None, +): + instance_index = ( + { + "Instance Index": surface.eval_argument( + nw, instance_index, n=len(collection.objects) + ) + } + if instance_index is not None + else {} + ) collection_info = nw.new_node(Nodes.CollectionInfo, [collection, True, True]) instance_groups = [] @@ -81,27 +123,58 @@ def bucketed_instance(nw, points, collection, distance, buckets, selection, scal upper_val = nw.expose_input(f"Cutoff_{idx + 1}", val=cutoff) distance_thresh = _in_bucket(distance, prev_upper_val, upper_val, nw) - lower_res_collection = nw.new_node(Nodes.MergeByDistance, [collection_info], input_kwargs={ - "Distance": nw.expose_input(f"Merge_By_Dist_{idx + 1}", merge_dist)}) - separate_points = nw.new_node(Nodes.SeparateGeometry, [points, distance_thresh], - attrs={"domain": "POINT"}) - instance_on_points = nw.new_node(Nodes.InstanceOnPoints, [separate_points], - input_kwargs={"Instance": collection_info, "Pick Instance": True, - **instance_index, "Scale": scaling, "Selection": selection, - "Rotation": rotation}) + nw.new_node( + Nodes.MergeByDistance, + [collection_info], + input_kwargs={ + "Distance": nw.expose_input(f"Merge_By_Dist_{idx + 1}", merge_dist) + }, + ) + separate_points = nw.new_node( + Nodes.SeparateGeometry, [points, distance_thresh], attrs={"domain": "POINT"} + ) + instance_on_points = nw.new_node( + Nodes.InstanceOnPoints, + [separate_points], + input_kwargs={ + "Instance": collection_info, + "Pick Instance": True, + **instance_index, + "Scale": scaling, + "Selection": selection, + "Rotation": rotation, + }, + ) instance_groups.append(instance_on_points) - return nw.new_node(Nodes.JoinGeometry, input_kwargs={'Geometry': instance_groups}) + return nw.new_node(Nodes.JoinGeometry, input_kwargs={"Geometry": instance_groups}) + def geo_instance_scatter( - nw: NodeWrangler, base_obj, collection, density, - fov=None, dist_max=None, no_culling_dist=5, min_spacing=0, - scaling=Vector((1, 1, 1)), normal=None, normal_fac=1, rotation_offset=None, - selection=True, taper_scale=False, taper_density=False, - ground_offset=0, instance_index=None, - transform_space='RELATIVE', reset_children=True, realize=False + nw: NodeWrangler, + base_obj, + collection, + density, + fov=None, + dist_max=None, + no_culling_dist=5, + min_spacing=0, + scaling=Vector((1, 1, 1)), + normal=None, + normal_fac=1, + rotation_offset=None, + selection=True, + taper_scale=False, + taper_density=False, + ground_offset=0, + instance_index=None, + transform_space="RELATIVE", + reset_children=True, + realize=False, ): - base_geo = nw.new_node(Nodes.ObjectInfo, [base_obj], attrs={'transform_space':transform_space}).outputs['Geometry'] + base_geo = nw.new_node( + Nodes.ObjectInfo, [base_obj], attrs={"transform_space": transform_space} + ).outputs["Geometry"] overall_density = nw.expose_input("Overall Density", val=density) selection_val = surface.eval_argument(nw, selection) @@ -113,111 +186,188 @@ def geo_instance_scatter( if density_scalar is not None: if taper_density: - overall_density = nw.new_node(Nodes.Math, [density_scalar, overall_density], attrs={'operation': 'MULTIPLY'}) + overall_density = nw.new_node( + Nodes.Math, + [density_scalar, overall_density], + attrs={"operation": "MULTIPLY"}, + ) if taper_scale: - scaling = nw.new_node(Nodes.VectorMath, input_kwargs={0: scaling, 'Scale': density_scalar}, attrs={'operation': 'SCALE'}) - - points = nw.new_node(Nodes.DistributePointsOnFaces, - [base_geo], input_kwargs={"Density": overall_density, "Selection": selection_val}) + scaling = nw.new_node( + Nodes.VectorMath, + input_kwargs={0: scaling, "Scale": density_scalar}, + attrs={"operation": "SCALE"}, + ) + + points = nw.new_node( + Nodes.DistributePointsOnFaces, + [base_geo], + input_kwargs={"Density": overall_density, "Selection": selection_val}, + ) distribute_points = points - + if min_spacing > 0: - points = nw.new_node(Nodes.MergeByDistance, - input_kwargs={'Geometry': points, 'Distance': surface.eval_argument(nw, min_spacing)}) + points = nw.new_node( + Nodes.MergeByDistance, + input_kwargs={ + "Geometry": points, + "Distance": surface.eval_argument(nw, min_spacing), + }, + ) point_fields = {} - normal = (distribute_points, "Normal") if normal is None else surface.eval_argument(nw, normal) - rotation_val = nw.new_node(Nodes.AlignEulerToVector, attrs={"axis": "Z"}, - input_kwargs={"Factor": surface.eval_argument(nw, normal_fac), "Vector": normal}) - rotation_val = nw.new_node(Nodes.RotateEuler, [rotation_val], {"type": "AXIS_ANGLE", "space": "LOCAL"}, - input_kwargs={"Axis": Vector((0., 0., 1.)), "Angle": nw.uniform(0, 1e4)}) + normal = ( + (distribute_points, "Normal") + if normal is None + else surface.eval_argument(nw, normal) + ) + rotation_val = nw.new_node( + Nodes.AlignEulerToVector, + attrs={"axis": "Z"}, + input_kwargs={ + "Factor": surface.eval_argument(nw, normal_fac), + "Vector": normal, + }, + ) + rotation_val = nw.new_node( + Nodes.RotateEuler, + [rotation_val], + {"type": "AXIS_ANGLE", "space": "LOCAL"}, + input_kwargs={"Axis": Vector((0.0, 0.0, 1.0)), "Angle": nw.uniform(0, 1e4)}, + ) if rotation_offset is not None: - rotation_val = nw.new_node(Nodes.RotateEuler, attrs=dict(space='OBJECT'), input_kwargs={ - 'Rotation':surface.eval_argument(nw, rotation_offset), 'Rotate By':rotation_val}) - point_fields['rotation'] = (rotation_val, 'FLOAT_VECTOR') - + rotation_val = nw.new_node( + Nodes.RotateEuler, + attrs=dict(space="OBJECT"), + input_kwargs={ + "Rotation": surface.eval_argument(nw, rotation_offset), + "Rotate By": rotation_val, + }, + ) + point_fields["rotation"] = (rotation_val, "FLOAT_VECTOR") + if instance_index is not None: inst = surface.eval_argument(nw, instance_index, n=len(collection.objects)) - point_fields['instance_index'] = (inst, 'INT') + point_fields["instance_index"] = (inst, "INT") if scaling is not None: - point_fields['scaling'] = (surface.eval_argument(nw, scaling), 'FLOAT_VECTOR') + point_fields["scaling"] = (surface.eval_argument(nw, scaling), "FLOAT_VECTOR") if ground_offset != 0: - point_fields['ground_offset'] = (surface.eval_argument(nw, ground_offset), 'FLOAT') + point_fields["ground_offset"] = ( + surface.eval_argument(nw, ground_offset), + "FLOAT", + ) if dist_max is not None or fov is not None: - for k, (soc, dtype) in point_fields.items(): - points = nw.new_node(Nodes.CaptureAttribute, input_kwargs={'Geometry': points, 'Value': soc}, attrs={'data_type': dtype}) + points = nw.new_node( + Nodes.CaptureAttribute, + input_kwargs={"Geometry": points, "Value": soc}, + attrs={"data_type": dtype}, + ) point_fields[k] = points # camera-based culling - visible, distance = camera_cull_points(nw, fov=nw.expose_input("FOV", val=fov), near_dist_margin=no_culling_dist) - points = nw.new_node(Nodes.SeparateGeometry, - [points, visible], attrs={"domain": "POINT"}) + visible, distance = camera_cull_points( + nw, fov=nw.expose_input("FOV", val=fov), near_dist_margin=no_culling_dist + ) + points = nw.new_node( + Nodes.SeparateGeometry, [points, visible], attrs={"domain": "POINT"} + ) if dist_max is not None: in_range = _less(distance, dist_max) - points = nw.new_node(Nodes.SeparateGeometry, - [points, in_range], attrs={"domain": "POINT"}) + points = nw.new_node( + Nodes.SeparateGeometry, [points, in_range], attrs={"domain": "POINT"} + ) else: for k, v in point_fields.items(): point_fields[k] = v[0] - - collection_info = nw.new_node(Nodes.CollectionInfo, [collection, True, reset_children]) - - instances = nw.new_node(Nodes.InstanceOnPoints, [points], input_kwargs={ - "Instance": collection_info, "Pick Instance": True, - 'Instance Index': point_fields.get('instance_index'), - "Rotation": point_fields.get('rotation'), - "Scale": point_fields.get('scaling')}) - + + collection_info = nw.new_node( + Nodes.CollectionInfo, [collection, True, reset_children] + ) + + instances = nw.new_node( + Nodes.InstanceOnPoints, + [points], + input_kwargs={ + "Instance": collection_info, + "Pick Instance": True, + "Instance Index": point_fields.get("instance_index"), + "Rotation": point_fields.get("rotation"), + "Scale": point_fields.get("scaling"), + }, + ) + if ground_offset != 0: - instances = nw.new_node(Nodes.TranslateInstances, [instances], - input_kwargs={ "Translation": nw.combine(0, 0, point_fields['ground_offset']), "Local Space": True}) - + instances = nw.new_node( + Nodes.TranslateInstances, + [instances], + input_kwargs={ + "Translation": nw.combine(0, 0, point_fields["ground_offset"]), + "Local Space": True, + }, + ) + if realize: instances = nw.new_node(Nodes.RealizeInstances, [instances]) - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': instances}) + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": instances}) + def scatter_instances( - collection, - density=None, vol_density=None, max_density=5000, - scale=None, scale_rand=0, scale_rand_axi=0, apply_geo=False, - **kwargs + collection, + density=None, + vol_density=None, + max_density=5000, + scale=None, + scale_rand=0, + scale_rand_axi=0, + apply_geo=False, + **kwargs, ): - if np.sum([density is None, vol_density is None]) != 1: - raise ValueError(f'Scatter instances got {density=} and {vol_density=} expected only one of the three') + raise ValueError( + f"Scatter instances got {density=} and {vol_density=} expected only one of the three" + ) - name = 'scatter:' + collection.name.split(':')[-1] + name = "scatter:" + collection.name.split(":")[-1] - avg_scale = scale * (1 - scale_rand/2) * (1 - scale_rand_axi/2) + avg_scale = scale * (1 - scale_rand / 2) * (1 - scale_rand_axi / 2) if vol_density is not None: - assert scale is not None, 'Cannot compute expected collection vol when using legacy scaling= func' - assert density is None # ensured by check above + assert ( + scale is not None + ), "Cannot compute expected collection vol when using legacy scaling= func" + assert density is None # ensured by check above avg_vol = np.mean([prod(list(o.dimensions)) for o in collection.objects]) - density = vol_density / (avg_vol * avg_scale ** 2) # TODO cube power? + density = vol_density / (avg_vol * avg_scale**2) # TODO cube power? if density > max_density: - logger.warning(f'scatter_instances with {collection.name=} {vol_density=} {avg_scale=:.4f} {avg_vol=:.4f} attempted {density=:.4f}, clamping to {max_density=}') + logger.warning( + f"scatter_instances with {collection.name=} {vol_density=} {avg_scale=:.4f} {avg_vol=:.4f} attempted {density=:.4f}, clamping to {max_density=}" + ) density = max_density - + if scale is not None: - assert 'scaling' not in kwargs + assert "scaling" not in kwargs + def scaling(nw: NodeWrangler): - axis_scaling = nw.new_node(Nodes.RandomValue, - input_kwargs={0: 3*(1-scale_rand_axi,), 1:3*(1,)}, - attrs={"data_type": 'FLOAT_VECTOR'}) + axis_scaling = nw.new_node( + Nodes.RandomValue, + input_kwargs={0: 3 * (1 - scale_rand_axi,), 1: 3 * (1,)}, + attrs={"data_type": "FLOAT_VECTOR"}, + ) overall = nw.uniform(1 - scale_rand, 1) - return nw.multiply(axis_scaling, overall, 3*(scale,)) - kwargs['scaling'] = scaling + return nw.multiply(axis_scaling, overall, 3 * (scale,)) + + kwargs["scaling"] = scaling scatter_obj = butil.spawn_vert(name) kwargs.update(dict(collection=collection, density=density)) with CountInstance(name): - surface.add_geomod(scatter_obj, geo_instance_scatter, apply=apply_geo, input_kwargs=kwargs) - butil.put_in_collection(scatter_obj, butil.get_collection('scatters')) - return scatter_obj \ No newline at end of file + surface.add_geomod( + scatter_obj, geo_instance_scatter, apply=apply_geo, input_kwargs=kwargs + ) + butil.put_in_collection(scatter_obj, butil.get_collection("scatters")) + return scatter_obj diff --git a/infinigen/core/placement/particles.py b/infinigen/core/placement/particles.py index 31c4d7f5a..8ddd1baa1 100644 --- a/infinigen/core/placement/particles.py +++ b/infinigen/core/placement/particles.py @@ -6,55 +6,52 @@ # - Alexander Raistrick: refactor, boids -from typing import Union -import math import logging +import math from copy import copy +from typing import Union -import numpy as np -from numpy.random import uniform as U, normal as N, uniform import bpy -from infinigen.core.util.logging import Suppress from infinigen.core.util import blender as butil -from infinigen.core.util.random import log_uniform +from infinigen.core.util.logging import Suppress logger = logging.getLogger(__name__) -def bake(emitter, system): - logger.info(f'Baking particles for {emitter.name=}') +def bake(emitter, system): + logger.info(f"Baking particles for {emitter.name=}") with butil.SelectObjects(emitter): - override = { - 'scene': bpy.context.scene, - 'active_object': emitter, - 'point_cache': system.point_cache, + "scene": bpy.context.scene, + "active_object": emitter, + "point_cache": system.point_cache, } with Suppress(): bpy.context.scene.frame_end += 1 bpy.ops.ptcache.bake(override, bake=True) bpy.context.scene.frame_end -= 1 + def configure_boids(system_config, settings): boids = system_config.boids - boids.states[0].rule_fuzzy = settings.pop('rule_fuzzy', 0.5) + boids.states[0].rule_fuzzy = settings.pop("rule_fuzzy", 0.5) - if rules := settings.pop('rules', None): + if rules := settings.pop("rules", None): context = bpy.context.copy() - context['particle_settings'] = system_config + context["particle_settings"] = system_config for _ in boids.states[0].rules.keys(): bpy.ops.boid.rule_del(context) for r in rules: - bpy.ops.boid.rule_add(context, type=r.pop('type')) + bpy.ops.boid.rule_add(context, type=r.pop("type")) for k, v in r.items(): setattr(boids.states[0].rules[-1], k, v) assert len(boids.states[0].rules) == len(rules) - if goal := settings.pop('goal_obj', None): + if goal := settings.pop("goal_obj", None): try: - goal_rule = next(r for r in boids.states[0].rules if r.type == 'GOAL') + goal_rule = next(r for r in boids.states[0].rules if r.type == "GOAL") goal_rule.object = goal except StopIteration: pass @@ -62,17 +59,17 @@ def configure_boids(system_config, settings): for k, v in settings.items(): setattr(boids, k, v) -def as_particle_collection(subject, prefix='particleassets'): - ''' - Particle assets cannot have hide_render=True or they will be invisible, +def as_particle_collection(subject, prefix="particleassets"): + """ + Particle assets cannot have hide_render=True or they will be invisible, yet this is the default behavior for most asset collections - ''' + """ if subject.name.startswith(prefix): return subject - subject.name = prefix + ':' + subject.name.split(':')[-1] + subject.name = prefix + ":" + subject.name.split(":")[-1] for o in subject.objects: o.location.z -= 100 subject.hide_viewport = True @@ -82,22 +79,22 @@ def as_particle_collection(subject, prefix='particleassets'): def particle_system( - emitter: bpy.types.Object, - subject: Union[bpy.types.Object, bpy.types.Collection], - settings: dict, collision_collection=None, + emitter: bpy.types.Object, + subject: Union[bpy.types.Object, bpy.types.Collection], + settings: dict, + collision_collection=None, ): - - ''' + """ Generalized particle system. kwargs are passed through to particle_system.settings - ''' + """ if isinstance(subject, bpy.types.Collection): subject = as_particle_collection(subject) emitter.name = f"particles:emitter({subject.name.split(':')[-1]})" - mod = emitter.modifiers.new(name='PARTICLE', type='PARTICLE_SYSTEM') + mod = emitter.modifiers.new(name="PARTICLE", type="PARTICLE_SYSTEM") system = emitter.particle_systems[mod.name] emitter.show_instancer_for_viewport = False @@ -106,39 +103,40 @@ def particle_system( settings = copy(settings) if isinstance(subject, bpy.types.Object): - system.settings.render_type = 'OBJECT' + system.settings.render_type = "OBJECT" system.settings.instance_object = subject objects = [subject] elif isinstance(subject, bpy.types.Collection): - - system.settings.render_type = 'COLLECTION' + system.settings.render_type = "COLLECTION" system.settings.instance_collection = subject - system.settings.use_collection_pick_random=True + system.settings.use_collection_pick_random = True objects = list(subject.objects) else: - raise ValueError(f'Unrecognized {type(subject)=}') - + raise ValueError(f"Unrecognized {type(subject)=}") + butil.origin_set(objects, "ORIGIN_GEOMETRY", center="MEDIAN") - + dur = bpy.context.scene.frame_end - bpy.context.scene.frame_start - system.settings.frame_start = bpy.context.scene.frame_start - settings.pop('warmup_frames', 0) + system.settings.frame_start = bpy.context.scene.frame_start - settings.pop( + "warmup_frames", 0 + ) system.settings.frame_end = ( - bpy.context.scene.frame_start + - settings.pop('emit_duration', dur) + - settings.pop('warmup_frames', 0) + bpy.context.scene.frame_start + + settings.pop("emit_duration", dur) + + settings.pop("warmup_frames", 0) ) - if (g := settings.pop('effect_gravity', None)) is not None: + if (g := settings.pop("effect_gravity", None)) is not None: system.settings.effector_weights.gravity = g - if (d := settings.pop('density', None)) is not None: - assert 'count' not in settings + if (d := settings.pop("density", None)) is not None: + assert "count" not in settings measure = math.prod([v for v in emitter.dimensions if v != 0]) system.settings.count = math.ceil(d * measure) - if (b := settings.pop('boids_settings', None)) is not None: - system.settings.physics_type='BOIDS' + if (b := settings.pop("boids_settings", None)) is not None: + system.settings.physics_type = "BOIDS" configure_boids(system.settings, b) if collision_collection is not None: @@ -148,95 +146,3 @@ def particle_system( setattr(system.settings, k, v) return emitter, system - -def falling_leaf_settings(): - - rate = U(0.001, 0.006) - dur = max(bpy.context.scene.frame_end - bpy.context.scene.frame_start, 500) - - return dict( - warmup_frames=1024, - density=rate * dur, - particle_size=N(0.5, 0.15), - size_random=U(0.1, 0.2), - lifetime=dur, - use_rotations=True, - rotation_factor_random=1.0, - use_die_on_collision=False, - drag_factor=0.2, - damping=0.3, - mass=0.01, - normal_factor=0.0, - angular_velocity_mode='RAND', - angular_velocity_factor=U(0, 3), - use_dynamic_rotation=True - ) - -def floating_dust_settings(): - - return dict( - mass=0.0001, - count=int(7000*U(0.5, 2)), - lifetime=1000, - warmup_frames=100, - particle_size=0.001, - size_random=uniform(.7, 1.), - emit_from='VOLUME', - damping=1.0, - drag_factor=1.0, - effect_gravity=U(0.3, 0.7), # partially buoyant - ) - -def marine_snow_setting(): - return dict( - mass=0.0001, - count=int(10000*U(0.5, 2)), - lifetime=1000, - warmup_frames=100, - particle_size=0.005, - size_random=uniform(.7, 1.), - emit_from='VOLUME', - brownian_factor=log_uniform(.0002, .0005), - damping=log_uniform(.95, .98), - drag_factor=uniform(.85, .95), - factor_random=uniform(.1, .2), - use_rotations=True, - phase_factor_random=uniform(.2,.5), - use_dynamic_rotation=True, - effect_gravity=U(0, 0.5) - ) - -def rain_settings(): - - drops_per_sec_m2 = U(0.05, 1) - velocity = U(9, 20) - lifetime = 100 - - return dict( - mass=0.001, - warmup_frames=100, - density=drops_per_sec_m2*lifetime, - lifetime=lifetime, - particle_size=U(0.01, 0.015), - size_random=U(0.005, 0.01), - normal_factor=-velocity, - effect_gravity=0.0, - use_die_on_collision=True, - ) - -def snow_settings(): - - density = U(2, 26) - - return dict( - mass=0.001, - density=density, - lifetime=2000, - warmup_frames=1000, - particle_size=0.003, - emit_from='FACE', - damping=1.0, - drag_factor=1.0, - use_rotations=True, - use_die_on_collision=True, - ) \ No newline at end of file diff --git a/infinigen/core/placement/path_finding.py b/infinigen/core/placement/path_finding.py index 0e491d3ca..afea41344 100644 --- a/infinigen/core/placement/path_finding.py +++ b/infinigen/core/placement/path_finding.py @@ -3,15 +3,13 @@ # Authors: Zeyu Ma -from tqdm import tqdm -import numpy as np -import mathutils import itertools + +import mathutils import networkx as nx +import numpy as np from scipy.sparse import csr_matrix -import matplotlib.pyplot as plt -import os - + def camera_rotation_matrix(pointing_direction, up_vector): forward = pointing_direction / np.linalg.norm(pointing_direction) @@ -21,22 +19,28 @@ def camera_rotation_matrix(pointing_direction, up_vector): up /= np.linalg.norm(up) return np.column_stack((right, up, forward)) -def path_finding(bvhtree, bounding_box, start_pose, end_pose, resolution=100000, margin=0.1): + +def path_finding( + bvhtree, bounding_box, start_pose, end_pose, resolution=100000, margin=0.1 +): volume = np.product(bounding_box[1] - bounding_box[0]) - N = np.floor((bounding_box[1] - bounding_box[0]) * (resolution / volume) ** (1/3)).astype(np.int32) + N = np.floor( + (bounding_box[1] - bounding_box[0]) * (resolution / volume) ** (1 / 3) + ).astype(np.int32) NN = np.product(N) # print(f"{N=}") start_location, start_rotation = start_pose end_location, end_rotation = end_pose - margin_d = np.ceil((resolution / volume) ** (1/3) * margin) + margin_d = np.ceil((resolution / volume) ** (1 / 3) * margin) row = [] col = [] data = [] - + def freespace_ray_check(a, b, margin=0): v = b - a location, *_ = bvhtree.ray_cast(a, v, v.length) - if location is not None: return False + if location is not None: + return False if margin != 0: if v[0] != 0: perp = mathutils.Vector([v[1], -v[0], 0]) @@ -48,7 +52,8 @@ def freespace_ray_check(a, b, margin=0): angle = np.pi * 2 / check_N for i in range(check_N): location, *_ = bvhtree.ray_cast(a + offset, v, v.length) - if location is not None: return False + if location is not None: + return False tar_direction = offset.cross(v) tar_direction *= margin / tar_direction.length offset = offset * np.cos(angle) + tar_direction * np.sin(angle) @@ -57,16 +62,40 @@ def freespace_ray_check(a, b, margin=0): def index(i, j, k): return i * N[1] * N[2] + j * N[2] + k - x, y, z = np.meshgrid(np.arange(N[0]), np.arange(N[1]), np.arange(N[2]), indexing="ij") - x = bounding_box[0][0] + (bounding_box[1][0]-bounding_box[0][0]) * (x+0.5) / N[0] - y = bounding_box[0][1] + (bounding_box[1][1]-bounding_box[0][1]) * (y+0.5) / N[1] - z = bounding_box[0][2] + (bounding_box[1][2]-bounding_box[0][2]) * (z+0.5) / N[2] + x, y, z = np.meshgrid( + np.arange(N[0]), np.arange(N[1]), np.arange(N[2]), indexing="ij" + ) + x = ( + bounding_box[0][0] + + (bounding_box[1][0] - bounding_box[0][0]) * (x + 0.5) / N[0] + ) + y = ( + bounding_box[0][1] + + (bounding_box[1][1] - bounding_box[0][1]) * (y + 0.5) / N[1] + ) + z = ( + bounding_box[0][2] + + (bounding_box[1][2] - bounding_box[0][2]) * (z + 0.5) / N[2] + ) x, y, z = x.reshape(-1), y.reshape(-1), z.reshape(-1) - start_index = index(*np.floor((np.array(start_location) - bounding_box[0]) / (bounding_box[1] - bounding_box[0]) * N).astype(np.int32)) - end_index = index(*np.floor((np.array(end_location) - bounding_box[0]) / (bounding_box[1] - bounding_box[0]) * N).astype(np.int32)) - if end_index == start_index: return None - + start_index = index( + *np.floor( + (np.array(start_location) - bounding_box[0]) + / (bounding_box[1] - bounding_box[0]) + * N + ).astype(np.int32) + ) + end_index = index( + *np.floor( + (np.array(end_location) - bounding_box[0]) + / (bounding_box[1] - bounding_box[0]) + * N + ).astype(np.int32) + ) + if end_index == start_index: + return None + x[start_index] = start_pose[0].x y[start_index] = start_pose[0].y z[start_index] = start_pose[0].z @@ -78,9 +107,26 @@ def index(i, j, k): for i, j, k in list(itertools.product(range(N[0]), range(N[1]), range(N[2]))): index_ijk = index(i, j, k) pos_from = mathutils.Vector([x[index_ijk], y[index_ijk], z[index_ijk]]) - for di, dj, dk in [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 1, 0], [0, 1, 1], [1, 0, 1], [1, -1, 0], [0, 1, -1], [1, 0, -1]]: - ni, nj, nk = i+di, j+dj, k+dk - if ni >= 0 and nj >= 0 and nk >= 0 and ni < N[0] and nj < N[1] and nk < N[2]: + for di, dj, dk in [ + [1, 0, 0], + [0, 1, 0], + [0, 0, 1], + [1, 1, 0], + [0, 1, 1], + [1, 0, 1], + [1, -1, 0], + [0, 1, -1], + [1, 0, -1], + ]: + ni, nj, nk = i + di, j + dj, k + dk + if ( + ni >= 0 + and nj >= 0 + and nk >= 0 + and ni < N[0] + and nj < N[1] + and nk < N[2] + ): index_nijk = index(ni, nj, nk) pos_to = mathutils.Vector([x[index_nijk], y[index_nijk], z[index_nijk]]) connected = freespace_ray_check(pos_from, pos_to) @@ -111,8 +157,8 @@ def index(i, j, k): for n in lengths_dict: lengths[n] = lengths_dict[n] - mask1 = (lengths[row] >= margin_d) - mask2 = (lengths[col] >= margin_d) + mask1 = lengths[row] >= margin_d + mask2 = lengths[col] >= margin_d row = row[mask1 & mask2] col = col[mask1 & mask2] data = data[mask1 & mask2] @@ -120,24 +166,29 @@ def index(i, j, k): A = csr_matrix((data, (row, col)), shape=(NN, NN)) G = nx.from_scipy_sparse_array(A) - try: path = nx.shortest_path(G, start_index, end_index, weight="weight") - except: + except Exception: return None - + stack = [start_index] for p in path[1:]: back = 0 - while freespace_ray_check(mathutils.Vector([x[stack[-1-back]], y[stack[-1-back]], z[stack[-1-back]]]), mathutils.Vector([x[p], y[p], z[p]]), margin=margin): + while freespace_ray_check( + mathutils.Vector( + [x[stack[-1 - back]], y[stack[-1 - back]], z[stack[-1 - back]]] + ), + mathutils.Vector([x[p], y[p], z[p]]), + margin=margin, + ): back += 1 if back == len(stack): break if back != 1: - stack = stack[:1-back] + stack = stack[: 1 - back] stack.append(p) - + locations = [] lengths = [] for i, p in enumerate(stack): @@ -147,7 +198,8 @@ def index(i, j, k): locations.append(end_pose[0]) else: locations.append(mathutils.Vector([x[p], y[p], z[p]])) - if len(locations) >= 2: lengths.append((locations[-1] - locations[-2]).length) + if len(locations) >= 2: + lengths.append((locations[-1] - locations[-2]).length) keyframed_poses = [] for i in range(len(stack)): @@ -157,17 +209,21 @@ def index(i, j, k): if i == len(stack) - 1: rotation_euler = end_pose[1] else: - rotation_matrix = mathutils.Matrix(camera_rotation_matrix(np.array(locations[i] - locations[i-1]), np.array([0, 0, 1]))) @ mathutils.Matrix([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) + rotation_matrix = mathutils.Matrix( + camera_rotation_matrix( + np.array(locations[i] - locations[i - 1]), np.array([0, 0, 1]) + ) + ) @ mathutils.Matrix([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) rotation_euler = rotation_matrix.to_euler() if rotation_euler.y != 0: rotation_euler.y = 0 rotation_euler.x += np.pi rotation_euler.z += np.pi angle_differece = [ - abs(rotation_euler.z - 2 * np.pi - keyframed_poses[i-1][2].z), - abs(rotation_euler.z - keyframed_poses[i-1][2].z), - abs(rotation_euler.z + 2 * np.pi - keyframed_poses[i-1][2].z), + abs(rotation_euler.z - 2 * np.pi - keyframed_poses[i - 1][2].z), + abs(rotation_euler.z - keyframed_poses[i - 1][2].z), + abs(rotation_euler.z + 2 * np.pi - keyframed_poses[i - 1][2].z), ] rotation_euler.z += (np.argmin(angle_differece) - 1) * 2 * np.pi keyframed_poses.append((np.sum(lengths[:i]), locations[i], rotation_euler)) - return keyframed_poses \ No newline at end of file + return keyframed_poses diff --git a/infinigen/core/placement/placement.py b/infinigen/core/placement/placement.py index c76e44a5d..83232e9dd 100644 --- a/infinigen/core/placement/placement.py +++ b/infinigen/core/placement/placement.py @@ -3,28 +3,26 @@ # Authors: Alexander Raistrick -import re import logging -from collections import defaultdict +import re import bpy +import gin import mathutils import numpy as np -from numpy.random import uniform as U from tqdm import tqdm -import gin - -from .factory import AssetFactory - -from mathutils.bvhtree import BVHTree - -from infinigen.core.util import blender as butil, camera as camera_util -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes, geometry_node_group_empty_new from infinigen.core import surface -from .factory import AssetFactory - +from infinigen.core.nodes.node_wrangler import ( + Nodes, + NodeWrangler, + geometry_node_group_empty_new, +) from infinigen.core.placement import detail +from infinigen.core.util import blender as butil +from infinigen.core.util import camera as camera_util + +from .factory import AssetFactory logger = logging.getLogger(__name__) @@ -35,28 +33,41 @@ def objects_to_grid(objects, spacing): o.location += spacing * mathutils.Vector((i % rowsize, i // rowsize, 0)) -def placeholder_locs(terrain, overall_density, selection, distance_min=0, altitude=0.0, max_locs=None): - temp_vert = butil.spawn_vert('compute_placeholder_locations') - geo = temp_vert.modifiers.new(name="GEOMETRY", type='NODES') +def placeholder_locs( + terrain, overall_density, selection, distance_min=0, altitude=0.0, max_locs=None +): + temp_vert = butil.spawn_vert("compute_placeholder_locations") + geo = temp_vert.modifiers.new(name="GEOMETRY", type="NODES") if geo.node_group is None: group = geometry_node_group_empty_new() geo.node_group = group nw = NodeWrangler(geo) - base_geo = nw.new_node(Nodes.ObjectInfo, [terrain]).outputs['Geometry'] - - points = nw.new_node(Nodes.DistributePointsOnFaces, attrs={'distribute_method': 'POISSON'}, - input_kwargs={'Mesh': base_geo, 'Selection': surface.eval_argument(nw, selection), - 'Seed': np.random.randint(1e5), 'Density Max': overall_density, - 'Distance Min': distance_min}) - verts = nw.new_node(Nodes.PointsToVertices, input_kwargs={'Points': points}) - verts = nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': verts, 'Offset': (0, 0, altitude)}) - - nw.new_node(Nodes.GroupOutput, input_kwargs={'Geometry': verts}) + base_geo = nw.new_node(Nodes.ObjectInfo, [terrain]).outputs["Geometry"] + + points = nw.new_node( + Nodes.DistributePointsOnFaces, + attrs={"distribute_method": "POISSON"}, + input_kwargs={ + "Mesh": base_geo, + "Selection": surface.eval_argument(nw, selection), + "Seed": np.random.randint(1e5), + "Density Max": overall_density, + "Distance Min": distance_min, + }, + ) + verts = nw.new_node(Nodes.PointsToVertices, input_kwargs={"Points": points}) + verts = nw.new_node( + Nodes.SetPosition, input_kwargs={"Geometry": verts, "Offset": (0, 0, altitude)} + ) + + nw.new_node(Nodes.GroupOutput, input_kwargs={"Geometry": verts}) # dump the point locations out as vertices butil.apply_modifiers(temp_vert, geo) - locations = np.array([temp_vert.matrix_world @ v.co for v in temp_vert.data.vertices]) + locations = np.array( + [temp_vert.matrix_world @ v.co for v in temp_vert.data.vertices] + ) butil.delete(temp_vert) @@ -64,11 +75,11 @@ def placeholder_locs(terrain, overall_density, selection, distance_min=0, altitu return locations + def points_near_camera(cam, scene_bvh, n, alt, dist_range): points = [] while len(points) < n: - rad = np.random.uniform(*dist_range) angle = np.deg2rad(np.random.uniform(0, 360)) off = rad * mathutils.Vector((np.cos(angle), np.sin(angle), 0)) @@ -82,21 +93,30 @@ def points_near_camera(cam, scene_bvh, n, alt, dist_range): return np.array(points) + def scatter_placeholders_mesh( - base_mesh, factory: AssetFactory, - overall_density, selection=None, - distance_min=0, num_placeholders=None, - **kwargs + base_mesh, + factory: AssetFactory, + overall_density, + selection=None, + distance_min=0, + num_placeholders=None, + **kwargs, ): - locations = placeholder_locs(base_mesh, overall_density, selection, distance_min=distance_min, **kwargs) + locations = placeholder_locs( + base_mesh, overall_density, selection, distance_min=distance_min, **kwargs + ) if num_placeholders is not None: np.random.shuffle(locations) if len(locations) < num_placeholders: area = butil.surface_area(base_mesh) - logger.warning(f'Only returning {len(locations)} despite {num_placeholders=} requested. {base_mesh.name} had {area=} {overall_density=}') + logger.warning( + f"Only returning {len(locations)} despite {num_placeholders=} requested. {base_mesh.name} had {area=} {overall_density=}" + ) locations = locations[:num_placeholders] return scatter_placeholders(locations, factory) + def scatter_placeholders(locations, factory: AssetFactory): logger.info(f"Placing {len(locations)} placeholders for {factory}") objs = [] @@ -104,38 +124,46 @@ def scatter_placeholders(locations, factory: AssetFactory): rot_z = np.random.uniform(0, 2 * np.pi) obj = factory.spawn_placeholder(i, loc, mathutils.Euler((0, 0, rot_z))) objs.append(obj) - col = butil.group_in_collection(objs, 'placeholders:' + repr(factory)) + col = butil.group_in_collection(objs, "placeholders:" + repr(factory)) factory.finalize_placeholders(objs) return col + def get_placeholder_points(obj): - if obj.type == 'MESH': + if obj.type == "MESH": verts = np.zeros((len(obj.data.vertices), 3)) - obj.data.vertices.foreach_get('co', verts.reshape(-1)) + obj.data.vertices.foreach_get("co", verts.reshape(-1)) return butil.apply_matrix_world(obj, verts) - elif obj.type == 'EMPTY' and obj.empty_display_type == 'CUBE': + elif obj.type == "EMPTY" and obj.empty_display_type == "CUBE": extent = obj.empty_display_size * np.array([-1, 1]) verts = np.stack(np.meshgrid(extent, extent, extent), axis=-1) return butil.apply_matrix_world(obj, verts) else: return np.array([obj.matrix_world.translation]).reshape(1, 3) + def parse_asset_name(name): - match = re.fullmatch('(.*)\((\d+)\)\..*_(.*)\((\d+)\)', name) + match = re.fullmatch("(.*)\((\d+)\)\..*_(.*)\((\d+)\)", name) if not match: return None, None, None, None return list(match.groups()) - + + def populate_collection( - factory: AssetFactory, placeholder_col, - asset_col_target=None, cameras=None, - dist_cull=None, vis_cull=None, verbose=True, cache_system = None, - **asset_kwargs + factory: AssetFactory, + placeholder_col, + asset_col_target=None, + cameras=None, + dist_cull=None, + vis_cull=None, + verbose=True, + cache_system=None, + **asset_kwargs, ): - logger.info(f'Populating placeholders for {factory}') - + logger.info(f"Populating placeholders for {factory}") + if asset_col_target is None: - asset_col_target = butil.get_collection(f'unique_assets:{repr(factory)}') + asset_col_target = butil.get_collection(f"unique_assets:{repr(factory)}") all_objs = [] updated_pholders = [] @@ -145,7 +173,6 @@ def populate_collection( placeholders = tqdm(placeholders) for i, p in enumerate(placeholders): - classname, fac_seed, _, inst_seed = parse_asset_name(p.name) if classname is None: continue @@ -156,13 +183,19 @@ def populate_collection( vis_dist_list = [] for i, camera in enumerate(cameras): points = get_placeholder_points(p) - dists, vis_dists = camera_util.min_dists_from_cam_trajectory(points, camera) + dists, vis_dists = camera_util.min_dists_from_cam_trajectory( + points, camera + ) dist, vis_dist = dists.min(), vis_dists.min() if dist_cull is not None and dist > dist_cull: - logger.debug(f'{p.name=} temporarily culled in camera {i} due to {dist=:.2f} > {dist_cull=}') + logger.debug( + f"{p.name=} temporarily culled in camera {i} due to {dist=:.2f} > {dist_cull=}" + ) continue if vis_cull is not None and vis_dist > vis_cull: - logger.debug(f'{p.name=} temporarily culled in camera {i} due to {vis_dist=:.2f} > {vis_cull=}') + logger.debug( + f"{p.name=} temporarily culled in camera {i} due to {vis_dist=:.2f} > {vis_cull=}" + ) continue populate = True dist_list.append(dist) @@ -170,34 +203,41 @@ def populate_collection( if not populate: p.hide_render = True continue - p['dist'] = min(dist_list) - p['vis_dist'] = min(vis_dist_list) + p["dist"] = min(dist_list) + p["vis_dist"] = min(vis_dist_list) else: dist = detail.scatter_res_distance() vis_dist = 0 if cache_system: - if sum(cache_system.n_placed.values()) < cache_system.max_fire_assets and cache_system.n_placed[factory.__class__.__name__] < cache_system.max_per_kind: + if ( + sum(cache_system.n_placed.values()) < cache_system.max_fire_assets + and cache_system.n_placed[factory.__class__.__name__] + < cache_system.max_per_kind + ): i_list = cache_system.find_i_list(factory) ind = np.random.choice(len(i_list)) i_chosen, full_sim_folder, sim_folder = i_list[ind] - obj = factory.spawn_asset(int(i_chosen), placeholder=p, distance=dist, vis_distance=vis_dist) - dom = cache_system.link_fire(full_sim_folder, sim_folder, obj, factory) + obj = factory.spawn_asset( + int(i_chosen), placeholder=p, distance=dist, vis_distance=vis_dist + ) + cache_system.link_fire(full_sim_folder, sim_folder, obj, factory) else: break else: - obj = factory.spawn_asset(i, placeholder=p, - distance=dist, vis_distance=vis_dist, **asset_kwargs) - + obj = factory.spawn_asset( + i, placeholder=p, distance=dist, vis_distance=vis_dist, **asset_kwargs + ) + if p is not obj: p.hide_render = True for o in butil.iter_object_tree(obj): butil.put_in_collection(o, asset_col_target) - obj['dist'] = dist - obj['vis_dist'] = vis_dist + obj["dist"] = dist + obj["vis_dist"] = vis_dist updated_pholders.append((inst_seed, p)) all_objs.append((inst_seed, obj)) @@ -208,55 +248,66 @@ def populate_collection( return all_objs, updated_pholders + @gin.configurable -def populate_all(factory_class, camera, dist_cull=200, vis_cull=0, cache_system = None, **kwargs): - - ''' +def populate_all( + factory_class, camera, dist_cull=200, vis_cull=0, cache_system=None, **kwargs +): + """ Find all collections that may have been produced by factory_class, and update them dist_cull: the max dist away from the camera to still populate assets vis_cull: the max dist outside of the view frustrum to still populate assets - ''' + """ results = [] for col in bpy.data.collections: - - if not (match := re.fullmatch('placeholders:((.*)\((\d*)\))', col.name)): + if not (match := re.fullmatch("placeholders:((.*)\((\d*)\))", col.name)): continue full_repr, classname, fac_seed = match.groups() - + if classname != factory_class.__name__: continue - asset_target_col = butil.get_collection(f'unique_assets:{full_repr}') + asset_target_col = butil.get_collection(f"unique_assets:{full_repr}") asset_target_col.hide_viewport = False if len(asset_target_col.objects) > 0: - logger.info(f'Skipping populating {col.name=} since {asset_target_col.name=} is already populated') + logger.info( + f"Skipping populating {col.name=} since {asset_target_col.name=} is already populated" + ) continue new_assets, pholders = populate_collection( - factory_class(int(fac_seed), **kwargs), col, asset_target_col, - camera, dist_cull=dist_cull, vis_cull=vis_cull, cache_system=cache_system) + factory_class(int(fac_seed), **kwargs), + col, + asset_target_col, + camera, + dist_cull=dist_cull, + vis_cull=vis_cull, + cache_system=cache_system, + ) results.append((fac_seed, pholders, new_assets)) return results -def make_placeholders_float(placeholder_col, scene_bvh, water): +def make_placeholders_float(placeholder_col, scene_bvh, water): deps = bpy.context.evaluated_depsgraph_get() water_bvh = mathutils.bvhtree.BVHTree.FromObject(water, deps) up = mathutils.Vector((0, 0, 1)) margin = mathutils.Vector((0, 0, 1e-3)) - for p in tqdm(placeholder_col.objects, desc=f'Computing fluid-floating locations for {placeholder_col.name=}'): + for p in tqdm( + placeholder_col.objects, + desc=f"Computing fluid-floating locations for {placeholder_col.name=}", + ): w_up, *_ = water_bvh.ray_cast(p.location + margin, up) if w_up is not None: t_up, *_ = scene_bvh.ray_cast(p.location + margin, up) z = min(w_up.z, t_up.z) if t_up is not None else w_up.z - z = max(p.location.z, z - 0.7) # the origin will be the creature's foot, allow some space for the rest of it + z = max( + p.location.z, z - 0.7 + ) # the origin will be the creature's foot, allow some space for the rest of it p.location.z = np.random.uniform(p.location.z, z) - - - diff --git a/infinigen/core/placement/split_in_view.py b/infinigen/core/placement/split_in_view.py index e0cab09f7..ea4a1c09a 100644 --- a/infinigen/core/placement/split_in_view.py +++ b/infinigen/core/placement/split_in_view.py @@ -7,18 +7,17 @@ import logging import bpy -from mathutils.bvhtree import BVHTree - import numpy as np +from mathutils.bvhtree import BVHTree from tqdm import trange -from infinigen.core.util import blender as butil, camera as cam_util, math -from infinigen.core.util.logging import Suppress from infinigen.core.placement.camera import get_sensor_coords -from infinigen.core import surface +from infinigen.core.util import blender as butil +from infinigen.core.util import camera as cam_util +from infinigen.core.util.logging import Suppress -def raycast_visiblity_mask(obj, cam, start=None, end=None, verbose=True): +def raycast_visiblity_mask(obj, cam, start=None, end=None, verbose=True): bvh = BVHTree.FromObject(obj, bpy.context.evaluated_depsgraph_get()) if start is None: @@ -28,14 +27,18 @@ def raycast_visiblity_mask(obj, cam, start=None, end=None, verbose=True): mask = np.zeros(len(obj.data.vertices), dtype=bool) rangeiter = trange if verbose else range - for i in rangeiter(start, end+1): + for i in rangeiter(start, end + 1): bpy.context.scene.frame_set(i) invworld = obj.matrix_world.inverted() sensor_coords, pix_it = get_sensor_coords(cam) - for x,y in pix_it: - direction = (sensor_coords[y,x] - cam.matrix_world.translation).normalized() + for x, y in pix_it: + direction = ( + sensor_coords[y, x] - cam.matrix_world.translation + ).normalized() origin = cam.matrix_world.translation - _, _, index, dist = bvh.ray_cast(invworld @ origin, invworld.to_3x3() @ direction) + _, _, index, dist = bvh.ray_cast( + invworld @ origin, invworld.to_3x3() @ direction + ) if dist is None: continue for vi in obj.data.polygons[index].vertices: @@ -43,76 +46,91 @@ def raycast_visiblity_mask(obj, cam, start=None, end=None, verbose=True): return mask + def select_vertmask(obj, mask): for i, v in enumerate(obj.data.vertices): v.select = mask[i] for f in obj.data.polygons: f.select = any(mask[vi] for vi in f.vertices) + def duplicate_mask(obj, mask, dilate=0, invert=False): butil.select_none() - with butil.ViewportMode(obj, mode='EDIT'): - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, mode="EDIT"): + bpy.ops.mesh.select_all(action="DESELECT") select_vertmask(obj, mask) - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): for _ in range(dilate): bpy.ops.mesh.select_more() - bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='FACE') + bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type="FACE") if invert: - bpy.ops.mesh.select_all(action='INVERT') + bpy.ops.mesh.select_all(action="INVERT") with Suppress(): - bpy.ops.mesh.duplicate_move() + bpy.ops.mesh.duplicate_move() try: - bpy.ops.mesh.separate(type='SELECTED') + bpy.ops.mesh.separate(type="SELECTED") return bpy.context.selected_objects[-1] except RuntimeError: - return butil.spawn_point_cloud('duplicate_mask', [], []) + return butil.spawn_point_cloud("duplicate_mask", [], []) + def split_inview( - obj: bpy.types.Object, cam, vis_margin, - raycast=False, dilate=0, dist_max=1e7, - outofview=True, verbose=False, - print_areas=False, hide_render=None, suffix=None, - **kwargs + obj: bpy.types.Object, + cam, + vis_margin, + raycast=False, + dilate=0, + dist_max=1e7, + outofview=True, + verbose=False, + print_areas=False, + hide_render=None, + suffix=None, + **kwargs, ): - - assert obj.type == 'MESH' - assert cam.type == 'CAMERA' + assert obj.type == "MESH" + assert cam.type == "CAMERA" bpy.context.view_layer.update() verts = np.zeros((len(obj.data.vertices), 3)) - obj.data.vertices.foreach_get('co', verts.reshape(-1)) + obj.data.vertices.foreach_get("co", verts.reshape(-1)) verts = butil.apply_matrix_world(obj, verts) - dists, vis_dists = cam_util.min_dists_from_cam_trajectory(verts, cam, verbose=verbose, **kwargs) + dists, vis_dists = cam_util.min_dists_from_cam_trajectory( + verts, cam, verbose=verbose, **kwargs + ) vis_mask = vis_dists < vis_margin dist_mask = dists < dist_max mask = vis_mask * dist_mask - logging.debug(f'split_inview {vis_mask.mean()=:.2f} {dist_mask.mean()=:.2f} {mask.mean()=:.2f}') + logging.debug( + f"split_inview {vis_mask.mean()=:.2f} {dist_mask.mean()=:.2f} {mask.mean()=:.2f}" + ) if raycast: mask *= raycast_visiblity_mask(obj, cam) - + inview = duplicate_mask(obj, mask, dilate=dilate) if outofview: - outview = duplicate_mask(obj, mask, dilate=dilate, invert=True) + outview = duplicate_mask(obj, mask, dilate=dilate, invert=True) else: - outview = butil.spawn_point_cloud('duplicate_mask', [], []) + outview = butil.spawn_point_cloud("duplicate_mask", [], []) if print_areas: sa_in = butil.surface_area(inview) sa_out = butil.surface_area(outview) - print(f'split {obj.name=} into inview area {sa_in:.2f} and outofview area {sa_out:.2f}') + print( + f"split {obj.name=} into inview area {sa_in:.2f} and outofview area {sa_out:.2f}" + ) - inview.name = obj.name + '.inview' - outview.name = obj.name + '.outofview' + inview.name = obj.name + ".inview" + outview.name = obj.name + ".outofview" if suffix is not None: - inview.name += '_' + suffix - outview.name += '_' + suffix + inview.name += "_" + suffix + outview.name += "_" + suffix if hide_render is not None: inview.hide_render = hide_render diff --git a/infinigen/core/rendering/auto_exposure.py b/infinigen/core/rendering/auto_exposure.py index e4b560431..45317d0d3 100644 --- a/infinigen/core/rendering/auto_exposure.py +++ b/infinigen/core/rendering/auto_exposure.py @@ -1,42 +1,66 @@ # Derived from https://www.blendswap.com/blend/30728 # Original node-graph created by PedroPLopes https://www.blendswap.com/profile/1609866 and licensed CC-0 -import bpy -import mathutils -from numpy.random import uniform, normal, randint -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler + from infinigen.core.nodes import node_utils -from infinigen.core.util.color import color_category -from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -@node_utils.to_nodegroup('nodegroup_auto_exposure', singleton=False, type='CompositorNodeTree') + +@node_utils.to_nodegroup( + "nodegroup_auto_exposure", singleton=False, type="CompositorNodeTree" +) def nodegroup_auto_exposure(nw: NodeWrangler): # Code generated using version 2.6.4 of the node_transpiler - group_input = nw.new_node(Nodes.GroupInput, - expose_input=[('NodeSocketColor', 'Image', (1.0000, 1.0000, 1.0000, 1.0000)), - ('NodeSocketFloat', 'EV Compensation', 0.0000), - ('NodeSocketFloat', 'Metering Area', 1.0000)]) - - divide = nw.new_node('CompositorNodeMath', + group_input = nw.new_node( + Nodes.GroupInput, + expose_input=[ + ("NodeSocketColor", "Image", (1.0000, 1.0000, 1.0000, 1.0000)), + ("NodeSocketFloat", "EV Compensation", 0.0000), + ("NodeSocketFloat", "Metering Area", 1.0000), + ], + ) + + divide = nw.new_node( + "CompositorNodeMath", input_kwargs={0: 1.0000, 1: group_input.outputs["Metering Area"]}, - attrs={'operation': 'DIVIDE'}) - - scale = nw.new_node('CompositorNodeScale', input_kwargs={'Image': group_input.outputs["Image"], 'X': divide, 'Y': divide}) - - multiply = nw.new_node('CompositorNodeMath', + attrs={"operation": "DIVIDE"}, + ) + + scale = nw.new_node( + "CompositorNodeScale", + input_kwargs={"Image": group_input.outputs["Image"], "X": divide, "Y": divide}, + ) + + multiply = nw.new_node( + "CompositorNodeMath", input_kwargs={0: group_input.outputs["EV Compensation"], 1: -1.0000}, - attrs={'operation': 'MULTIPLY'}) - - exposure = nw.new_node(Nodes.Exposure, input_kwargs={'Image': scale, 'Exposure': multiply}) - - levels = nw.new_node('CompositorNodeLevels', input_kwargs={'Image': exposure}, attrs={'channel': 'LUMINANCE'}) - - multiply_1 = nw.new_node('CompositorNodeMath', + attrs={"operation": "MULTIPLY"}, + ) + + exposure = nw.new_node( + Nodes.Exposure, input_kwargs={"Image": scale, "Exposure": multiply} + ) + + levels = nw.new_node( + "CompositorNodeLevels", + input_kwargs={"Image": exposure}, + attrs={"channel": "LUMINANCE"}, + ) + + multiply_1 = nw.new_node( + "CompositorNodeMath", input_kwargs={0: levels.outputs["Mean"], 1: 2.0000}, - attrs={'operation': 'MULTIPLY'}) - - rgb_curves = nw.new_node('CompositorNodeCurveRGB', - input_kwargs={'Image': group_input.outputs["Image"], 'White Level': multiply_1}) - - group_output = nw.new_node(Nodes.GroupOutput, input_kwargs={'Image': rgb_curves}, attrs={'is_active_output': True}) + attrs={"operation": "MULTIPLY"}, + ) + + rgb_curves = nw.new_node( + "CompositorNodeCurveRGB", + input_kwargs={"Image": group_input.outputs["Image"], "White Level": multiply_1}, + ) + + nw.new_node( + Nodes.GroupOutput, + input_kwargs={"Image": rgb_curves}, + attrs={"is_active_output": True}, + ) diff --git a/infinigen/core/rendering/post_render.py b/infinigen/core/rendering/post_render.py index dfeb46c51..1ad7bd5e8 100644 --- a/infinigen/core/rendering/post_render.py +++ b/infinigen/core/rendering/post_render.py @@ -5,47 +5,70 @@ import argparse -import os import logging +import os # ruff: noqa: E402 -os.environ["OPENCV_IO_ENABLE_OPENEXR"]="1" # This must be done BEFORE import cv2. +os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" # This must be done BEFORE import cv2. -import cv2 import colorsys +from pathlib import Path +import cv2 import numpy as np -from matplotlib import pyplot as plt -from pathlib import Path from imageio import imwrite - -import flow_vis +from matplotlib import pyplot as plt logger = logging.getLogger(__name__) + def load_exr(path): assert Path(path).exists() and Path(path).suffix == ".exr", path - return cv2.imread(str(path), cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH) + return cv2.imread(str(path), cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH) + load_flow = load_exr -load_depth = lambda p: load_exr(p)[..., 0] -load_normals = lambda p: load_exr(p)[...,[2,0,1]] * np.array([-1.,1.,1.]) -load_seg_mask = lambda p: load_exr(p)[...,2].astype(np.int64) -load_uniq_inst = lambda p: load_exr(p).view(np.int32) + + +def load_depth(p): + return load_exr(p)[..., 0] + + +def load_normals(p): + return load_exr(p)[..., [2, 0, 1]] * np.array([-1.0, 1.0, 1.0]) + + +def load_seg_mask(p): + return load_exr(p)[..., 2].astype(np.int64) + + +def load_uniq_inst(p): + return load_exr(p).view(np.int32) + def colorize_flow(optical_flow): - flow_uv = optical_flow[...,:2] + try: + import flow_vis + except ImportError: + logger.warning( + "Flow visualization requires the 'flow_vis' package. Please install via `pip install .[vis]." + ) + return None + + flow_uv = optical_flow[..., :2] flow_color = flow_vis.flow_to_color(flow_uv, convert_to_bgr=False) return flow_color + def colorize_normals(surface_normals): - assert surface_normals.max() < 1+1e-4 - assert surface_normals.min() > -1-1e-4 + assert surface_normals.max() < 1 + 1e-4 + assert surface_normals.min() > -1 - 1e-4 norm = np.linalg.norm(surface_normals, axis=2) - color = np.round((surface_normals + 1) * (255/2)).astype(np.uint8) + color = np.round((surface_normals + 1) * (255 / 2)).astype(np.uint8) color[norm < 1e-4] = 0 return color + def colorize_depth(depth, scale_vmin=1.0): valid = (depth > 1e-3) & (depth < 1e4) vmin = depth[valid].min() * scale_vmin @@ -54,57 +77,70 @@ def colorize_depth(depth, scale_vmin=1.0): norm = plt.Normalize(vmin=vmin, vmax=vmax) depth = cmap(norm(depth)) depth[~valid] = 1 - return np.ascontiguousarray(depth[...,:3] * 255, dtype=np.uint8) + return np.ascontiguousarray(depth[..., :3] * 255, dtype=np.uint8) + def colorize_int_array(data, color_seed=0): H, W, *_ = data.shape data = data.reshape((H * W, -1)) uniq, indices = np.unique(data, return_inverse=True, axis=0) - random_states = [np.random.RandomState(e[:2].astype(np.uint32) + color_seed) for e in uniq] - unique_colors = (np.asarray([colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1) for s in random_states]) * 255).astype(np.uint8) + random_states = [ + np.random.RandomState(e[:2].astype(np.uint32) + color_seed) for e in uniq + ] + unique_colors = ( + np.asarray( + [ + colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1) + for s in random_states + ] + ) + * 255 + ).astype(np.uint8) return unique_colors[indices].reshape((H, W, 3)) + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--flow_path', type=Path, default=None) - parser.add_argument('--depth_path', type=Path, default=None) - parser.add_argument('--seg_path', type=Path, default=None) - parser.add_argument('--uniq_inst_path', type=Path, default=None) - parser.add_argument('--normals_path', type=Path, default=None) + parser.add_argument("--flow_path", type=Path, default=None) + parser.add_argument("--depth_path", type=Path, default=None) + parser.add_argument("--seg_path", type=Path, default=None) + parser.add_argument("--uniq_inst_path", type=Path, default=None) + parser.add_argument("--normals_path", type=Path, default=None) args = parser.parse_args() if args.flow_path is not None: try: flow_color = colorize_flow(load_flow(args.flow_path)) - output_path = args.flow_path.with_suffix('.png') - imwrite(output_path, flow_color) - print(f"Wrote {output_path}") + if flow_color is not None: + output_path = args.flow_path.with_suffix(".png") + imwrite(output_path, flow_color) + print(f"Wrote {output_path}") except ModuleNotFoundError: - print("Flow visualization requires the 'flow_vis' package. Install it with 'pip install flow_vis'") + print( + "Flow visualization requires the 'flow_vis' package. Install it with 'pip install flow_vis'" + ) pass if args.normals_path is not None: normal_color = colorize_normals(load_normals(args.normals_path)) - output_path = args.normals_path.with_suffix('.png') + output_path = args.normals_path.with_suffix(".png") imwrite(output_path, normal_color) print(f"Wrote {output_path}") if args.depth_path is not None: depth_color = colorize_depth(load_depth(args.depth_path)) - output_path = args.depth_path.with_suffix('.png') + output_path = args.depth_path.with_suffix(".png") imwrite(output_path, depth_color) print(f"Wrote {output_path}") if args.uniq_inst_path is not None: mask_color = colorize_int_array(load_uniq_inst(args.uniq_inst_path)) - output_path = args.uniq_inst_path.with_suffix('.png') + output_path = args.uniq_inst_path.with_suffix(".png") imwrite(output_path, mask_color) print(f"Wrote {output_path}") if args.seg_path is not None: mask_color = colorize_int_array(load_seg_mask(args.seg_path)) - output_path = args.seg_path.with_suffix('.png') + output_path = args.seg_path.with_suffix(".png") imwrite(output_path, mask_color) print(f"Wrote {output_path}") - - \ No newline at end of file diff --git a/infinigen/core/rendering/render.py b/infinigen/core/rendering/render.py index e248b66ac..9e5fdec6f 100644 --- a/infinigen/core/rendering/render.py +++ b/infinigen/core/rendering/render.py @@ -11,24 +11,28 @@ import logging import os import time -import warnings from pathlib import Path import bpy import gin import numpy as np -from imageio import imread, imwrite +from imageio import imwrite -from infinigen.core import init +from infinigen.core import init, surface from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core.placement import camera as cam_util -from infinigen.core.rendering.post_render import (colorize_depth, colorize_flow, - colorize_normals, colorize_int_array, - load_depth, load_flow, load_normals, - load_seg_mask, load_uniq_inst) -from infinigen.core import surface +from infinigen.core.rendering.post_render import ( + colorize_depth, + colorize_flow, + colorize_int_array, + colorize_normals, + load_depth, + load_flow, + load_normals, + load_seg_mask, + load_uniq_inst, +) from infinigen.core.util import blender as butil -from infinigen.core.util import exporting as exputil from infinigen.core.util.logging import Timer from infinigen.tools.datarelease_toolkit import reorganize_old_framesfolder from infinigen.tools.suffixes import get_suffix @@ -39,6 +43,7 @@ logger = logging.getLogger(__name__) + def remove_translucency(): # The asserts were added since these edge cases haven't appeared yet -Lahav for material in bpy.data.materials: @@ -50,7 +55,10 @@ def remove_translucency(): assert shader_2_soc.is_linked and len(shader_2_soc.links) == 1 shader_1_type = shader_1_soc.links[0].from_node.bl_idname shader_2_type = shader_2_soc.links[0].from_node.bl_idname - assert not (shader_1_type in TRANSPARENT_SHADERS and shader_2_type in TRANSPARENT_SHADERS) + assert not ( + shader_1_type in TRANSPARENT_SHADERS + and shader_2_type in TRANSPARENT_SHADERS + ) if shader_1_type in TRANSPARENT_SHADERS: assert not fac_soc.is_linked fac_soc.default_value = 1.0 @@ -58,6 +66,7 @@ def remove_translucency(): assert not fac_soc.is_linked fac_soc.default_value = 0.0 + def set_pass_indices(): tree_output = {} index = 1 @@ -67,14 +76,12 @@ def set_pass_indices(): if obj.pass_index == 0: obj.pass_index = index index += 1 - object_dict = { - "type": obj.type, "object_index": obj.pass_index, "children": [] - } + object_dict = {"type": obj.type, "object_index": obj.pass_index, "children": []} if obj.type == "MESH": - object_dict['num_verts'] = len(obj.data.vertices) - object_dict['num_faces'] = len(obj.data.polygons) - object_dict['materials'] = obj.material_slots.keys() - object_dict['unapplied_modifiers'] = obj.modifiers.keys() + object_dict["num_verts"] = len(obj.data.vertices) + object_dict["num_faces"] = len(obj.data.polygons) + object_dict["materials"] = obj.material_slots.keys() + object_dict["unapplied_modifiers"] = obj.modifiers.keys() tree_output[obj.name] = object_dict for child_obj in obj.children: if child_obj.pass_index == 0: @@ -84,47 +91,60 @@ def set_pass_indices(): index += 1 return tree_output + # Can be pasted directly into the blender console def make_clay(): - clay_material = bpy.data.materials.new(name="clay") - clay_material.diffuse_color = (0.2, 0.05, 0.01, 1) - for obj in bpy.data.objects: - if "atmosphere" not in obj.name.lower() and not obj.hide_render: - if len(obj.material_slots) == 0: - obj.active_material = clay_material - else: - for mat_slot in obj.material_slots: - mat_slot.material = clay_material + clay_material = bpy.data.materials.new(name="clay") + clay_material.diffuse_color = (0.2, 0.05, 0.01, 1) + for obj in bpy.data.objects: + if "atmosphere" not in obj.name.lower() and not obj.hide_render: + if len(obj.material_slots) == 0: + obj.active_material = clay_material + else: + for mat_slot in obj.material_slots: + mat_slot.material = clay_material -@gin.configurable -def compositor_postprocessing(nw, source, show=True, autoexpose=False, autoexpose_level=-2, color_correct=True, distort=0, glare=False): +@gin.configurable +def compositor_postprocessing( + nw, + source, + show=True, + autoexpose=False, + autoexpose_level=-2, + color_correct=True, + distort=0, + glare=False, +): if autoexpose: - source = nw.new_node(nodegroup_auto_exposure().name, input_kwargs={'Image': source, 'EV Comp': autoexpose_level}) + source = nw.new_node( + nodegroup_auto_exposure().name, + input_kwargs={"Image": source, "EV Comp": autoexpose_level}, + ) if distort > 0: - source = nw.new_node(Nodes.LensDistortion, - input_kwargs={'Image': source, 'Dispersion': distort}) + source = nw.new_node( + Nodes.LensDistortion, input_kwargs={"Image": source, "Dispersion": distort} + ) if color_correct: - source = nw.new_node(Nodes.BrightContrast, - input_kwargs={'Image': source, 'Bright': 1.0, 'Contrast': 4.0}) + source = nw.new_node( + Nodes.BrightContrast, + input_kwargs={"Image": source, "Bright": 1.0, "Contrast": 4.0}, + ) if glare: source = nw.new_node( Nodes.Glare, - input_kwargs={'Image': source}, + input_kwargs={"Image": source}, attrs={"glare_type": "GHOSTS", "threshold": 0.5, "mix": -0.99}, ) if show: - nw.new_node(Nodes.Composite, input_kwargs={'Image': source}) + nw.new_node(Nodes.Composite, input_kwargs={"Image": source}) + + return source.outputs[0] if hasattr(source, "outputs") else source - return ( - source.outputs[0] - if hasattr(source, 'outputs') - else source - ) @gin.configurable def configure_compositor_output( @@ -135,12 +155,14 @@ def configure_compositor_output( passes_to_save, saving_ground_truth, ): - - file_output_node = nw.new_node(Nodes.OutputFile, attrs={ - "base_path": str(frames_folder), - "format.file_format": 'OPEN_EXR' if saving_ground_truth else 'PNG', - "format.color_mode": 'RGB' - }) + file_output_node = nw.new_node( + Nodes.OutputFile, + attrs={ + "base_path": str(frames_folder), + "format.file_format": "OPEN_EXR" if saving_ground_truth else "PNG", + "format.color_mode": "RGB", + }, + ) file_slot_list = [] viewlayer = bpy.context.scene.view_layers["ViewLayer"] render_layers = nw.new_node(Nodes.RenderLayers) @@ -153,45 +175,54 @@ def configure_compositor_output( render_socket = render_layers.outputs[socket_name] if viewlayer_pass == "vector": separate_color = nw.new_node(Nodes.CompSeparateColor, [render_socket]) - comnbine_color = nw.new_node(Nodes.CompCombineColor, [0, (separate_color, 3), (separate_color, 2), 0]) + comnbine_color = nw.new_node( + Nodes.CompCombineColor, [0, (separate_color, 3), (separate_color, 2), 0] + ) nw.links.new(comnbine_color.outputs[0], slot_input) else: nw.links.new(render_socket, slot_input) file_slot_list.append(file_output_node.file_slots[slot_input.name]) - slot_input = file_output_node.file_slots['Image'] + slot_input = file_output_node.file_slots["Image"] image = image_denoised if image_denoised is not None else image_noisy - nw.links.new(image, file_output_node.inputs['Image']) + nw.links.new(image, file_output_node.inputs["Image"]) if saving_ground_truth: - slot_input.path = 'UniqueInstances' + slot_input.path = "UniqueInstances" else: - image_exr_output_node = nw.new_node(Nodes.OutputFile, attrs={ - "base_path": str(frames_folder), - "format.file_format": 'OPEN_EXR', - "format.color_mode": 'RGB' - }) - rgb_exr_slot_input = file_output_node.file_slots['Image'] - nw.links.new(image, image_exr_output_node.inputs['Image']) + image_exr_output_node = nw.new_node( + Nodes.OutputFile, + attrs={ + "base_path": str(frames_folder), + "format.file_format": "OPEN_EXR", + "format.color_mode": "RGB", + }, + ) + rgb_exr_slot_input = file_output_node.file_slots["Image"] + nw.links.new(image, image_exr_output_node.inputs["Image"]) file_slot_list.append(image_exr_output_node.file_slots[rgb_exr_slot_input.path]) file_slot_list.append(file_output_node.file_slots[slot_input.path]) return file_slot_list + def shader_random(nw: NodeWrangler): # Code generated using version 2.4.3 of the node_transpiler object_info = nw.new_node(Nodes.ObjectInfo_Shader) - white_noise_texture = nw.new_node(Nodes.WhiteNoiseTexture, - input_kwargs={'Vector': object_info.outputs["Random"]}) + white_noise_texture = nw.new_node( + Nodes.WhiteNoiseTexture, input_kwargs={"Vector": object_info.outputs["Random"]} + ) + + nw.new_node( + Nodes.MaterialOutput, + input_kwargs={"Surface": white_noise_texture.outputs["Color"]}, + ) - nw.new_node(Nodes.MaterialOutput, - input_kwargs={'Surface': white_noise_texture.outputs["Color"]}) def global_flat_shading(): - - for obj in bpy.context.scene.view_layers['ViewLayer'].objects: - if 'fire_system_type' in obj and obj['fire_system_type'] == 'volume': + for obj in bpy.context.scene.view_layers["ViewLayer"].objects: + if "fire_system_type" in obj and obj["fire_system_type"] == "volume": continue if obj.name.lower() in {"atmosphere", "atmosphere_fine"}: bpy.data.objects.remove(obj) @@ -199,25 +230,25 @@ def global_flat_shading(): nw = obj.active_material.node_tree for node in nw.nodes: if node.bl_idname == Nodes.MaterialOutput: - vol_socket = node.inputs['Volume'] + vol_socket = node.inputs["Volume"] if len(vol_socket.links) > 0: nw.links.remove(vol_socket.links[0]) - for obj in bpy.context.scene.view_layers['ViewLayer'].objects: - if obj.type != 'MESH': + for obj in bpy.context.scene.view_layers["ViewLayer"].objects: + if obj.type != "MESH": continue obj.hide_viewport = False - if 'fire_system_type' in obj and obj['fire_system_type'] == 'gt_mesh': + if "fire_system_type" in obj and obj["fire_system_type"] == "gt_mesh": obj.hide_viewport = False obj.hide_render = False - if not hasattr(obj, 'material_slots'): - print(obj.name, 'NONE') + if not hasattr(obj, "material_slots"): + print(obj.name, "NONE") continue with butil.SelectObjects(obj): for i in range(len(obj.material_slots)): bpy.ops.object.material_slot_remove() - for obj in bpy.context.scene.view_layers['ViewLayer'].objects: + for obj in bpy.context.scene.view_layers["ViewLayer"].objects: surface.add_material(obj, shader_random) for mat in bpy.data.materials: nw = NodeWrangler(mat.node_tree) @@ -227,57 +258,85 @@ def global_flat_shading(): for link in nw.links: nw.links.remove(link) -def postprocess_blendergt_outputs(frames_folder, output_stem): +def postprocess_blendergt_outputs(frames_folder, output_stem): # Save flow visualization flow_dst_path = frames_folder / f"Vector{output_stem}.exr" flow_array = load_flow(flow_dst_path) np.save(flow_dst_path.with_name(f"Flow{output_stem}.npy"), flow_array) - imwrite(flow_dst_path.with_name(f"Flow{output_stem}.png"), colorize_flow(flow_array)) - flow_dst_path.unlink() + + flow_color = colorize_flow(flow_array) + if flow_color is not None: + imwrite( + flow_dst_path.with_name(f"Flow{output_stem}.png"), + flow_color, + ) + flow_dst_path.unlink() # Save surface normal visualization normal_dst_path = frames_folder / f"Normal{output_stem}.exr" normal_array = load_normals(normal_dst_path) np.save(flow_dst_path.with_name(f"SurfaceNormal{output_stem}.npy"), normal_array) - imwrite(flow_dst_path.with_name(f"SurfaceNormal{output_stem}.png"), colorize_normals(normal_array)) + imwrite( + flow_dst_path.with_name(f"SurfaceNormal{output_stem}.png"), + colorize_normals(normal_array), + ) normal_dst_path.unlink() # Save depth visualization depth_dst_path = frames_folder / f"Depth{output_stem}.exr" depth_array = load_depth(depth_dst_path) np.save(flow_dst_path.with_name(f"Depth{output_stem}.npy"), depth_array) - imwrite(depth_dst_path.with_name(f"Depth{output_stem}.png"), colorize_depth(depth_array)) + imwrite( + depth_dst_path.with_name(f"Depth{output_stem}.png"), colorize_depth(depth_array) + ) depth_dst_path.unlink() # Save segmentation visualization seg_dst_path = frames_folder / f"IndexOB{output_stem}.exr" seg_mask_array = load_seg_mask(seg_dst_path) - np.save(flow_dst_path.with_name(f"ObjectSegmentation{output_stem}.npy"), seg_mask_array) - imwrite(seg_dst_path.with_name(f"ObjectSegmentation{output_stem}.png"), colorize_int_array(seg_mask_array)) + np.save( + flow_dst_path.with_name(f"ObjectSegmentation{output_stem}.npy"), seg_mask_array + ) + imwrite( + seg_dst_path.with_name(f"ObjectSegmentation{output_stem}.png"), + colorize_int_array(seg_mask_array), + ) seg_dst_path.unlink() # Save unique instances visualization uniq_inst_path = frames_folder / f"UniqueInstances{output_stem}.exr" uniq_inst_array = load_uniq_inst(uniq_inst_path) - np.save(flow_dst_path.with_name(f"InstanceSegmentation{output_stem}.npy"), uniq_inst_array) - imwrite(uniq_inst_path.with_name(f"InstanceSegmentation{output_stem}.png"), colorize_int_array(uniq_inst_array)) + np.save( + flow_dst_path.with_name(f"InstanceSegmentation{output_stem}.npy"), + uniq_inst_array, + ) + imwrite( + uniq_inst_path.with_name(f"InstanceSegmentation{output_stem}.png"), + colorize_int_array(uniq_inst_array), + ) uniq_inst_path.unlink() + def configure_compositor( - frames_folder: Path, - passes_to_save: list, + frames_folder: Path, + passes_to_save: list, flat_shading: bool, ): compositor_node_tree = bpy.context.scene.node_tree nw = NodeWrangler(compositor_node_tree) render_layers = nw.new_node(Nodes.RenderLayers) - final_image_denoised = compositor_postprocessing(nw, source=render_layers.outputs["Image"]) + final_image_denoised = compositor_postprocessing( + nw, source=render_layers.outputs["Image"] + ) final_image_noisy = ( - compositor_postprocessing(nw, source=render_layers.outputs["Noisy Image"], show=False) - if bpy.context.scene.cycles.use_denoising else None + compositor_postprocessing( + nw, source=render_layers.outputs["Noisy Image"], show=False + ) + if bpy.context.scene.cycles.use_denoising + else None ) return configure_compositor_output( @@ -286,9 +345,10 @@ def configure_compositor( image_denoised=final_image_denoised, image_noisy=final_image_noisy, passes_to_save=passes_to_save, - saving_ground_truth=flat_shading + saving_ground_truth=flat_shading, ) + @gin.configurable def render_image( camera_id, @@ -300,7 +360,6 @@ def render_image( use_dof=False, dof_aperture_fstop=2.8, ): - tic = time.time() camera_rig_id, subcam_id = camera_id @@ -319,13 +378,19 @@ def render_image( object_data = set_pass_indices() json_object = json.dumps(object_data, indent=4) first_frame = bpy.context.scene.frame_start - suffix = get_suffix(dict(cam_rig=camera_rig_id, resample=0, frame=first_frame, subcam=subcam_id)) + suffix = get_suffix( + dict( + cam_rig=camera_rig_id, + resample=0, + frame=first_frame, + subcam=subcam_id, + ) + ) (frames_folder / f"Objects{suffix}.json").write_text(json_object) with Timer("Flat Shading"): global_flat_shading() - if not bpy.context.scene.use_nodes: bpy.context.scene.use_nodes = True file_slot_nodes = configure_compositor(frames_folder, passes_to_save, flat_shading) @@ -333,12 +398,12 @@ def render_image( indices = dict(cam_rig=camera_rig_id, resample=0, subcam=subcam_id) ## Update output names - fileslot_suffix = get_suffix({'frame': "####", **indices}) + fileslot_suffix = get_suffix({"frame": "####", **indices}) for file_slot in file_slot_nodes: file_slot.path = f"{file_slot.path}{fileslot_suffix}" camera = cam_util.get_camera(camera_rig_id, subcam_id) - if use_dof == 'IF_TARGET_SET': + if use_dof == "IF_TARGET_SET": use_dof = camera.data.dof.focus_object is not None if use_dof is not None: camera.data.dof.use_dof = use_dof @@ -354,7 +419,9 @@ def render_image( bpy.ops.render.render(animation=True) with Timer("Post Processing"): - for frame in range(bpy.context.scene.frame_start, bpy.context.scene.frame_end + 1): + for frame in range( + bpy.context.scene.frame_start, bpy.context.scene.frame_end + 1 + ): if flat_shading: bpy.context.scene.frame_set(frame) suffix = get_suffix(dict(frame=frame, **indices)) @@ -363,10 +430,10 @@ def render_image( cam_util.save_camera_parameters( camera_ids=cam_util.get_cameras_ids(), output_folder=frames_folder, - frame=frame + frame=frame, ) - for file in tmp_dir.glob('*.png'): + for file in tmp_dir.glob("*.png"): file.unlink() reorganize_old_framesfolder(frames_folder) diff --git a/infinigen/core/rendering/resample.py b/infinigen/core/rendering/resample.py index c3f098516..3e6cfa700 100644 --- a/infinigen/core/rendering/resample.py +++ b/infinigen/core/rendering/resample.py @@ -6,21 +6,18 @@ import bpy +from infinigen.assets.lighting import sky_lighting +from infinigen.assets.objects import rocks, trees from infinigen.core.nodes.node_utils import resample_node_group from infinigen.core.nodes.node_wrangler import NodeWrangler - -from infinigen.assets.lighting import sky_lighting - -from infinigen.assets.trees.generate import TreeFactory, BushFactory -from infinigen.assets.rocks.glowing_rocks import GlowingRocksFactory - -from infinigen.core.util.logging import Timer -from infinigen.core.util.math import FixedSeed, int_hash from infinigen.core.util import blender as butil +from infinigen.core.util.logging import Timer +from infinigen.core.util.math import FixedSeed + def resample_all(factory_class): - for placeholder_col in butil.get_collection('placeholders').children: - classname, _ = placeholder_col.name.split('(') + for placeholder_col in butil.get_collection("placeholders").children: + classname, _ = placeholder_col.name.split("(") if classname != factory_class.__name__: continue @@ -28,23 +25,31 @@ def resample_all(factory_class): for pholder in placeholders: factory_class.quickly_resample(pholder) + def resample_scene(scene_seed): - with FixedSeed(scene_seed), Timer('Resample noise nodes in materials'): + with FixedSeed(scene_seed), Timer("Resample noise nodes in materials"): for material in bpy.data.materials: nw = NodeWrangler(material.node_tree) resample_node_group(nw, scene_seed) - with FixedSeed(scene_seed), Timer('Resample noise nodes in scatters'): + + with FixedSeed(scene_seed), Timer("Resample noise nodes in scatters"): for obj in bpy.data.objects: for modifier in obj.modifiers: - if not any(obj.name.startswith(s) for s in ["BlenderRockFactory", "CloudFactory"]): - if modifier.type == 'NODES': + if not any( + obj.name.startswith(s) + for s in ["BlenderRockFactory", "CloudFactory"] + ): + if modifier.type == "NODES": nw = NodeWrangler(modifier.node_group) resample_node_group(nw, scene_seed) - with FixedSeed(scene_seed), Timer('Resample all placeholders'): # CloudFactory too expensive - resample_all(GlowingRocksFactory) - resample_all(TreeFactory) - resample_all(BushFactory) - #resample_all(CreatureFactory) + with ( + FixedSeed(scene_seed), + Timer("Resample all placeholders"), + ): # CloudFactory too expensive + resample_all(rocks.GlowingRocksFactory) + resample_all(trees.TreeFactory) + resample_all(trees.BushFactory) + # resample_all(CreatureFactory) with FixedSeed(scene_seed): - sky_lighting.add_lighting() \ No newline at end of file + sky_lighting.add_lighting() diff --git a/infinigen/core/surface.py b/infinigen/core/surface.py index 5b1302f55..275ceb3d7 100644 --- a/infinigen/core/surface.py +++ b/infinigen/core/surface.py @@ -7,30 +7,34 @@ # - Lahav Lipson: Surface mixing # - Lingjie Mei: attributes and geo nodes +import importlib import string from collections import defaultdict -import importlib from inspect import signature import bpy -from mathutils import Vector import gin import numpy as np +from mathutils import Vector from tqdm import trange -from infinigen.core.util import blender as butil -from infinigen.core.util.blender import set_geomod_inputs # got moved, left here for import compatibility -from infinigen.core.nodes.node_wrangler import NodeWrangler, Nodes, isnode, infer_output_socket, \ - geometry_node_group_empty_new +from infinigen.core import tags as t from infinigen.core.nodes import node_info -from infinigen.core import tagging, tags as t +from infinigen.core.nodes.node_wrangler import ( + Nodes, + NodeWrangler, + geometry_node_group_empty_new, + infer_output_socket, + isnode, +) +from infinigen.core.util import blender as butil def remove_materials(obj): with butil.SelectObjects(obj): obj.active_material_index = 0 for i in range(len(obj.material_slots)): - bpy.ops.object.material_slot_remove({'object': obj}) + bpy.ops.object.material_slot_remove({"object": obj}) def write_attribute(objs, node_func, name=None, data_type=None, apply=False): @@ -44,30 +48,42 @@ def attr_writer(nw, **kwargs): if data_type is None: data_type = node_info.NODETYPE_TO_DATATYPE[infer_output_socket(value).type] - capture = nw.new_node(Nodes.CaptureAttribute, attrs={'data_type': data_type}, - input_kwargs={'Geometry': nw.new_node(Nodes.GroupInput), 'Value': value - }) - output = nw.new_node(Nodes.GroupOutput, - input_kwargs={'Geometry': (capture, 'Geometry'), name: (capture, 'Attribute') - }) - - mod = add_geomod(objs, attr_writer, name=f'write_attribute({name})', apply=apply, attributes=[name]) + capture = nw.new_node( + Nodes.CaptureAttribute, + attrs={"data_type": data_type}, + input_kwargs={"Geometry": nw.new_node(Nodes.GroupInput), "Value": value}, + ) + nw.new_node( + Nodes.GroupOutput, + input_kwargs={ + "Geometry": (capture, "Geometry"), + name: (capture, "Attribute"), + }, + ) + + add_geomod( + objs, + attr_writer, + name=f"write_attribute({name})", + apply=apply, + attributes=[name], + ) return name -def read_attr_data(obj, attr, domain='POINT', result_dtype=None) -> np.array: +def read_attr_data(obj, attr, domain="POINT", result_dtype=None) -> np.array: if isinstance(attr, str): attr = obj.data.attributes[attr] domain = attr.domain - if domain == 'POINT': + if domain == "POINT": n = len(obj.data.vertices) - elif domain == 'EDGE': + elif domain == "EDGE": n = len(obj.data.edges) - elif domain == 'FACE': + elif domain == "FACE": n = len(obj.data.polygons) else: - raise NotImplementedError + raise ValueError(f"Unknown domain {domain}") dim = node_info.DATATYPE_DIMS[attr.data_type] field = node_info.DATATYPE_FIELDS[attr.data_type] @@ -86,11 +102,13 @@ def read_attr_data(obj, attr, domain='POINT', result_dtype=None) -> np.array: def set_active(obj, name): attributes = obj.data.attributes - attributes.active_index = next((i for i, a in enumerate(attributes) if a.name == name)) + attributes.active_index = next( + (i for i, a in enumerate(attributes) if a.name == name) + ) attributes.active = attributes[attributes.active_index] -def write_attr_data(obj, attr, data: np.array, type='FLOAT', domain='POINT'): +def write_attr_data(obj, attr, data: np.array, type="FLOAT", domain="POINT"): if isinstance(attr, str): if attr in obj.data.attributes: attr = obj.data.attributes[attr] @@ -102,8 +120,8 @@ def write_attr_data(obj, attr, data: np.array, type='FLOAT', domain='POINT'): def new_attr_data(obj, attr, type, domain, data: np.array): - assert (isinstance(attr, str)) - assert (attr not in obj.data.attributes) + assert isinstance(attr, str) + assert attr not in obj.data.attributes obj.data.attributes.new(name=attr, type=type, domain=domain) attr = obj.data.attributes[attr] @@ -115,7 +133,7 @@ def smooth_attribute(obj, name, iters=20, weight=0.05, verbose=False): data = read_attr_data(obj, name) edges = np.empty(len(obj.data.edges) * 2, dtype=int) - obj.data.edges.foreach_get('vertices', edges) + obj.data.edges.foreach_get("vertices", edges) edges = edges.reshape(-1, 2) r = range(iters) if not verbose else trange(iters) @@ -142,17 +160,18 @@ def attribute_to_vertex_group(obj, attr, name=None, min_thresh=0, binary=False): if attr_data.shape[-1] != 1: raise ValueError( - f'Could not convert non-scalar attribute {attr} to vertex group, expected 1 data dimension but ' - f'got {attr_data.shape=}') + f"Could not convert non-scalar attribute {attr} to vertex group, expected 1 data dimension but " + f"got {attr_data.shape=}" + ) group = obj.vertex_groups.new(name=name) if binary: - group.add(np.where(attr_data > min_thresh)[0], 1.0, 'ADD') + group.add(np.where(attr_data > min_thresh)[0], 1.0, "ADD") else: for i, v in enumerate(attr_data): if v > min_thresh: - group.add([i], v, 'ADD') + group.add([i], v, "ADD") return group @@ -179,41 +198,51 @@ def eval_argument(nw, argument, default_value=1.0, **kwargs): elif nw.is_socket(argument): return argument else: - raise ValueError(f'surface.eval_argument couldnt parse {argument}') + raise ValueError(f"surface.eval_argument couldnt parse {argument}") def shaderfunc_to_material(shader_func, *args, name=None, **kwargs): - ''' + """ Convert a shader_func(nw) directly to a bpy.data.material Used in add_material and transpiler's Nodes.SetMaterial handler - ''' + """ if name is None: name = shader_func.__name__ material = bpy.data.materials.new(name=name) material.use_nodes = True - material.node_tree.nodes.remove(material.node_tree.nodes['Principled BSDF']) # remove the default BSDF + material.node_tree.nodes.remove( + material.node_tree.nodes["Principled BSDF"] + ) # remove the default BSDF nw = NodeWrangler(material.node_tree) new_node_tree = shader_func(nw, *args, **kwargs) - + if new_node_tree is not None: if isinstance(new_node_tree, tuple) and isnode(new_node_tree[1]): new_node_tree, volume = new_node_tree - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Volume': volume}) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': new_node_tree}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Volume": volume}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": new_node_tree}) return material def seed_generator(size=8, chars=string.ascii_uppercase): - return ''.join(np.random.choice(list(chars)) for _ in range(size)) - - -def add_material(objs, shader_func, selection=None, input_args=None, input_kwargs=None, name=None, reuse=False): + return "".join(np.random.choice(list(chars)) for _ in range(size)) + + +def add_material( + objs, + shader_func, + selection=None, + input_args=None, + input_kwargs=None, + name=None, + reuse=False, +): if input_args is None: input_args = [] if input_kwargs is None: @@ -228,7 +257,6 @@ def add_material(objs, shader_func, selection=None, input_args=None, input_kwarg name += f"_{seed_generator(8)}" material = shaderfunc_to_material(shader_func, *input_args, **input_kwargs) elif isinstance(selection, (str, t.Semantics)): - if isinstance(selection, t.Semantics): selection = selection.value name = "MixedSurface" @@ -237,49 +265,62 @@ def add_material(objs, shader_func, selection=None, input_args=None, input_kwarg else: material = bpy.data.materials.new(name=name) material.use_nodes = True - material.node_tree.nodes['Principled BSDF'].inputs['Base Color'].default_value = ( - 1, 0, 1, 1) # Set Magenta + material.node_tree.nodes["Principled BSDF"].inputs[ + "Base Color" + ].default_value = (1, 0, 1, 1) # Set Magenta objs[0].active_material = material nw = NodeWrangler(material.node_tree) - new_attribute_node = nw.new_node(Nodes.Attribute, [], {"attribute_name": selection}) + new_attribute_node = nw.new_node( + Nodes.Attribute, [], {"attribute_name": selection} + ) if "Attribute Sum" in material.node_tree.nodes: old_attribute_sum_node = material.node_tree.nodes["Attribute Sum"] if old_attribute_sum_node.type == "ATTRIBUTE": socket_index_old = 2 else: socket_index_old = 0 - new_attribute_sum_node = nw.scalar_add((old_attribute_sum_node, socket_index_old), - (new_attribute_node, 2)) + new_attribute_sum_node = nw.scalar_add( + (old_attribute_sum_node, socket_index_old), (new_attribute_node, 2) + ) old_attribute_sum_node.name = "Attribute Sum Old" new_attribute_sum_node.name = "Attribute Sum" else: new_attribute_node.name = "Attribute Sum" new_attribute_sum_node = new_attribute_node # grab a reference to whatever is currently linked to output - links_to_output = [link for link in nw.links if (link.to_node.bl_idname == Nodes.MaterialOutput)] + links_to_output = [ + link + for link in nw.links + if (link.to_node.bl_idname == Nodes.MaterialOutput) + ] assert len(links_to_output) == 1, links_to_output penultimate_node = links_to_output.pop().from_node if new_attribute_sum_node.type == "ATTRIBUTE": socket_index_new = 2 else: socket_index_new = 0 - selection_weight = nw.divide2((new_attribute_node, 2), (new_attribute_sum_node, socket_index_new)) + selection_weight = nw.divide2( + (new_attribute_node, 2), (new_attribute_sum_node, socket_index_new) + ) # spawn in the node tree to mix with it new_node_tree = shader_func(nw, **input_kwargs) if new_node_tree is None: raise ValueError( - f'{shader_func} returned None while attempting add_material(selection=...). Shaderfunc must ' - f'return its output to be mixable') + f"{shader_func} returned None while attempting add_material(selection=...). Shaderfunc must " + f"return its output to be mixable" + ) if isinstance(new_node_tree, tuple) and isnode(new_node_tree[1]): new_node_tree, volume = new_node_tree - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Volume': volume}) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Volume": volume}) # mix the two together - mix_shader = nw.new_node(Nodes.MixShader, [selection_weight, penultimate_node, new_node_tree]) - nw.new_node(Nodes.MaterialOutput, input_kwargs={'Surface': mix_shader}) + mix_shader = nw.new_node( + Nodes.MixShader, [selection_weight, penultimate_node, new_node_tree] + ) + nw.new_node(Nodes.MaterialOutput, input_kwargs={"Surface": mix_shader}) else: raise ValueError(f"{type(selection)=} not handled.") @@ -288,8 +329,20 @@ def add_material(objs, shader_func, selection=None, input_args=None, input_kwarg return material -def add_geomod(objs, geo_func, name=None, apply=False, reuse=False, input_args=None, input_kwargs=None, - attributes=None, show_viewport=True, selection=None, domains=None, input_attributes=None, ): +def add_geomod( + objs, + geo_func, + name=None, + apply=False, + reuse=False, + input_args=None, + input_kwargs=None, + attributes=None, + show_viewport=True, + selection=None, + domains=None, + input_attributes=None, +): if input_args is None: input_args = [] if input_kwargs is None: @@ -297,7 +350,7 @@ def add_geomod(objs, geo_func, name=None, apply=False, reuse=False, input_args=N if attributes is None: attributes = [] if domains is None: - domains = ['POINT'] * len(attributes) + domains = ["POINT"] * len(attributes) if input_attributes is None: input_attributes = [None] * 128 @@ -309,16 +362,18 @@ def add_geomod(objs, geo_func, name=None, apply=False, reuse=False, input_args=N return None if selection is not None: - input_kwargs['selection'] = selection + input_kwargs["selection"] = selection ng = None for obj in objs: - mod = obj.modifiers.new(name=name, type='NODES') + mod = obj.modifiers.new(name=name, type="NODES") mod.show_viewport = False if mod is None: - raise ValueError(f'Attempted to surface.add_geomod({obj=}), yet created modifier was None. ' - f'Check that {obj.type=} supports geo modifiers') + raise ValueError( + f"Attempted to surface.add_geomod({obj=}), yet created modifier was None. " + f"Check that {obj.type=} supports geo modifiers" + ) mod.show_viewport = show_viewport if ng is None: # Create a unique node_group for the first one only @@ -326,7 +381,7 @@ def add_geomod(objs, geo_func, name=None, apply=False, reuse=False, input_args=N mod.node_group = bpy.data.node_groups[name] else: # print("input_kwargs", input_kwargs, geo_func.__name__) - if mod.node_group == None: + if mod.node_group is None: group = geometry_node_group_empty_new() mod.node_group = group nw = NodeWrangler(mod) @@ -337,27 +392,32 @@ def add_geomod(objs, geo_func, name=None, apply=False, reuse=False, input_args=N mod.node_group = ng outputs = mod.node_group.outputs - identifiers = [outputs[i].identifier for i in range(len(outputs)) if outputs[i].type != 'GEOMETRY'] + identifiers = [ + outputs[i].identifier + for i in range(len(outputs)) + if outputs[i].type != "GEOMETRY" + ] if len(identifiers) != len(attributes): raise Exception( f"has {len(identifiers)} identifiers, but {len(attributes)} attributes. Specifically, " - f"{identifiers=} and {attributes=}") + f"{identifiers=} and {attributes=}" + ) for id, att_name in zip(identifiers, attributes): # attributes are a 1-indexed list, and Geometry is the first element, so we start from 2 # while f'Output_{i}_attribute_name' not in - mod[id + '_attribute_name'] = att_name - os = [outputs[i] for i in range(len(outputs)) if outputs[i].type != 'GEOMETRY'] + mod[id + "_attribute_name"] = att_name + os = [outputs[i] for i in range(len(outputs)) if outputs[i].type != "GEOMETRY"] for o, domain in zip(os, domains): o.attribute_domain = domain inputs = mod.node_group.inputs if not any(att_name is None for att_name in input_attributes): - raise Exception('None should be provided for Geometry inputs.') + raise Exception("None should be provided for Geometry inputs.") for i, att_name in zip(inputs, input_attributes): id = i.identifier if att_name is not None: - mod[f'{id}_use_attribute'] = True - mod[f'{id}_attribute_name'] = att_name + mod[f"{id}_use_attribute"] = True + mod[f"{id}_attribute_name"] = att_name if apply: for obj in objs: @@ -374,31 +434,33 @@ def apply(objs, *args, **kwargs): class Registry: - def __init__(self): self._registry = None @staticmethod def get_surface(name): - if name == '': + if name == "": return NoApply - prefixes = ['infinigen.infinigen_gpl.surfaces', 'infinigen.assets.materials', - 'infinigen.assets.scatters'] + prefixes = [ + "infinigen.infinigen_gpl.surfaces", + "infinigen.assets.materials", + "infinigen.assets.scatters", + ] for prefix in prefixes: try: - return importlib.import_module('.' + name, prefix) - except ModuleNotFoundError as e: + return importlib.import_module("." + name, prefix) + except ModuleNotFoundError: continue - raise ValueError(f'Could not find {name=} in any of {prefixes}') + raise ValueError(f"Could not find {name=} in any of {prefixes}") @staticmethod def sample_registry(registry): mods, probs = zip(*registry) return np.random.choice(mods, p=np.array(probs) / sum(probs)) - @gin.configurable('registry') + @gin.configurable("registry") def initialize_from_gin(self, smooth_categories=0, **gin_category_info): if smooth_categories != 0: raise NotImplementedError @@ -406,18 +468,22 @@ def initialize_from_gin(self, smooth_categories=0, **gin_category_info): with gin.unlock_config(): self._registry = defaultdict(list) for k, v in gin_category_info.items(): - self._registry[k] = [(self.get_surface(name), weight) for name, weight in v] + self._registry[k] = [ + (self.get_surface(name), weight) for name, weight in v + ] def __call__(self, category_key): if self._registry is None: raise ValueError( - 'Surface registry has not been initialized! Have you loaded gin and called .initialize()?' - 'Note, this step cannot happen at module initialization time, as gin is not yet loaded') + "Surface registry has not been initialized! Have you loaded gin and called .initialize()?" + "Note, this step cannot happen at module initialization time, as gin is not yet loaded" + ) if category_key not in self._registry: raise KeyError( - f'registry recieved request with {category_key=}, but no gin_config for this key was ' - f'provided. {self._registry.keys()=}') + f"registry recieved request with {category_key=}, but no gin_config for this key was " + f"provided. {self._registry.keys()=}" + ) return self.sample_registry(self._registry[category_key]) diff --git a/infinigen/core/tagging.py b/infinigen/core/tagging.py index 1ec08b5cd..f60ba2c4b 100644 --- a/infinigen/core/tagging.py +++ b/infinigen/core/tagging.py @@ -5,27 +5,27 @@ # Authors: Yihan Wang, Karhan Kayan: face based tagging, canonical surface tagging, mask extraction -import os -import bpy import json import logging +from typing import Union +import bpy import numpy as np + import infinigen.core.util.blender as butil -from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler from infinigen.core import surface +from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler +from infinigen.core.util.logging import lazydebug from . import tags as t -from typing import Union, Any - logger = logging.getLogger(__name__) -PREFIX = 'TAG_' -COMBINED_ATTR_NAME = 'MaskTag' +PREFIX = "TAG_" +COMBINED_ATTR_NAME = "MaskTag" -class AutoTag(): +class AutoTag: tag_dict = {} def __init__(self): @@ -37,48 +37,53 @@ def clear(self): # This function now only supports APPLIED OBJECTS # PLEASE KEEP ALL THE GEOMETRY APPLIED BEFORE SCATTERING THEM ON THE TERRAIN # PLEASE DO NOT USE BOOLEAN TAGS FOR OTHER USE - def save_tag(self, path='./MaskTag.json'): - with open(path, 'w') as f: + def save_tag(self, path="./MaskTag.json"): + with open(path, "w") as f: json.dump(self.tag_dict, f) - def load_tag(self, path='./MaskTag.json'): - with open(path, 'r') as f: + def load_tag(self, path="./MaskTag.json"): + with open(path, "r") as f: self.tag_dict = json.load(f) def _extract_incoming_tagmasks(self, obj): - new_attr_names = [ - name for name in obj.data.attributes.keys() - if name.startswith(PREFIX) + name for name in obj.data.attributes.keys() if name.startswith(PREFIX) ] n_poly = len(obj.data.polygons) for name in new_attr_names: attr = obj.data.attributes[name] - if attr.domain != 'FACE': - raise ValueError(f'Incoming attribute {obj.name=} {attr.name=} had invalid {attr.domain=}, expected FACE') + if attr.domain != "FACE": + raise ValueError( + f"Incoming attribute {obj.name=} {attr.name=} had invalid {attr.domain=}, expected FACE" + ) if len(attr.data) != n_poly: - raise ValueError(f'Incoming attribute {obj.name=} {attr.name=} had invalid {len(attr.data)=}, expected {n_poly=}') + raise ValueError( + f"Incoming attribute {obj.name=} {attr.name=} had invalid {len(attr.data)=}, expected {n_poly=}" + ) new_attrs = { - name[len(PREFIX):]: surface.read_attr_data(obj, name, 'FACE') + name[len(PREFIX) :]: surface.read_attr_data(obj, name, "FACE") for name in new_attr_names } for name, vals in new_attrs.items(): if vals.dtype == bool: continue - elif vals.dtype.kind == 'f': + elif vals.dtype.kind == "f": new_attrs[name] = vals > 0.5 - elif vals.dtype.kind == 'i': + elif vals.dtype.kind == "i": new_attrs[name] = vals > 0 else: - raise ValueError(f'Incoming attribute {obj.name=} had invalid np dtype {vals.dtype} {vals.dtype.kind=}, expected float or ideally boolean ') - + raise ValueError( + f"Incoming attribute {obj.name=} had invalid np dtype {vals.dtype} {vals.dtype.kind=}, expected float or ideally boolean " + ) for name, arr in new_attrs.items(): if arr.dtype != bool: - raise ValueError(f'Retrieved incoming tag mask {name=} had {arr.dtype=}, expected bool') + raise ValueError( + f"Retrieved incoming tag mask {name=} had {arr.dtype=}, expected bool" + ) for name in new_attr_names: obj.data.attributes.remove(obj.data.attributes[name]) @@ -86,124 +91,130 @@ def _extract_incoming_tagmasks(self, obj): return new_attrs def _specialize_tag_name(self, vi, name, tag_name_lookup): - - if '.' in name: + if "." in name: raise ValueError(f'{name=} should not contain separator character "."') if vi == 0: return name - + existing = tag_name_lookup[vi - 1] - parts = set(existing.split('.')) + parts = set(existing.split(".")) if name in parts: return existing - + parts.add(name) - return '.'.join(sorted(list(parts))) + return ".".join(sorted(list(parts))) def _relabel_obj_single(self, obj, tag_name_lookup): - n_poly = len(obj.data.polygons) new_attrs = self._extract_incoming_tagmasks(obj) - + if COMBINED_ATTR_NAME in obj.data.attributes.keys(): domain = obj.data.attributes[COMBINED_ATTR_NAME].domain - if domain != 'FACE': - raise ValueError(f'{obj.name=} had {COMBINED_ATTR_NAME} on {domain=}, expected FACE') - tagint = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain='FACE') + if domain != "FACE": + raise ValueError( + f"{obj.name=} had {COMBINED_ATTR_NAME} on {domain=}, expected FACE" + ) + tagint = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain="FACE") else: tagint = np.full(n_poly, 0, np.int64) assert tagint.dtype == np.int64, tagint.dtype for name, new_mask in new_attrs.items(): - affected_tagints = np.unique(tagint[new_mask]) for vi in affected_tagints: - affected_mask = new_mask * (tagint == vi) if not affected_mask.any(): continue new_tag_name = self._specialize_tag_name(vi, name, tag_name_lookup) - tag_value = self.tag_dict.get(new_tag_name) + tag_value = self.tag_dict.get(new_tag_name) if tag_value is None: tag_value = len(self.tag_dict) + 1 self.tag_dict[new_tag_name] = tag_value tag_name_lookup.append(new_tag_name) - assert len(self.tag_dict) == len(tag_name_lookup), \ - f'{len(self.tag_dict)=} yet {len(tag_name_lookup)=}, out of sync at {vi=} {new_tag_name=}' + assert ( + len(self.tag_dict) == len(tag_name_lookup) + ), f"{len(self.tag_dict)=} yet {len(tag_name_lookup)=}, out of sync at {vi=} {new_tag_name=}" assert new_tag_name in tag_name_lookup - logger.debug(f"{self._relabel_obj_single.__name__} updating {vi=} to {new_tag_name=} with {affected_mask.mean()=:.2f} for {obj.name=}") + lazydebug( + logger, + lambda: f"{self._relabel_obj_single.__name__} updating {vi=} to {new_tag_name=} with {affected_mask.mean()=:.2f} for {obj.name=}", + ) tagint[affected_mask] = tag_value if COMBINED_ATTR_NAME not in obj.data.attributes.keys(): - mask_tag_attr = obj.data.attributes.new(COMBINED_ATTR_NAME, 'INT', 'FACE') + mask_tag_attr = obj.data.attributes.new(COMBINED_ATTR_NAME, "INT", "FACE") else: mask_tag_attr = obj.data.attributes[COMBINED_ATTR_NAME] - mask_tag_attr.data.foreach_set('value', tagint) - + mask_tag_attr.data.foreach_set("value", tagint) def relabel_obj(self, root_obj): - tag_name_lookup = [None] * len(self.tag_dict) for name, tag_id in self.tag_dict.items(): key = tag_id - 1 if key >= len(tag_name_lookup): - raise IndexError(f'{name} had {tag_id=} {key=} yet {len(self.tag_dict)=}') + raise IndexError( + f"{name} had {tag_id=} {key=} yet {len(self.tag_dict)=}" + ) if tag_name_lookup[key] is not None: - raise ValueError(f'{name=} {tag_id=} {key=} attempted to overwrite {tag_name_lookup[key]=}') + raise ValueError( + f"{name=} {tag_id=} {key=} attempted to overwrite {tag_name_lookup[key]=}" + ) tag_name_lookup[key] = name for obj in butil.iter_object_tree(root_obj): - if obj.type != 'MESH': + if obj.type != "MESH": continue self._relabel_obj_single(obj, tag_name_lookup) return root_obj + tag_system = AutoTag() - -def print_segments_summary(obj: bpy.types.Object): - tagint = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain='FACE') + +def print_segments_summary(obj: bpy.types.Object): + tagint = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain="FACE") results = [] for vi in np.unique(tagint): - mask = (tagint == vi) + mask = tagint == vi results.append((vi, mask.mean())) results.sort(key=lambda x: x[1], reverse=True) - print(f'Tag Segments Summary for {obj.name=}') + print(f"Tag Segments Summary for {obj.name=}") for vi, mean in results: name = _name_for_tagval(vi) - print(f' {mean*100:.1f}% {vi=} {name}') + print(f" {mean*100:.1f}% {vi=} {name}") -def tag_object(obj, name=None, mask=None): +def tag_object(obj, name=None, mask=None): if name is not None: name = t.to_string(name) for o in butil.iter_object_tree(obj): - - if o.type != 'MESH': + if o.type != "MESH": continue if name is not None: - n_poly = len(o.data.polygons) if n_poly == 0: - logger.debug(f'{tag_object.__name__} had {n_poly=} for {o.name=} {name=} child of {obj.name=}') + lazydebug( + logger, + lambda: f"{tag_object.__name__} had {n_poly=} for {o.name=} {name=} child of {obj.name=}", + ) continue mask_o = np.full(n_poly, 1, dtype=bool) if mask is None else mask @@ -211,36 +222,36 @@ def tag_object(obj, name=None, mask=None): assert isinstance(mask_o, np.ndarray) assert len(mask_o) == n_poly - logger.debug(f'{tag_object.__name__} applying {name=} {mask_o.mean()=:.2f} to {o.name=}') + lazydebug( + logger, + lambda: f"{tag_object.__name__} applying {name=} {mask_o.mean()=:.2f} to {o.name=}", + ) surface.write_attr_data( - obj=o, - attr=(PREFIX + name), - data=mask_o, - type='BOOLEAN', - domain='FACE' + obj=o, attr=(PREFIX + name), data=mask_o, type="BOOLEAN", domain="FACE" ) tag_system.relabel_obj(obj) -def vert_mask_to_tri_mask(obj, vert_mask, require_all=True): +def vert_mask_to_tri_mask(obj, vert_mask, require_all=True): arr = np.zeros(len(obj.data.polygons) * 3) - obj.data.polygons.foreach_get('vertices', arr) + obj.data.polygons.foreach_get("vertices", arr) face_vert_idxs = arr.reshape(-1, 3).astype(int) - + if require_all: return ( - vert_mask[face_vert_idxs[:, 0]] * - vert_mask[face_vert_idxs[:, 1]] * - vert_mask[face_vert_idxs[:, 2]] + vert_mask[face_vert_idxs[:, 0]] + * vert_mask[face_vert_idxs[:, 1]] + * vert_mask[face_vert_idxs[:, 2]] ) else: return ( - vert_mask[face_vert_idxs[:, 0]] | - vert_mask[face_vert_idxs[:, 1]] | - vert_mask[face_vert_idxs[:, 2]] + vert_mask[face_vert_idxs[:, 0]] + | vert_mask[face_vert_idxs[:, 1]] + | vert_mask[face_vert_idxs[:, 2]] ) + CANONICAL_TAGS = [t.Subpart.Back, t.Subpart.Front, t.Subpart.Top, t.Subpart.Bottom] CANONICAL_TAG_MEANINGS = { t.Subpart.Back: (np.min, 0), @@ -249,19 +260,18 @@ def vert_mask_to_tri_mask(obj, vert_mask, require_all=True): t.Subpart.Top: (np.max, 2), } -def tag_canonical_surfaces(obj, rtol=0.01): +def tag_canonical_surfaces(obj, rtol=0.01): obj.update_from_editmode() n_vert = len(obj.data.vertices) - n_poly = len(obj.data.polygons) + len(obj.data.polygons) verts = np.empty(n_vert * 3, dtype=float) - obj.data.vertices.foreach_get('co', verts) + obj.data.vertices.foreach_get("co", verts) verts = verts.reshape(n_vert, 3) for tag in CANONICAL_TAGS: - gather_func, axis_idx = CANONICAL_TAG_MEANINGS[tag] target_axis_val = gather_func(verts[:, axis_idx]) @@ -271,50 +281,51 @@ def tag_canonical_surfaces(obj, rtol=0.01): face_mask = vert_mask_to_tri_mask(obj, vert_mask, require_all=True) if not face_mask.any(): - logger.warning(f'{tag_canonical_surfaces.__name__} found got {face_mask.mean()=:.2f} for {tag=} on {obj.name=}') + logger.warning( + f"{tag_canonical_surfaces.__name__} found got {face_mask.mean()=:.2f} for {tag=} on {obj.name=}" + ) - logger.debug(f'{tag_canonical_surfaces.__name__} applying {tag=} {face_mask.mean()=:.2f} to {obj.name=}') - surface.write_attr_data(obj, PREFIX + tag.value, face_mask, type='BOOLEAN', domain='FACE') + lazydebug( + logger, + lambda: f"{tag_canonical_surfaces.__name__} applying {tag=} {face_mask.mean()=:.2f} to {obj.name=}", + ) + surface.write_attr_data( + obj, PREFIX + tag.value, face_mask, type="BOOLEAN", domain="FACE" + ) tag_system.relabel_obj(obj) + def tag_nodegroup(nw: NodeWrangler, input_node, name: t.Tag, selection=None): - name = PREFIX + t.to_string(name) sel = surface.eval_argument(nw, selection) store_named_attribute = nw.new_node( Nodes.StoreNamedAttribute, input_kwargs={ - 'Geometry': input_node, - 'Name': name, - 'Selection': sel, - 'Value': True + "Geometry": input_node, + "Name": name, + "Selection": sel, + "Value": True, }, - attrs={ - 'domain': 'FACE', - 'data_type': 'BOOLEAN' - } + attrs={"domain": "FACE", "data_type": "BOOLEAN"}, ) return store_named_attribute -def _name_for_tagval(i: int) -> str | None: +def _name_for_tagval(i: int) -> str | None: if i == 0: # index 0 represents an untagged face return None - name = next( - (k for k, v in tag_system.tag_dict.items() if v == i), - None - ) + name = next((k for k, v in tag_system.tag_dict.items() if v == i), None) if name is None: - raise ValueError(f'Found {name=} for {i=} in {tag_system.tag_dict=}') - + raise ValueError(f"Found {name=} for {i=} in {tag_system.tag_dict=}") + return name -def union_object_tags(obj): +def union_object_tags(obj): if COMBINED_ATTR_NAME not in obj.data.attributes: return set() @@ -323,7 +334,7 @@ def union_object_tags(obj): for v in np.unique(masktag): if v == 0: continue - res = res.union(_name_for_tagval(v).split('.')) + res = res.union(_name_for_tagval(v).split(".")) def try_convert(x): try: @@ -333,29 +344,32 @@ def try_convert(x): return {try_convert(x) for x in res} -def tagged_face_mask(obj: bpy.types.Object, tags: Union[t.Subpart]) -> np.ndarray: +def tagged_face_mask(obj: bpy.types.Object, tags: Union[t.Subpart]) -> np.ndarray: # ASSUMES: object is triangulated, no quads/polygons tags = t.to_tag_set(tags) - pos_tags = [t.to_string(tagval) for tagval in tags if not isinstance(tagval, t.Negated)] - neg_tags = [t.to_string(tagval.tag) for tagval in tags if isinstance(tagval, t.Negated)] + pos_tags = [ + t.to_string(tagval) for tagval in tags if not isinstance(tagval, t.Negated) + ] + neg_tags = [ + t.to_string(tagval.tag) for tagval in tags if isinstance(tagval, t.Negated) + ] del tags n_poly = len(obj.data.polygons) if COMBINED_ATTR_NAME not in obj.data.attributes: return np.ones(n_poly, dtype=bool) - masktag = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain='FACE') + masktag = surface.read_attr_data(obj, COMBINED_ATTR_NAME, domain="FACE") face_mask = np.zeros(n_poly, dtype=bool) - + for v in np.unique(masktag): - if v == 0: name_parts = [] else: - name_parts = _name_for_tagval(v).split('.') + name_parts = _name_for_tagval(v).split(".") - v_mask = (masktag == v) + v_mask = masktag == v if len(pos_tags) > 0 and not all(tag in name_parts for tag in pos_tags): continue @@ -364,33 +378,39 @@ def tagged_face_mask(obj: bpy.types.Object, tags: Union[t.Subpart]) -> np.ndarra face_mask |= v_mask - logger.debug(f'{obj.name=} had {face_mask.mean()=:.2f} for {pos_tags=} {neg_tags=}') + lazydebug( + logger, + lambda: f"{obj.name=} had {face_mask.mean()=:.2f} for {pos_tags=} {neg_tags=}", + ) return face_mask -def extract_tagged_faces(obj: bpy.types.Object, tags: set, nonempty=False) -> bpy.types.Object: + +def extract_tagged_faces( + obj: bpy.types.Object, tags: set, nonempty=False +) -> bpy.types.Object: "extract the surface that satisfies all tags" - + # Ensure we're dealing with a mesh object - if obj.type != 'MESH': + if obj.type != "MESH": raise TypeError("Object is not a mesh!") face_mask = tagged_face_mask(obj, tags) if nonempty and not face_mask.any(): - raise ValueError(f'extract_tagged_faces({obj.name=}, {tags=}, {nonempty=}) got empty mask for {len(obj.data.polygons)}') + raise ValueError( + f"extract_tagged_faces({obj.name=}, {tags=}, {nonempty=}) got empty mask for {len(obj.data.polygons)}" + ) return extract_mask(obj, face_mask, nonempty=nonempty) + def extract_mask( - obj: bpy.types.Object, - face_mask: np.array, - nonempty=False + obj: bpy.types.Object, face_mask: np.array, nonempty=False ) -> bpy.types.Object: - if not face_mask.any(): if nonempty: - raise ValueError(f'extract_mask({obj.name=}) got empty mask') + raise ValueError(f"extract_mask({obj.name=}) got empty mask") return butil.spawn_vert() orig_hide_viewport = obj.hide_viewport @@ -398,19 +418,20 @@ def extract_mask( # Switch to Edit mode, duplicate the selection, and separate it with butil.SelectObjects(obj, active=0): - - with butil.ViewportMode(obj, 'EDIT'): - bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='FACE') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(obj, "EDIT"): + bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type="FACE") + bpy.ops.mesh.select_all(action="DESELECT") for poly in obj.data.polygons: poly.select = face_mask[poly.index] if nonempty and len([p for p in obj.data.polygons if p.select]) == 0: - raise ValueError(f'extract_mask({obj.name=}, {nonempty=}) failed to select polygons') + raise ValueError( + f"extract_mask({obj.name=}, {nonempty=}) failed to select polygons" + ) - with butil.ViewportMode(obj, 'EDIT'): + with butil.ViewportMode(obj, "EDIT"): bpy.ops.mesh.duplicate_move() - bpy.ops.mesh.separate(type='SELECTED') + bpy.ops.mesh.separate(type="SELECTED") res = next((o for o in bpy.context.selected_objects if o != obj), None) @@ -418,11 +439,15 @@ def extract_mask( if nonempty: if res is None: - raise ValueError(f'extract_mask({obj.name=}) got {res=} for {face_mask.mean()=}') + raise ValueError( + f"extract_mask({obj.name=}) got {res=} for {face_mask.mean()=}" + ) if len(res.data.polygons) == 0: - raise ValueError(f'extract_mask({obj.name=}) got {res=} with {len(res.data.polygons)=}') + raise ValueError( + f"extract_mask({obj.name=}) got {res=} with {len(res.data.polygons)=}" + ) elif res is None: - logger.warning(f'extract_mask({obj.name=}) failed to extract any faces') + logger.warning(f"extract_mask({obj.name=}) failed to extract any faces") return butil.spawn_vert() - - return res \ No newline at end of file + + return res diff --git a/infinigen/core/tags.py b/infinigen/core/tags.py index f31d38f6d..30baef77f 100644 --- a/infinigen/core/tags.py +++ b/infinigen/core/tags.py @@ -7,27 +7,29 @@ from __future__ import annotations from abc import ABCMeta -from enum import Enum, EnumMeta from dataclasses import dataclass +from enum import Enum, EnumMeta + class ABCEnumMeta(EnumMeta, ABCMeta): pass -class Tag: +class Tag: def __neg__(self) -> Negated: return Negated(self) + class StringTag(Tag): - def __init__(self, desc: str): self.desc = desc + class EnumTag(Tag, Enum, metaclass=ABCEnumMeta): pass -class Semantics(EnumTag): +class Semantics(EnumTag): # Mesh types Room = "room" Object = "object" @@ -35,16 +37,16 @@ class Semantics(EnumTag): # Room types Kitchen = "kitchen" - Bedroom = 'bedroom' - LivingRoom = 'living-room' - Closet = 'closet' - Hallway = 'hallway' - Bathroom = 'bathroom' - Garage = 'garage' - Balcony = 'balcony' - DiningRoom = 'dining-room' - Utility = 'utility' - Staircase = 'staircase' + Bedroom = "bedroom" + LivingRoom = "living-room" + Closet = "closet" + Hallway = "hallway" + Bathroom = "bathroom" + Garage = "garage" + Balcony = "balcony" + DiningRoom = "dining-room" + Utility = "utility" + Staircase = "staircase" # Object types Furniture = "furniture" @@ -94,28 +96,29 @@ class Semantics(EnumTag): # Special Case Objects Chair = "chair" - Window = 'window' - Open = 'open' - Entrance = 'entrance' - Door = 'door' - StaircaseWall = 'staircase-wall' - + Window = "window" + Open = "open" + Entrance = "entrance" + Door = "door" + StaircaseWall = "staircase-wall" + # Solver feature flags # TODO these should not be in Semantics RealPlaceholder = "real-placeholder" AssetAsPlaceholder = "asset-as-placeholder" AssetPlaceholderForChildren = "asset-placeholder-for-children" - PlaceholderBBox = 'placeholder-bbox' - SingleGenerator = 'single-generator' - NoRotation = 'no-rotation' - NoCollision = 'no-collision' - NoChildren = 'no-children' + PlaceholderBBox = "placeholder-bbox" + SingleGenerator = "single-generator" + NoRotation = "no-rotation" + NoCollision = "no-collision" + NoChildren = "no-children" def __str__(self): - return f'{self.__class__.__name__}({self.value})' + return f"{self.__class__.__name__}({self.value})" def __repr__(self): - return f'{self.__class__.__name__}.{self.name}' + return f"{self.__class__.__name__}.{self.name}" + class Subpart(EnumTag): SupportSurface = "support" @@ -131,29 +134,31 @@ class Subpart(EnumTag): Ceiling = "ceiling" Wall = "wall" - StaircaseWall = "staircase-wall" # TODO Lingjie Remove - + StaircaseWall = "staircase-wall" # TODO Lingjie Remove + def __str__(self): - return f'{self.__class__.__name__}({self.value})' + return f"{self.__class__.__name__}({self.value})" def __repr__(self): - return f'{self.__class__.__name__}.{self.name}' + return f"{self.__class__.__name__}.{self.name}" + @dataclass(frozen=True) class FromGenerator(Tag): generator: type def __repr__(self): - return f'{self.__class__.__name__}({self.generator.__name__})' + return f"{self.__class__.__name__}({self.generator.__name__})" + @dataclass(frozen=True) class Negated(Tag): tag: Tag - + def __str__(self): return "-" + str(self.tag) - def __repr__(self): + def __repr__(self): return f"-{repr(self.tag)}" def __neg__(self): @@ -162,6 +167,7 @@ def __neg__(self): def __post_init__(self): assert not isinstance(self.tag, Negated), "dont construct double negative tags" + @dataclass(frozen=True) class Variable(Tag): name: str @@ -170,17 +176,18 @@ def __post_init__(self): assert isinstance(self.name, str) def __repr__(self): - return f'{self.__class__.__name__}({self.name})' + return f"{self.__class__.__name__}({self.name})" def __str__(self): return self.name + @dataclass(frozen=True) class SpecificObject(Tag): name: str + def decompose_tags(tags: set[Tag]): - positive, negative = set(), set() for t in tags: @@ -192,44 +199,36 @@ def decompose_tags(tags: set[Tag]): return positive, negative + def contradiction(tags: set[Tag]): - pos, neg = decompose_tags(tags) if pos.intersection(neg): return True - + if len([t for t in pos if isinstance(t, FromGenerator)]) > 1: return True if len([t for t in tags if isinstance(t, SpecificObject | Variable)]) > 1: return True - + return False -def implies(t1: set[Tag], t2: set[Tag]): +def implies(t1: set[Tag], t2: set[Tag]): p1, n1 = decompose_tags(t1) p2, n2 = decompose_tags(t2) - return ( - not contradiction(t1) - and p1.issuperset(p2) - and n1.issuperset(n2) - ) + return not contradiction(t1) and p1.issuperset(p2) and n1.issuperset(n2) -def satisfies(t1: set[Tag], t2: set[Tag]): +def satisfies(t1: set[Tag], t2: set[Tag]): p1, n1 = decompose_tags(t1) p2, n2 = decompose_tags(t2) - return ( - p1.issuperset(p2) - and not n1.intersection(p2) - and not n2.intersection(p1) - ) + return p1.issuperset(p2) and not n1.intersection(p2) and not n2.intersection(p1) -def difference(t1: set[Tag], t2: set[Tag]): +def difference(t1: set[Tag], t2: set[Tag]): """Return a set of predicates representing the difference If the difference is empty, will return a contradictory set of predicates. @@ -238,13 +237,13 @@ def difference(t1: set[Tag], t2: set[Tag]): p1, n1 = decompose_tags(t1) p2, n2 = decompose_tags(t2) - pos = p1.union(n2 - n1) + pos = p1.union(n2 - n1) neg = n1.union(p2 - p1) return pos.union(Negated(n) for n in neg) -def to_tag(s: str | Tag | type, fac_context=None) -> Tag: +def to_tag(s: str | Tag | type, fac_context=None) -> Tag: if isinstance(s, Tag): return s @@ -254,18 +253,18 @@ def to_tag(s: str | Tag | type, fac_context=None) -> Tag: if s not in fac_context: raise ValueError(f"Got {s=} of type=type but it was not in fac_context") return FromGenerator(s) - + assert isinstance(s, str), s if s.startswith("-"): return Negated(to_tag(s[1:])) - + if fac_context is not None: fac = next((f for f in fac_context.keys() if f.__name__ == s), None) if fac: return FromGenerator(fac) - s = s.strip("\"\'") + s = s.strip("\"'") try: return Semantics[s] @@ -277,10 +276,12 @@ def to_tag(s: str | Tag | type, fac_context=None) -> Tag: except KeyError: pass - raise ValueError(f"to_tag got {s=} but could not resolve it. Please see tags.Semantics and tags.Subpart for available tag strings") - -def to_string(tag: Tag | str): + raise ValueError( + f"to_tag got {s=} but could not resolve it. Please see tags.Semantics and tags.Subpart for available tag strings" + ) + +def to_string(tag: Tag | str): if isinstance(tag, str): return tag @@ -292,10 +293,11 @@ def to_string(tag: Tag | str): case FromGenerator(): return tag.__name__ case Negated(): - raise ValueError(f'Negated tag {tag=} is not allowed here') + raise ValueError(f"Negated tag {tag=} is not allowed here") case _: - raise ValueError(f'to_string unhandled {tag=}') - + raise ValueError(f"to_string unhandled {tag=}") + + def to_tag_set(x, fac_context=None): match x: case None: @@ -303,4 +305,4 @@ def to_tag_set(x, fac_context=None): case set() | list() | tuple() | frozenset(): return {to_tag(xi, fac_context=fac_context) for xi in x} case x: - return {to_tag(x, fac_context=fac_context)} \ No newline at end of file + return {to_tag(x, fac_context=fac_context)} diff --git a/infinigen/core/util/__init__.py b/infinigen/core/util/__init__.py index e69de29bb..07f375e92 100644 --- a/infinigen/core/util/__init__.py +++ b/infinigen/core/util/__init__.py @@ -0,0 +1,3 @@ +from . import blender as butil +from .math import FixedSeed +from .random import random_general diff --git a/infinigen/core/util/bevelling.py b/infinigen/core/util/bevelling.py index c2a50372e..9e56d244e 100644 --- a/infinigen/core/util/bevelling.py +++ b/infinigen/core/util/bevelling.py @@ -3,12 +3,13 @@ # Authors: Zeyu Ma +import bmesh import bpy import mathutils -import bmesh import numpy as np from infinigen.core.nodes.node_wrangler import Nodes + from .blender import ViewportMode @@ -18,9 +19,10 @@ def special_bounds(obj): for v in obj.data.vertices: points.append(v.co) points = np.array(points) - mask = np.sum(points ** 2, axis=-1) ** 0.5 < 0.5 * inf + mask = np.sum(points**2, axis=-1) ** 0.5 < 0.5 * inf return points[mask].min(axis=0), points[mask].max(axis=0) + def on_bound_edges(points, points_min, points_max): flags = [0, 0, 0] eps = 1e-4 @@ -31,6 +33,7 @@ def on_bound_edges(points, points_min, points_max): flags[i] = 1 return flags + def get_bevel_edges(obj): inf = 1e5 points_min, points_max = special_bounds(obj) @@ -44,7 +47,7 @@ def get_bevel_edges(obj): for i in range(2): pos = np.array([edge.verts[i].co.x, edge.verts[i].co.y, edge.verts[i].co.z]) flags.append(on_bound_edges(pos, points_min, points_max)) - mags.append(np.sum(pos ** 2) ** 0.5) + mags.append(np.sum(pos**2) ** 0.5) for j in range(3): on_bounds_flag[j] = flags[0][j] != 0 and flags[0][j] == flags[1][j] if np.sum(on_bounds_flag) >= 2: @@ -53,27 +56,48 @@ def get_bevel_edges(obj): edges.append(edge.index) return edges + def add_bevel(obj, edges, offset=0.03, segments=8): - with ViewportMode(obj, mode='EDIT'): + with ViewportMode(obj, mode="EDIT"): bpy.ops.mesh.select_mode(type="EDGE") - bpy.ops.mesh.select_all(action = 'DESELECT') + bpy.ops.mesh.select_all(action="DESELECT") bm = bmesh.from_edit_mesh(obj.data) for edge in bm.edges: if edge.index in edges: edge.select_set(True) - bpy.ops.mesh.bevel(offset=offset, offset_pct=0, segments=segments, release_confirm=True) + bpy.ops.mesh.bevel( + offset=offset, offset_pct=0, segments=segments, release_confirm=True + ) return obj + def complete_bevel(nw, geometry, preprocess): inf = 1e5 geometry = nw.new_node(Nodes.RealizeInstances, [geometry]) if not preprocess: return geometry - return nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': (geometry, 0), 'Offset': nw.new_node(Nodes.Vector, attrs={"vector": mathutils.Vector((inf, 0, 0))})}) + return nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": (geometry, 0), + "Offset": nw.new_node( + Nodes.Vector, attrs={"vector": mathutils.Vector((inf, 0, 0))} + ), + }, + ) + def complete_no_bevel(nw, geometry, preprocess): inf = 1e5 geometry = nw.new_node(Nodes.RealizeInstances, [geometry]) if not preprocess: return geometry - return nw.new_node(Nodes.SetPosition, input_kwargs={'Geometry': (geometry, 0), 'Offset': nw.new_node(Nodes.Vector, attrs={"vector": mathutils.Vector((2 * inf, 0, 0))})}) + return nw.new_node( + Nodes.SetPosition, + input_kwargs={ + "Geometry": (geometry, 0), + "Offset": nw.new_node( + Nodes.Vector, attrs={"vector": mathutils.Vector((2 * inf, 0, 0))} + ), + }, + ) diff --git a/infinigen/core/util/blender.py b/infinigen/core/util/blender.py index 6c89d854c..e1e05f0e9 100644 --- a/infinigen/core/util/blender.py +++ b/infinigen/core/util/blender.py @@ -4,35 +4,27 @@ # Authors: Alex Raistrick, Zeyu Ma, Lahav Lipson, Hei Law, Lingjie Mei, Karhan Kayan -from collections import defaultdict -import pdb -from math import prod -from contextlib import nullcontext import logging - +from contextlib import nullcontext +from itertools import chain +from math import prod from pathlib import Path -import gin +import bmesh import bpy import mathutils -import os -import re -import json -from uuid import uuid4 -import bmesh import numpy as np import trimesh -from itertools import chain, product from tqdm import tqdm -import cv2 -from .math import lerp # for other people to import from this file +from infinigen.core.nodes.node_info import DATATYPE_DIMS, DATATYPE_FIELDS + from . import math as mutil from .logging import Suppress -from infinigen.core.nodes.node_info import DATATYPE_FIELDS, DATATYPE_DIMS logger = logging.getLogger(__name__) + def deep_clone_obj(obj, keep_modifiers=False, keep_materials=False): new_obj = obj.copy() new_obj.data = obj.data.copy() @@ -45,17 +37,25 @@ def deep_clone_obj(obj, keep_modifiers=False, keep_materials=False): bpy.context.collection.objects.link(new_obj) return new_obj + copy = deep_clone_obj + def get_all_bpy_data_targets(): D = bpy.data return [ - D.objects, D.collections, D.movieclips, D.particles, - D.meshes, D.curves, D.armatures, D.node_groups, + D.objects, + D.collections, + D.movieclips, + D.particles, + D.meshes, + D.curves, + D.armatures, + D.node_groups, ] -class ViewportMode: +class ViewportMode: def __init__(self, obj, mode): self.obj = obj self.mode = mode @@ -73,7 +73,6 @@ def __exit__(self, *args): class CursorLocation: - def __init__(self, loc): self.loc = loc self.saved = None @@ -87,28 +86,25 @@ def __exit__(self, *_): class SelectObjects: - def __init__(self, objects, active=0): - self.objects = list(objects) if hasattr(objects, '__iter__') else [objects] + self.objects = list(objects) if hasattr(objects, "__iter__") else [objects] self.active = active self.saved_objs = None self.saved_active = None def _check_selectable(self): - unlinked = [ - o for o in self.objects - if o.name not in bpy.context.scene.objects - ] + unlinked = [o for o in self.objects if o.name not in bpy.context.scene.objects] if len(unlinked) > 0: - raise ValueError(f'{SelectObjects.__name__} had objects {unlinked=} which are not in bpy.context.scene.objects and cannot be selected') - - hidden = [ - o for o in self.objects - if o.hide_viewport - ] + raise ValueError( + f"{SelectObjects.__name__} had objects {unlinked=} which are not in bpy.context.scene.objects and cannot be selected" + ) + + hidden = [o for o in self.objects if o.hide_viewport] if len(hidden) > 0: - raise ValueError(f'{SelectObjects.__name__} had objects {hidden=} which are hidden and cannot be selected') + raise ValueError( + f"{SelectObjects.__name__} had objects {hidden=} which are hidden and cannot be selected" + ) def _get_intended_active(self): if isinstance(self.active, int): @@ -120,8 +116,8 @@ def _get_intended_active(self): return self.active def _validate(self, error=False): - if error: + def msg(str): raise ValueError(str) else: @@ -134,7 +130,6 @@ def msg(str): "The most common cause is that the objects are in a collection with col.hide_viewport=True" ) - intended = self._get_intended_active() if intended is not None and bpy.context.active_object != intended: msg( @@ -155,7 +150,6 @@ def __enter__(self): self._validate() def __exit__(self, *_): - # our saved selection / active objects may have been deleted, update them to only include valid ones def enforce_not_deleted(o): try: @@ -169,11 +163,12 @@ def enforce_not_deleted(o): select_none() select(self.saved_objects) if self.saved_active is not None: - bpy.context.view_layer.objects.active = enforce_not_deleted(self.saved_active) + bpy.context.view_layer.objects.active = enforce_not_deleted( + self.saved_active + ) class DisableModifiers: - def __init__(self, objs, keep=[]): self.objs = objs if isinstance(objs, list) else [objs] self.keep = keep @@ -191,16 +186,20 @@ def __exit__(self, *_): for m in self.modifiers_disabled: m.show_viewport = True -class EnableParentCollections: - def __init__(self, objs, target_key='hide_viewport', target_value=False): +class EnableParentCollections: + def __init__(self, objs, target_key="hide_viewport", target_value=False): self.objs = objs self.target_key = target_key self.target_value = target_value def __enter__(self): - self.enable_cols = set(chain.from_iterable([o.users_collection for o in self.objs])) - self.enable_cols_startstate = [getattr(c, self.target_key) for c in self.enable_cols] + self.enable_cols = set( + chain.from_iterable([o.users_collection for o in self.objs]) + ) + self.enable_cols_startstate = [ + getattr(c, self.target_key) for c in self.enable_cols + ] for c in self.enable_cols: setattr(c, self.target_key, self.target_value) @@ -209,8 +208,8 @@ def __exit__(self, *_, **__): for c, s in zip(self.enable_cols, self.enable_cols_startstate): setattr(c, self.target_key, s) -class TemporaryObject: +class TemporaryObject: def __init__(self, obj): self.obj = obj @@ -232,15 +231,14 @@ def garbage_collect(targets, keep_in_use=True, keep_names=None, verbose=False): continue if o.name in orig: continue - if '(no gc)' in o.name: + if "(no gc)" in o.name: continue if verbose: - print(f'Garbage collecting {o} from {t}') + print(f"Garbage collecting {o} from {t}") t.remove(o) class GarbageCollect: - def __init__(self, targets=None, keep_in_use=True, keep_orig=True, verbose=False): self.targets = targets or get_all_bpy_data_targets() self.keep_in_use = keep_in_use @@ -251,7 +249,12 @@ def __enter__(self): self.names = [set(o.name for o in t) for t in self.targets] def __exit__(self, *_): - garbage_collect(self.targets, keep_in_use=self.keep_in_use, keep_names=self.names, verbose=self.verbose) + garbage_collect( + self.targets, + keep_in_use=self.keep_in_use, + keep_names=self.names, + verbose=self.verbose, + ) def select_none(): @@ -268,16 +271,17 @@ def select(objs: bpy.types.Object | list[bpy.types.Object]): objs = [objs] for o in objs: if o.name not in bpy.context.scene.objects: - raise ValueError(f'Object {o.name=} not in scene and cant be selected') + raise ValueError(f"Object {o.name=} not in scene and cant be selected") o.select_set(True) + def delete(objs: bpy.types.Object | list[bpy.types.Object]): if not isinstance(objs, list): objs = [objs] select_none() for obj in objs: select(obj) - is_mesh = obj.type == 'MESH' + is_mesh = obj.type == "MESH" if is_mesh: mesh = obj.data with Suppress(): @@ -341,9 +345,9 @@ def put_in_collection(objs, collection, exclusive=True): def group_in_collection(objs, name: str, reuse=True, **kwargs): - ''' + """ objs: List of (None | Blender Object | List[Blender Object]) - ''' + """ collection = get_collection(name, reuse=reuse) @@ -353,17 +357,23 @@ def group_in_collection(objs, name: str, reuse=True, **kwargs): if not isinstance(obj, list): obj = [obj] for child in obj: - traverse_children(child, lambda obj: put_in_collection(obj, collection, **kwargs)) + traverse_children( + child, lambda obj: put_in_collection(obj, collection, **kwargs) + ) return collection -def group_toplevel_collections(keyword, hide_viewport=False, hide_render=False, reuse=True): +def group_toplevel_collections( + keyword, hide_viewport=False, hide_render=False, reuse=True +): scenecol = bpy.context.scene.collection - matches = [c for c in scenecol.children if c.name.startswith(keyword) and keyword != c.name] + matches = [ + c for c in scenecol.children if c.name.startswith(keyword) and keyword != c.name + ] parent = get_collection(keyword, reuse=reuse) - if not parent.name in scenecol.children: + if parent.name not in scenecol.children: scenecol.children.link(parent) for c in matches: @@ -374,7 +384,7 @@ def group_toplevel_collections(keyword, hide_viewport=False, hide_render=False, parent.hide_render = hide_render -def spawn_empty(name, disp_type='PLAIN_AXES', s=0.1): +def spawn_empty(name, disp_type="PLAIN_AXES", s=0.1): empty = bpy.data.objects.new(name, None) bpy.context.scene.collection.objects.link(empty) empty.empty_display_size = s @@ -393,7 +403,7 @@ def spawn_point_cloud(name, pts, edges=None): return obj -def spawn_vert(name='vert'): +def spawn_vert(name="vert"): return spawn_point_cloud(name, np.zeros((1, 3))) @@ -402,24 +412,21 @@ def spawn_line(name, pts): edges = np.stack([idxs[:-1], idxs[1:]], axis=-1) return spawn_point_cloud(name, pts, edges=edges) + def spawn_plane(**kwargs): - name = kwargs.pop('name', None) - bpy.ops.mesh.primitive_plane_add( - enter_editmode=False, - align='WORLD', - **kwargs - ) + name = kwargs.pop("name", None) + bpy.ops.mesh.primitive_plane_add(enter_editmode=False, align="WORLD", **kwargs) obj = bpy.context.active_object if name is not None: obj.name = name return obj -def spawn_cube(size=1, location=(0, 0, 0), scale=(1, 1, 1), name=None): +def spawn_cube(size=1, location=(0, 0, 0), scale=(1, 1, 1), name=None): bpy.ops.mesh.primitive_cube_add( - size = size, + size=size, enter_editmode=False, - align='WORLD', + align="WORLD", location=location, scale=scale, ) @@ -428,13 +435,15 @@ def spawn_cube(size=1, location=(0, 0, 0), scale=(1, 1, 1), name=None): obj.name = name return obj -def spawn_cylinder(radius=1.0, depth=2.0, location=(0, 0, 0), scale=(1, 1, 1), name=None): +def spawn_cylinder( + radius=1.0, depth=2.0, location=(0, 0, 0), scale=(1, 1, 1), name=None +): bpy.ops.mesh.primitive_cylinder_add( radius=radius, depth=depth, enter_editmode=False, - align='WORLD', + align="WORLD", location=location, scale=scale, ) @@ -443,33 +452,34 @@ def spawn_cylinder(radius=1.0, depth=2.0, location=(0, 0, 0), scale=(1, 1, 1), n obj.name = name return obj + def spawn_sphere(radius=1, location=(0, 0, 0), scale=(1, 1, 1), name=None): + bpy.ops.mesh.primitive_uv_sphere_add( + radius=radius, + enter_editmode=False, + align="WORLD", + location=location, + scale=scale, + ) + obj = bpy.context.active_object + if name is not None: + obj.name = name + return obj - bpy.ops.mesh.primitive_uv_sphere_add( - radius = radius, - enter_editmode=False, - align='WORLD', - location=location, - scale=scale, - ) - obj = bpy.context.active_object - if name is not None: - obj.name = name - return obj def spawn_icosphere(radius=1, location=(0, 0, 0), scale=(1, 1, 1), name=None): + bpy.ops.mesh.primitive_ico_sphere_add( + radius=radius, + enter_editmode=False, + align="WORLD", + location=location, + scale=scale, + ) + obj = bpy.context.active_object + if name is not None: + obj.name = name + return obj - bpy.ops.mesh.primitive_ico_sphere_add( - radius = radius, - enter_editmode=False, - align='WORLD', - location=location, - scale=scale, - ) - obj = bpy.context.active_object - if name is not None: - obj.name = name - return obj def clear_scene(keep=[], targets=None, materials=True): D = bpy.data @@ -492,8 +502,8 @@ def clear_scene(keep=[], targets=None, materials=True): def spawn_capsule(rad, height, us=32, vs=16): - mesh = bpy.data.meshes.new('Capsule') - obj = bpy.data.objects.new('Capsule', mesh) + mesh = bpy.data.meshes.new("Capsule") + obj = bpy.data.objects.new("Capsule", mesh) bpy.context.collection.objects.link(obj) bm = bmesh.new() @@ -531,24 +541,27 @@ def to_mesh(object, context=bpy.context): def get_camera_res(): - d = np.array([bpy.context.scene.render.resolution_x, bpy.context.scene.render.resolution_y], - dtype=np.float32) + d = np.array( + [bpy.context.scene.render.resolution_x, bpy.context.scene.render.resolution_y], + dtype=np.float32, + ) d *= bpy.context.scene.render.resolution_percentage / 100.0 return d def set_geomod_inputs(mod, inputs: dict): - assert mod.type == 'NODES' + assert mod.type == "NODES" for k, v in inputs.items(): - if k not in mod.node_group.inputs: - raise KeyError(f'Couldnt find {k=} in {mod.node_group.inputs.keys()=}') + raise KeyError(f"Couldnt find {k=} in {mod.node_group.inputs.keys()=}") soc = mod.node_group.inputs[k] - - if not hasattr(soc, 'default_value'): + + if not hasattr(soc, "default_value"): if v is not None: - raise ValueError(f'Got non-None value {v=} for {soc.identifier=} which has no default value') + raise ValueError( + f"Got non-None value {v=} for {soc.identifier=} which has no default value" + ) continue elif v is None: continue @@ -559,14 +572,24 @@ def set_geomod_inputs(mod, inputs: dict): try: mod[soc.identifier] = v except TypeError as e: - print(f'Error incurred while assigning {v} with {type(v)=} to {soc.identifier=} of {mod.name=}') + print( + f"Error incurred while assigning {v} with {type(v)=} to {soc.identifier=} of {mod.name=}" + ) raise e -def modify_mesh(obj, type, apply=True, name=None, return_mod=False, ng_inputs=None, show_viewport=None, - **kwargs) -> bpy.types.Object: +def modify_mesh( + obj, + type, + apply=True, + name=None, + return_mod=False, + ng_inputs=None, + show_viewport=None, + **kwargs, +) -> bpy.types.Object: if name is None: - name = f'modify_mesh({type}, **{kwargs})' + name = f"modify_mesh({type}, **{kwargs})" if show_viewport is None: show_viewport = not apply @@ -574,13 +597,15 @@ def modify_mesh(obj, type, apply=True, name=None, return_mod=False, ng_inputs=No mod.show_viewport = show_viewport if mod is None: - raise ValueError(f'modifer.new() returned None, ensure {obj.type=} is valid for modifier {type=}') + raise ValueError( + f"modifer.new() returned None, ensure {obj.type=} is valid for modifier {type=}" + ) for k, v in kwargs.items(): setattr(mod, k, v) if ng_inputs is not None: - assert type == 'NODES' - assert 'node_group' in kwargs + assert type == "NODES" + assert "node_group" in kwargs set_geomod_inputs(mod, ng_inputs) if apply: @@ -591,12 +616,14 @@ def modify_mesh(obj, type, apply=True, name=None, return_mod=False, ng_inputs=No else: return obj + def constrain_object(obj, type, **kwargs): c = obj.constraints.new(type=type) for k, v in kwargs.items(): setattr(c, k, v) return c + def apply_transform(obj, loc=False, rot=True, scale=True): with SelectObjects(obj): bpy.ops.object.transform_apply(location=loc, rotation=rot, scale=scale) @@ -605,20 +632,21 @@ def apply_transform(obj, loc=False, rot=True, scale=True): def import_mesh(path, **kwargs): path = Path(path) - ext = path.parts[-1].split('.')[-1] + ext = path.parts[-1].split(".")[-1] ext = ext.lower().strip() funcs = { - 'obj': bpy.ops.import_scene.obj, - 'fbx': bpy.ops.import_scene.fbx, - 'stl': bpy.ops.import_mesh.stl, - 'ply': bpy.ops.import_mesh.ply, - 'usdc': bpy.ops.wm.usd_import, + "obj": bpy.ops.import_scene.obj, + "fbx": bpy.ops.import_scene.fbx, + "stl": bpy.ops.import_mesh.stl, + "ply": bpy.ops.import_mesh.ply, + "usdc": bpy.ops.wm.usd_import, } if ext not in funcs: raise ValueError( - f'butil.import_mesh does not yet support extension {ext}, please contact the developer') + f"butil.import_mesh does not yet support extension {ext}, please contact the developer" + ) select_none() with Suppress(): @@ -627,23 +655,25 @@ def import_mesh(path, **kwargs): if len(bpy.context.selected_objects) > 1: print( f"Warning: {ext.upper()} Import produced {len(bpy.context.selected_objects)} objects, " - f"but only the first is returned by import_obj") + f"but only the first is returned by import_obj" + ) return bpy.context.selected_objects[0] -def boolean(objs, mode='UNION', verbose=False): +def boolean(objs, mode="UNION", verbose=False): keep, *rest = list(objs) if verbose: - rest = tqdm(rest, desc=f'butil.boolean({keep.name}..., {mode=})') + rest = tqdm(rest, desc=f"butil.boolean({keep.name}..., {mode=})") with SelectObjects(keep): for target in rest: if len(target.modifiers) != 0: raise ValueError( - f'Attempted to boolean() with {target=} which still has {len(target.modifiers)=}') + f"Attempted to boolean() with {target=} which still has {len(target.modifiers)=}" + ) - mod = keep.modifiers.new(type='BOOLEAN', name='butil.boolean()') + mod = keep.modifiers.new(type="BOOLEAN", name="butil.boolean()") mod.operation = mode mod.object = target bpy.ops.object.modifier_apply(modifier=mod.name) @@ -651,7 +681,7 @@ def boolean(objs, mode='UNION', verbose=False): return keep -def split_object(obj, mode='LOOSE'): +def split_object(obj, mode="LOOSE"): select_none() select(obj) bpy.ops.mesh.separate(type=mode) @@ -685,10 +715,12 @@ def join_objects(objs, check_attributes=False): bpy.ops.object.join() return bpy.context.active_object + def clear_mesh(obj): - with ViewportMode(obj, mode='EDIT'): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.delete(type='VERT') + with ViewportMode(obj, mode="EDIT"): + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.delete(type="VERT") + def apply_modifiers(obj, mod=None, quiet=True): if mod is None: @@ -705,22 +737,24 @@ def apply_modifiers(obj, mod=None, quiet=True): try: bpy.ops.object.modifier_apply(modifier=m.name) except RuntimeError as e: - if mod_type == 'NODES': - logging.warning(f'apply_modifers on {obj.name=} {m.name=} raised {e}, ignoring and returning empty mesh for pre-3.5 compatibility reasons') + if mod_type == "NODES": + logging.warning( + f"apply_modifers on {obj.name=} {m.name=} raised {e}, ignoring and returning empty mesh for pre-3.5 compatibility reasons" + ) bpy.ops.object.modifier_remove(modifier=m.name) clear_mesh(obj) else: raise e - - # geometry nodes occasionally introduces empty material slots in 3.6, we consider this an error and remove them + + # geometry nodes occasionally introduces empty material slots in 3.6, we consider this an error and remove them purge_empty_materials(obj) - # geometry nodes occasionally introduces empty material slots in 3.6, we consider this an error and remove them + # geometry nodes occasionally introduces empty material slots in 3.6, we consider this an error and remove them purge_empty_materials(obj) def recalc_normals(obj, inside=False): - with ViewportMode(obj, mode='EDIT'): + with ViewportMode(obj, mode="EDIT"): bpy.ops.mesh.select_all() bpy.ops.mesh.normals_make_consistent(inside=inside) @@ -740,7 +774,7 @@ def save_blend(path, autopack=False, verbose=False): def joined_kd(objs, include_origins=False): if not isinstance(objs, list): objs = objs - objs = [o for o in objs if o.type == 'MESH'] + objs = [o for o in objs if o.type == "MESH"] size = sum(len(o.data.vertices) for o in objs) if include_origins: @@ -761,8 +795,9 @@ def joined_kd(objs, include_origins=False): return kd + def make_instances_real(): - bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.select_all(action="DESELECT") for obj in bpy.data.objects: if len(obj.particle_systems) == 0: continue @@ -770,7 +805,7 @@ def make_instances_real(): obj.select_set(True) bpy.ops.object.duplicates_make_real() obj.select_set(False) - bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.select_all(action="DESELECT") # faces are required to be triangles now @@ -781,7 +816,9 @@ def objectdata_from_VF(vertices, faces): new_mesh.polygons.add(len(faces)) new_mesh.loops.add(len(faces) * 3) new_mesh.polygons.foreach_set("loop_total", np.ones(len(faces), np.int32) * 3) - new_mesh.polygons.foreach_set("loop_start", np.arange(len(faces), dtype=np.int32) * 3) + new_mesh.polygons.foreach_set( + "loop_start", np.arange(len(faces), dtype=np.int32) * 3 + ) new_mesh.polygons.foreach_set("vertices", faces.reshape(-1).astype(np.int32)) new_mesh.update(calc_edges=True) return new_mesh @@ -801,18 +838,24 @@ def object_from_trimesh(mesh, name, material=None): new_object = object_from_VF(mesh.vertices, mesh.faces, name) for attr_name in mesh.vertex_attributes: attr_name_ls = attr_name.lstrip("_") # this is because of trimesh bug - if mesh.vertex_attributes[attr_name].ndim == 1 or mesh.vertex_attributes[attr_name].shape[1] == 1: + if ( + mesh.vertex_attributes[attr_name].ndim == 1 + or mesh.vertex_attributes[attr_name].shape[1] == 1 + ): type_key = "FLOAT" elif mesh.vertex_attributes[attr_name].shape[1] == 3: type_key = "FLOAT_VECTOR" elif mesh.vertex_attributes[attr_name].shape[1] == 4: type_key = "FLOAT_COLOR" else: - raise Exception(f"attribute of shape {mesh.vertex_attributes[attr_name].shape} not supported") - new_object.data.attributes.new(name=attr_name_ls, type=type_key, domain='POINT') - new_object.data.attributes[attr_name_ls].data.foreach_set(DATATYPE_FIELDS[type_key], - mesh.vertex_attributes[attr_name].reshape( - -1).astype(np.float32)) + raise Exception( + f"attribute of shape {mesh.vertex_attributes[attr_name].shape} not supported" + ) + new_object.data.attributes.new(name=attr_name_ls, type=type_key, domain="POINT") + new_object.data.attributes[attr_name_ls].data.foreach_set( + DATATYPE_FIELDS[type_key], + mesh.vertex_attributes[attr_name].reshape(-1).astype(np.float32), + ) if material is not None: new_object.data.materials.append(material) return new_object @@ -822,7 +865,9 @@ def object_to_vertex_attributes(obj): vertex_attributes = {} for attr in obj.data.attributes.keys(): type_key = obj.data.attributes[attr].data_type - tmp = np.zeros(len(obj.data.vertices) * DATATYPE_DIMS[type_key], dtype=np.float32) + tmp = np.zeros( + len(obj.data.vertices) * DATATYPE_DIMS[type_key], dtype=np.float32 + ) obj.data.attributes[attr].data.foreach_get(DATATYPE_FIELDS[type_key], tmp) vertex_attributes[attr] = tmp.reshape((len(obj.data.vertices), -1)) return vertex_attributes @@ -841,39 +886,46 @@ def object_to_trimesh(obj): mesh.vertex_attributes.update(vertex_attributes) return mesh + def blender_internal_attr(a): - if hasattr(a, 'name'): + if hasattr(a, "name"): a = a.name - if a.startswith('.'): + if a.startswith("."): return True - if a in ['material_index', 'uv_map', 'UVMap', 'sharp_face']: + if a in ["material_index", "uv_map", "UVMap", "sharp_face"]: return True return False + def merge_by_distance(obj, face_size): - with SelectObjects(obj), ViewportMode(obj, mode='EDIT'), Suppress(): - bpy.ops.mesh.select_all(action='SELECT') + with SelectObjects(obj), ViewportMode(obj, mode="EDIT"), Suppress(): + bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.remove_doubles(threshold=face_size) + def origin_set(objs, mode, **kwargs): with SelectObjects(objs): bpy.ops.object.origin_set(type=mode, **kwargs) + def apply_geo(obj): with SelectObjects(obj): for m in obj.modifiers: m.show_viewport = False for m in obj.modifiers: - if m.type == 'NODES': + if m.type == "NODES": bpy.ops.object.modifier_apply(modifier=m.name) + def avg_approx_vol(objects): return np.mean([prod(list(o.dimensions)) for o in objects]) -def parent_to(a, b, type='OBJECT', keep_transform=False, no_inverse=False, no_transform=False): +def parent_to( + a, b, type="OBJECT", keep_transform=False, no_inverse=False, no_transform=False +): if a.name == b.name: - raise ValueError(f'parent_to expects two distinct objects, got {a=} {b=}') + raise ValueError(f"parent_to expects two distinct objects, got {a=} {b=}") select_none() with SelectObjects([a, b], active=1): @@ -883,15 +935,19 @@ def parent_to(a, b, type='OBJECT', keep_transform=False, no_inverse=False, no_tr bpy.ops.object.parent_set(type=type, keep_transform=keep_transform) if no_transform: - a.location = (0,0,0) - a.rotation_euler = (0,0,0) + a.location = (0, 0, 0) + a.rotation_euler = (0, 0, 0) if a.parent is not b: - raise ValueError(f'parent_to({a=}, {b=}) failed, after execution we saw {a.parent=}') + raise ValueError( + f"parent_to({a=}, {b=}) failed, after execution we saw {a.parent=}" + ) + def apply_matrix_world(obj, verts: np.array): return mutil.dehomogenize(mutil.homogenize(verts) @ np.array(obj.matrix_world).T) + def surface_area(obj: bpy.types.Object): bm = bmesh.new() bm.from_mesh(obj.data) @@ -899,8 +955,8 @@ def surface_area(obj: bpy.types.Object): bm.free() return area -def approve_all_drivers(): +def approve_all_drivers(): # 'Touch' every driver in the file so that blender trusts them n = 0 @@ -912,22 +968,19 @@ def approve_all_drivers(): d.driver.expression = d.driver.expression n += 1 - logging.warning(f'Re-initialized {n} as trusted. Do not run infinigen on untrusted blend files. ') + logging.warning( + f"Re-initialized {n} as trusted. Do not run infinigen on untrusted blend files. " + ) + def count_objects(): count = 0 for obj in bpy.context.scene.objects: if obj.type != "MESH": continue - count +=1 + count += 1 return count -def count_objects(): - count = 0 - for obj in bpy.context.scene.objects: - if obj.type != "MESH": continue - count +=1 - return count def count_instance(): depsgraph = bpy.context.evaluated_depsgraph_get() @@ -938,6 +991,7 @@ def bounds(obj): points = np.array(obj.bound_box) return points.min(axis=0), points.max(axis=0) + def create_noise_plane(size=50, cuts=10, std=3, levels=3): bpy.ops.mesh.primitive_grid_add(size=size, x_subdivisions=cuts, y_subdivisions=cuts) obj = bpy.context.active_object @@ -945,7 +999,8 @@ def create_noise_plane(size=50, cuts=10, std=3, levels=3): for v in obj.data.vertices: v.co[2] = v.co[2] + np.random.normal(0, std) - return modify_mesh(obj, 'SUBSURF', levels=levels) + return modify_mesh(obj, "SUBSURF", levels=levels) + def purge_empty_materials(obj): with SelectObjects(obj): diff --git a/infinigen/core/util/camera.py b/infinigen/core/util/camera.py index fc955eb51..687d846e9 100644 --- a/infinigen/core/util/camera.py +++ b/infinigen/core/util/camera.py @@ -4,20 +4,19 @@ # Authors: Lahav Lipson, Lingjie Mei -import numpy as np - -from mathutils import Matrix, Vector -from mathutils.bvhtree import BVHTree import bpy import bpy_extras +import numpy as np +from mathutils import Matrix, Vector from tqdm import trange -from infinigen.core.util.math import homogenize, dehomogenize from infinigen.core.util import blender as butil +from infinigen.core.util.math import dehomogenize, homogenize -#--------------------------------------------------------------- +# --------------------------------------------------------------- # 3x4 P matrix from Blender camera -#--------------------------------------------------------------- +# --------------------------------------------------------------- + # Build intrinsic camera parameters from Blender camera data # @@ -32,37 +31,38 @@ def get_calibration_matrix_K_from_blender(camd): sensor_width_in_mm = camd.sensor_width sensor_height_in_mm = camd.sensor_height - if sensor_width_in_mm/sensor_height_in_mm != W/H: - vals = f'{(sensor_width_in_mm, sensor_height_in_mm, W, H)=}' - raise ValueError(f'Camera sensor has not been properly configured, you probably need to call camera.adjust_camera_sensor on it. {vals}') + if sensor_width_in_mm / sensor_height_in_mm != W / H: + vals = f"{(sensor_width_in_mm, sensor_height_in_mm, W, H)=}" + raise ValueError( + f"Camera sensor has not been properly configured, you probably need to call camera.adjust_camera_sensor on it. {vals}" + ) pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y - if (camd.sensor_fit == 'VERTICAL'): + if camd.sensor_fit == "VERTICAL": # the sensor height is fixed (sensor fit is horizontal), # the sensor width is effectively changed with the pixel aspect ratio - s_u = resolution_x_in_px * scale / sensor_width_in_mm / pixel_aspect_ratio # pixels per milimeter + s_u = ( + resolution_x_in_px * scale / sensor_width_in_mm / pixel_aspect_ratio + ) # pixels per milimeter s_v = resolution_y_in_px * scale / sensor_height_in_mm - else: # 'HORIZONTAL' and 'AUTO' + else: # 'HORIZONTAL' and 'AUTO' # the sensor width is fixed (sensor fit is horizontal), # the sensor height is effectively changed with the pixel aspect ratio pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y s_u = resolution_x_in_px * scale / sensor_width_in_mm s_v = resolution_y_in_px * scale * pixel_aspect_ratio / sensor_height_in_mm - # Parameters of intrinsic calibration matrix K alpha_u = f_in_mm * s_u alpha_v = f_in_mm * s_v u_0 = resolution_x_in_px * scale / 2 v_0 = resolution_y_in_px * scale / 2 - skew = 0 # only use rectangular pixels + skew = 0 # only use rectangular pixels - K = Matrix( - ((alpha_u, skew, u_0), - ( 0 , alpha_v, v_0), - ( 0 , 0, 1 ))) + K = Matrix(((alpha_u, skew, u_0), (0, alpha_v, v_0), (0, 0, 1))) return K + # Returns camera rotation and translation matrices from Blender. # # There are 3 coordinate systems involved: @@ -79,10 +79,7 @@ def get_calibration_matrix_K_from_blender(camd): # - right-handed: positive z look-at direction def get_3x4_RT_matrix_from_blender(cam): # bcam stands for blender camera - R_bcam2cv = Matrix( - ((1, 0, 0), - (0, -1, 0), - (0, 0, -1))) + R_bcam2cv = Matrix(((1, 0, 0), (0, -1, 0), (0, 0, -1))) # Transpose since the rotation is object rotation, # and we want coordinate rotation @@ -96,27 +93,30 @@ def get_3x4_RT_matrix_from_blender(cam): # Convert camera location to translation vector used in coordinate changes # T_world2bcam = -1*R_world2bcam*cam.location # Use location from matrix_world to account for constraints: - T_world2bcam = -1*R_world2bcam @ location + T_world2bcam = -1 * R_world2bcam @ location # Build the coordinate transform matrix from world to computer vision camera # NOTE: Use * instead of @ here for older versions of Blender # TODO: detect Blender version - R_world2cv = R_bcam2cv@R_world2bcam - T_world2cv = R_bcam2cv@T_world2bcam + R_world2cv = R_bcam2cv @ R_world2bcam + T_world2cv = R_bcam2cv @ T_world2bcam # put into 3x4 matrix - RT = Matrix(( - R_world2cv[0][:] + (T_world2cv[0],), - R_world2cv[1][:] + (T_world2cv[1],), - R_world2cv[2][:] + (T_world2cv[2],) - )) + RT = Matrix( + ( + R_world2cv[0][:] + (T_world2cv[0],), + R_world2cv[1][:] + (T_world2cv[1],), + R_world2cv[2][:] + (T_world2cv[2],), + ) + ) return RT def get_3x4_P_matrix_from_blender(cam): K = get_calibration_matrix_K_from_blender(cam.data) RT = get_3x4_RT_matrix_from_blender(cam) - return K@RT, K, RT + return K @ RT, K, RT + # ---------------------------------------------------------- # Alternate 3D coordinates to 2D pixel coordinate projection code @@ -127,11 +127,12 @@ def project_by_object_utils(cam, point): co_2d = bpy_extras.object_utils.world_to_camera_view(scene, cam, point) render_scale = scene.render.resolution_percentage / 100 render_size = ( - int(scene.render.resolution_x * render_scale), - int(scene.render.resolution_y * render_scale), - ) + int(scene.render.resolution_x * render_scale), + int(scene.render.resolution_y * render_scale), + ) return Vector((co_2d.x * render_size[0], render_size[1] - co_2d.y * render_size[1])) + def compute_vis_dists(points, cam): projmat, K, RT = map(np.array, get_3x4_P_matrix_from_blender(cam)) proj = points @ projmat.T @@ -141,16 +142,19 @@ def compute_vis_dists(points, cam): clamped_d = np.maximum(d, 0) RT_4x4_inv = np.array(Matrix(RT).to_4x4().inverted()) - clipped_pos = homogenize((homogenize(clamped_uv) * clamped_d[:, None]) @ np.linalg.inv(K).T) @ RT_4x4_inv.T + clipped_pos = ( + homogenize((homogenize(clamped_uv) * clamped_d[:, None]) @ np.linalg.inv(K).T) + @ RT_4x4_inv.T + ) vis_dist = np.linalg.norm(points[:, :-1] - clipped_pos[:, :-1], axis=-1) return d, vis_dist -def min_dists_from_cam_trajectory(points, cam, start=None, end=None, verbose=False): +def min_dists_from_cam_trajectory(points, cam, start=None, end=None, verbose=False): assert len(points.shape) == 2 and points.shape[-1] == 3 - assert cam.type == 'CAMERA' + assert cam.type == "CAMERA" if start is None: start = bpy.context.scene.frame_start @@ -162,7 +166,7 @@ def min_dists_from_cam_trajectory(points, cam, start=None, end=None, verbose=Fal min_vis_dists = np.full(len(points), 1e7) rangeiter = trange if verbose else range - for i in rangeiter(start, end+1): + for i in rangeiter(start, end + 1): bpy.context.scene.frame_set(i) dists, vis_dists = compute_vis_dists(points, cam) min_dists = np.minimum(dists, min_dists) @@ -175,5 +179,11 @@ def points_inview(bbox, camera): proj = np.array(get_3x4_P_matrix_from_blender(camera)[0]) x, y, z = proj @ np.concatenate([bbox, np.ones((len(bbox), 1))], -1).T render = bpy.context.scene.render - inview = (z > 0) & (x >= 0) & (y >= 0) & (x / z < render.resolution_x) & (y / z < render.resolution_y) + inview = ( + (z > 0) + & (x >= 0) + & (y >= 0) + & (x / z < render.resolution_x) + & (y / z < render.resolution_y) + ) return inview diff --git a/infinigen/core/util/color.py b/infinigen/core/util/color.py index f91e3c205..446a4e78f 100644 --- a/infinigen/core/util/color.py +++ b/infinigen/core/util/color.py @@ -5,14 +5,12 @@ # Authors: Alexander Raistrick, Yiming Zuo, Lingjie Mei, Lahav Lipson +import colorsys from dataclasses import dataclass -import bpy +import gin import mathutils - import numpy as np -import colorsys -import gin from infinigen.core.util.math import int_hash @@ -20,7 +18,7 @@ @dataclass class ChannelScheme: args: list - dist: str = 'uniform' + dist: str = "uniform" clip: tuple = (0, 1) wrap: bool = False @@ -33,32 +31,61 @@ def sample(self): return v -U = lambda min, max, **kwargs: ChannelScheme([min, max], dist='uniform', **kwargs) -N = lambda m, std, **kwargs: ChannelScheme([m, std], dist='normal', **kwargs) +def U(min, max, **kwargs): + return ChannelScheme([min, max], dist="uniform", **kwargs) + + +def N(m, std, **kwargs): + return ChannelScheme([m, std], dist="normal", **kwargs) + HSV_RANGES = { - 'petal': (N(0.95, 1.2, wrap=True), U(0.2, 0.85), U(0.2, 0.75)), - 'gem': (U(0, 1), U(0.85, 0.85), U(0.5, 1)), - 'greenery': (U(0.25, 0.33), N(0.65, 0.03), U(0.1, 0.45)), - 'yellowish': (N(0.15, 0.005, wrap=True), N(0.95, 0.02), N(0.9, 0.02)), - 'red': (N(0.0, 0.05, wrap=True), N(0.9, 0.03), N(0.6, 0.05)), - 'pink': (N(0.88, 0.06, wrap=True), N(0.6, 0.05), N(0.8, 0.05)), - 'white': (N(0.0, 0.06, wrap=True), U(0.0, 0.2, clip=[0, 1]), N(0.95, 0.02)), - 'fog': (U(0, 1), U(0, 0.2), U(0.8, 1)), - 'water': (U(0.2, 0.6), N(0.5, 0.1), U(0.7, 1)), - 'darker_water': (U(0.2, 0.6), N(0.5, 0.1), U(0.2, 0.3)), - 'under_water': (U(0.5, 0.7), U(0.7, 0.95), U(0.7, 1)), - 'eye_schlera': (U(0.05, 0.15), U(0.2, 0.8), U(0.05, 0.5)), - 'eye_pupil': (U(0, 1), U(0.1, 0.9), U(0.1, 0.9)), - 'beak': (U(0, 0.13), U(0, 0.9), U(0.1, 0.6)), - 'fur': (U(0, 0.11), U(0.5, 0.95), U(0.02, 0.9)), - 'pine_needle': (N(0.05, 0.02, wrap=True), U(0.5, 0.93), U(0.045, 0.4),), - 'wet_sand': (U(0.05, 0.1), U(0.65, 0.7), U(0.05, 0.15),), - 'dry_sand': (U(0.05, 0.1), U(0.65, 0.7), U(0.15, 0.25),), - 'leather': (U(0.04, 0.07), U(0.80, 1.0), U(0.1, 0.6),), - 'concrete': (U(0.0, 1.0), U(0.02, 0.12), U(0.3, 0.9),), - 'textile': (U(0, 1), U(0.15, 0.7), U(0.1, 0.3),), - 'fabric': (U(0, 1), U(0.3, 0.8), U(0.6, 0.9)) + "petal": (N(0.95, 1.2, wrap=True), U(0.2, 0.85), U(0.2, 0.75)), + "gem": (U(0, 1), U(0.85, 0.85), U(0.5, 1)), + "greenery": (U(0.25, 0.33), N(0.65, 0.03), U(0.1, 0.45)), + "yellowish": (N(0.15, 0.005, wrap=True), N(0.95, 0.02), N(0.9, 0.02)), + "red": (N(0.0, 0.05, wrap=True), N(0.9, 0.03), N(0.6, 0.05)), + "pink": (N(0.88, 0.06, wrap=True), N(0.6, 0.05), N(0.8, 0.05)), + "white": (N(0.0, 0.06, wrap=True), U(0.0, 0.2, clip=[0, 1]), N(0.95, 0.02)), + "fog": (U(0, 1), U(0, 0.2), U(0.8, 1)), + "water": (U(0.2, 0.6), N(0.5, 0.1), U(0.7, 1)), + "darker_water": (U(0.2, 0.6), N(0.5, 0.1), U(0.2, 0.3)), + "under_water": (U(0.5, 0.7), U(0.7, 0.95), U(0.7, 1)), + "eye_schlera": (U(0.05, 0.15), U(0.2, 0.8), U(0.05, 0.5)), + "eye_pupil": (U(0, 1), U(0.1, 0.9), U(0.1, 0.9)), + "beak": (U(0, 0.13), U(0, 0.9), U(0.1, 0.6)), + "fur": (U(0, 0.11), U(0.5, 0.95), U(0.02, 0.9)), + "pine_needle": ( + N(0.05, 0.02, wrap=True), + U(0.5, 0.93), + U(0.045, 0.4), + ), + "wet_sand": ( + U(0.05, 0.1), + U(0.65, 0.7), + U(0.05, 0.15), + ), + "dry_sand": ( + U(0.05, 0.1), + U(0.65, 0.7), + U(0.15, 0.25), + ), + "leather": ( + U(0.04, 0.07), + U(0.80, 1.0), + U(0.1, 0.6), + ), + "concrete": ( + U(0.0, 1.0), + U(0.02, 0.12), + U(0.3, 0.9), + ), + "textile": ( + U(0, 1), + U(0.15, 0.7), + U(0.1, 0.3), + ), + "fabric": (U(0, 1), U(0.3, 0.8), U(0.6, 0.9)), # 'dirt': ('uniform', [], []), # 'rock': ('uniform', [], []), # 'creature_fur': ('normal', [0.89, 0.6, 0.2], []), @@ -68,8 +95,10 @@ def sample(self): def color_category(name): - if not name in HSV_RANGES: - raise ValueError(f'color_category did not recognize {name=}, options are {HSV_RANGES.keys()=}') + if name not in HSV_RANGES: + raise ValueError( + f"color_category did not recognize {name=}, options are {HSV_RANGES.keys()=}" + ) schemes = HSV_RANGES[name] assert len(schemes) == 3 hsv = [s.sample() for s in schemes] @@ -96,16 +125,19 @@ def rgb2hsv(rgb, *args): def srgb_to_linearrgb(c): - if c < 0: return 0 - elif c < 0.04045: return c / 12.92 - else: return ((c + 0.055) / 1.055) ** 2.4 + if c < 0: + return 0 + elif c < 0.04045: + return c / 12.92 + else: + return ((c + 0.055) / 1.055) ** 2.4 def hex2rgba(h, alpha=1): - r = (h & 0xff0000) >> 16 - g = (h & 0x00ff00) >> 8 - b = (h & 0x0000ff) - return tuple([srgb_to_linearrgb(c / 0xff) for c in (r, g, b)] + [alpha]) + r = (h & 0xFF0000) >> 16 + g = (h & 0x00FF00) >> 8 + b = h & 0x0000FF + return tuple([srgb_to_linearrgb(c / 0xFF) for c in (r, g, b)] + [alpha]) @gin.configurable diff --git a/infinigen/core/util/exporting.py b/infinigen/core/util/exporting.py index eb50c61a1..8f661966b 100644 --- a/infinigen/core/util/exporting.py +++ b/infinigen/core/util/exporting.py @@ -4,19 +4,21 @@ # Authors: Lahav Lipson +import json +import re +from itertools import chain, product from pathlib import Path -import gin +from uuid import uuid4 import bpy +import gin import mathutils -import re -import json -from uuid import uuid4 import numpy as np -from itertools import chain, product +from bpy.types import DepsgraphObjectInstance from tqdm import tqdm + from infinigen.core.util.math import int_hash -from bpy.types import DepsgraphObjectInstance + def get_mesh_data(obj): polys = obj.data.polygons @@ -25,61 +27,89 @@ def get_mesh_data(obj): polys.foreach_get("loop_total", loop_totals) indices = np.full((loop_totals.sum(),), -1, dtype=np.int32) polys.foreach_get("vertices", indices) - vert_lookup = np.full((len(verts)*3,), np.nan, dtype=np.float32) + vert_lookup = np.full((len(verts) * 3,), np.nan, dtype=np.float32) verts.foreach_get("co", vert_lookup) vert_lookup = vert_lookup.reshape((-1, 3)) - masktag = np.full(len(verts, ), 0, dtype=np.int32) - if False and 'MaskTag' in obj.data.attributes: - obj.data.attributes['MaskTag'].data.foreach_get("value", masktag) + masktag = np.full( + len( + verts, + ), + 0, + dtype=np.int32, + ) + if False and "MaskTag" in obj.data.attributes: + obj.data.attributes["MaskTag"].data.foreach_get("value", masktag) assert (loop_totals.size == 0) or (loop_totals.min() >= 0) assert (indices.size == 0) or (indices.min() >= 0) assert not np.any(np.isnan(vert_lookup)) return vert_lookup, indices, loop_totals, masktag + def get_curve_data(obj): curves = obj.data.curves points = obj.data.points points_length = np.full(len(curves), -1, dtype=np.int32) - curves.foreach_get('points_length', points_length) + curves.foreach_get("points_length", points_length) points_length = np.unique(points_length) - assert (points_length.size == 0) or (points_length.size == 1 and points_length[0] == 5), np.unique(points_length) - vertices = np.full((len(points)*3), np.nan, dtype=np.float32) - points.foreach_get('position', vertices) + assert (points_length.size == 0) or ( + points_length.size == 1 and points_length[0] == 5 + ), np.unique(points_length) + vertices = np.full((len(points) * 3), np.nan, dtype=np.float32) + points.foreach_get("position", vertices) vertices = vertices.reshape(-1, 3) radii = np.full(len(points), np.nan, dtype=np.float32) - points.foreach_get('radius', radii) + points.foreach_get("radius", radii) assert not np.any(np.isnan(vertices)) assert not np.any(np.isnan(radii)) return vertices, radii -valid_int32 = lambda x: (-2**31 <= x < 2**31) + +def valid_int32(x): + return -(2**31) <= x < 2**31 + # See https://projects.blender.org/blender/blender/issues/60881 for logic def get_id(i: DepsgraphObjectInstance): - parent_hash = (int_hash(i.parent.name)-2**31) if (i.parent is not None) else 0 + parent_hash = (int_hash(i.parent.name) - 2**31) if (i.parent is not None) else 0 t = list(i.persistent_id) - if list(t) == [0]*8: + if list(t) == [0] * 8: return (0, 0, parent_hash) a, b, *c = t - assert c == [2**31-1]*6, t + assert c == [2**31 - 1] * 6, t assert valid_int32(a) and valid_int32(b), t return (a, b, parent_hash) + def get_all_instances(): vertex_info = {} pbar = tqdm(bpy.context.evaluated_depsgraph_get().object_instances) for deps_instance in pbar: obj = deps_instance.object pbar.set_description(f"Finding Instances: {obj.name[:20].ljust(20)}") - if (obj.type == "MESH") and (deps_instance.is_instance) and ("PARTICLE_SYSTEM" not in {m.type for m in obj.modifiers}): + if ( + (obj.type == "MESH") + and (deps_instance.is_instance) + and ("PARTICLE_SYSTEM" not in {m.type for m in obj.modifiers}) + ): mat = np.asarray(deps_instance.matrix_world, dtype=np.float32).copy() if obj.data not in vertex_info: vert_lookup, indices, loop_totals, masktag = get_mesh_data(obj) - vertex_info[obj.data] = dict(vertex_lookup=vert_lookup, is_instance=True, masktag=masktag, - indices=indices, loop_totals=loop_totals, matrices=[], instance_ids=[], name=obj.name) + vertex_info[obj.data] = dict( + vertex_lookup=vert_lookup, + is_instance=True, + masktag=masktag, + indices=indices, + loop_totals=loop_totals, + matrices=[], + instance_ids=[], + name=obj.name, + ) vertex_info[obj.data]["matrices"].append(mat) vertex_info[obj.data]["instance_ids"].append(get_id(deps_instance)) - return chain.from_iterable(((v['vertex_lookup'].shape[0], v['name']), v) for v in vertex_info.values()) + return chain.from_iterable( + ((v["vertex_lookup"].shape[0], v["name"]), v) for v in vertex_info.values() + ) + def get_all_non_instances(): pbar = tqdm(bpy.context.evaluated_depsgraph_get().object_instances) @@ -88,15 +118,34 @@ def get_all_non_instances(): pbar.set_description(f"Finding Non-Instances: {obj.name[:20].ljust(20)}") mat = np.asarray(deps_instance.matrix_world, dtype=np.float32).copy()[None] if obj.type == "MESH": - if (not deps_instance.is_instance) and ("PARTICLE_SYSTEM" not in {m.type for m in obj.modifiers}): + if (not deps_instance.is_instance) and ( + "PARTICLE_SYSTEM" not in {m.type for m in obj.modifiers} + ): yield (len(obj.data.vertices), obj.name) vert_lookup, indices, loop_totals, masktag = get_mesh_data(obj) - yield dict(vertex_lookup=vert_lookup, indices=indices, loop_totals=loop_totals, name=obj.name, matrices=mat, instance_ids=[get_id(deps_instance)], masktag=masktag, is_instance=False) - elif obj.type == 'CURVES': + yield dict( + vertex_lookup=vert_lookup, + indices=indices, + loop_totals=loop_totals, + name=obj.name, + matrices=mat, + instance_ids=[get_id(deps_instance)], + masktag=masktag, + is_instance=False, + ) + elif obj.type == "CURVES": assert not deps_instance.is_instance - yield (len(obj.data.points)//5, obj.name) # //5 bc hair is inexpensive + yield (len(obj.data.points) // 5, obj.name) # //5 bc hair is inexpensive hair_vertices, hair_radii = get_curve_data(obj) - yield dict(vertex_lookup=hair_vertices, radii=hair_radii, name=obj.name, matrices=mat, instance_ids=[get_id(deps_instance)], is_instance=False) + yield dict( + vertex_lookup=hair_vertices, + radii=hair_radii, + name=obj.name, + matrices=mat, + instance_ids=[get_id(deps_instance)], + is_instance=False, + ) + def parse_group_from_name(name: str): for reg in ["(.*)\.spawn_asset\(.*", "scatter:(.*)", "([A-Za-z_]+)"]: @@ -104,6 +153,7 @@ def parse_group_from_name(name: str): if match: return match.group(1) + def parse_semantic_from_name(name: str): group_name = parse_group_from_name(name) or name for reg in ["([A-Za-z_]+)[\.\(].*", "([A-Za-z_]+)"]: @@ -111,26 +161,29 @@ def parse_semantic_from_name(name: str): if match: return match.group(1).replace("Factory", "").replace("_fine", "").title() + def calc_aa_bbox(pts): xx, yy, zz = zip(pts.min(axis=0), pts.max(axis=0)) - return np.stack(list(product(xx, yy, zz))) # 8 x 3 + return np.stack(list(product(xx, yy, zz))) # 8 x 3 + def calc_instance_bbox(matrices, verts): assert verts.shape[1] == 3 single_bbox = calc_aa_bbox(verts) - h_bbox = np.concatenate((single_bbox.T, np.ones((1, 8))), axis=0) # 4 x 8 - all_h_bbox = np.einsum("bij, jk -> bki", matrices, h_bbox) # B x 8 x 4 + h_bbox = np.concatenate((single_bbox.T, np.ones((1, 8))), axis=0) # 4 x 8 + all_h_bbox = np.einsum("bij, jk -> bki", matrices, h_bbox) # B x 8 x 4 assert all_h_bbox.shape[1:] == (8, 4) - all_bbox = (all_h_bbox[...,:3] / all_h_bbox[..., 3:]) # B x 8 x 3 + all_bbox = all_h_bbox[..., :3] / all_h_bbox[..., 3:] # B x 8 x 3 combined_bbox = calc_aa_bbox(all_bbox.reshape((-1, 3))) return combined_bbox, single_bbox + def get_mesh_id_if_cached(name, num_verts, current_ids, previous_frame_mapping): assert isinstance(current_ids, frozenset) if releveant_entries := previous_frame_mapping.get(name): for (nv, prev_ids), mesh_id in releveant_entries.items(): assert isinstance(prev_ids, frozenset) - if (num_verts == nv): + if num_verts == nv: for idd in current_ids: if idd in prev_ids: return mesh_id @@ -138,10 +191,12 @@ def get_mesh_id_if_cached(name, num_verts, current_ids, previous_frame_mapping): @gin.configurable -def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, current_frame_mesh_id_mapping): +def save_obj_and_instances( + output_folder, previous_frame_mesh_id_mapping, current_frame_mesh_id_mapping +): output_folder = Path(output_folder) output_folder.mkdir(exist_ok=True, parents=True) - for atm_name in ['atmosphere', 'atmosphere_fine', 'KoleClouds']: + for atm_name in ["atmosphere", "atmosphere_fine", "KoleClouds"]: if atm_name in bpy.data.objects: bpy.data.objects.remove(bpy.data.objects[atm_name]) if "scatters" in bpy.data.collections: @@ -155,19 +210,23 @@ def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, curren singleton_mesh_data = get_all_non_instances() npz_number = 1 filename = output_folder / f"saved_mesh_{npz_number:04d}.npz" - MAX_NUM_VERTS = int(5e6) # lower if OOM + MAX_NUM_VERTS = int(5e6) # lower if OOM running_total_verts = 0 current_obj_num_verts = None npz_data = {} object_names_mapping = {} for item in chain(instance_mesh_data, singleton_mesh_data): if isinstance(item, tuple): - current_obj_num_verts, object_name = item # Sometimes current_obj_num_verts will be 0. This is fine. + current_obj_num_verts, object_name = ( + item # Sometimes current_obj_num_verts will be 0. This is fine. + ) if object_name not in object_names_mapping: object_names_mapping[object_name] = len(object_names_mapping) + 1 # Flush the .npz to avoid OOM - if (len(npz_data) > 0) and ((running_total_verts + current_obj_num_verts) >= MAX_NUM_VERTS): + if (len(npz_data) > 0) and ( + (running_total_verts + current_obj_num_verts) >= MAX_NUM_VERTS + ): np.savez(filename, **npz_data) print(f"Saving to {filename}") npz_data.clear() @@ -176,16 +235,25 @@ def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, curren filename = output_folder / f"saved_mesh_{npz_number:04d}.npz" if current_obj_num_verts > MAX_NUM_VERTS: - print(f"WARNING: Object {object_name} is very large, with {current_obj_num_verts} vertices.") + print( + f"WARNING: Object {object_name} is very large, with {current_obj_num_verts} vertices." + ) else: is_instance = item["is_instance"] if is_instance: instance_ids_set = frozenset(item["instance_ids"]) - mesh_id = get_mesh_id_if_cached(object_name, current_obj_num_verts, instance_ids_set, previous_frame_mesh_id_mapping) + mesh_id = get_mesh_id_if_cached( + object_name, + current_obj_num_verts, + instance_ids_set, + previous_frame_mesh_id_mapping, + ) if mesh_id is None: mesh_id = uuid4().hex[:12] - current_frame_mesh_id_mapping[object_name][(current_obj_num_verts, instance_ids_set)] = mesh_id + current_frame_mesh_id_mapping[object_name][ + (current_obj_num_verts, instance_ids_set) + ] = mesh_id else: mesh_id = str(hex(int_hash(object_name)))[:12] @@ -200,24 +268,44 @@ def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, curren matrices = np.asarray(item["matrices"], dtype=np.float32) npz_data[f"{mesh_id}_transformations"] = matrices instance_ids_array = np.asarray(item["instance_ids"], dtype=np.int32) - assert np.unique(instance_ids_array, axis=0).shape == instance_ids_array.shape + assert ( + np.unique(instance_ids_array, axis=0).shape == instance_ids_array.shape + ) assert instance_ids_array.shape[1] == 3 npz_data[f"{mesh_id}_instance_ids"] = instance_ids_array obj = bpy.data.objects[object_name] - json_val = {"filename": filename.name, "mesh_id": mesh_id, "object_name": object_name, "num_verts": current_obj_num_verts, "children": [], - "object_type": obj.type, "num_instances": matrices.shape[0], "object_idx": object_names_mapping[object_name]} + json_val = { + "filename": filename.name, + "mesh_id": mesh_id, + "object_name": object_name, + "num_verts": current_obj_num_verts, + "children": [], + "object_type": obj.type, + "num_instances": matrices.shape[0], + "object_idx": object_names_mapping[object_name], + } if obj.type == "MESH": - json_val['num_verts'] = len(obj.data.vertices) - json_val['num_faces'] = len(obj.data.polygons) - json_val['materials'] = obj.material_slots.keys() - json_val['unapplied_modifiers'] = obj.modifiers.keys() + json_val["num_verts"] = len(obj.data.vertices) + json_val["num_faces"] = len(obj.data.polygons) + json_val["materials"] = obj.material_slots.keys() + json_val["unapplied_modifiers"] = obj.modifiers.keys() if not is_instance: - non_aa_bbox = np.asarray([(obj.matrix_world @ mathutils.Vector(v)) for v in obj.bound_box], dtype=np.float32) + non_aa_bbox = np.asarray( + [(obj.matrix_world @ mathutils.Vector(v)) for v in obj.bound_box], + dtype=np.float32, + ) json_val["instance_bbox"] = calc_aa_bbox(non_aa_bbox).tolist() # Todo add chain up parents else: - combined_bbox, instance_bbox = calc_instance_bbox(matrices, item["vertex_lookup"]) - json_val.update({"bbox": combined_bbox.tolist(), "instance_bbox": instance_bbox.tolist()}) + combined_bbox, instance_bbox = calc_instance_bbox( + matrices, item["vertex_lookup"] + ) + json_val.update( + { + "bbox": combined_bbox.tolist(), + "instance_bbox": instance_bbox.tolist(), + } + ) for child_obj in obj.children: if child_obj.name not in object_names_mapping: object_names_mapping[child_obj.name] = len(object_names_mapping) + 1 @@ -225,7 +313,6 @@ def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, curren json_data.append(json_val) running_total_verts += current_obj_num_verts - if len(npz_data) > 0: np.savez(filename, **npz_data) print(f"Saving to {filename}") @@ -235,9 +322,16 @@ def save_obj_and_instances(output_folder, previous_frame_mesh_id_mapping, curren object_name = obj.name if object_name not in object_names_mapping: object_names_mapping[object_name] = len(object_names_mapping) + 1 - non_aa_bbox = np.asarray([(obj.matrix_world @ mathutils.Vector(v)) for v in obj.bound_box]) - json_val = {"object_name": object_name, "object_type": obj.type, "children": [], - "bbox": calc_aa_bbox(non_aa_bbox).tolist(), "object_idx": object_names_mapping[object_name]} + non_aa_bbox = np.asarray( + [(obj.matrix_world @ mathutils.Vector(v)) for v in obj.bound_box] + ) + json_val = { + "object_name": object_name, + "object_type": obj.type, + "children": [], + "bbox": calc_aa_bbox(non_aa_bbox).tolist(), + "object_idx": object_names_mapping[object_name], + } for child_obj in obj.children: if child_obj.name not in object_names_mapping: object_names_mapping[child_obj.name] = len(object_names_mapping) + 1 diff --git a/infinigen/core/util/logging.py b/infinigen/core/util/logging.py index cb2389f90..c871d6ffb 100644 --- a/infinigen/core/util/logging.py +++ b/infinigen/core/util/logging.py @@ -8,25 +8,31 @@ # - Lingjie Mei: disable -import os, sys +import logging +import os +import sys +import typing from datetime import datetime from pathlib import Path -import logging -import uuid import bpy import gin -timer_results = logging.getLogger('times') +timer_results = logging.getLogger("times") + + +def lazydebug(logger: logging.Logger, msg: typing.Callable, *args, **kwargs): + if logger.isEnabledFor(logging.DEBUG): + logger.debug(msg(), *args, **kwargs) + @gin.configurable class Timer: - def __init__(self, desc, disable_timer=False, logger=None): self.disable_timer = disable_timer if self.disable_timer: return - self.name = f'[{desc}]' + self.name = f"[{desc}]" if logger is None: logger = logging.getLogger("infinigen.times") self.logger = logger @@ -35,22 +41,22 @@ def __enter__(self): if self.disable_timer: return self.start = datetime.now() - self.logger.info(f'{self.name}') + self.logger.info(f"{self.name}") def __exit__(self, exc_type, exc_val, traceback): if self.disable_timer: return self.end = datetime.now() - self.duration = self.end - self.start # timedelta + self.duration = self.end - self.start # timedelta if exc_type is None: - self.logger.info(f'{self.name} finished in {str(self.duration)}') + self.logger.info(f"{self.name} finished in {str(self.duration)}") else: - self.logger.info(f'{self.name} failed with {exc_type}') + self.logger.info(f"{self.name} failed with {exc_type}") -class Suppress(): +class Suppress: def __enter__(self, logfile=os.devnull): - open(logfile, 'w').close() + open(logfile, "w").close() self.old = os.dup(1) sys.stdout.flush() os.close(1) @@ -64,8 +70,8 @@ def __exit__(self, type, value, traceback): os.close(self.old) logging.disable(self.level) -class LogLevel(): +class LogLevel: def __init__(self, logger, level): self.logger = logger self.level = level @@ -78,13 +84,19 @@ def __enter__(self): def __exit__(self, *_): self.logger.setLevel(self.orig_level) + def save_polycounts(file): for col in bpy.data.collections: - polycount = sum(len(obj.data.polygons) for obj in col.all_objects if (obj.type == "MESH" and obj.data is not None)) + polycount = sum( + len(obj.data.polygons) + for obj in col.all_objects + if (obj.type == "MESH" and obj.data is not None) + ) file.write(f"{col.name}: {polycount:,}\n") - for stat in bpy.context.scene.statistics(bpy.context.view_layer).split(' | ')[2:]: + for stat in bpy.context.scene.statistics(bpy.context.view_layer).split(" | ")[2:]: file.write(stat) + @gin.configurable def create_text_file(log_dir, filename, text=None): log_dir = Path(log_dir) @@ -95,4 +107,4 @@ def create_text_file(log_dir, filename, text=None): class BadSeedError(ValueError): - pass \ No newline at end of file + pass diff --git a/infinigen/core/util/math.py b/infinigen/core/util/math.py index af21ad72b..8e62d6161 100644 --- a/infinigen/core/util/math.py +++ b/infinigen/core/util/math.py @@ -8,17 +8,15 @@ import math import random import warnings -import sys -import numpy as np -import gin import cv2 +import gin +import numpy as np + @gin.configurable class FixedSeed: - def __init__(self, seed): - self.seed = int(seed) self.py_state = None self.np_state = None @@ -37,7 +35,6 @@ def __exit__(self, *_): @gin.configurable class AddedSeed: - def __init__(self, added_seed): self.added_seed = added_seed self.py_state = None @@ -56,40 +53,40 @@ def __exit__(self, *_): class BBox: - def __init__(self, mins, maxs): self.mins = np.array(mins) self.maxs = np.array(maxs) def __repr__(self): - return f'{self.__class__}({self.mins}, {self.maxs})' + return f"{self.__class__}({self.mins}, {self.maxs})" def __contains__(self, p): p = np.array(p) return np.all((self.mins <= p) * (self.maxs >= p)) def uniform(self): - return np.random.uniform(0, 1, len(self.mins)) * (self.maxs - self.mins) + self.mins + return ( + np.random.uniform(0, 1, len(self.mins)) * (self.maxs - self.mins) + + self.mins + ) def union(self, other): - if isinstance(other, BBox): return BBox( mins=np.minimum(self.mins, other.mins), - maxs=np.maximum(self.maxs, other.maxs) + maxs=np.maximum(self.maxs, other.maxs), ) elif isinstance(other, np.ndarray) and other.shape[-1] == len(self.mins): return BBox( - mins=np.minimum(self.mins, other), - maxs=np.maximum(self.maxs, other) + mins=np.minimum(self.mins, other), maxs=np.maximum(self.maxs, other) ) else: - raise ValueError(f'Unrecognized arg {other} in BBox.union') + raise ValueError(f"Unrecognized arg {other} in BBox.union") def intersect(self, other): return BBox( mins=np.maximum(self.mins, other.mins), - maxs=np.minimum(self.maxs, other.maxs) + maxs=np.minimum(self.maxs, other.maxs), ) def empty(self): @@ -101,7 +98,9 @@ def subset(self, idx): def linspace(self, n): if isinstance(n, int): n = [n] * len(self.mins) - lins = [np.linspace(self.mins[i], self.maxs[i], n[i]) for i in range(len(self.mins))] + lins = [ + np.linspace(self.mins[i], self.maxs[i], n[i]) for i in range(len(self.mins)) + ] return np.meshgrid(*lins) def to_local_coords(self, p): @@ -130,36 +129,29 @@ def center(self): def eroded(self, margin): if not isinstance(margin, np.ndarray): margin = np.array([margin] * len(self)) - return BBox( - mins=self.mins + margin, - maxs=self.maxs - margin - ) + return BBox(mins=self.mins + margin, maxs=self.maxs - margin) def inflated(self, margin): if not isinstance(margin, np.ndarray): margin = np.array([margin] * len(self)) - return BBox( - mins=self.mins - margin, - maxs=self.maxs + margin - ) + return BBox(mins=self.mins - margin, maxs=self.maxs + margin) @classmethod def from_center_dims(cls, center, dims): - return cls( - mins=center - dims / 2, - maxs=center + dims / 2 - ) + return cls(mins=center - dims / 2, maxs=center + dims / 2) @classmethod def from_bpy_box(cls, bpy_obj): if not ( - hasattr(bpy_obj, 'empty_display_type') and - bpy_obj.empty_display_type == 'CUBE' + hasattr(bpy_obj, "empty_display_type") + and bpy_obj.empty_display_type == "CUBE" ): - raise ValueError(f'BBox.from_bpy_box expected a CUBE type blender empty') + raise ValueError("BBox.from_bpy_box expected a CUBE type blender empty") center = bpy_obj.location - dims = bpy_obj.scale * bpy_obj.empty_display_size / 2 # default has a RADIUS of 1 + dims = ( + bpy_obj.scale * bpy_obj.empty_display_size / 2 + ) # default has a RADIUS of 1 return cls.from_center_dims(center, dims) @@ -176,13 +168,13 @@ def md5_hash(x): m = hashlib.md5() for s in x: assert isinstance(s, (int, str)) - m.update(str(s).encode('utf-8')) + m.update(str(s).encode("utf-8")) return m elif isinstance(x, (int, str)): - x = str(x).encode('utf-8') + x = str(x).encode("utf-8") return hashlib.md5(x) else: - raise ValueError(f'util.md5_hash doesnt currently support type({type(x)}') + raise ValueError(f"util.md5_hash doesnt currently support type({type(x)}") def int_hash(x, max=(2**32 - 1)): @@ -190,6 +182,7 @@ def int_hash(x, max=(2**32 - 1)): h = abs(md5) % max return h + def round_to_nearest(x, step): return step * np.round(x / step) @@ -213,9 +206,9 @@ def lerp_sample(vec, ts: np.array): def inverse_interpolate(vals, ds): - ''' + """ Find ts such that lerp_sample(vals, ts) = ds - ''' + """ assert (ds >= vals.min()).all() assert (ds <= vals.max()).all() @@ -235,11 +228,14 @@ def inverse_interpolate(vals, ds): def cross_matrix(v): o = np.zeros(v.shape[0]) - cross_mat = np.stack([ - np.stack([o, -v[:, 2], v[:, 1]], axis=-1), - np.stack([v[:, 2], o, -v[:, 0]], axis=-1), - np.stack([-v[:, 1], v[:, 0], o], axis=-1), - ], axis=-1).transpose(0, 2, 1) + cross_mat = np.stack( + [ + np.stack([o, -v[:, 2], v[:, 1]], axis=-1), + np.stack([v[:, 2], o, -v[:, 0]], axis=-1), + np.stack([-v[:, 1], v[:, 0], o], axis=-1), + ], + axis=-1, + ).transpose(0, 2, 1) return cross_mat @@ -264,7 +260,7 @@ def rotate_match_directions(a, b): rots = np.empty((len(a), 3, 3)) rots[~m] = np.eye(3)[None] - if np.all(~m): # needed to prevent exceptions if continued + if np.all(~m): # needed to prevent exceptions if continued return rots dots = (a[m] * b[m]).sum(axis=-1) @@ -278,16 +274,19 @@ def lerp(a, b, x): "linear interpolation" return (1 - x) * a + x * b + def dict_lerp(a, b, t): assert list(a.keys()) == list(b.keys()) return {k: lerp(va, b[k], t) for k, va in a.items()} + def dict_convex_comb(dicts, weights): assert all(d.keys == dicts[0].keys() for d in dicts[1:]) weights = np.array(weights) vals = {k: np.array([d[k] for d in dicts]) for k in dicts[0]} return {k: (v * weights).sum() for k, v in vals.items()} + def randomspacing(min, max, n, margin): assert 0 <= margin and margin <= 0.5 @@ -312,6 +311,7 @@ def homogenize(points): def dehomogenize(points): return points[..., :-1] / points[..., [-1]] + def clip_gaussian(mean, std, min, max, max_tries=20): assert min <= max i = 0 @@ -321,11 +321,14 @@ def clip_gaussian(mean, std, min, max, max_tries=20): return val if i == max_tries: - warnings.warn(f'clip_gaussian({mean=}, {std=}, {min=}, {max=}) reached {max_tries=}') + warnings.warn( + f"clip_gaussian({mean=}, {std=}, {min=}, {max=}) reached {max_tries=}" + ) return np.clip(val, min, max) i += 1 + def normalize(v, disallow_zero_norm=False, in_place=True): n = np.linalg.norm(v, axis=-1) if disallow_zero_norm and np.any(n == 0): @@ -343,6 +346,7 @@ def wrap_around_cyclic_coord(u, u_start, u_end): _, r = np.divmod(u - u_start, u_end - u_start) return r + u_start + def new_domain_from_affine(old_domain, a=1.0, b=0.0): """ old domain: domain of u(t) @@ -366,5 +370,6 @@ def affine_from_new_domain(old_domain, new_domain): b = s[0] - a * t[0] return (a, b) + def resize(arr, shape): - return cv2.resize(arr, shape) #, interpolation=cv2.INTER_LANCZOS4) + return cv2.resize(arr, shape) # , interpolation=cv2.INTER_LANCZOS4) diff --git a/infinigen/core/util/organization.py b/infinigen/core/util/organization.py index a2e264688..c9887eed3 100644 --- a/infinigen/core/util/organization.py +++ b/infinigen/core/util/organization.py @@ -24,7 +24,18 @@ class Materials: Atmosphere = "atmosphere" Clouds = "clouds" Beach = "beach" - all = [GroundCollection, MountainCollection, Eroded, LiquidCollection, Lava, Snow, Atmosphere, Clouds, Beach] + all = [ + GroundCollection, + MountainCollection, + Eroded, + LiquidCollection, + Lava, + Snow, + Atmosphere, + Clouds, + Beach, + ] + class LandTile: Canyon = "Canyon" @@ -37,11 +48,13 @@ class LandTile: Coast = "Coast" MultiMountains = "MultiMountains" + class Assets: Caves = "Caves" UpsidedownMountains = "UpsidedownMountains" Ocean = "Ocean" + class AssetFile: Heightmap = "heightmap" Mask = "mask" @@ -49,21 +62,25 @@ class AssetFile: Params = "params" Finish = "finish" + class Process: Snowfall = "snowfall" Erosion = "erosion" - IceErosion = 'ice_erosion' + IceErosion = "ice_erosion" Eruption = "eruption" + class TerrainNames: OpaqueTerrain = "OpaqueTerrain" CollectiveTransparentTerrain = "CollectiveTransparentTerrain" + class Transparency: IndividualTransparent = "IndividualTransparent" CollectiveTransparent = "CollectiveTransparent" Opaque = "Opaque" + class ElementNames: Atmosphere = "atmosphere" Liquid = "liquid" @@ -79,6 +96,7 @@ class ElementNames: Volcanos = "volcanos" FloatingIce = "floating_ice" + class Tags: Cave = "cave" LiquidCovered = "liquid_covered" @@ -87,14 +105,17 @@ class Tags: Landscape = "landscape" OutOfView = "out_of_view" + class Attributes: BoundarySDF = "BoundarySDF" ElementTag = "ElementTag" + class SelectionCriterions: CloseUp = "closeup" Altitude = "altitude" + class ElementTag: Liquid = 0 Clouds = 1 @@ -107,10 +128,18 @@ class ElementTag: UpsidedownMountains = 8 total_cnt = 9 map = [ - ElementNames.Liquid, ElementNames.Clouds, Tags.Terrain, ElementNames.WarpedRocks, ElementNames.VoronoiRocks, - ElementNames.VoronoiGrains, ElementNames.Volcanos, ElementNames.FloatingIce, ElementNames.UpsidedownMountains, + ElementNames.Liquid, + ElementNames.Clouds, + Tags.Terrain, + ElementNames.WarpedRocks, + ElementNames.VoronoiRocks, + ElementNames.VoronoiGrains, + ElementNames.Volcanos, + ElementNames.FloatingIce, + ElementNames.UpsidedownMountains, ] + class SurfaceTypes: BlenderDisplacement = "BlenderDisplacement" Displacement = "Displacement" diff --git a/infinigen/core/util/pipeline.py b/infinigen/core/util/pipeline.py index 3e19a5445..0304b1af4 100644 --- a/infinigen/core/util/pipeline.py +++ b/infinigen/core/util/pipeline.py @@ -4,24 +4,23 @@ # Authors: Alexander Raistrick -from pathlib import Path import logging -import psutil import os +from contextlib import nullcontext +from pathlib import Path import numpy as np import pandas as pd +import psutil -from contextlib import nullcontext - -from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.logging import Timer from infinigen.core.util.blender import GarbageCollect, count_instance, count_objects +from infinigen.core.util.logging import Timer +from infinigen.core.util.math import FixedSeed, int_hash logger = logging.getLogger(__name__) -class RandomStageExecutor: +class RandomStageExecutor: def __init__(self, scene_seed, output_folder: Path, params): self.scene_seed = scene_seed self.output_folder = output_folder @@ -32,49 +31,69 @@ def __init__(self, scene_seed, output_folder: Path, params): def _should_run_stage(self, name, use_chance, prereq): if prereq is not None: try: - e = next(e for e in self.results if e['name'] == prereq) + e = next(e for e in self.results if e["name"] == prereq) except StopIteration: - raise ValueError(f'{self} could not find matching name for {prereq=}') - if not e['ran']: - logger.info(f'Skipping run_stage({name}...) due to unmet {prereq=}') + raise ValueError(f"{self} could not find matching name for {prereq=}") + if not e["ran"]: + logger.info(f"Skipping run_stage({name}...) due to unmet {prereq=}") return with FixedSeed(int_hash((self.scene_seed, name, 0))): - if not self.params.get(f'{name}_enabled', True): - logger.debug(f'Not running {name} due to manually set not enabled') - return False - if use_chance and np.random.uniform() > self.params[f'{name}_chance']: - logger.debug(f'Not running {name} due to random chance') + if not self.params.get(f"{name}_enabled", True): + logger.debug(f"Not running {name} due to manually set not enabled") + return False + if use_chance and np.random.uniform() > self.params[f"{name}_chance"]: + logger.debug(f"Not running {name} due to random chance") return False return True - + def save_results(self, path): pd.DataFrame.from_records(self.results).to_csv(path) def run_stage( - self, name, fn, *args, - use_chance=True, gc=True, default=None, - prereq=None, **kwargs): - + self, + name, + fn, + *args, + use_chance=True, + gc=True, + default=None, + prereq=None, + **kwargs, + ): mem_usage = psutil.Process(os.getpid()).memory_info().rss - + will_run = self._should_run_stage(name, use_chance, prereq) - + if not will_run: - self.results.append({'name': name, 'ran': will_run, 'mem_at_finish': mem_usage, 'obj_count': count_objects(),\ - 'instance_count': count_instance()}) + self.results.append( + { + "name": name, + "ran": will_run, + "mem_at_finish": mem_usage, + "obj_count": count_objects(), + "instance_count": count_instance(), + } + ) return default gc_context = GarbageCollect() if gc else nullcontext() - seed = self.params.get(f'{name}_seed') + seed = self.params.get(f"{name}_seed") if seed is None: seed = int_hash((self.scene_seed, name)) - logger.debug(f'run_stage({name=}) using {seed=}') - + logger.debug(f"run_stage({name=}) using {seed=}") + with FixedSeed(seed): with Timer(name), gc_context: ret = fn(*args, **kwargs) mem_usage = psutil.Process(os.getpid()).memory_info().rss - self.results.append({'name': name, 'ran': will_run, 'mem_at_finish': mem_usage, 'obj_count': count_objects(),\ - 'instance_count': count_instance()}) + self.results.append( + { + "name": name, + "ran": will_run, + "mem_at_finish": mem_usage, + "obj_count": count_objects(), + "instance_count": count_instance(), + } + ) return ret diff --git a/infinigen/core/util/random.py b/infinigen/core/util/random.py index 156466e96..6e5aa9607 100644 --- a/infinigen/core/util/random.py +++ b/infinigen/core/util/random.py @@ -4,28 +4,27 @@ # Authors: Zeyu Ma, Alexander Raistrick -from infinigen.core.util.color import color_category -import gin -import numpy as np -import random -import json import colorsys +import json + import mathutils +import numpy as np from matplotlib import colors -from numpy.random import normal, uniform +from numpy.random import uniform -from infinigen.core.util.math import md5_hash, clip_gaussian -from infinigen.core.init import repo_root +import infinigen +from infinigen.core.util.color import color_category +from infinigen.core.util.math import clip_gaussian def log_uniform(low, high, size=None): return np.exp(uniform(np.log(low), np.log(high), size)) + def sample_json_palette(pallette_name, n_sample=1): - rel = f"infinigen_examples/configs_nature/palette/{pallette_name}.json" - with (repo_root()/rel).open('r') as f: + with (infinigen.repo_root() / rel).open("r") as f: color_template = json.load(f) colors = color_template["color"] @@ -41,11 +40,16 @@ def sample_json_palette(pallette_name, n_sample=1): i = np.random.choice(range(len(colors)), 1, p=probs / np.sum(probs))[0] color_samples = [] for j in range(n_sample): - color = np.array(means[i]) + np.matmul(np.array(stds[i]).reshape((3, 3)), np.clip(np.random.randn(3), a_min=-1, a_max=1)) + color = np.array(means[i]) + np.matmul( + np.array(stds[i]).reshape((3, 3)), + np.clip(np.random.randn(3), a_min=-1, a_max=1), + ) color[2] = max(min(color[2], 0.9), 0.1) color = colorsys.hsv_to_rgb(*color) color = np.clip(color, a_min=0, a_max=1) - color = np.where(color >= 0.04045,((color+0.055)/1.055) ** 2.4, color / 12.92) + color = np.where( + color >= 0.04045, ((color + 0.055) / 1.055) ** 2.4, color / 12.92 + ) color = np.concatenate((color, np.ones(1))) color_samples.append(color) if n_sample == 1: @@ -60,7 +64,7 @@ def random_general(var): func, *args = var if func == "weighted_choice": weights, recargs = zip(*args) - p = np.array(weights)/sum(weights) + p = np.array(weights) / sum(weights) i = np.random.choice(np.arange(len(recargs)), p=p) return random_general(recargs[i]) elif func == "spherical_sample": @@ -69,7 +73,9 @@ def random_general(var): # angle distribution from uniform sphere P = np.random.randn(3) x = np.arctan2(np.abs(P[2]), (P[0] ** 2 + P[1] ** 2) ** 0.5) - if (min_elevation is None or x > np.radians(min_elevation)) and (max_elevation is None or x < np.radians(max_elevation)): + if (min_elevation is None or x > np.radians(min_elevation)) and ( + max_elevation is None or x < np.radians(max_elevation) + ): break return np.degrees(x) elif func == "uniform": @@ -88,9 +94,9 @@ def random_general(var): return np.random.uniform() < args[0] elif func == "choice": return np.random.choice(args[0], 1, p=args[1] if len(args) > 1 else None)[0] - elif func == 'categorical': + elif func == "categorical": prob = np.array(args) - return np.random.choice(np.arange(len(args)), p=prob/prob.sum()) + return np.random.choice(np.arange(len(args)), p=prob / prob.sum()) elif func == "palette": return sample_json_palette(*args) elif func == "color_category": @@ -100,7 +106,9 @@ def random_general(var): def random_vector3(): - return mathutils.Vector((np.random.randint(999), np.random.randint(999), np.random.randint(999))) + return mathutils.Vector( + (np.random.randint(999), np.random.randint(999), np.random.randint(999)) + ) def _rgb_to_hsv(rgb): @@ -126,9 +134,16 @@ def _hsv_to_rgb(hsv, a): def random_color_neighbour( - rgb, hue_diff=0.0, sat_diff=0.0, val_diff=0.0, - only_less_hue=False, only_less_sat=False, only_less_val=False, - only_more_hue=False, only_more_sat=False, only_more_val=False, + rgb, + hue_diff=0.0, + sat_diff=0.0, + val_diff=0.0, + only_less_hue=False, + only_less_sat=False, + only_less_val=False, + only_more_hue=False, + only_more_sat=False, + only_more_val=False, ): """ returns a random color in the neighbourhood of the given one @@ -162,20 +177,14 @@ def sample(x, diff, low=0, high=1, only_less=False, only_more=False): if diff is None: out = np.random.uniform(low, high) else: - lb = max(0, x) if only_more else max(0, x-diff) - ub = min(1, x) if only_less else min(1, x+diff) + lb = max(0, x) if only_more else max(0, x - diff) + ub = min(1, x) if only_less else min(1, x + diff) out = np.random.uniform(lb, ub) return out - hsv[0] = sample( - hsv[0], hue_diff, only_less=only_less_hue, - only_more=only_more_hue) - hsv[1] = sample( - hsv[1], sat_diff, only_less=only_less_sat, - only_more=only_more_sat) - hsv[2] = sample( - hsv[2], val_diff, only_less=only_less_val, - only_more=only_more_val) + hsv[0] = sample(hsv[0], hue_diff, only_less=only_less_hue, only_more=only_more_hue) + hsv[1] = sample(hsv[1], sat_diff, only_less=only_less_sat, only_more=only_more_sat) + hsv[2] = sample(hsv[2], val_diff, only_less=only_less_val, only_more=only_more_val) rgb = _hsv_to_rgb(hsv, a) @@ -200,11 +209,17 @@ def clip_hsv(rgb, max_h=None, max_s=None, max_v=None): return rgb + def random_color(brightness_lim=1): - return (np.random.randint(256) / 256. * brightness_lim, np.random.randint(256) / 256. * brightness_lim, np.random.randint(256) / 256. * brightness_lim, 1) + return ( + np.random.randint(256) / 256.0 * brightness_lim, + np.random.randint(256) / 256.0 * brightness_lim, + np.random.randint(256) / 256.0 * brightness_lim, + 1, + ) + def sample_registry(reg): classes, weights = zip(*reg) weights = np.array(weights) - return np.random.choice(classes, p=weights/weights.sum()) - + return np.random.choice(classes, p=weights / weights.sum()) diff --git a/infinigen/datagen/configs/compute_platform/local_256GB.gin b/infinigen/datagen/configs/compute_platform/local_256GB.gin index babc757f8..b6efef024 100644 --- a/infinigen/datagen/configs/compute_platform/local_256GB.gin +++ b/infinigen/datagen/configs/compute_platform/local_256GB.gin @@ -8,8 +8,6 @@ LocalScheduleHandler.jobs_per_gpu = 1 jobs_to_launch_next.max_queued_total = 1 jobs_to_launch_next.max_stuck_at_task = 4 -# get_cmd.blender_thread_limit = 8 # no longer supported with pip bpy - # All will run locally, LocalScheduleHandler doesnt actually enforce cpu/ram constraints currently queue_coarse.submit_cmd = @local_submit_cmd queue_fine_terrain.submit_cmd = @local_submit_cmd diff --git a/infinigen/datagen/configs/compute_platform/slurm_cpuheavy.gin b/infinigen/datagen/configs/compute_platform/slurm_cpuheavy.gin index a13d74e58..6fde63e40 100644 --- a/infinigen/datagen/configs/compute_platform/slurm_cpuheavy.gin +++ b/infinigen/datagen/configs/compute_platform/slurm_cpuheavy.gin @@ -1,4 +1,4 @@ -include 'infinigen/datagen/compute_platform/slurm.gin' +include 'infinigen/datagen/configs/compute_platform/slurm.gin' iterate_scene_tasks.view_block_size = 2 diff --git a/infinigen/datagen/configs/data_schema/monocular_flow.gin b/infinigen/datagen/configs/data_schema/monocular_flow.gin index 0bee65ae8..f92f9e9d4 100644 --- a/infinigen/datagen/configs/data_schema/monocular_flow.gin +++ b/infinigen/datagen/configs/data_schema/monocular_flow.gin @@ -1,4 +1,4 @@ -include 'infinigen/datagen/data_schema/monocular.gin' +include 'infinigen/datagen/configs/data_schema/monocular.gin' iterate_scene_tasks.frame_range=(1, 2) diff --git a/infinigen/datagen/configs/data_schema/stereo_video.gin b/infinigen/datagen/configs/data_schema/stereo_video.gin index 0f3082e03..48caec7f5 100644 --- a/infinigen/datagen/configs/data_schema/stereo_video.gin +++ b/infinigen/datagen/configs/data_schema/stereo_video.gin @@ -1,2 +1,2 @@ -include 'infinigen/datagen/data_schema/monocular_video.gin' +include 'infinigen/datagen/configs/data_schema/monocular_video.gin' iterate_scene_tasks.cam_id_ranges = [1, 2] diff --git a/infinigen/datagen/customgt/show.py b/infinigen/datagen/customgt/show.py index 11534077f..3e1964dff 100644 --- a/infinigen/datagen/customgt/show.py +++ b/infinigen/datagen/customgt/show.py @@ -9,15 +9,15 @@ import matplotlib.pyplot as plt import numpy as np -from matplotlib.text import Text from imageio import imread +from matplotlib.text import Text np.random.seed(42) if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('input_path', type=Path) - parser.add_argument('pos', type=int, nargs='*') + parser.add_argument("input_path", type=Path) + parser.add_argument("pos", type=int, nargs="*") args = parser.parse_args() if args.input_path.suffix == ".npy": @@ -27,19 +27,19 @@ if len(args.pos) > 0: assert len(args.pos) == 4 - x1,y1,x2,y2 = args.pos - image = image[y1:y2+1, x1:x2+1] + x1, y1, x2, y2 = args.pos + image = image[y1 : y2 + 1, x1 : x2 + 1] fig = plt.figure() ax = fig.add_subplot(111) ax.imshow(image) - textvar: Text = ax.text(0, -15, "", style='italic') + textvar: Text = ax.text(0, -15, "", style="italic") def hover(event): if event.xdata is not None: x, y = round(event.xdata), round(event.ydata) - val = image[y,x] + val = image[y, x] if len(args.pos) > 0: x += x1 y += y1 @@ -51,6 +51,6 @@ def hover(event): fig.canvas.draw_idle() # add callback for mouse moves - fig.canvas.mpl_connect('motion_notify_event', hover) + fig.canvas.mpl_connect("motion_notify_event", hover) - plt.show() \ No newline at end of file + plt.show() diff --git a/infinigen/datagen/job_funcs.py b/infinigen/datagen/job_funcs.py index a10906fcd..32846529e 100644 --- a/infinigen/datagen/job_funcs.py +++ b/infinigen/datagen/job_funcs.py @@ -1,78 +1,87 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alex Raistrick: refactor, local rendering, video rendering # - Lahav Lipson: stereo version, local rendering # - David Yan: export integration # - Hei Law: initial version +import logging import re -import gin -from copy import copy -from uuid import uuid4 +import sys from functools import partial from pathlib import Path -from shutil import copytree -import logging -import sys +from shutil import copytree +from uuid import uuid4 -from infinigen.datagen.util.show_gpu_table import nodes_with_gpus +import gin + +import infinigen from infinigen.datagen.util import upload_util -from infinigen.datagen.util.upload_util import upload_job_folder -from infinigen.datagen.states import get_suffix +from infinigen.datagen.util.show_gpu_table import nodes_with_gpus +from infinigen.datagen.util.upload_util import upload_job_folder +from infinigen.tools.suffixes import get_suffix from . import states -from infinigen.core.init import repo_root - logger = logging.getLogger(__name__) + @gin.configurable def get_cmd( - seed, - task, - configs, - taskname, - output_folder, - driver_script='infinigen_examples.generate_nature', # replace with a regular path to a .py, or another installed module - input_folder=None, + seed, + task, + configs, + taskname, + output_folder, + driver_script="infinigen_examples.generate_nature", # replace with a regular path to a .py, or another installed module + input_folder=None, process_niceness=None, ): - if isinstance(task, list): task = " ".join(task) - cmd = '' + cmd = "" if process_niceness is not None: - cmd += f'nice -n {process_niceness} ' - cmd += f'{sys.executable} ' + cmd += f"nice -n {process_niceness} " + cmd += f"{sys.executable} " - if driver_script.endswith('.py'): - cmd += driver_script + ' ' + if driver_script.endswith(".py"): + cmd += driver_script + " " else: - cmd += '-m ' + driver_script + ' ' + cmd += "-m " + driver_script + " " # No longer supported using pip bpy - #if blender_thread_limit is not None: + # if blender_thread_limit is not None: # cmd += f'--threads {blender_thread_limit} ' - - cmd += '-- ' + + cmd += "-- " if input_folder is not None: - cmd += '--input_folder ' + str(input_folder) + ' ' + cmd += "--input_folder " + str(input_folder) + " " if output_folder is not None: - cmd += '--output_folder ' + str(output_folder) + ' ' - cmd += f'--seed {seed} --task {task} --task_uniqname {taskname} ' + cmd += "--output_folder " + str(output_folder) + " " + cmd += f"--seed {seed} --task {task} --task_uniqname {taskname} " if len(configs) != 0: - cmd += f'-g {" ".join(configs)} ' - cmd += '-p' - + cmd += f'-g {" ".join(configs)} ' + cmd += "-p" + return cmd.split() + @gin.configurable -def queue_upload(folder, submit_cmd, name, taskname, dir_prefix_len=0, method='rclone', seed=None, **kwargs): +def queue_upload( + folder, + submit_cmd, + name, + taskname, + dir_prefix_len=0, + method="rclone", + seed=None, + **kwargs, +): func = partial(upload_job_folder, dir_prefix_len=dir_prefix_len, method=method) res = submit_cmd((func, folder, taskname), folder, name, **kwargs) return res, None @@ -88,27 +97,35 @@ def queue_export( taskname=None, exclude_gpus=[], overrides=[], - input_indices=None, output_indices=None, - **kwargs + input_indices=None, + output_indices=None, + **kwargs, ): input_suffix = get_suffix(input_indices) - input_folder=f'{folder}/coarse{input_suffix}' - - cmd = get_cmd(seed, 'export', configs, taskname, output_folder=f'{folder}/frames', input_folder=input_folder)+ f''' + input_folder = f"{folder}/coarse{input_suffix}" + + cmd = ( + get_cmd( + seed, + "export", + configs, + taskname, + output_folder=f"{folder}/frames", + input_folder=input_folder, + ) + + f""" LOG_DIR='{folder / "logs"}' - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, - folder=folder, - name=name, - gpus=0, - **kwargs - ) + res = submit_cmd(cmd, folder=folder, name=name, gpus=0, **kwargs) return res, folder + @gin.configurable def queue_coarse( folder, @@ -119,38 +136,45 @@ def queue_coarse( taskname=None, exclude_gpus=[], overrides=[], - input_indices=None, output_indices=None, - **kwargs + input_indices=None, + output_indices=None, + **kwargs, ): """ Generating the coarse scene """ - input_suffix = get_suffix(input_indices) + get_suffix(input_indices) output_suffix = get_suffix(output_indices) - output_folder = Path(f'{folder}/coarse{output_suffix}') + output_folder = Path(f"{folder}/coarse{output_suffix}") - cmd = get_cmd(seed, 'coarse', configs, taskname, output_folder=output_folder) + f''' + cmd = ( + get_cmd(seed, "coarse", configs, taskname, output_folder=output_folder) + + f""" LOG_DIR='{folder / "logs"}' - '''.split("\n") + overrides + """.split("\n") + + overrides + ) commit = upload_util.get_commit_hash() - with (folder / "run_pipeline.sh").open('w') as f: + with (folder / "run_pipeline.sh").open("w") as f: f.write(f"# git checkout {commit}\n\n") f.write(f"{' '.join(' '.join(cmd).split())}\n\n") (folder / "run_pipeline.sh").chmod(0o774) - res = submit_cmd(cmd, + res = submit_cmd( + cmd, folder=folder, name=name, gpus=0, slurm_exclude=nodes_with_gpus(*exclude_gpus), - **kwargs + **kwargs, ) return res, output_folder + @gin.configurable def queue_populate( submit_cmd, @@ -161,7 +185,8 @@ def queue_populate( taskname=None, input_prefix="fine", overrides=[], - input_indices=None, output_indices=None, + input_indices=None, + output_indices=None, **kwargs, ): """ @@ -169,28 +194,33 @@ def queue_populate( """ input_suffix = get_suffix(input_indices) - output_suffix = get_suffix(output_indices) + get_suffix(output_indices) - input_folder = folder/f'{input_prefix}{input_suffix}' + input_folder = folder / f"{input_prefix}{input_suffix}" output_folder = input_folder - cmd = get_cmd(seed, 'populate', configs, taskname, - input_folder=input_folder, - output_folder=output_folder) + f''' + cmd = ( + get_cmd( + seed, + "populate", + configs, + taskname, + input_folder=input_folder, + output_folder=output_folder, + ) + + f""" LOG_DIR='{folder / "logs"}' - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, - folder=folder, - name=name, - gpus=0, - **kwargs - ) + res = submit_cmd(cmd, folder=folder, name=name, gpus=0, **kwargs) return res, output_folder + @gin.configurable def queue_fine_terrain( submit_cmd, @@ -202,8 +232,9 @@ def queue_fine_terrain( taskname=None, exclude_gpus=[], overrides=[], - input_indices=None, output_indices=None, - **kwargs + input_indices=None, + output_indices=None, + **kwargs, ): """ Generating the fine scene @@ -212,28 +243,39 @@ def queue_fine_terrain( input_suffix = get_suffix(input_indices) output_suffix = get_suffix(output_indices) - output_folder = Path(f'{folder}/fine{output_suffix}') + output_folder = Path(f"{folder}/fine{output_suffix}") enable_gpu_in_terrain = "Terrain.device='cuda'" if gpus > 0 else "" - cmd = get_cmd(seed, 'fine_terrain', configs, taskname, - input_folder=f'{folder}/coarse{input_suffix}', - output_folder=output_folder) + f''' + cmd = ( + get_cmd( + seed, + "fine_terrain", + configs, + taskname, + input_folder=f"{folder}/coarse{input_suffix}", + output_folder=output_folder, + ) + + f""" LOG_DIR='{folder / "logs"}' {enable_gpu_in_terrain} - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, + res = submit_cmd( + cmd, folder=folder, name=name, gpus=gpus, slurm_exclude=nodes_with_gpus(*exclude_gpus), - **kwargs + **kwargs, ) return res, output_folder + @gin.configurable def queue_combined( submit_cmd, @@ -246,40 +288,53 @@ def queue_combined( gpus=0, overrides=[], include_coarse=True, - input_indices=None, output_indices=None, - **kwargs + input_indices=None, + output_indices=None, + **kwargs, ): - input_suffix = get_suffix(input_indices) output_suffix = get_suffix(output_indices) - tasks = 'populate fine_terrain' + tasks = "populate fine_terrain" if include_coarse: - tasks = 'coarse ' + tasks + tasks = "coarse " + tasks - output_folder = Path(f'{folder}/fine{output_suffix}') + output_folder = Path(f"{folder}/fine{output_suffix}") enable_gpu_in_terrain = "Terrain.device='cuda'" if gpus > 0 else "" - cmd = get_cmd(seed, tasks, configs, taskname, - input_folder=f'{folder}/coarse{input_suffix}' if not include_coarse else None, - output_folder=output_folder) + f''' + cmd = ( + get_cmd( + seed, + tasks, + configs, + taskname, + input_folder=f"{folder}/coarse{input_suffix}" + if not include_coarse + else None, + output_folder=output_folder, + ) + + f""" LOG_DIR='{folder / "logs"}' {enable_gpu_in_terrain} - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, + res = submit_cmd( + cmd, folder=folder, name=name, gpus=gpus, slurm_exclude=nodes_with_gpus(*exclude_gpus), - **kwargs + **kwargs, ) return res, output_folder + @gin.configurable def queue_render( submit_cmd, @@ -291,40 +346,52 @@ def queue_render( taskname=None, overrides=[], exclude_gpus=[], - input_indices=None, output_indices=None, - **submit_kwargs + input_indices=None, + output_indices=None, + **submit_kwargs, ): - input_suffix = get_suffix(input_indices) output_suffix = get_suffix(output_indices) - output_folder = Path(f'{folder}/frames{output_suffix}') + output_folder = Path(f"{folder}/frames{output_suffix}") input_folder_priority_options = [ f"fine{input_suffix}", "fine", f"coarse{input_suffix}", - "coarse" + "coarse", ] for option in input_folder_priority_options: - input_folder = f'{folder}/{option}' - if (Path(input_folder)/'scene.blend').exists(): + input_folder = f"{folder}/{option}" + if (Path(input_folder) / "scene.blend").exists(): break else: - raise ValueError(f'No scene.blend found in {input_folder} for any of {input_folder_priority_options}') - - cmd = get_cmd(seed, "render", configs, taskname, - input_folder=input_folder, - output_folder=f'{output_folder}') + f''' + logger.warning( + f"No scene.blend found in {input_folder} for any of {input_folder_priority_options}" + ) + + cmd = ( + get_cmd( + seed, + "render", + configs, + taskname, + input_folder=input_folder, + output_folder=f"{output_folder}", + ) + + f""" render.render_image_func=@{render_type}/render_image LOG_DIR='{folder / "logs"}' - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, + res = submit_cmd( + cmd, folder=folder, name=name, slurm_exclude=nodes_with_gpus(*exclude_gpus), @@ -332,6 +399,7 @@ def queue_render( ) return res, output_folder + @gin.configurable def queue_mesh_save( submit_cmd, @@ -342,31 +410,41 @@ def queue_mesh_save( taskname=None, overrides=[], exclude_gpus=[], - input_indices=None, output_indices=None, + input_indices=None, + output_indices=None, reuse_subcams=True, - **submit_kwargs + **submit_kwargs, ): - - if (output_indices['subcam'] > 0) and reuse_subcams: + if (output_indices["subcam"] > 0) and reuse_subcams: return states.JOB_OBJ_SUCCEEDED, None input_suffix = get_suffix(input_indices) output_suffix = get_suffix(output_indices) - output_folder = Path(f'{folder}/savemesh{output_suffix}') + output_folder = Path(f"{folder}/savemesh{output_suffix}") output_folder.mkdir(parents=True, exist_ok=True) - cmd = get_cmd(seed, "mesh_save", configs, taskname, - input_folder=f'{folder}/coarse{input_suffix}', - output_folder=f'{folder}/savemesh{output_suffix}') + f''' + cmd = ( + get_cmd( + seed, + "mesh_save", + configs, + taskname, + input_folder=f"{folder}/coarse{input_suffix}", + output_folder=f"{folder}/savemesh{output_suffix}", + ) + + f""" LOG_DIR='{folder / "logs"}' - '''.split("\n") + overrides + """.split("\n") + + overrides + ) - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") - res = submit_cmd(cmd, + res = submit_cmd( + cmd, folder=folder, name=name, slurm_exclude=nodes_with_gpus(*exclude_gpus), @@ -374,9 +452,13 @@ def queue_mesh_save( ) return res, output_folder -process_mesh_path = Path(__file__).parent/'customgt'/'build'/'customgt' + +process_mesh_path = Path(__file__).parent / "customgt" / "build" / "customgt" if not process_mesh_path.exists(): - logger.warning(f'{process_mesh_path=} does not exist, if opengl_gt is enabled it will fail') + logger.warning( + f"{process_mesh_path=} does not exist, if opengl_gt is enabled it will fail" + ) + @gin.configurable def queue_opengl( @@ -388,58 +470,60 @@ def queue_opengl( taskname=None, overrides=[], exclude_gpus=[], - input_indices=None, output_indices=None, + input_indices=None, + output_indices=None, reuse_subcams=True, gt_testing=False, - **submit_kwargs + **submit_kwargs, ): - - if (output_indices['subcam'] > 0) and reuse_subcams: + if (output_indices["subcam"] > 0) and reuse_subcams: return states.JOB_OBJ_SUCCEEDED, None output_suffix = get_suffix(output_indices) - - input_folder = Path(folder)/f'savemesh{output_suffix}' # OUTPUT SUFFIX IS CORRECT HERE. I know its weird. But input suffix really means 'prev tier of the pipeline - if (gt_testing): + + input_folder = ( + Path(folder) / f"savemesh{output_suffix}" + ) # OUTPUT SUFFIX IS CORRECT HERE. I know its weird. But input suffix really means 'prev tier of the pipeline + if gt_testing: copy_folder = Path(folder) / f"frames{output_suffix}" - output_folder = Path(folder) / f"opengl_frames{output_suffix}" + output_folder = Path(folder) / f"opengl_frames{output_suffix}" copytree(copy_folder, output_folder, dirs_exist_ok=True) - else: + else: output_folder = Path(folder) / f"frames{output_suffix}" output_folder.mkdir(exist_ok=True) assert input_folder.exists(), input_folder - assert isinstance(overrides, list) and ("\n" not in ' '.join(overrides)) + assert isinstance(overrides, list) and ("\n" not in " ".join(overrides)) tmp_script = Path(folder) / "tmp" / f"opengl_{uuid4().hex}.sh" tmp_script.parent.mkdir(exist_ok=True) - start_frame, end_frame = output_indices['frame'], output_indices['last_cam_frame'] - with tmp_script.open('w') as f: - + start_frame, end_frame = output_indices["frame"], output_indices["last_cam_frame"] + with tmp_script.open("w") as f: lines = ["set -e"] - + lines += [ f"{process_mesh_path} -in {input_folder} " f"--frame {frame_idx} -out {output_folder}" for frame_idx in range(start_frame, end_frame + 1) ] - - - lines.append(f"{sys.executable} {repo_root()/'infinigen/tools/compress_masks.py'} {output_folder}") lines.append( - f"{sys.executable} -c \"from infinigen.tools.datarelease_toolkit import reorganize_old_framesfolder; " - f"reorganize_old_framesfolder({repr(str(output_folder))})\"" + f"{sys.executable} {infinigen.repo_root()/'infinigen/tools/compress_masks.py'} {output_folder}" + ) + + lines.append( + f'{sys.executable} -c "from infinigen.tools.datarelease_toolkit import reorganize_old_framesfolder; ' + f'reorganize_old_framesfolder({repr(str(output_folder))})"' ) lines.append(f"touch {folder}/logs/FINISH_{taskname}") for line in lines: line = re.sub("( \([A-Za-z0-9]+\))", "", line) - f.write(line + '\n') + f.write(line + "\n") cmd = f"bash {tmp_script}".split() - with (folder / "run_pipeline.sh").open('a') as f: + with (folder / "run_pipeline.sh").open("a") as f: f.write(f"{' '.join(' '.join(cmd).split())}\n\n") res = submit_cmd( diff --git a/infinigen/datagen/manage_jobs.py b/infinigen/datagen/manage_jobs.py index 15f68123a..0890fd925 100644 --- a/infinigen/datagen/manage_jobs.py +++ b/infinigen/datagen/manage_jobs.py @@ -1,146 +1,141 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alex Raistrick: refactor, local rendering, video rendering # - Lahav Lipson: stereo version, local rendering # - Hei Law: initial version import argparse +import importlib +import itertools import logging +import math import os -import re import random -import gin +import re import subprocess -import time import sys import time -import math -import itertools -import importlib - -from uuid import uuid4 -from enum import Enum -from copy import copy from ast import literal_eval - from collections import defaultdict +from copy import copy from datetime import datetime from pathlib import Path from shutil import which -import pandas as pd +import gin import numpy as np +import pandas as pd import submitit -import submitit.core.utils +import submitit.core.utils from jinja2 import Environment, FileSystemLoader, select_autoescape -ORIG_SYS_PATH = list(sys.path) # Make a new instance of sys.path +# ruff: noqa: E402 +ORIG_SYS_PATH = list(sys.path) # Make a new instance of sys.path import infinigen.core.init -BPY_SYS_PATH = list(sys.path) # Make instance of `bpy`'s modified sys.path +BPY_SYS_PATH = list(sys.path) # Make instance of `bpy`'s modified sys.path + +# ruff: noqa: F401 +from infinigen.datagen.job_funcs import get_cmd from infinigen.datagen.monitor_tasks import iterate_scene_tasks, on_scene_termination -from infinigen.datagen.util import upload_util from infinigen.datagen.states import ( - JobState, - SceneState, - CONCLUDED_JOBSTATES, - JOB_OBJ_SUCCEEDED, - cancel_job + CONCLUDED_JOBSTATES, + JOB_OBJ_SUCCEEDED, + JobState, + SceneState, + cancel_job, ) +from infinigen.datagen.util import upload_util from infinigen.datagen.util.submitit_emulator import ( - ScheduledLocalExecutor, - ImmediateLocalExecutor, - LocalScheduleHandler -) - -from infinigen.datagen import job_funcs -from infinigen.datagen.job_funcs import ( - # referenced by name via gin configs - queue_coarse, - queue_combined, - queue_fine_terrain, - queue_mesh_save, - queue_opengl, - queue_populate, - queue_render, - queue_upload + ImmediateLocalExecutor, + LocalScheduleHandler, + ScheduledLocalExecutor, ) logger = logging.getLogger(__name__) -wandb = None # will be imported and initialized ONLY if installed and enabled +wandb = None # will be imported and initialized ONLY if installed and enabled # used only if enabled in gin configs -PARTITION_ENVVAR = 'INFINIGEN_SLURMPARTITION' -EXCLUDE_FILE_ENVVAR = 'INFINIGEN_SLURM_EXCLUDENODES_LIST' -NUM_CONCURRENT_ENVVAR = 'INFINIGEN_NUMCONCURRENT_TARGET' +PARTITION_ENVVAR = "INFINIGEN_SLURMPARTITION" +EXCLUDE_FILE_ENVVAR = "INFINIGEN_SLURM_EXCLUDENODES_LIST" +NUM_CONCURRENT_ENVVAR = "INFINIGEN_NUMCONCURRENT_TARGET" -def node_from_slurm_jobid(scene_id): - if not which('sacct'): +def node_from_slurm_jobid(scene_id): + if not which("sacct"): return None - + try: - node_of_scene, *rest = subprocess.check_output(f"{which('sacct')} -j {scene_id} --format Node --noheader".split()).decode().split() + node_of_scene, *rest = ( + subprocess.check_output( + f"{which('sacct')} -j {scene_id} --format Node --noheader".split() + ) + .decode() + .split() + ) return node_of_scene except Exception as e: - logger.warning(f'sacct threw {e}') + logger.warning(f"sacct threw {e}") return None + def seed_generator(): seed_int = np.random.randint(np.iinfo(np.int32).max) - return hex(seed_int).removeprefix('0x') + return hex(seed_int).removeprefix("0x") + @gin.configurable def get_slurm_banned_nodes(config_path=None): - if config_path == f'ENVVAR_{EXCLUDE_FILE_ENVVAR}': + if config_path == f"ENVVAR_{EXCLUDE_FILE_ENVVAR}": config_path = os.environ.get(EXCLUDE_FILE_ENVVAR) if config_path is None: return [] - with Path(config_path).open('r') as f: + with Path(config_path).open("r") as f: return list(f.read().split()) + @gin.configurable def slurm_submit_cmd( - cmd, - folder, - name, - mem_gb=None, - cpus=None, - gpus=0, - hours=1, - slurm_account=None, + cmd, + folder, + name, + mem_gb=None, + cpus=None, + gpus=0, + hours=1, + slurm_account=None, slurm_partition=None, - slurm_exclude: list = None, + slurm_exclude: list = None, slurm_niceness=None, - **_ + **_, ): - executor = submitit.AutoExecutor(folder=(folder / "logs")) executor.update_parameters( mem_gb=mem_gb, name=name, cpus_per_task=cpus, - timeout_min=60*hours, + timeout_min=60 * hours, ) - + exclude = get_slurm_banned_nodes() if slurm_exclude is not None: exclude += slurm_exclude if len(exclude): - executor.update_parameters(slurm_exclude=','.join(exclude)) - + executor.update_parameters(slurm_exclude=",".join(exclude)) + if gpus > 0: executor.update_parameters(gpus_per_node=gpus) if slurm_account is not None: - - if slurm_account == f'ENVVAR_{PARTITION_ENVVAR}': + if slurm_account == f"ENVVAR_{PARTITION_ENVVAR}": slurm_account = os.environ.get(PARTITION_ENVVAR) if slurm_account is None: - logger.warning(f'{PARTITION_ENVVAR=} was not set, using no slurm account') + logger.warning( + f"{PARTITION_ENVVAR=} was not set, using no slurm account" + ) if isinstance(slurm_account, list): slurm_account = np.random.choice(slurm_account) @@ -150,10 +145,10 @@ def slurm_submit_cmd( slurm_additional_params = {} if slurm_niceness is not None: - slurm_additional_params['nice'] = slurm_niceness + slurm_additional_params["nice"] = slurm_niceness if slurm_partition is not None: - slurm_additional_params['partition'] = slurm_partition + slurm_additional_params["partition"] = slurm_partition executor.update_parameters(slurm_additional_parameters=slurm_additional_params) @@ -169,9 +164,9 @@ def slurm_submit_cmd( print(f"[{current_time_str}] Job submission failed with error:\n{e}") time.sleep(60) + @gin.configurable def local_submit_cmd(cmd, folder, name, use_scheduler=False, **kwargs): - ExecutorClass = ScheduledLocalExecutor if use_scheduler else ImmediateLocalExecutor executor = ExecutorClass(folder=(folder / "logs")) executor.update_parameters(name=name, **kwargs) @@ -182,13 +177,13 @@ def local_submit_cmd(cmd, folder, name, use_scheduler=False, **kwargs): func = submitit.helpers.CommandFunction(cmd) return executor.submit(func) -def init_db_from_existing(output_folder: Path): +def init_db_from_existing(output_folder: Path): # TODO in future: directly use existing_db (with some cleanup / checking). - db_path = output_folder/'scenes_db.csv' + db_path = output_folder / "scenes_db.csv" if not db_path.exists(): - raise ValueError(f'Recieved --use_existing but {db_path=} did not exist') + raise ValueError(f"Recieved --use_existing but {db_path=} did not exist") existing_db = pd.read_csv(db_path, converters={"configs": literal_eval}) def init_scene(seed_folder): @@ -196,124 +191,128 @@ def init_scene(seed_folder): return None if seed_folder.is_symlink() and not seed_folder.readlink().is_dir(): return None - if not (seed_folder/'logs').exists(): - logger.warning(f'Skipping {seed_folder=} due to missing "logs" subdirectory') + if not (seed_folder / "logs").exists(): + logger.warning( + f'Skipping {seed_folder=} due to missing "logs" subdirectory' + ) return None scene_dict = { - 'seed': seed_folder.name, - 'all_done': SceneState.NotDone, + "seed": seed_folder.name, + "all_done": SceneState.NotDone, } - if 'configs' in existing_db.columns: - mask = (existing_db["seed"].astype(str) == seed_folder.name) + if "configs" in existing_db.columns: + mask = existing_db["seed"].astype(str) == seed_folder.name if not mask.any(): raise ValueError(f"Couldnt find configs for {seed_folder.name}") configs = existing_db.loc[mask, "configs"].iloc[0] - scene_dict['configs'] = list(configs) + scene_dict["configs"] = list(configs) - finish_key = 'FINISH_' - for finish_file_name in (seed_folder/'logs').glob(finish_key + '*'): - taskname = os.path.basename(finish_file_name)[len(finish_key):] - logger.info(f'Marking {seed_folder.name=} {taskname=} as completed') - scene_dict[f'{taskname}_submitted'] = True - scene_dict[f'{taskname}_job_obj'] = JOB_OBJ_SUCCEEDED + finish_key = "FINISH_" + for finish_file_name in (seed_folder / "logs").glob(finish_key + "*"): + taskname = os.path.basename(finish_file_name)[len(finish_key) :] + logger.info(f"Marking {seed_folder.name=} {taskname=} as completed") + scene_dict[f"{taskname}_submitted"] = True + scene_dict[f"{taskname}_job_obj"] = JOB_OBJ_SUCCEEDED return scene_dict return [init_scene(seed_folder) for seed_folder in output_folder.iterdir()] -def _sample_config_distribution(i: int, config_distribution: list[tuple[str, float]], config_sample_mode: str): - + +def _sample_config_distribution( + i: int, config_distribution: list[tuple[str, float]], config_sample_mode: str +): match config_sample_mode: - case 'random': - configs_options, weights = zip(*config_distribution) # list of rows to list per column + case "random": + configs_options, weights = zip( + *config_distribution + ) # list of rows to list per column ps = np.array(weights) / sum(weights) return np.random.choice(configs_options, p=ps) - case 'roundrobin': - configs_options, weights = zip(*config_distribution) # list of rows to list per column + case "roundrobin": + configs_options, weights = zip( + *config_distribution + ) # list of rows to list per column if not all(isinstance(w, int) for w in weights): - raise ValueError(f'{config_sample_mode=} expects integer scene counts as weights but got {weights=} with non-integer values') + raise ValueError( + f"{config_sample_mode=} expects integer scene counts as weights but got {weights=} with non-integer values" + ) idx = np.argmin(i % sum(weights) + 1 > np.cumsum(weights)) return configs_options[idx] case _: - raise ValueError(f'Unrecognized {config_sample_mode=}') + raise ValueError(f"Unrecognized {config_sample_mode=}") @gin.configurable def sample_scene_spec( - args: argparse.Namespace, - i: int, - seed_range=None, - config_distribution=None, - config_sample_mode='random' + args: argparse.Namespace, + i: int, + seed_range=None, + config_distribution=None, + config_sample_mode="random", ): - if seed_range is None: seed = seed_generator() else: start, end = seed_range if i > end - start: return None - seed = hex(start + i).removeprefix('0x') + seed = hex(start + i).removeprefix("0x") if config_distribution is None: config_distribution = [] - conf_keys = {k.split('.')[0] for k, _ in config_distribution} - arg_confs = {k.split('.')[0] for k in args.configs} + conf_keys = {k.split(".")[0] for k, _ in config_distribution} + arg_confs = {k.split(".")[0] for k in args.configs} inter = conf_keys.intersection(arg_confs) if len(inter) == 0: - configs = _sample_config_distribution(i, config_distribution, config_sample_mode) + configs = _sample_config_distribution( + i, config_distribution, config_sample_mode + ) elif len(inter) == 1: configs = list(inter) else: - raise ValueError(f'Got user specified configs {inter}, only 1 is expected') - - + raise ValueError(f"Got user specified configs {inter}, only 1 is expected") + if isinstance(configs, str) and " " in configs: configs = configs.split(" ") if not isinstance(configs, list): configs = [configs] - return { - "all_done": SceneState.NotDone, - "seed": seed, - 'configs': configs - } + return {"all_done": SceneState.NotDone, "seed": seed, "configs": configs} + @gin.configurable def init_db(args): - if args.use_existing: scenes = init_db_from_existing(args.output_folder) elif args.specific_seed is not None: scenes = [ - { - "seed": s, - "configs": args.configs, - "all_done": SceneState.NotDone - } + {"seed": s, "configs": args.configs, "all_done": SceneState.NotDone} for s in args.specific_seed ] else: - scenes = [sample_scene_spec(args, i) for i in range(args.num_scenes)] + scenes = [sample_scene_spec(args, i) for i in range(args.num_scenes)] scenes = [s for s in scenes if s is not None] if len(scenes) < args.num_scenes: - logger.warning(f'Initialized only {len(scenes)=} despite {args.num_scenes=}. Likely due to --use_existing, --specific_seed or seed_range.') + logger.warning( + f"Initialized only {len(scenes)=} despite {args.num_scenes=}. Likely due to --use_existing, --specific_seed or seed_range." + ) return scenes + def update_symlink(scene_folder, scenes): for new_name, scene in scenes: - if scene == JOB_OBJ_SUCCEEDED: continue elif isinstance(scene, str): - raise ValueError(f'Failed due to {scene=}') + raise ValueError(f"Failed due to {scene=}") to = scene_folder / "logs" / f"{new_name}.out" std_out = scene_folder / "logs" / f"{scene.job_id}_0_log.out" @@ -322,15 +321,21 @@ def update_symlink(scene_folder, scenes): os.unlink(to) os.unlink(scene_folder / "logs" / f"{new_name}.err") os.symlink(std_out.resolve(), to) - os.symlink(std_out.with_suffix('.err').resolve(), scene_folder / "logs" / f"{new_name}.err") + os.symlink( + std_out.with_suffix(".err").resolve(), + scene_folder / "logs" / f"{new_name}.err", + ) + def get_disk_usage(folder): - out = subprocess.check_output(f"df -h {folder.resolve()}".replace(" (Princeton)", "").split()).decode() + out = subprocess.check_output( + f"df -h {folder.resolve()}".replace(" (Princeton)", "").split() + ).decode() return int(re.compile("[\s\S]* ([0-9]+)% [\s\S]*").fullmatch(out).group(1)) / 100 -def make_html_page(output_path, scenes, frame, camera_pair_id, **kwargs): - template_path = infinigen.core.init.repo_root()/"infinigen/datagen/util" +def make_html_page(output_path, scenes, frame, camera_pair_id, **kwargs): + template_path = infinigen.repo_root() / "infinigen/datagen/util" assert template_path.exists(), template_path env = Environment( loader=FileSystemLoader(template_path), @@ -338,56 +343,49 @@ def make_html_page(output_path, scenes, frame, camera_pair_id, **kwargs): ) template = env.get_template("template.html") - seeds = [scene['seed'] for scene in scenes] - html = template.render( + seeds = [scene["seed"] for scene in scenes] + html = template.render( seeds=seeds, **kwargs, frame=frame, camera_pair_id=camera_pair_id, ) - with output_path.open('a') as f: + with output_path.open("a") as f: f.write(html) -@gin.configurable -def run_task( - queue_func, - scene_folder, - scene_dict, - taskname, - dryrun=False -): +@gin.configurable +def run_task(queue_func, scene_folder, scene_dict, taskname, dryrun=False): assert scene_folder.parent.exists(), scene_folder scene_folder.mkdir(exist_ok=True) scene_folder = scene_folder.resolve() stage_scene_name = f"{scene_folder.parent.stem}_{scene_folder.stem}_{taskname}" - assert not scene_dict.get(f'{taskname}_submitted', False) + assert not scene_dict.get(f"{taskname}_submitted", False) if dryrun: - scene_dict[f'{taskname}_job_obj'] = JOB_OBJ_SUCCEEDED - scene_dict[f'{taskname}_submitted'] = 1 + scene_dict[f"{taskname}_job_obj"] = JOB_OBJ_SUCCEEDED + scene_dict[f"{taskname}_submitted"] = 1 return job_obj, output_folder = queue_func( - seed=scene_dict['seed'], + seed=scene_dict["seed"], folder=scene_folder, name=stage_scene_name, - taskname=taskname + taskname=taskname, ) - scene_dict[f'{taskname}_job_obj'] = job_obj - scene_dict[f'{taskname}_output_folder'] = output_folder - scene_dict[f'{taskname}_submitted'] = 1 # marked as submitted + scene_dict[f"{taskname}_job_obj"] = job_obj + scene_dict[f"{taskname}_output_folder"] = output_folder + scene_dict[f"{taskname}_submitted"] = 1 # marked as submitted update_symlink(scene_folder, [(taskname, job_obj)]) def infer_crash_reason(stdout_file, stderr_file: Path): - if not stderr_file.exists(): - return f'{stderr_file} not found' - + return f"{stderr_file} not found" + try: error_log = stderr_file.read_text() except UnicodeDecodeError: @@ -395,7 +393,7 @@ def infer_crash_reason(stdout_file, stderr_file: Path): if "System is out of GPU memory" in error_log: return "Out of GPU memory" - elif "this scene is timed-out" in error_log or 'DUE TO TIME LIMIT' in error_log: + elif "this scene is timed-out" in error_log or "DUE TO TIME LIMIT" in error_log: return "Timed out" elif "" in error_log: return "SIGKILL: 9 (out-of-memory, probably)" @@ -403,62 +401,57 @@ def infer_crash_reason(stdout_file, stderr_file: Path): return "SIGCONT (timeout?)" if not stdout_file.exists(): - return f'{stdout_file} not found' + return f"{stdout_file} not found" if not stderr_file.exists(): - return f'{stderr_file} not found' + return f"{stderr_file} not found" output_text = f"{stdout_file.read_text()}\n{stderr_file.read_text()}\n" matches = re.findall("([^\.\n]*[Ee]rror):(.*)\n", output_text) ignore_errors = { - # happens for every failed submitit job, not informative to report in summary - "FailedProcessError", + "FailedProcessError", "CalledProcessError", - # happens for every failed slurm job on IONIC "srun: error", "FailedJobError", } - ignore_messages = [ - "Not freed memory blocks" - ] + ignore_messages = ["Not freed memory blocks"] matches = [ - f'{m[0]}: {m[1]}' for m in matches if not ( - m[0] in ignore_errors - or any(x in m[1] for x in ignore_messages) - ) + f"{m[0]}: {m[1]}" + for m in matches + if not (m[0] in ignore_errors or any(x in m[1] for x in ignore_messages)) ] if len(matches): - return ','.join(matches) + return ",".join(matches) else: - return f"Could not summarize cause, check {stderr_file}" + return f"Could not summarize cause, check {stderr_file}" -def record_crashed_seed(scene, taskname, f, fatal=True): - seed = scene['seed'] +def record_crashed_seed(scene, taskname, f, fatal=True): + seed = scene["seed"] stdout_file = args.output_folder / seed / "logs" / f"{taskname}.out" stderr_file = args.output_folder / seed / "logs" / f"{taskname}.err" - scene_id, *_ = stderr_file.resolve().stem.split('_') + scene_id, *_ = stderr_file.resolve().stem.split("_") node = node_from_slurm_jobid(scene_id) time_str = datetime.now().strftime("%m/%d %I:%M%p") - + reason = infer_crash_reason(stdout_file, stderr_file) text = f"{time_str} {str(stderr_file)} {reason=} {node=} {fatal=}\n" - print('Crashed: ' + text) + print("Crashed: " + text) f.write(text) - scene[f'{taskname}_crash_recorded'] = True + scene[f"{taskname}_crash_recorded"] = True return reason - -def write_html_summary(all_scenes, output_folder, max_size=5000): + +def write_html_summary(all_scenes, output_folder, max_size=5000): names = [ "index" if (idx == 0) else f"index_{idx}" for idx in range(0, len(all_scenes), max_size) @@ -466,98 +459,104 @@ def write_html_summary(all_scenes, output_folder, max_size=5000): for name, idx in zip(names, range(0, len(all_scenes), max_size)): html_path = output_folder / f"{name}.html" if not html_path.exists(): - make_html_page(html_path, all_scenes[idx:idx+max_size], frame=100, - camera_pair_id=0, samples=[f"resmpl{i}" for i in range(5)], pages=names, - ) - -def monitor_existing_jobs(all_scenes, aggressive_cancel_on_crash=False): + make_html_page( + html_path, + all_scenes[idx : idx + max_size], + frame=100, + camera_pair_id=0, + samples=[f"resmpl{i}" for i in range(5)], + pages=names, + ) + +def monitor_existing_jobs(all_scenes, aggressive_cancel_on_crash=False): state_counts = defaultdict(int) for scene in all_scenes: - - seed = scene['seed'] - scene['num_running'], scene['num_done'] = 0, 0 + seed = scene["seed"] + scene["num_running"], scene["num_done"] = 0, 0 any_fatal = False - for state, taskname, _, fatal in iterate_scene_tasks(scene, args, monitor_all=True): - + for state, taskname, _, fatal in iterate_scene_tasks( + scene, args, monitor_all=True + ): if state == JobState.NotQueued: continue - taskname_stem = taskname.split('_')[0] + taskname_stem = taskname.split("_")[0] state_counts[(state, taskname_stem)] += 1 - scene['num_done'] += state in CONCLUDED_JOBSTATES - scene['num_running'] += state not in CONCLUDED_JOBSTATES - + scene["num_done"] += state in CONCLUDED_JOBSTATES + scene["num_running"] += state not in CONCLUDED_JOBSTATES + if state == JobState.Failed: - if not scene.get(f'{taskname}_crash_recorded', False): - logging.info(f'{seed} - recording crash for {taskname}') - with (args.output_folder / "crash_summaries.txt").open('a') as f: + if not scene.get(f"{taskname}_crash_recorded", False): + logging.info(f"{seed} - recording crash for {taskname}") + with (args.output_folder / "crash_summaries.txt").open("a") as f: record_crashed_seed(scene, taskname, f, fatal=fatal) if fatal: any_fatal = True if any_fatal: - logging.info(f'{seed} - recording fatally crashed') - scene['any_fatal_crash'] = True + logging.info(f"{seed} - recording fatally crashed") + scene["any_fatal_crash"] = True if aggressive_cancel_on_crash and any_fatal: - suffix = 'job_obj' + suffix = "job_obj" to_cancel = [k for k in scene.keys() if k.endswith(suffix)] for k in to_cancel: - cancel_key = k.replace(suffix, 'force_cancelled') + cancel_key = k.replace(suffix, "force_cancelled") if scene.get(cancel_key, False): continue - logging.info(f'{seed} - cancelling {k} due to fatal crash') + logging.info(f"{seed} - cancelling {k} due to fatal crash") scene[cancel_key] = True cancel_job(scene[k]) - + if ( - any_fatal and - scene['num_running'] == 0 and - scene['all_done'] == SceneState.NotDone + any_fatal + and scene["num_running"] == 0 + and scene["all_done"] == SceneState.NotDone ): - logging.info(f'{seed} - processing scene termination due to fatal crash') + logging.info(f"{seed} - processing scene termination due to fatal crash") on_scene_termination(args, scene, crashed=True) - return state_counts -def stats_summary(state_counts): +def stats_summary(state_counts): uniq_states = set(s for (s, _) in state_counts.keys()) - def get_count(state): + + def get_count(state): return sum(v for (s, _), v in state_counts.items() if s == state) + totals = {s: get_count(s) for s in uniq_states} - stats = {f'{s}/{t}': v for (s, t), v in state_counts.items()} + stats = {f"{s}/{t}": v for (s, t), v in state_counts.items()} return stats, totals + @gin.configurable def jobs_to_launch_next( scenes: list[dict], state_counts: dict[tuple[str, str], int], - greedy=True, - - # following kwargs are designed to help minimize over-eager starting new scenes, + greedy=True, + # following kwargs are designed to help minimize over-eager starting new scenes, # or limit paralellism to help greedily finish scenes / lower overall latency. # warning: may reduce throughput, especially if not using warmup_sec, or cluster capacity varies max_queued_task: int = None, max_queued_total: int = None, - max_stuck_at_task: int = None + max_stuck_at_task: int = None, ): - def is_candidate_for_launch(scene): - return ( - scene['all_done'] == SceneState.NotDone and - not scene.get('any_fatal_crash', False) + return scene["all_done"] == SceneState.NotDone and not scene.get( + "any_fatal_crash", False ) + scenes = [s for s in scenes if is_candidate_for_launch(s)] def inflight(s): - return s['num_running'] + s['num_done'] + return s["num_running"] + s["num_done"] + if greedy: scenes = sorted(copy(scenes), key=inflight, reverse=True) @@ -565,27 +564,21 @@ def inflight(s): started_uniq, curr_per_started = np.unique(started_counts, return_counts=True) started_uniq = list(started_uniq) - logging.debug(f'Pipeline state: {list(zip(started_uniq, curr_per_started))}') + logging.debug(f"Pipeline state: {list(zip(started_uniq, curr_per_started))}") + + total_queued = sum(v for (s, _), v in state_counts.items() if s == JobState.Queued) - total_queued = sum( - v for (s, _), v in state_counts.items() - if s == JobState.Queued - ) - for scene in scenes: + seed = scene["seed"] - seed = scene['seed'] - started_if_launch = inflight(scene) + 1 stuck_at_next = ( - curr_per_started[started_uniq.index(started_if_launch)] - if started_if_launch in started_uniq else 0 + curr_per_started[started_uniq.index(started_if_launch)] + if started_if_launch in started_uniq + else 0 ) - if ( - max_stuck_at_task is not None and - stuck_at_next >= max_stuck_at_task - ): + if max_stuck_at_task is not None and stuck_at_next >= max_stuck_at_task: logging.info( f"{seed} - Not launching due to {stuck_at_next=} >" f" {max_stuck_at_task} for {started_if_launch=}" @@ -598,13 +591,17 @@ def inflight(s): if state != JobState.NotQueued: continue - queued_key = (JobState.Queued, taskname.split('_')[0]) + queued_key = (JobState.Queued, taskname.split("_")[0]) queued = state_counts.get(queued_key, 0) if max_queued_task is not None and queued >= max_queued_task: - logging.info(f"{seed} - Not launching due to {queued=} > {max_queued_task} for {taskname}") + logging.info( + f"{seed} - Not launching due to {queued=} > {max_queued_task} for {taskname}" + ) continue if max_queued_total is not None and total_queued >= max_queued_total: - logging.info(f"{seed} - Not launching due to {total_queued=} > {max_queued_total} for {taskname}") + logging.info( + f"{seed} - Not launching due to {total_queued=} > {max_queued_total} for {taskname}" + ) continue yield scene, taskname, queue_func @@ -612,32 +609,36 @@ def inflight(s): state_counts[queued_key] += 1 total_queued += 1 -def compute_control_state(args, totals, elapsed, num_concurrent): - if num_concurrent == f'ENVVAR_{NUM_CONCURRENT_ENVVAR}': +def compute_control_state(args, totals, elapsed, num_concurrent): + if num_concurrent == f"ENVVAR_{NUM_CONCURRENT_ENVVAR}": num_concurrent = int(os.environ[NUM_CONCURRENT_ENVVAR]) control_state = {} - control_state['n_in_flight'] = totals.get(JobState.Running, 0) + totals.get(JobState.Queued, 0) - control_state['disk_usage'] = get_disk_usage(args.output_folder) + control_state["n_in_flight"] = totals.get(JobState.Running, 0) + totals.get( + JobState.Queued, 0 + ) + control_state["disk_usage"] = get_disk_usage(args.output_folder) warmup_pct = min(elapsed / args.warmup_sec, 1) if args.warmup_sec > 0 else 1 - control_state['curr_concurrent_max'] = math.ceil(warmup_pct * num_concurrent) + control_state["curr_concurrent_max"] = math.ceil(warmup_pct * num_concurrent) - if control_state['n_in_flight'] > control_state['curr_concurrent_max']: + if control_state["n_in_flight"] > control_state["curr_concurrent_max"]: raise ValueError( f"manage_datagen_jobs observed {control_state['n_in_flight']=}," f" which exceeds allowed {control_state['curr_concurrent_max']=}" ) - control_state['try_to_launch'] = max(control_state['curr_concurrent_max'] - control_state['n_in_flight'], 0) + control_state["try_to_launch"] = max( + control_state["curr_concurrent_max"] - control_state["n_in_flight"], 0 + ) return control_state + def record_states(stats, totals, control_state): - pretty_stats = copy(stats) - pretty_stats.update({f'control_state/{k}': v for k, v in control_state.items()}) - pretty_stats.update({f'{k}/total': v for k, v in totals.items()}) + pretty_stats.update({f"control_state/{k}": v for k, v in control_state.items()}) + pretty_stats.update({f"{k}/total": v for k, v in totals.items()}) if wandb is not None: wandb.log(pretty_stats) @@ -646,11 +647,11 @@ def record_states(stats, totals, control_state): print(f"{k.ljust(30)} : {v}") print("-" * 60) + @gin.configurable def manage_datagen_jobs(all_scenes, elapsed, num_concurrent, disk_sleep_threshold=0.95): - if LocalScheduleHandler._inst is not None: - sys.path = ORIG_SYS_PATH #hacky workaround because bpy module breaks with multiprocessing + sys.path = ORIG_SYS_PATH # hacky workaround because bpy module breaks with multiprocessing LocalScheduleHandler.instance().poll() sys.path = BPY_SYS_PATH @@ -659,31 +660,37 @@ def manage_datagen_jobs(all_scenes, elapsed, num_concurrent, disk_sleep_threshol control_state = compute_control_state(args, totals, elapsed, num_concurrent) new_jobs = jobs_to_launch_next(all_scenes, state_counts) - new_jobs = list(itertools.islice(new_jobs, control_state['try_to_launch'])) - control_state['will_launch'] = len(new_jobs) # may be less due to jobs_to_launch optional kwargs, or running out of num_jobs + new_jobs = list(itertools.islice(new_jobs, control_state["try_to_launch"])) + control_state["will_launch"] = len( + new_jobs + ) # may be less due to jobs_to_launch optional kwargs, or running out of num_jobs - pd.DataFrame.from_records(all_scenes).to_csv(args.output_folder/'scenes_db.csv') + pd.DataFrame.from_records(all_scenes).to_csv(args.output_folder / "scenes_db.csv") record_states(stats, totals, control_state) # Dont launch new scenes if disk is getting full - if control_state['disk_usage'] > disk_sleep_threshold: + if control_state["disk_usage"] > disk_sleep_threshold: message = f"{args.output_folder} is full ({100*control_state['disk_usage']}%). Sleeping." print(message) if wandb is not None: - wandb.alert(title=f'{args.output_folder} full', text=message, wait_duration=3*60*60) + wandb.alert( + title=f"{args.output_folder} full", + text=message, + wait_duration=3 * 60 * 60, + ) time.sleep(60) return - for scene, taskname, queue_func in new_jobs: + for scene, taskname, queue_func in new_jobs: logger.info(f"{scene['seed']} - running {taskname}") - run_task(queue_func, args.output_folder / str(scene['seed']), scene, taskname) + run_task(queue_func, args.output_folder / str(scene["seed"]), scene, taskname) -@gin.configurable -def main(args, shuffle=True, wandb_project='render', upload_commandfile_method=None): - command_path = args.output_folder/'datagen_command.sh' - with command_path.open('w') as f: - f.write(' '.join(sys.argv)) +@gin.configurable +def main(args, shuffle=True, wandb_project="render", upload_commandfile_method=None): + command_path = args.output_folder / "datagen_command.sh" + with command_path.open("w") as f: + f.write(" ".join(sys.argv)) if upload_commandfile_method is not None: upload = upload_util.get_upload_func(upload_commandfile_method) upload(command_path, upload_util.get_upload_destfolder(args.output_folder)) @@ -694,185 +701,189 @@ def main(args, shuffle=True, wandb_project='render', upload_commandfile_method=N if args.cleanup != all: write_html_summary(all_scenes, args.output_folder) - if args.wandb_mode != 'disabled': + if args.wandb_mode != "disabled": global wandb - wandb = importlib.import_module('wandb') + wandb = importlib.import_module("wandb") if wandb is not None: wandb.init( - name=scene_name, - config=vars(args), - project=wandb_project, - mode=args.wandb_mode + name=scene_name, + config=vars(args), + project=wandb_project, + mode=args.wandb_mode, ) logging.basicConfig( filename=str(args.output_folder / "jobs.log"), level=args.loglevel, - format='[%(asctime)s]: %(message)s', + format="[%(asctime)s]: %(message)s", ) - print(f'Using {get_slurm_banned_nodes()=}') + print(f"Using {get_slurm_banned_nodes()=}") if shuffle: np.random.shuffle(all_scenes) else: - all_scenes = sorted(all_scenes, key=lambda j: j['seed']) + all_scenes = sorted(all_scenes, key=lambda j: j["seed"]) start_time = datetime.now() - while any(j['all_done'] == SceneState.NotDone for j in all_scenes): + while any(j["all_done"] == SceneState.NotDone for j in all_scenes): now = datetime.now() - print(f'{args.output_folder} {start_time.strftime("%m/%d %I:%M%p")} -> {now.strftime("%m/%d %I:%M%p")}') - manage_datagen_jobs(all_scenes, elapsed=(now-start_time).total_seconds()) + print( + f'{args.output_folder} {start_time.strftime("%m/%d %I:%M%p")} -> {now.strftime("%m/%d %I:%M%p")}' + ) + manage_datagen_jobs(all_scenes, elapsed=(now - start_time).total_seconds()) time.sleep(2) + any_crashed = any(j.get("any_fatal_crash", False) for j in all_scenes) + sys.exit(1 if any_crashed else 0) + + +mandatory_exclusive_configs = [ + "infinigen/datagen/configs/compute_platform", + "infinigen/datagen/configs/data_schema", +] + if __name__ == "__main__": - - os.umask(0o007) + slurm_available = which("sbatch") is not None + parser = argparse.ArgumentParser() - slurm_available = (which("sbatch") is not None) - parser = argparse.ArgumentParser() # to guarantee that the render scenes finish, try render_image.time_limit=2000 - parser.add_argument( - '-o', - '--output_folder', - type=Path, - required=True - ) + parser.add_argument("-o", "--output_folder", type=Path, default=None) # parser.add_argument( - '--num_scenes', - type=int, + "--num_scenes", + type=int, default=1, - help="Number of scenes to attempt before terminating" + help="Number of scenes to attempt before terminating", ) parser.add_argument( - '--meta_seed', - type=int, + "--meta_seed", + type=int, default=None, help="What seed should be used to determine the random seeds of each scene? " - "Leave as None unless deliberately replicating past runs" + "Leave as None unless deliberately replicating past runs", ) parser.add_argument( - '--specific_seed', - default=None, - nargs='+', + "--specific_seed", + default=None, + nargs="+", help="The default, None, will choose a random seed per scene. Otherwise, all " - "scenes will have the specified seed. Interpreted as an integer if possible." + "scenes will have the specified seed. Interpreted as an integer if possible.", ) parser.add_argument( - '--use_existing', - action='store_true', + "--use_existing", + action="store_true", help="If set, then assume output_folder is an existing folder from a " "terminated run, and make a best-possible-effort to resume from where " - "it left off" + "it left off", ) parser.add_argument( - '--warmup_sec', - type=float, + "--warmup_sec", + type=float, default=0, help="Perform a staggered start over the specified period, so that jobs dont " - "sync up or all write to disk at similar times." + "sync up or all write to disk at similar times.", ) parser.add_argument( - '--cleanup', - type=str, - choices=['all', 'big_files', 'none', 'except_logs', 'except_crashed'], - default='none', - help="What files should be cleaned up by the manager as it runs?" + "--cleanup", + type=str, + choices=["all", "big_files", "none", "except_logs", "except_crashed"], + default="none", + help="What files should be cleaned up by the manager as it runs?", ) parser.add_argument( - '--configs', - nargs='*', + "--configs", + nargs="*", default=[], help="List of gin config names to pass through to all underlying " - "scene generation jobs." + "scene generation jobs.", ) parser.add_argument( - '-p', - '--overrides', - nargs='+', - type=str, - default=[], + "-p", + "--overrides", + nargs="+", + type=str, + default=[], help="List of gin overrides to pass through to all underlying " - "scene generation jobs" + "scene generation jobs", ) parser.add_argument( - '--wandb_mode', - type=str, - default='disabled', - choices=['online', 'offline', 'disabled'], - help="Mode kwarg for wandb.init(). Set up wandb before use." + "--wandb_mode", + type=str, + default="disabled", + choices=["online", "offline", "disabled"], + help="Mode kwarg for wandb.init(). Set up wandb before use.", ) parser.add_argument( - '--pipeline_configs', - type=str, - nargs='+', + "--pipeline_configs", + type=str, + nargs="+", help="List of gin config names from tools/pipeline_configs " - "to configure this execution" + "to configure this execution", ) parser.add_argument( - '--pipeline_overrides', - nargs='+', - type=str, - default=[], + "--pipeline_overrides", + nargs="+", + type=str, + default=[], help="List of gin overrides to configure this execution", ) + parser.add_argument("--overwrite", action="store_true") parser.add_argument( - '--overwrite', action='store_true' - ) - parser.add_argument( - '-d', - '--debug', - action="store_const", - dest="loglevel", - const=logging.DEBUG, - default=logging.INFO + "-d", + "--debug", + action="store_const", + dest="loglevel", + const=logging.DEBUG, + default=logging.INFO, ) parser.add_argument( - '-v', - '--verbose', - action="store_const", - dest="loglevel", - const=logging.INFO + "-v", "--verbose", action="store_const", dest="loglevel", const=logging.INFO ) args = parser.parse_args() - using_upload = any('upload' in x for x in args.pipeline_configs) + using_upload = any("upload" in x for x in args.pipeline_configs) - if not using_upload and args.cleanup in ['except_logs', 'except_crashed', 'all']: + if not using_upload and args.cleanup in ["except_logs", "except_crashed", "all"]: raise ValueError( - f'Pipeline is configured with {args.cleanup=}' - ' yet {args.upload=}! No output would be preserved!' + f"Pipeline is configured with {args.cleanup=}" + " yet {args.upload=}! No output would be preserved!" ) - if using_upload and args.cleanup == 'none': + if using_upload and args.cleanup == "none": logging.warning( - f'Upload performs some cleanup, so combining upload.gin with ' - '--cleanup none will not result in ALL files being preserved' + "Upload performs some cleanup, so combining upload.gin with " + "--cleanup none will not result in ALL files being preserved" ) - + assert args.specific_seed is None or args.num_scenes == 1 + if args.output_folder is None: + date_str = datetime.now().strftime("%y-%m-%d_%H-%M") + hostname = os.uname().nodename + + output_base = Path("outputs") + assert output_base.exists(), output_base + + args.output_folder = Path(f"outputs/{date_str}_{hostname}") + overwrite_ok = args.use_existing or args.overwrite if args.output_folder.exists() and not overwrite_ok: raise FileExistsError( - f'--output_folder {args.output_folder} already exists! Please delete it,' - ' specify a different --output_folder, or use --overwrite' + f"--output_folder {args.output_folder} already exists! Please delete it," + " specify a different --output_folder, or use --overwrite" ) + args.output_folder.mkdir(parents=True, exist_ok=overwrite_ok) if args.meta_seed is not None: random.seed(args.meta_seed) np.random.seed(args.meta_seed) - mandatory_exclusive = [ - 'infinigen/datagen/configs/compute_platform', - 'infinigen/datagen/configs/data_schema' - ] infinigen.core.init.apply_gin_configs( - configs_folder=Path('infinigen/datagen/configs'), + config_folders=[Path("infinigen/datagen/configs")], configs=args.pipeline_configs, overrides=args.pipeline_overrides, - mandatory_folders=mandatory_exclusive, - mutually_exclusive_folders=mandatory_exclusive, + mandatory_folders=mandatory_exclusive_configs, + mutually_exclusive_folders=mandatory_exclusive_configs, ) main(args) diff --git a/infinigen/datagen/monitor_tasks.py b/infinigen/datagen/monitor_tasks.py index 8b9560922..c74148d00 100644 --- a/infinigen/datagen/monitor_tasks.py +++ b/infinigen/datagen/monitor_tasks.py @@ -1,209 +1,211 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alex Raistrick: refactor, local rendering, video rendering # - Lahav Lipson: stereo version, local rendering # - Hei Law: initial version import itertools -from functools import partial import logging -from shutil import rmtree import subprocess +from functools import partial +from shutil import rmtree import gin -from infinigen.datagen.util.cleanup import cleanup -from infinigen.datagen.util import upload_util from infinigen.datagen.states import ( - JobState, - SceneState, - CONCLUDED_JOBSTATES, - get_scene_state, - get_suffix, + CONCLUDED_JOBSTATES, + JobState, + SceneState, + get_scene_state, ) +from infinigen.datagen.util import upload_util +from infinigen.datagen.util.cleanup import cleanup +from infinigen.tools.suffixes import get_suffix logger = logging.getLogger(__name__) + def iterate_sequential_tasks( - task_list, - get_task_state, - overrides, - configs, - input_indices=None, - output_indices=None + task_list, + get_task_state, + overrides, + configs, + input_indices=None, + output_indices=None, ): - if len(task_list) == 0: return JobState.Succeeded prev_state = JobState.Succeeded - assert task_list[0].get('condition', 'prev_succeeded') == 'prev_succeeded' + assert task_list[0].get("condition", "prev_succeeded") == "prev_succeeded" for i, task_spec in enumerate(task_list): - # check that we should actually run this step, according to its condition - cond = task_spec.get('condition', 'prev_succeeded') - if cond == 'prev_succeeded' and prev_state != JobState.Succeeded: + cond = task_spec.get("condition", "prev_succeeded") + if cond == "prev_succeeded" and prev_state != JobState.Succeeded: return - elif cond == 'prev_failed' and prev_state != JobState.Failed: - continue # we wont run this scene, but skipping doesnt count as crashing - elif cond == 'prev_redundant': - pass # any outcome is fine + elif cond == "prev_failed" and prev_state != JobState.Failed: + continue # we wont run this scene, but skipping doesnt count as crashing + elif cond == "prev_redundant": + pass # any outcome is fine # determine whether the current step failing would be catastrophic fatal = ( - i + 1 >= len(task_list) or - task_list[i + 1].get('condition', 'prev_succeeded') != 'prev_failed' + i + 1 >= len(task_list) + or task_list[i + 1].get("condition", "prev_succeeded") != "prev_failed" + ) + + queue_func = partial( + task_spec["func"], + overrides=overrides, + configs=configs, + input_indices=input_indices, + output_indices=output_indices, ) - - queue_func = partial(task_spec['func'], overrides=overrides, configs=configs, - input_indices=input_indices, output_indices=output_indices) - taskname = task_spec['name'] + get_suffix(output_indices) + taskname = task_spec["name"] + get_suffix(output_indices) state = get_task_state(taskname=taskname) yield state, taskname, queue_func, fatal prev_state = state -def apply_cleanup_options(args, seed, crashed, scene_folder): - if args.cleanup == 'all' or (args.cleanup == 'except_crashed' and not crashed): +def apply_cleanup_options(args, seed, crashed, scene_folder): + if args.cleanup == "all" or (args.cleanup == "except_crashed" and not crashed): logger.info(f"{seed} - Removing entirety of {scene_folder}") rmtree(scene_folder) - elif args.cleanup == 'big_files': + elif args.cleanup == "big_files": logger.info(f"{seed} - Cleaning up any large files") cleanup(scene_folder, verbose=False) - elif args.cleanup == 'except_logs': + elif args.cleanup == "except_logs": logger.info(f"{seed} - Cleaning up everything except logs") for f in scene_folder.iterdir(): - if f.name == 'logs': + if f.name == "logs": continue if f.is_dir(): rmtree(f) else: f.unlink() - elif args.cleanup == 'none' or (args.cleanup == 'except_crashed' and crashed): + elif args.cleanup == "none" or (args.cleanup == "except_crashed" and crashed): pass else: - raise ValueError(f'Unrecognized {args.cleanup=} {crashed=}') + raise ValueError(f"Unrecognized {args.cleanup=} {crashed=}") + @gin.configurable def on_scene_termination( - args, - scene: dict, - crashed: bool, + args, + scene: dict, + crashed: bool, enforce_upload_manifest=False, - remove_write_permission=False # safeguard finished data against accidental meddling + remove_write_permission=False, # safeguard finished data against accidental meddling ): - seed = scene['seed'] + seed = scene["seed"] if crashed: - with (args.output_folder / "crashed_seeds.txt").open('a') as f: + with (args.output_folder / "crashed_seeds.txt").open("a") as f: f.write(f"{seed}\n") - scene['all_done'] = SceneState.Crashed + scene["all_done"] = SceneState.Crashed else: - with (args.output_folder / "finished_seeds.txt").open('a') as f: + with (args.output_folder / "finished_seeds.txt").open("a") as f: f.write(f"{seed}\n") - scene['all_done'] = SceneState.Done + scene["all_done"] = SceneState.Done - scene_folder = args.output_folder/seed + scene_folder = args.output_folder / seed apply_cleanup_options(args, seed, crashed, scene_folder) - + if scene_folder.exists() and ( - remove_write_permission is True or - (remove_write_permission == 'except_crashed' and not crashed) + remove_write_permission is True + or (remove_write_permission == "except_crashed" and not crashed) ): subprocess.check_output(f"chmod -R a-w {scene_folder}".split()) if enforce_upload_manifest: - scene_folder = args.output_folder/scene['seed'] + scene_folder = args.output_folder / scene["seed"] upload_util.check_files_covered(scene_folder, upload_util.UPLOAD_MANIFEST) - -def check_intermediate_cleanup(args, scene, idxs, stagetype_name, tasklist): - raise NotImplementedError # todo fix +def check_intermediate_cleanup(args, scene, idxs, stagetype_name, tasklist): + raise NotImplementedError # todo fix - idxs_str = '_'.join(idxs.values()) - key = f'{stagetype_name}_{idxs_str}_cleaned' - if ( - args.cleanup != 'none' and - not scene.get(key, False) - ): + idxs_str = "_".join(idxs.values()) + key = f"{stagetype_name}_{idxs_str}_cleaned" + if args.cleanup != "none" and not scene.get(key, False): for stage_rec in tasklist: - taskname = stage_rec['name'] - path = scene[f'{taskname}_output_folder'] - print(f'Doing end-of-{stagetype_name} cleanup for {path} for {taskname}') + taskname = stage_rec["name"] + path = scene[f"{taskname}_output_folder"] + print(f"Doing end-of-{stagetype_name} cleanup for {path} for {taskname}") if path is not None and path.exists(): rmtree(path) scene[key] = True + @gin.configurable def iterate_scene_tasks( - scene_dict, + scene_dict, args, - - # if True, enumerate scenes that we might have launched earlier, + # if True, enumerate scenes that we might have launched earlier, # even if we wouldnt launch them now (due to crashes etc) - monitor_all, - + monitor_all, # provided by gin - global_tasks, - view_dependent_tasks, - camera_dependent_tasks, - - frame_range, - cam_id_ranges, - num_resamples=1, + global_tasks, + view_dependent_tasks, + camera_dependent_tasks, + frame_range, + cam_id_ranges, + num_resamples=1, render_frame_range=None, - finalize_tasks = [], - view_block_size=1, # how many frames should share each `view_dependent_task` - cam_block_size=None, # how many frames should share each `camera_dependent_task` - #cleanup_viewdep=False, # TODO fix. Should cleanup the results of `view_dependent_tasks` once each view iter is done? - viewdep_paralell=True, # can we work on multiple view depenendent tasks (usually `fine`) in paralell? - camdep_paralell=True # can we work on multiple camera dependent tasks (usually render/gt) in paralell? + finalize_tasks=[], + view_block_size=1, # how many frames should share each `view_dependent_task` + cam_block_size=None, # how many frames should share each `camera_dependent_task` + # cleanup_viewdep=False, # TODO fix. Should cleanup the results of `view_dependent_tasks` once each view iter is done? + viewdep_paralell=True, # can we work on multiple view depenendent tasks (usually `fine`) in paralell? + camdep_paralell=True, # can we work on multiple camera dependent tasks (usually render/gt) in paralell? ): - - ''' - This function is a generator which yields all scenes we might want to consider + """ + This function is a generator which yields all scenes we might want to consider monitoring or running for a particular scene It `yield`s the available scenes, regardless of whether they are already running etc - ''' + """ for task in global_tasks + view_dependent_tasks + camera_dependent_tasks: - if '_' in task['name']: - raise ValueError(f'{task=} with {task["name"]=} is invalid, must not contain underscores') + if "_" in task["name"]: + raise ValueError( + f'{task=} with {task["name"]=} is invalid, must not contain underscores' + ) if cam_block_size is None: cam_block_size = view_block_size - + if cam_id_ranges[0] <= 0 or cam_id_ranges[1] <= 0: raise ValueError( - f'{cam_id_ranges=} is invalid, both num. rigs and ' - 'num subcams must be >= 1 or no work is done' + f"{cam_id_ranges=} is invalid, both num. rigs and " + "num subcams must be >= 1 or no work is done" ) assert view_block_size >= 1 assert cam_block_size >= 1 if cam_block_size > view_block_size: cam_block_size = view_block_size - seed = scene_dict['seed'] + seed = scene_dict["seed"] - scene_folder = args.output_folder/seed - get_task_state = partial(get_scene_state, scene=scene_dict, scene_folder=scene_folder) + scene_folder = args.output_folder / seed + get_task_state = partial( + get_scene_state, scene=scene_dict, scene_folder=scene_folder + ) global_overrides = [ - f'execute_tasks.frame_range={repr(list(frame_range))}', - 'execute_tasks.camera_id=[0, 0]' + f"execute_tasks.frame_range={repr(list(frame_range))}", + "execute_tasks.camera_id=[0, 0]", ] - global_configs = scene_dict.get('configs', []) + args.configs + global_configs = scene_dict.get("configs", []) + args.configs global_iter = iterate_sequential_tasks( - global_tasks, + global_tasks, get_task_state, - overrides=args.overrides+global_overrides, - configs=global_configs + overrides=args.overrides + global_overrides, + configs=global_configs, ) for state, *rest in global_iter: @@ -211,7 +213,7 @@ def iterate_scene_tasks( if not state == JobState.Succeeded: return - # blender frame_range is inclusive, but python's range is end-exclusive + # blender frame_range is inclusive, but python's range is end-exclusive view_range = render_frame_range if render_frame_range is not None else frame_range view_frames = range(view_range[0], view_range[1] + 1, view_block_size) resamples = range(num_resamples) @@ -220,18 +222,22 @@ def iterate_scene_tasks( running_views = 0 for cam_rig, view_frame in itertools.product(cam_rigs, view_frames): - - view_frame_range = [view_frame, min(frame_range[1], view_frame + view_block_size - 1)] + view_frame_range = [ + view_frame, + min(frame_range[1], view_frame + view_block_size - 1), + ] view_overrides = [ - f'execute_tasks.frame_range=[{view_frame_range[0]},{view_frame_range[1]}]', - f'execute_tasks.camera_id=[{cam_rig},{0}]' + f"execute_tasks.frame_range=[{view_frame_range[0]},{view_frame_range[1]}]", + f"execute_tasks.camera_id=[{cam_rig},{0}]", ] view_idxs = dict(cam_rig=cam_rig, frame=view_frame) view_tasks_iter = iterate_sequential_tasks( - view_dependent_tasks, get_task_state, - overrides=args.overrides+view_overrides, - configs=global_configs, output_indices=view_idxs + view_dependent_tasks, + get_task_state, + overrides=args.overrides + view_overrides, + configs=global_configs, + output_indices=view_idxs, ) for state, *rest in view_tasks_iter: yield state, *rest @@ -240,47 +246,44 @@ def iterate_scene_tasks( running_views += 1 continue else: - return + return elif state == JobState.Failed and not monitor_all: return running_blocks = 0 for subcam, resample_idx in itertools.product(subcams, resamples): for cam_frame in range( - view_frame_range[0], - view_frame_range[1] + 1, - cam_block_size + view_frame_range[0], view_frame_range[1] + 1, cam_block_size ): - cam_frame_range = [ - cam_frame, - min(view_frame_range[1], cam_frame + cam_block_size - 1) - ] # blender frame_end is INCLUSIVE + cam_frame, + min(view_frame_range[1], cam_frame + cam_block_size - 1), + ] # blender frame_end is INCLUSIVE cam_overrides = [ - f'execute_tasks.frame_range=[{cam_frame_range[0]},{cam_frame_range[1]}]', - f'execute_tasks.camera_id=[{cam_rig},{subcam}]', - f'execute_tasks.resample_idx={resample_idx}' + f"execute_tasks.frame_range=[{cam_frame_range[0]},{cam_frame_range[1]}]", + f"execute_tasks.camera_id=[{cam_rig},{subcam}]", + f"execute_tasks.resample_idx={resample_idx}", ] camdep_indices = dict( - cam_rig=cam_rig, - frame=cam_frame, - subcam=subcam, + cam_rig=cam_rig, + frame=cam_frame, + subcam=subcam, resample=resample_idx, ) # extra semi-redundant info needed for openglgt mostly extra_indices = dict( - view_first_frame=view_frame_range[0], - last_view_frame=view_frame_range[1], - last_cam_frame=cam_frame_range[1] - ) + view_first_frame=view_frame_range[0], + last_view_frame=view_frame_range[1], + last_cam_frame=cam_frame_range[1], + ) camera_dep_iter = iterate_sequential_tasks( - camera_dependent_tasks, + camera_dependent_tasks, get_task_state, - overrides=args.overrides+cam_overrides, + overrides=args.overrides + cam_overrides, configs=global_configs, input_indices=view_idxs if len(view_dependent_tasks) else None, - output_indices={**camdep_indices, **extra_indices} + output_indices={**camdep_indices, **extra_indices}, ) for state, *rest in camera_dep_iter: @@ -302,15 +305,15 @@ def iterate_scene_tasks( return finalize_iter = iterate_sequential_tasks( - finalize_tasks, + finalize_tasks, get_task_state, - overrides=args.overrides+global_overrides, - configs=global_configs + overrides=args.overrides + global_overrides, + configs=global_configs, ) for state, *rest in finalize_iter: yield state, *rest if not state == JobState.Succeeded: return - - if scene_dict['all_done'] == SceneState.NotDone: + + if scene_dict["all_done"] == SceneState.NotDone: on_scene_termination(args, scene_dict, crashed=False) diff --git a/infinigen/datagen/states.py b/infinigen/datagen/states.py index 6ce4f585f..e7cdf8ea3 100644 --- a/infinigen/datagen/states.py +++ b/infinigen/datagen/states.py @@ -1,21 +1,20 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alex Raistrick: refactor, local rendering, video rendering # - Lahav Lipson: stereo version, local rendering # - Hei Law: initial version import subprocess import time -from copy import copy from pathlib import Path import gin import submitit from infinigen.datagen.util.submitit_emulator import LocalJob -from infinigen.tools.suffixes import SUFFIX_ORDERING, get_suffix, parse_suffix + class JobState: NotQueued = "notqueued" @@ -25,13 +24,15 @@ class JobState: Failed = "crashed" Cancelled = "cancelled" + class SceneState: NotDone = "notdone" Done = "done" Crashed = "crashed" + CONCLUDED_JOBSTATES = {JobState.Succeeded, JobState.Failed, JobState.Cancelled} -JOB_OBJ_SUCCEEDED = 'MARK_AS_SUCCEEDED' +JOB_OBJ_SUCCEEDED = "MARK_AS_SUCCEEDED" # Will throw exception if the scene was not found. Sometimes this happens if the scene was queued very very recently @@ -42,28 +43,30 @@ def seff(job_obj, retry_on_error=True): assert scene_id.isdigit() while True: try: - seff_out = subprocess.check_output(f"/usr/bin/seff -d {scene_id}".split()).decode() + seff_out = subprocess.check_output( + f"/usr/bin/seff -d {scene_id}".split() + ).decode() lines = seff_out.splitlines() - return dict(zip(lines[0].split(' ')[2:], lines[1].split(' ')[2:]))["State"] + return dict(zip(lines[0].split(" ")[2:], lines[1].split(" ")[2:]))["State"] except Exception as e: if not retry_on_error: raise e time.sleep(1) -def get_scene_state(scene: dict, taskname: str, scene_folder: Path): - if not scene.get(f'{taskname}_submitted', False): +def get_scene_state(scene: dict, taskname: str, scene_folder: Path): + if not scene.get(f"{taskname}_submitted", False): return JobState.NotQueued - elif scene.get(f'{taskname}_crash_recorded', False): + elif scene.get(f"{taskname}_crash_recorded", False): return JobState.Failed - elif scene.get(f'{taskname}_force_cancelled', False): + elif scene.get(f"{taskname}_force_cancelled", False): return JobState.Cancelled - - #if scene['all_done']: + + # if scene['all_done']: # return JobState.Succeeded # TODO Hacky / incorrect for nonfatal - job_obj = scene[f'{taskname}_job_obj'] - + job_obj = scene[f"{taskname}_job_obj"] + # for when both local and slurm scenes are being mixed if isinstance(job_obj, str): assert job_obj == JOB_OBJ_SUCCEEDED @@ -73,20 +76,20 @@ def get_scene_state(scene: dict, taskname: str, scene_folder: Path): elif isinstance(job_obj, submitit.Job): res = seff(job_obj) else: - raise TypeError(f'Unrecognized {job_obj=}') + raise TypeError(f"Unrecognized {job_obj=}") # map from submitit's scene state strings to our JobState enum if res in {"PENDING", "REQUEUED"}: return JobState.Queued - elif res == 'RUNNING': + elif res == "RUNNING": return JobState.Running - elif not (scene_folder/"logs"/f"FINISH_{taskname}").exists(): + elif not (scene_folder / "logs" / f"FINISH_{taskname}").exists(): return JobState.Failed - + return JobState.Succeeded + def cancel_job(job_obj): - if isinstance(job_obj, str): assert job_obj == JOB_OBJ_SUCCEEDED return JobState.Succeeded @@ -94,6 +97,6 @@ def cancel_job(job_obj): job_obj.kill() elif isinstance(job_obj, submitit.Job): # TODO: does submitit have a cancel? - subprocess.check_call(['/usr/bin/scancel', str(job_obj.job_id)]) + subprocess.check_call(["/usr/bin/scancel", str(job_obj.job_id)]) else: - raise TypeError(f'Unrecognized {job_obj=}') \ No newline at end of file + raise TypeError(f"Unrecognized {job_obj=}") diff --git a/infinigen/datagen/util/cancel_jobs.py b/infinigen/datagen/util/cancel_jobs.py index 940f8adf4..1999d774e 100644 --- a/infinigen/datagen/util/cancel_jobs.py +++ b/infinigen/datagen/util/cancel_jobs.py @@ -4,25 +4,30 @@ # Authors: Lahav Lipson -import subprocess import argparse -import re import os +import re +import subprocess + from tqdm import tqdm -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-w', "--with_substring", required=True) - parser.add_argument('-wo', "--without_substring", default=None) + parser.add_argument("-w", "--with_substring", required=True) + parser.add_argument("-wo", "--without_substring", default=None) parser.add_argument("--not_running", action="store_true") args = parser.parse_args() job_cmd = f'/usr/bin/squeue --user={os.environ["USER"]} -o %.24i%.40j%.14R -h' squeue_output = subprocess.check_output(job_cmd.split()).decode() matches = re.findall("([0-9]+) *([^ ]+) *([^ ]+) *\n", squeue_output) for job_id, job_name, job_status in tqdm(matches): - should_cancel = ((args.with_substring in job_name) and - ((args.without_substring is None) or args.without_substring not in job_name) and - ((not args.not_running) or 'node' not in job_status)) + should_cancel = ( + (args.with_substring in job_name) + and ( + (args.without_substring is None) + or args.without_substring not in job_name + ) + and ((not args.not_running) or "node" not in job_status) + ) if should_cancel: subprocess.check_output(["/usr/bin/scancel", job_id]) - \ No newline at end of file diff --git a/infinigen/datagen/util/cleanup.py b/infinigen/datagen/util/cleanup.py index eef97c5ba..2d4a82aa1 100644 --- a/infinigen/datagen/util/cleanup.py +++ b/infinigen/datagen/util/cleanup.py @@ -44,6 +44,7 @@ def check_delete(filepath): for file_path in sorted(folder.rglob(file_name_to_del)): check_delete(file_path) + if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("folder", type=Path) diff --git a/infinigen/datagen/util/google_drive_client.py b/infinigen/datagen/util/google_drive_client.py index 7e3fbd921..e45197150 100644 --- a/infinigen/datagen/util/google_drive_client.py +++ b/infinigen/datagen/util/google_drive_client.py @@ -4,20 +4,30 @@ # Authors: Lahav Lipson -from pathlib import Path -import subprocess import os import shutil +import subprocess +from pathlib import Path + def listdir(remote_path): - stdout = subprocess.check_output(f"{shutil.which('rclone')} lsf infinigen_renders:{remote_path}/".split(), text=True) + stdout = subprocess.check_output( + f"{shutil.which('rclone')} lsf infinigen_renders:{remote_path}/".split(), + text=True, + ) return sorted((Path(remote_path) / l) for l in stdout.splitlines()) + def download(remote_path, local_folder): assert os.path.exists(local_folder) and os.path.isdir(local_folder) dest_path = os.path.join(local_folder, os.path.basename(remote_path)) print(f"Downloading to {dest_path}") - with Path('/dev/null').open('w') as devnull: - subprocess.run(f'{shutil.which("rclone")} copy infinigen_renders:{remote_path} {local_folder}/', - shell=True, check=True, stderr=devnull, stdout=devnull) - return dest_path \ No newline at end of file + with Path("/dev/null").open("w") as devnull: + subprocess.run( + f'{shutil.which("rclone")} copy infinigen_renders:{remote_path} {local_folder}/', + shell=True, + check=True, + stderr=devnull, + stdout=devnull, + ) + return dest_path diff --git a/infinigen/datagen/util/show_gpu_table.py b/infinigen/datagen/util/show_gpu_table.py index dca63f04c..0cf43d8f9 100644 --- a/infinigen/datagen/util/show_gpu_table.py +++ b/infinigen/datagen/util/show_gpu_table.py @@ -5,18 +5,19 @@ import re -import time import subprocess -from datetime import datetime +import time from collections import defaultdict +from datetime import datetime from itertools import chain from shutil import which gres_regex = re.compile(".*gpu:([^:]+):([0-9]+).*").fullmatch cpu_regex = re.compile(".+/([0-9]+)[^/]+").fullmatch + def sinfo(): - sinfo_command = f'/usr/bin/sinfo --Node --format=%12N%22P%C%30G%10m --noheader' + sinfo_command = "/usr/bin/sinfo --Node --format=%12N%22P%C%30G%10m --noheader" while True: try: return subprocess.check_output(sinfo_command.split()).decode() @@ -25,6 +26,7 @@ def sinfo(): print(f"[{current_time_str}] sinfo failed with error:\n{e}") time.sleep(60) + def get_gpu_nodes(): sinfo_output = sinfo() gpu_table = {} @@ -45,22 +47,26 @@ def get_gpu_nodes(): return gpu_table, dict(node_type_lookup), shared_node_mem + # e.g. nodes_with_gpus('gtx_1080', 'k80') def nodes_with_gpus(*gpu_names): - if not which('sinfo'): + if not which("sinfo"): return [] if len(gpu_names) == 0: return [] _, node_type_lookup, _ = get_gpu_nodes() - return sorted(chain.from_iterable(node_type_lookup.get(n, set()) for n in gpu_names)) + return sorted( + chain.from_iterable(node_type_lookup.get(n, set()) for n in gpu_names) + ) + -if __name__ == '__main__': +if __name__ == "__main__": gpu_table, node_type_lookup, shared_node_mem = get_gpu_nodes() for group, lookup in gpu_table.items(): print(f"{group.ljust(10)} {dict(lookup)} Total: {sum(lookup.values())}") print() - for k,v in sorted(node_type_lookup.items()): + for k, v in sorted(node_type_lookup.items()): print(f"{k.ljust(10)} {','.join(v)}") print() diff --git a/infinigen/datagen/util/smb_client.py b/infinigen/datagen/util/smb_client.py index 8d9932b6d..acd051430 100644 --- a/infinigen/datagen/util/smb_client.py +++ b/infinigen/datagen/util/smb_client.py @@ -1,48 +1,47 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Lahav Lipson: everything except noted below # - Alex Raistrick: dest_folder options, warnings, SMB_AUTH envvar change - -from pathlib import Path -import subprocess import argparse -from itertools import product -import types -import os -import re import logging -from multiprocessing import Pool +import os +import subprocess import time -import sys +import types +from itertools import product +from multiprocessing import Pool +from pathlib import Path -import gin import submitit from tqdm import tqdm logger = logging.getLogger(__file__) -SMB_AUTH_VARNAME = 'SMB_AUTH' +SMB_AUTH_VARNAME = "SMB_AUTH" if SMB_AUTH_VARNAME not in os.environ: logging.warning( - f'{SMB_AUTH_VARNAME} envvar is not set, smb_client upload ' - 'will not work. Ignore this message if not using upload' + f"{SMB_AUTH_VARNAME} envvar is not set, smb_client upload " + "will not work. Ignore this message if not using upload" ) _SMB_RATELIMIT_DELAY = 0.0 + def check_exists(folder_path: Path): folder_path = str(folder_path).strip("/") return run_command(f"ls {folder_path}", False).returncode == 0 + def mkdir(folder_path: Path): assert isinstance(folder_path, Path) for path in list(reversed(folder_path.parents))[1:] + [folder_path]: run_command(f"mkdir {path}") + def upload(local_path: Path, dest_folder: Path): assert isinstance(local_path, Path) and isinstance(dest_folder, Path) assert local_path.exists() @@ -50,18 +49,22 @@ def upload(local_path: Path, dest_folder: Path): data = run_command(f"put {local_path} {dest_folder / local_path.name}") assert data.returncode == 0 + def pathlib_to_smb(p: Path): - p = str(p).replace('/', '\\') - if not p.endswith('\\'): - p += '\\' + p = str(p).replace("/", "\\") + if not p.endswith("\\"): + p += "\\" return p + def remove(remote_path: Path): - run_command(f"recurse ON; cd {pathlib_to_smb(remote_path.parent)}; deltree {remote_path.name}") + run_command( + f"recurse ON; cd {pathlib_to_smb(remote_path.parent)}; deltree {remote_path.name}" + ) -def download(remote_path: Path, dest_folder=None, verbose=False): - assert ' ' not in str(remote_path), remote_path +def download(remote_path: Path, dest_folder=None, verbose=False): + assert " " not in str(remote_path), remote_path assert isinstance(remote_path, Path) if not check_exists(remote_path): @@ -75,21 +78,21 @@ def download(remote_path: Path, dest_folder=None, verbose=False): if dest_folder is not None: dest_folder.mkdir(exist_ok=True, parents=True) - statements.append(f'lcd {str(dest_folder)}') + statements.append(f"lcd {str(dest_folder)}") print(f"Downloading {remote_path} to {dest_folder}") else: - print(f'Downloading {remote_path} to working directory') + print(f"Downloading {remote_path} to working directory") statements.append(f"mget {remote_path.name}") - - command = str.join('; ', statements) + + command = str.join("; ", statements) if verbose: print(command) data = run_command(command, verbose=verbose) if dest_folder: - dest_path = dest_folder/remote_path.name + dest_path = dest_folder / remote_path.name else: dest_path = remote_path.name @@ -97,38 +100,39 @@ def download(remote_path: Path, dest_folder=None, verbose=False): return dest_path -def yield_dirfiles(data, extras, parent): +def yield_dirfiles(data, extras, parent): for line in data.splitlines(): - if 'blocks of size' in line: + if "blocks of size" in line: continue parts = line.split() if not len(parts): continue - if parts[0].startswith('.'): + if parts[0].startswith("."): continue - parts[0] = parent/parts[0] - + parts[0] = parent / parts[0] + if extras: yield parts else: yield parts[0] -def globdir(remote_path: Path, extras=False): +def globdir(remote_path: Path, extras=False): remote_path = Path(remote_path) - assert '*' in remote_path.parts[-1], remote_path + assert "*" in remote_path.parts[-1], remote_path search_path = str(remote_path).strip("/") - search_path = search_path.replace('/', '\\') + search_path = search_path.replace("/", "\\") try: - data = run_command_stdout(f'ls {search_path}') - except subprocess.CalledProcessError as e: + data = run_command_stdout(f"ls {search_path}") + except subprocess.CalledProcessError: return [] yield from yield_dirfiles(data, extras, parent=remote_path.parent) - + + def listdir(remote_path: Path, extras=False): """ Args: str or Path @@ -136,39 +140,49 @@ def listdir(remote_path: Path, extras=False): """ search_path = str(remote_path).strip("/") - search_path = search_path.replace('/', '\\') + search_path = search_path.replace("/", "\\") - if '*' in search_path: - raise ValueError(f'Found \"*\" in {search_path=}, use smb_client.globdir instead') + if "*" in search_path: + raise ValueError(f'Found "*" in {search_path=}, use smb_client.globdir instead') if len(search_path) > 0 and not check_exists(search_path): raise FileNotFoundError(search_path) - search_path += '\\*' + search_path += "\\*" - data = run_command_stdout(f'ls {search_path}') + data = run_command_stdout(f"ls {search_path}") yield from yield_dirfiles(data, extras, parent=remote_path) + def run_command_stdout(command: str): smb_str = os.environ[SMB_AUTH_VARNAME] time.sleep(_SMB_RATELIMIT_DELAY) - return subprocess.check_output(f'smbclient {smb_str} -c "{command}"', text=True, shell=True) + return subprocess.check_output( + f'smbclient {smb_str} -c "{command}"', text=True, shell=True + ) + def run_command(command: str, check=True, verbose=False): smb_str = os.environ[SMB_AUTH_VARNAME] time.sleep(_SMB_RATELIMIT_DELAY) - with Path('/dev/null').open('w') as devnull: + with Path("/dev/null").open("w") as devnull: outstream = None if verbose else devnull - return subprocess.run(f'smbclient {smb_str} -c "{command}"', - shell=True, stderr=outstream, stdout=outstream, check=check) + return subprocess.run( + f'smbclient {smb_str} -c "{command}"', + shell=True, + stderr=outstream, + stdout=outstream, + check=check, + ) + def list_files_recursive(base_path: Path): """ Args: str or Path Returns [path, ...] """ - all_paths=[] + all_paths = [] children = listdir(base_path) for child, is_dir in children: if is_dir: @@ -177,6 +191,7 @@ def list_files_recursive(base_path: Path): all_paths.append(child) return all_paths + def mapfunc(f, its, args): if args.n_workers == 1: return [f(i) for i in its] @@ -184,28 +199,26 @@ def mapfunc(f, its, args): with Pool(args.n_workers) as p: return list(tqdm(p.imap(f, its), total=len(its))) else: - executor = submitit.AutoExecutor( - folder=args.local_path/"logs" - ) + executor = submitit.AutoExecutor(folder=args.local_path / "logs") executor.update_parameters( name=args.local_path.name, - timeout_min=48*60, + timeout_min=48 * 60, cpus_per_task=8, mem_gb=8, - slurm_partition=os.environ['INFINIGEN_SLURMPARTITION'], - slurm_array_parallelism=args.n_workers + slurm_partition=os.environ["INFINIGEN_SLURMPARTITION"], + slurm_array_parallelism=args.n_workers, ) executor.map_array(f, its) -def process_one(p: list[Path]): +def process_one(p: list[Path]): res = commands[args.command](*p) - - p_summary = ' '.join(str(pi) for pi in p) + + p_summary = " ".join(str(pi) for pi in p) def result(r): if args.verbose: - print(f'{args.command} {p_summary}: {r}') + print(f"{args.command} {p_summary}: {r}") else: print(r) @@ -215,65 +228,60 @@ def result(r): else: result(res) -def resolve_globs(p: Path, args): +def resolve_globs(p: Path, args): def resolved(parts): - if any(x in str(p) for x in args.exclude): return - - first_glob = next((i for i, pp in enumerate(parts) if '*' in pp), None) + + first_glob = next((i for i, pp in enumerate(parts) if "*" in pp), None) if first_glob is None: yield p else: - curr_level = p.parts[:first_glob+1] - remainder = p.parts[first_glob+1:] + curr_level = p.parts[: first_glob + 1] + remainder = p.parts[first_glob + 1 :] for child in globdir(Path(*curr_level)): - yield from resolve_globs(child/Path(*remainder), args) + yield from resolve_globs(child / Path(*remainder), args) - - if args.command == 'glob': + if args.command == "glob": before, after = p.parts[:-1], p.parts[-1:] for f in resolved(before): - yield f/Path(*after) + yield f / Path(*after) else: yield from resolved(p.parts) - - + + commands = { - 'ls': listdir, - 'glob': globdir, - 'rm': remove, - 'download': download, - 'upload': upload, - 'mkdir': mkdir, - 'exists': check_exists, + "ls": listdir, + "glob": globdir, + "rm": remove, + "download": download, + "upload": upload, + "mkdir": mkdir, + "exists": check_exists, } -def main(args): - n_globs = len([x for x in args.paths if '*' in str(x)]) +def main(args): + n_globs = len([x for x in args.paths if "*" in str(x)]) if n_globs > 1: - raise ValueError(f'{args.paths=} had {n_globs=}, only equipped to handle 1') + raise ValueError(f"{args.paths=} had {n_globs=}, only equipped to handle 1") + + paths = [resolve_globs(p, args) for p in args.paths] - paths = [ - resolve_globs(p, args) - for p in args.paths - ] - targets = list(product(*paths)) mapfunc(process_one, targets, args) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('command', type=str, choices=list(commands.keys())) - parser.add_argument('paths', type=Path, nargs='+') - parser.add_argument('--exclude', type=str, nargs='+', default=[]) - parser.add_argument('--n_workers', type=int, default=1) - parser.add_argument('--slurm', action='store_true') - parser.add_argument('--verbose', action='store_true') + parser.add_argument("command", type=str, choices=list(commands.keys())) + parser.add_argument("paths", type=Path, nargs="+") + parser.add_argument("--exclude", type=str, nargs="+", default=[]) + parser.add_argument("--n_workers", type=int, default=1) + parser.add_argument("--slurm", action="store_true") + parser.add_argument("--verbose", action="store_true") args = parser.parse_args() - main(args) \ No newline at end of file + main(args) diff --git a/infinigen/datagen/util/submitit_emulator.py b/infinigen/datagen/util/submitit_emulator.py index 1dcaeaf2f..309f5c425 100644 --- a/infinigen/datagen/util/submitit_emulator.py +++ b/infinigen/datagen/util/submitit_emulator.py @@ -1,47 +1,40 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Lahav Lipson - LocalJob # - Alex Raistrick - Local queue handler # - David Yan - Bugfix -import re -import time -import sys -from pathlib import Path -from dataclasses import dataclass -import os -from functools import partial -import itertools import copy -import random +import itertools import logging - +import os +import re import subprocess +import sys +from dataclasses import dataclass from multiprocessing import Process -import threading -import submitit -import gin +from pathlib import Path +from shutil import which +import gin import numpy as np -from shutil import which logger = logging.getLogger(__name__) CUDA_VARNAME = "CUDA_VISIBLE_DEVICES" -NVIDIA_SMI_PATH = '/bin/nvidia-smi' +NVIDIA_SMI_PATH = "/bin/nvidia-smi" + @dataclass class LocalJob: - job_id: int process: Process finalized: bool = False def status(self): - if self.finalized: return "COMPLETED" @@ -66,43 +59,51 @@ def kill(self): return self.process.kill() + def get_fake_job_id(): # Lahav assures me these will never conflict return np.random.randint(int(1e10), int(1e11)) -def job_wrapper(func, inner_args, inner_kwargs, stdout_file: Path, stderr_file: Path, cuda_devices=None): - with stdout_file.open('w') as stdout, stderr_file.open('w') as stderr: +def job_wrapper( + func, + inner_args, + inner_kwargs, + stdout_file: Path, + stderr_file: Path, + cuda_devices=None, +): + with stdout_file.open("w") as stdout, stderr_file.open("w") as stderr: sys.stdout = stdout sys.stderr = stderr if cuda_devices is not None: - os.environ[CUDA_VARNAME] = ','.join([str(i) for i in cuda_devices]) + os.environ[CUDA_VARNAME] = ",".join([str(i) for i in cuda_devices]) else: - os.environ[CUDA_VARNAME] = '' + os.environ[CUDA_VARNAME] = "" return func(*inner_args, **inner_kwargs) + def launch_local(func, args, kwargs, job_id, log_folder, name, cuda_devices=None): - stderr_file = log_folder / f"{job_id}_0_log.err" stdout_file = log_folder / f"{job_id}_0_log.out" - with stdout_file.open('w') as f: + with stdout_file.open("w") as f: f.write(f"{func} {args}\n") kwargs = dict( func=func, inner_args=args, inner_kwargs=kwargs, - stdout_file=stdout_file, - stderr_file=stderr_file, - cuda_devices=cuda_devices + stdout_file=stdout_file, + stderr_file=stderr_file, + cuda_devices=cuda_devices, ) proc = Process(target=job_wrapper, kwargs=kwargs, name=name) proc.start() return proc -class ImmediateLocalExecutor: +class ImmediateLocalExecutor: def __init__(self, folder: str): self.log_folder = Path(folder).resolve() self.log_folder.mkdir(exist_ok=True) @@ -110,17 +111,18 @@ def __init__(self, folder: str): def update_parameters(self, **parameters): self.parameters.update(parameters) - + def submit(self, func, *args, **kwargs): job_id = get_fake_job_id() - name = self.parameters.get('name', None) - proc = launch_local(func, args, kwargs, job_id, - log_folder=self.log_folder, name=name) + name = self.parameters.get("name", None) + proc = launch_local( + func, args, kwargs, job_id, log_folder=self.log_folder, name=name + ) return LocalJob(job_id=job_id, process=proc) + @gin.configurable class LocalScheduleHandler: - _inst = None @classmethod @@ -135,101 +137,104 @@ def __init__(self, jobs_per_gpu=1, use_gpu=True): self.use_gpu = use_gpu def enqueue(self, func, args, kwargs, params, log_folder): - job = LocalJob(job_id=get_fake_job_id(), process=None) job_rec = dict( - func=func, args=args, kwargs=kwargs, - params=params, - job=job, log_folder=log_folder, - gpu_assignment=None + func=func, + args=args, + kwargs=kwargs, + params=params, + job=job, + log_folder=log_folder, + gpu_assignment=None, ) - + self.queue.append(job_rec) return job @gin.configurable def total_resources(self): - resources = {} if self.use_gpu: - if which(NVIDIA_SMI_PATH) is None: - raise ValueError(f'LocalScheduleHandler.use_gpu=True yet could not find {NVIDIA_SMI_PATH}') + raise ValueError( + f"LocalScheduleHandler.use_gpu=True yet could not find {NVIDIA_SMI_PATH}, " + "please use --pipeline_overrides LocalScheduleHandler.use_gpu=False if your machine does not have a supported GPU" + ) - result = subprocess.check_output(f'{NVIDIA_SMI_PATH} -L'.split()).decode() + result = subprocess.check_output(f"{NVIDIA_SMI_PATH} -L".split()).decode() gpus_uuids = set(i for i in range(len(result.splitlines()))) if CUDA_VARNAME in os.environ: - visible = [int(s.strip()) for s in os.environ[CUDA_VARNAME].split(',')] + visible = [int(s.strip()) for s in os.environ[CUDA_VARNAME].split(",")] gpus_uuids = gpus_uuids.intersection(visible) - logger.debug(f"Restricting to {gpus_uuids=} due to toplevel {CUDA_VARNAME} setting") + logger.debug( + f"Restricting to {gpus_uuids=} due to toplevel {CUDA_VARNAME} setting" + ) + + resources["gpus"] = set( + itertools.product(gpus_uuids, range(self.jobs_per_gpu)) + ) - resources['gpus'] = set(itertools.product( - gpus_uuids, - range(self.jobs_per_gpu) - )) - return resources def resources_available(self, total): - resources = copy.copy(total) for job_rec in self.queue: - if job_rec['job'].status() != 'RUNNING': + if job_rec["job"].status() != "RUNNING": continue - if (g := job_rec['gpu_assignment']) is not None: - resources['gpus'] -= g + if (g := job_rec["gpu_assignment"]) is not None: + resources["gpus"] -= g return resources - - def poll(self): + def poll(self): total = self.total_resources() available = self.resources_available(total) - logger.debug(f'Checked resources, {total=} {available=}') + logger.debug(f"Checked resources, {total=} {available=}") for job_rec in self.queue: - if job_rec['job'].status() != 'PENDING': + if job_rec["job"].status() != "PENDING": continue self.attempt_dispatch_job(job_rec, available, total) - - def dispatch(self, job_rec, resources): + def dispatch(self, job_rec, resources): gpu_assignment = resources.get("gpus", None) if gpu_assignment is None: gpu_idxs = None else: gpu_idxs = [g[0] for g in gpu_assignment] - job_rec['job'].process = launch_local( - func=job_rec["func"], args=job_rec["args"], kwargs=job_rec["kwargs"], - job_id=job_rec["job"].job_id, log_folder=job_rec["log_folder"], - name=job_rec["params"].get("name", None), - cuda_devices=gpu_idxs + job_rec["job"].process = launch_local( + func=job_rec["func"], + args=job_rec["args"], + kwargs=job_rec["kwargs"], + job_id=job_rec["job"].job_id, + log_folder=job_rec["log_folder"], + name=job_rec["params"].get("name", None), + cuda_devices=gpu_idxs, ) - job_rec['gpu_assignment'] = gpu_assignment - - def attempt_dispatch_job(self, job_rec, available, total, select_gpus='first'): - - n_gpus = job_rec['params'].get('gpus', 0) or 0 - + job_rec["gpu_assignment"] = gpu_assignment + + def attempt_dispatch_job(self, job_rec, available, total, select_gpus="first"): + n_gpus = job_rec["params"].get("gpus", 0) or 0 + if n_gpus == 0 or not self.use_gpu: return self.dispatch(job_rec, resources={}) - - if n_gpus <= len(available['gpus']): - if select_gpus == 'first': - gpus = set(itertools.islice(list(available['gpus']), n_gpus)) - elif select_gpus == 'random': - gpus = set(np.random.choice(list(available['gpus']), n_gpus)) + + if n_gpus <= len(available["gpus"]): + if select_gpus == "first": + gpus = set(itertools.islice(list(available["gpus"]), n_gpus)) + elif select_gpus == "random": + gpus = set(np.random.choice(list(available["gpus"]), n_gpus)) else: - raise ValueError(f'Unrecognized {select_gpus=}') - available['gpus'] -= gpus - return self.dispatch(job_rec, resources={'gpus': gpus}) + raise ValueError(f"Unrecognized {select_gpus=}") + available["gpus"] -= gpus + return self.dispatch(job_rec, resources={"gpus": gpus}) -class ScheduledLocalExecutor: +class ScheduledLocalExecutor: def __init__(self, folder: str): self.log_folder = Path(folder) self.log_folder.mkdir(exist_ok=True) @@ -240,14 +245,18 @@ def update_parameters(self, **parameters): def submit(self, func, *args, **kwargs): return LocalScheduleHandler.instance().enqueue( - func, args, kwargs, params=self.parameters, log_folder=self.log_folder) + func, args, kwargs, params=self.parameters, log_folder=self.log_folder + ) + """ key: pid value: command """ + + def get_all_processes(): psef_regex = re.compile(" *([0-9]+) +(.*)").fullmatch psef_out = subprocess.check_output("ps -e -o pid,cmd --no-headers".split()).decode() groups = (psef_regex(l).groups() for l in psef_out.splitlines()) - return {int(pid):cmd for pid, cmd in groups} \ No newline at end of file + return {int(pid): cmd for pid, cmd in groups} diff --git a/infinigen/datagen/util/upload_util.py b/infinigen/datagen/util/upload_util.py index 7679fec1a..b6040faa9 100644 --- a/infinigen/datagen/util/upload_util.py +++ b/infinigen/datagen/util/upload_util.py @@ -1,64 +1,57 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: create_upload_payload, metadata # - Lahav Lipson: initial version import argparse +import json import os -from pathlib import Path import platform +import shutil +import subprocess import tarfile -import json -import itertools - import time from datetime import datetime -import gin -from tqdm import tqdm -import subprocess -import shutil +from pathlib import Path from infinigen.core.util.logging import Suppress -from . import smb_client, cleanup +from . import smb_client RCLONE_PREFIX_ENVVAR = "INFINIGEN_RCLONE_PREFIX" UPLOAD_MANIFEST = [ - ('frames*/*', 'KEEP'), - ('logs/*', 'KEEP'), - ('fine/scene.blend', 'KEEP'), - ('run_pipeline.sh', 'KEEP'), - - ('coarse/*.txt', 'KEEP'), - ('coarse/*.csv', 'KEEP'), - ('coarse/*.json', 'KEEP'), - ('fine*/*.txt', 'KEEP'), - ('fine*/*.csv', 'KEEP'), - ('fine*/*.json', 'KEEP'), - - ('savemesh*', 'DELETE'), - ('coarse/assets', 'DELETE'), - ('coarse/scene.blend*', 'DELETE'), - ('fine*/assets', 'DELETE'), - ('tmp', 'DELETE'), - ('*/*.b_displacement.npy', 'DELETE'), - + ("frames*/*", "KEEP"), + ("logs/*", "KEEP"), + ("fine/scene.blend", "KEEP"), + ("run_pipeline.sh", "KEEP"), + ("coarse/*.txt", "KEEP"), + ("coarse/*.csv", "KEEP"), + ("coarse/*.json", "KEEP"), + ("fine*/*.txt", "KEEP"), + ("fine*/*.csv", "KEEP"), + ("fine*/*.json", "KEEP"), + ("savemesh*", "DELETE"), + ("coarse/assets", "DELETE"), + ("coarse/scene.blend*", "DELETE"), + ("fine*/assets", "DELETE"), + ("tmp", "DELETE"), + ("*/*.b_displacement.npy", "DELETE"), # These two only show up during/after upload, we just specify them to prevent an error - ('*_thumbnail.png', 'KEEP'), - ('*_metadata.json', 'KEEP'), + ("*_thumbnail.png", "KEEP"), + ("*_metadata.json", "KEEP"), ] -def check_files_covered(scene_folder, manifest): +def check_files_covered(scene_folder, manifest): covered = set() for glob, _ in UPLOAD_MANIFEST: covered |= set(scene_folder.glob(glob)) - extant = set(scene_folder.glob('*')) + extant = set(scene_folder.glob("*")) not_covered = extant - covered @@ -66,39 +59,40 @@ def check_files_covered(scene_folder, manifest): if len(not_covered) == 0: return - + raise ValueError( - f'{scene_folder=} had {not_covered=}. Please modify {__file__}.UPLOAD_MANIFEST' - ' to explicitly say whether you want these files to be deleted or included in the final tarball' + f"{scene_folder=} had {not_covered=}. Please modify {__file__}.UPLOAD_MANIFEST" + " to explicitly say whether you want these files to be deleted or included in the final tarball" ) + def apply_manifest_cleanup(scene_folder, manifest): - check_files_covered(scene_folder, manifest) keep = set() delete = set() for glob, action in manifest: - affected = set() for p in scene_folder.glob(glob): affected.add(p) if p.is_dir(): affected |= set(p.rglob("*")) - print(f'{glob=} {action=} matched {len(affected)=}') + print(f"{glob=} {action=} matched {len(affected)=}") - if action == 'KEEP': + if action == "KEEP": keep |= affected - elif action == 'KEEP_MANDATORY': + elif action == "KEEP_MANDATORY": if len(affected) == 0: - raise ValueError(f'In {apply_manifest_cleanup.__name__} {glob=} had {action=} but failed to match any files') + raise ValueError( + f"In {apply_manifest_cleanup.__name__} {glob=} had {action=} but failed to match any files" + ) keep |= affected - elif action == 'DELETE': + elif action == "DELETE": delete |= set(affected) - keep else: - raise ValueError(f'Unrecognized {action=}') + raise ValueError(f"Unrecognized {action=}") assert delete.isdisjoint(keep) @@ -110,24 +104,27 @@ def apply_manifest_cleanup(scene_folder, manifest): for f in delete: if not f.exists() or not f.is_dir(): continue - if len([f1 for f1 in f.rglob('*') if not f.is_dir()]) == 0: + if len([f1 for f1 in f.rglob("*") if not f.is_dir()]) == 0: shutil.rmtree(f) -def rclone_upload_file(src_file, dst_folder): +def rclone_upload_file(src_file, dst_folder): prefix = os.environ.get(RCLONE_PREFIX_ENVVAR) if prefix is None: - raise ValueError(f'Please specify envvar {RCLONE_PREFIX_ENVVAR}') - if ':' not in prefix: - raise ValueError(f'Rclone prefix must contain ":" to separate remote from path prefix') + raise ValueError(f"Please specify envvar {RCLONE_PREFIX_ENVVAR}") + if ":" not in prefix: + raise ValueError( + 'Rclone prefix must contain ":" to separate remote from path prefix' + ) assert os.path.exists(src_file), src_file cmd = f"{shutil.which('rclone')} copy -P {src_file} {prefix}{dst_folder}" subprocess.check_output(cmd.split()) print(f"Uploaded {src_file}") + def get_commit_hash(): - git = shutil.which('git') + git = shutil.which("git") if git is None: return None try: @@ -137,93 +134,94 @@ def get_commit_hash(): except subprocess.CalledProcessError: return None + def write_metadata(parent_folder, seed, all_images): - try: version = (parent_folder / "coarse" / "version.txt").read_text().splitlines()[0] except FileNotFoundError: version = None metadata = { - 'original_directory': str(parent_folder.resolve()), - 'user': os.environ['USER'], - 'node': platform.node().split('.')[0], - 'timestamp': time.time(), - 'datetime': datetime.now().strftime("%m/%d/%Y, %H:%M:%S"), - 'version': version, - 'commit': get_commit_hash(), - 'n_frames': len(all_images) + "original_directory": str(parent_folder.resolve()), + "user": os.environ["USER"], + "node": platform.node().split(".")[0], + "timestamp": time.time(), + "datetime": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"), + "version": version, + "commit": get_commit_hash(), + "n_frames": len(all_images), } - metadata_path = parent_folder/f'{seed}_metadata.json' - with metadata_path.open('w') as f: + metadata_path = parent_folder / f"{seed}_metadata.json" + with metadata_path.open("w") as f: json.dump(metadata, f, indent=4) return metadata_path + def write_thumbnail(parent_folder, seed, all_images): if len(all_images) > 0: - thumb_path = parent_folder/f'{seed}_thumbnail.png' + thumb_path = parent_folder / f"{seed}_thumbnail.png" shutil.copyfile(all_images[0], thumb_path) else: - thumb_path = None + thumb_path = None return thumb_path + def create_tarball(parent_folder): - tar_path = parent_folder.with_suffix('.tar.gz') + tar_path = parent_folder.with_suffix(".tar.gz") print(f"Tarring {parent_folder} to {tar_path}") with tarfile.open(tar_path, "w:gz") as tar: tar.add(parent_folder, os.path.sep) assert tar_path.exists() return tar_path -def get_upload_func(method='smbclient'): - if method == 'rclone': + +def get_upload_func(method="smbclient"): + if method == "rclone": return rclone_upload_file - elif method == 'smbclient': + elif method == "smbclient": return smb_client.upload else: - raise ValueError(f'Unrecognized {method=}') + raise ValueError(f"Unrecognized {method=}") + def get_upload_destfolder(job_folder): - return Path('infinigen')/'renders'/job_folder.name + return Path("infinigen") / "renders" / job_folder.name + # DO NOT make gin.configurable # this function gets submitted via pickle in some settings, and gin args are not preserved def upload_job_folder( - parent_folder, - task_uniqname, - dir_prefix_len=0, - method='smbclient' + parent_folder, task_uniqname, dir_prefix_len=0, method="smbclient" ): - parent_folder = Path(parent_folder) seed = parent_folder.name - print(f'Performing cleanup on {parent_folder}') + print(f"Performing cleanup on {parent_folder}") apply_manifest_cleanup(parent_folder, UPLOAD_MANIFEST) upload_func = get_upload_func(method) - + upload_dest_folder = get_upload_destfolder(parent_folder.parent) if dir_prefix_len > 0: - upload_dest_folder = upload_dest_folder/parent_folder.name[:dir_prefix_len] + upload_dest_folder = upload_dest_folder / parent_folder.name[:dir_prefix_len] all_images = sorted(list(parent_folder.rglob("**/Image*.png"))) upload_paths = [ write_thumbnail(parent_folder, seed, all_images), write_metadata(parent_folder, seed, all_images), - create_tarball(parent_folder) + create_tarball(parent_folder), ] - orig_fine_path = parent_folder/'fine'/'scene.blend' + orig_fine_path = parent_folder / "fine" / "scene.blend" if orig_fine_path.exists(): - dest_fine_path = parent_folder.parent / f'{seed}_fine.blend' + dest_fine_path = parent_folder.parent / f"{seed}_fine.blend" shutil.move(orig_fine_path, dest_fine_path) upload_paths.append(dest_fine_path) - + for f in upload_paths: if f is None: continue @@ -232,10 +230,11 @@ def upload_job_folder( (parent_folder / "logs" / f"FINISH_{task_uniqname}").touch() + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('parent_folder', type=Path) - parser.add_argument('task_uniqname', type=str) + parser.add_argument("parent_folder", type=Path) + parser.add_argument("task_uniqname", type=str) args = parser.parse_args() - upload_job_folder(args.parent_folder, args.task_uniqname) \ No newline at end of file + upload_job_folder(args.parent_folder, args.task_uniqname) diff --git a/infinigen/launch_blender.py b/infinigen/launch_blender.py index 11db8205d..56fd83305 100644 --- a/infinigen/launch_blender.py +++ b/infinigen/launch_blender.py @@ -4,71 +4,65 @@ # Authors: Alexander Raistrick -import subprocess import argparse +import subprocess from pathlib import Path root = Path(__file__).parent.parent BLENDER_BINARY_RELATIVE = [ - root/"blender/blender", - root/"Blender.app/Contents/MacOS/Blender" + root / "blender/blender", + root / "Blender.app/Contents/MacOS/Blender", ] -IMPORT_INFINIGEN_SCRIPT = root/'infinigen/tools/blendscript_import_infinigen.py' -APPEND_SYSPATH_SCRIPT = root/'infinigen/tools/blendscript_path_append.py' +IMPORT_INFINIGEN_SCRIPT = root / "infinigen/tools/blendscript_import_infinigen.py" +APPEND_SYSPATH_SCRIPT = root / "infinigen/tools/blendscript_path_append.py" HEADLESS_ARGS = [ - '-noaudio', - '--background', + "-noaudio", + "--background", ] + def get_standalone_blender_path(): try: return next(x for x in BLENDER_BINARY_RELATIVE if x.exists()) except StopIteration: raise ValueError( "Could not find blender binary - please check you have completed " - "'Infinigen as a Blender-Python script' section of docs/Installation.md" + "'Infinigen as a Blender-Python script' section of docs/Installation.md" f" and that one of {BLENDER_BINARY_RELATIVE} exists" ) + if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('-m', '--module', type=str, default=None) - parser.add_argument('-s', '--script', type=str, default=None) + parser.add_argument("-m", "--module", type=str, default=None) + parser.add_argument("-s", "--script", type=str, default=None) args, unknown_args = parser.parse_known_args() cmd_args = [str(get_standalone_blender_path())] if args.module is not None: - cmd_args += HEADLESS_ARGS - cmd_args += [ - '--python', - str(APPEND_SYSPATH_SCRIPT) - ] + cmd_args += ["--python", str(APPEND_SYSPATH_SCRIPT)] - relpath = '/'.join(args.module.split('.')) + '.py' - path = root/relpath + relpath = "/".join(args.module.split(".")) + ".py" + path = root / relpath if not path.exists(): - raise FileNotFoundError(f'Could not find python script {path}') - - cmd_args += ['--python', str(path)] - + raise FileNotFoundError(f"Could not find python script {path}") + + cmd_args += ["--python", str(path)] + elif args.script is not None: - cmd_args += HEADLESS_ARGS + ['--python', args.script] + cmd_args += HEADLESS_ARGS + ["--python", args.script] else: - cmd_args += [ - '--python', - str(IMPORT_INFINIGEN_SCRIPT) - ] + cmd_args += ["--python", str(IMPORT_INFINIGEN_SCRIPT)] if len(unknown_args): cmd_args += unknown_args - print(' '.join(cmd_args)) + print(" ".join(cmd_args)) - subprocess.run(cmd_args, cwd=root) \ No newline at end of file + subprocess.run(cmd_args, cwd=root) diff --git a/infinigen/terrain/__init__.py b/infinigen/terrain/__init__.py index bba71c859..5afd8b41c 100644 --- a/infinigen/terrain/__init__.py +++ b/infinigen/terrain/__init__.py @@ -4,4 +4,4 @@ # Authors: Zeyu Ma -from .core import Terrain, hidden_in_viewport \ No newline at end of file +from .core import Terrain, hidden_in_viewport diff --git a/infinigen/terrain/assets/caves/__init__.py b/infinigen/terrain/assets/caves/__init__.py index 7578e4700..13f8f332b 100644 --- a/infinigen/terrain/assets/caves/__init__.py +++ b/infinigen/terrain/assets/caves/__init__.py @@ -4,4 +4,4 @@ # Authors: Zeyu Ma -from .core import caves_asset, assets_to_data \ No newline at end of file +from .core import assets_to_data, caves_asset diff --git a/infinigen/terrain/assets/caves/core.py b/infinigen/terrain/assets/caves/core.py index f42ebb1f8..fdb400827 100644 --- a/infinigen/terrain/assets/caves/core.py +++ b/infinigen/terrain/assets/caves/core.py @@ -4,18 +4,15 @@ # Authors: Lahav Lipson, Zeyu Ma -from pathlib import Path - import bpy import gin import numpy as np from numpy import ascontiguousarray as AC import infinigen.terrain.mesh_to_sdf as mesh_to_sdf -from infinigen.terrain.utils import Mesh from infinigen.core.util.blender import SelectObjects, ViewportMode -from infinigen.core.util.math import FixedSeed from infinigen.core.util.organization import AssetFile +from infinigen.terrain.utils import Mesh from .geometry_utils import increment_step, pitch_up, yaw_clockwise from .pcfg import generate_string @@ -23,105 +20,103 @@ def get_all_verts(): if bpy.ops.mesh.select_all.poll(): - bpy.ops.mesh.select_all(action='DESELECT') - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.mesh.select_all(action="DESELECT") + bpy.ops.object.mode_set(mode="OBJECT") return bpy.context.active_object.data.vertices def select_vert(idx: int): assert idx >= 0 obj = bpy.context.active_object - bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.object.mode_set(mode="EDIT") bpy.ops.mesh.select_mode(type="VERT") - bpy.ops.mesh.select_all(action='DESELECT') - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.mesh.select_all(action="DESELECT") + bpy.ops.object.mode_set(mode="OBJECT") # assert False, [len(bpy.context.active_object.data.vertices), len(get_all_verts())] - assert idx < len( - obj.data.vertices), f"There are only {len(obj.data.vertices)} {len(get_all_verts())} verts, cannot select {idx}" + assert ( + idx < len(obj.data.vertices) + ), f"There are only {len(obj.data.vertices)} {len(get_all_verts())} verts, cannot select {idx}" obj.data.vertices[idx].select = True - bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.object.mode_set(mode="EDIT") def move_forward(current_dir): assert type(current_dir) == np.ndarray and current_dir.size == 3 - bpy.ops.mesh.extrude_region_move( - TRANSFORM_OT_translate={"value": current_dir}) + bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value": current_dir}) -def trace_string(symbols, num_verts=1, current_idx=-1, current_dir=(0.5, 0., 0.)): - bpy.ops.object.mode_set(mode='EDIT') +def trace_string(symbols, num_verts=1, current_idx=-1, current_dir=(0.5, 0.0, 0.0)): + bpy.ops.object.mode_set(mode="EDIT") current_dir = np.array(current_dir).flatten() angle_magnitude = 15 while len(symbols) > 0: symbol = symbols.pop(0) - if symbol == 'f': + if symbol == "f": move_forward(current_dir) num_verts += 1 current_idx = num_verts - 1 - elif symbol == 'r': + elif symbol == "r": current_dir = yaw_clockwise(current_dir, angle_magnitude) - elif symbol == 'l': + elif symbol == "l": current_dir = yaw_clockwise(current_dir, -angle_magnitude) - elif symbol == 'u': + elif symbol == "u": current_dir = pitch_up(current_dir, angle_magnitude) - elif symbol == 'd': + elif symbol == "d": current_dir = pitch_up(current_dir, -angle_magnitude) - elif symbol == 'o': + elif symbol == "o": angle_magnitude += 15 - elif symbol == 'a': + elif symbol == "a": angle_magnitude -= 15 - elif symbol == 'b': + elif symbol == "b": current_dir = increment_step(current_dir, 1) - elif symbol == 's': + elif symbol == "s": current_dir = increment_step(current_dir, -1) - elif symbol == 'n': # do nothing + elif symbol == "n": # do nothing pass - elif symbol == '[': - num_verts = trace_string( - symbols, num_verts, current_idx, current_dir) + elif symbol == "[": + num_verts = trace_string(symbols, num_verts, current_idx, current_dir) select_vert(current_idx) - elif symbol == ']': + elif symbol == "]": return num_verts else: raise Exception(f"Symbol not defined: {symbol}") - if symbol in list('rlud'): + if symbol in list("rlud"): angle_magnitude = 15 class Cave: - def scale_verts(self, random_scaling_factor=0.0): # 0.8 is a good number assert random_scaling_factor >= 0.0 vertices = get_all_verts() - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") obj = bpy.context.active_object - radii = 2*np.ones((len(vertices), 2)) - urn = np.random.rand(*radii.shape)*2 - 1 + radii = 2 * np.ones((len(vertices), 2)) + urn = np.random.rand(*radii.shape) * 2 - 1 radii *= np.exp(urn * random_scaling_factor) assert radii.min() > 0.05 - obj.data.skin_vertices[0].data.foreach_set('radius', radii.flatten()) + obj.data.skin_vertices[0].data.foreach_set("radius", radii.flatten()) def add_subdivision(self, name: str, levels: int): assert name not in self.modifier_stack - bpy.ops.object.mode_set(mode='OBJECT') - bpy.ops.object.modifier_add(type='SUBSURF') + bpy.ops.object.mode_set(mode="OBJECT") + bpy.ops.object.modifier_add(type="SUBSURF") bpy.context.object.modifiers["Subdivision"].name = name bpy.context.object.modifiers[name].levels = levels self.modifier_stack.append(name) def remesh(self, name: str, voxel_size: float): assert name not in self.modifier_stack - bpy.ops.object.mode_set(mode='OBJECT') - bpy.ops.object.modifier_add(type='REMESH') + bpy.ops.object.mode_set(mode="OBJECT") + bpy.ops.object.modifier_add(type="REMESH") bpy.context.object.modifiers["Remesh"].name = name bpy.context.object.modifiers[name].voxel_size = voxel_size self.modifier_stack.append(name) def add_skin(self): - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.object.modifier_add(type='SKIN') + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.object.modifier_add(type="SKIN") self.modifier_stack.append("Skin") def apply_modifiers(self): @@ -131,19 +126,18 @@ def apply_modifiers(self): def add_to_collection(self, name): if bpy.data.collections.get(name) is None: bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link( - bpy.data.collections[name]) + bpy.context.scene.collection.children.link(bpy.data.collections[name]) obj = bpy.context.active_object bpy.ops.collection.objects_remove_all() bpy.data.collections[name].objects.link(obj) def add_lights(self, num_lights, power=100): - np.random.shuffle(self.path_verts) for single_vert in self.path_verts[:num_lights]: # print("LIGHT AT", single_vert) bpy.ops.object.light_add( - type='POINT', align='WORLD', location=single_vert, scale=(1, 1, 1)) + type="POINT", align="WORLD", location=single_vert, scale=(1, 1, 1) + ) bpy.context.object.data.energy = power self.add_to_collection("AllPointLights") @@ -158,13 +152,10 @@ def __init__(self, name="Cave") -> None: bpy.context.active_object.name = name generated_string = generate_string(max_len=5000) # print(f"Using String", ''.join(generated_string)) - trace_string(['f']*2 + generated_string) + trace_string(["f"] * 2 + generated_string) -def add_cave( - rescale, - cave_z -): +def add_cave(rescale, cave_z): cave = Cave("Cave") cave.add_skin() cave.scale_verts(0.1) @@ -173,13 +164,14 @@ def add_cave( cave.apply_modifiers() with SelectObjects(bpy.data.objects["Cave"]): bpy.data.objects["Cave"].scale = (rescale, rescale, rescale) - bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='MEDIAN') + bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY", center="MEDIAN") bpy.data.objects["Cave"].location = (0, 0, cave_z) bpy.ops.object.transform_apply(scale=True) cave.path_verts = np.array([v.co for v in get_all_verts()]) # cave.add_lights(n_cave_light) return cave + @gin.configurable def caves_asset( folder, @@ -190,14 +182,14 @@ def caves_asset( name = "Cave" obj = bpy.data.objects[name] with ViewportMode(obj, "EDIT"): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") bounding_box = np.array([v[:] for v in obj.bound_box]) min_gen, max_gen = [0, 0, 0], [0, 0, 0] for j in range(3): - min_gen[j], max_gen[j] = bounding_box[:, j].min(), bounding_box[:,j].max() + min_gen[j], max_gen[j] = bounding_box[:, j].min(), bounding_box[:, j].max() bounding_box = np.array([min_gen, max_gen]) - dim = (bounding_box[1] - bounding_box[0]) + dim = bounding_box[1] - bounding_box[0] bounding_box[0] -= dim / 4 bounding_box[1] += dim / 4 cave_mesh = Mesh(obj=obj).to_trimesh() @@ -211,15 +203,18 @@ def caves_asset( query_points[:, j, :, 1] = y[j] query_points[:, :, j, 2] = z[j] query_points = query_points.reshape(-1, 3) - voxels = mesh_to_sdf.mesh_to_sdf(cave_mesh, query_points, surface_point_method='sample').reshape((N, N, N)) - np.save(folder/"occupancy.npy", voxels) - np.save(folder/"boundingbox.npy", bounding_box) + voxels = mesh_to_sdf.mesh_to_sdf( + cave_mesh, query_points, surface_point_method="sample" + ).reshape((N, N, N)) + np.save(folder / "occupancy.npy", voxels) + np.save(folder / "boundingbox.npy", bounding_box) (folder / AssetFile.Finish).touch() + def assets_to_data(folder): data = {} - occupancies = np.load(folder/"occupancy.npy") + occupancies = np.load(folder / "occupancy.npy") N = occupancies.shape[0] data["occupancy"] = AC(occupancies.reshape(-1)) - data["bounding_box"] = AC(np.load(folder/"boundingbox.npy").reshape(-1)) - return N, data \ No newline at end of file + data["bounding_box"] = AC(np.load(folder / "boundingbox.npy").reshape(-1)) + return N, data diff --git a/infinigen/terrain/assets/caves/geometry_utils.py b/infinigen/terrain/assets/caves/geometry_utils.py index c9dc7a4d6..c81b6e415 100644 --- a/infinigen/terrain/assets/caves/geometry_utils.py +++ b/infinigen/terrain/assets/caves/geometry_utils.py @@ -32,5 +32,5 @@ def pitch_up(current_dir, angle_magnitude): def increment_step(current_dir, amount): mag = np.sqrt(np.sum(np.power(current_dir, 2))) unit_dir = current_dir / mag - output = current_dir + unit_dir*amount + output = current_dir + unit_dir * amount return output if np.sum(np.power(output, 2)) > 0 else current_dir diff --git a/infinigen/terrain/assets/caves/pcfg.py b/infinigen/terrain/assets/caves/pcfg.py index e1d04800e..1518b3f16 100644 --- a/infinigen/terrain/assets/caves/pcfg.py +++ b/infinigen/terrain/assets/caves/pcfg.py @@ -4,19 +4,17 @@ # Authors: Lahav Lipson -import os -from random import random -import numpy as np -from pathlib import Path -from collections import defaultdict import re -import time +from collections import defaultdict from itertools import chain +from pathlib import Path -CONFIG_FILE = Path(__file__).parent/'cfg.txt' +import numpy as np + +CONFIG_FILE = Path(__file__).parent / "cfg.txt" assert CONFIG_FILE.exists(), CONFIG_FILE.resolve() -STARTING_SYMBOL = 'Q' +STARTING_SYMBOL = "Q" def create_pcfg(): @@ -27,38 +25,39 @@ def create_pcfg(): regex = rule.fullmatch(line) if regex is not None and len(regex.groups()) == 3: LHS, prob, RHS = regex.groups() - PCFG[LHS]['a'].append(RHS) - PCFG[LHS]['p'].append(float(prob)) + PCFG[LHS]["a"].append(RHS) + PCFG[LHS]["p"].append(float(prob)) for k, v in PCFG.items(): - assert abs(np.sum(v['p'])-1) < 1e-4, (k, v['p']) + assert abs(np.sum(v["p"]) - 1) < 1e-4, (k, v["p"]) return dict(PCFG) def generate_string(max_len=10000): - PCFG = create_pcfg() + # print(f"PCFG Keys: {' '.join(list(PCFG.keys()))}") - def expand(s): return list(np.random.choice( - **PCFG[s]).split()) if (s in PCFG) else s + def expand(s): + return list(np.random.choice(**PCFG[s]).split()) if (s in PCFG) else s - def terminate_expand(s): return ['n'] if (s in PCFG) else s + def terminate_expand(s): + return ["n"] if (s in PCFG) else s symbols = [STARTING_SYMBOL] for steps in range(1000): symbols = list(chain(*map(expand, symbols))) - assert all([(type(e) == str) for e in symbols]) + assert all([isinstance(e, str) for e in symbols]) if not any((s in PCFG for s in symbols)) and len(symbols) < max_len: symbols = [STARTING_SYMBOL] if len(symbols) >= max_len: # print(f"Done making symbols. There are {len(symbols)}") symbols = list(chain(*map(terminate_expand, symbols))) - assert 'P' not in symbols, terminate_expand('P') + assert "P" not in symbols, terminate_expand("P") return symbols raise Exception("Too many steps") -if __name__ == '__main__': +if __name__ == "__main__": print(generate_string()) diff --git a/infinigen/terrain/assets/landtiles/ant_landscape.py b/infinigen/terrain/assets/landtiles/ant_landscape.py index e20f44316..4662950b8 100644 --- a/infinigen/terrain/assets/landtiles/ant_landscape.py +++ b/infinigen/terrain/assets/landtiles/ant_landscape.py @@ -8,13 +8,13 @@ import bpy import cv2 -import numpy as np import gin +import numpy as np +from infinigen.core.util.organization import AssetFile, LandTile from infinigen.terrain.land_process.erosion import run_erosion from infinigen.terrain.land_process.snowfall import run_snowfall -from infinigen.terrain.utils import smooth, random_nat -from infinigen.core.util.organization import AssetFile, LandTile +from infinigen.terrain.utils import random_nat, smooth def create( @@ -23,25 +23,393 @@ def create( subdivision_y, ): def presets(**kwargs): - bpy.ops.mesh.landscape_add(ant_terrain_name="Landscape", land_material="", water_material="", texture_block="", at_cursor=True, smooth_mesh=True, tri_face=False, sphere_mesh=False, subdivision_x=subdivision_x, subdivision_y=subdivision_y, mesh_size=2, mesh_size_x=2, mesh_size_y=2, random_seed=random_nat(), water_plane=False, water_level=0.01, remove_double=False, show_main_settings=True, show_noise_settings=True, show_displace_settings=True, refresh=True, auto_refresh=True, **kwargs) + bpy.ops.mesh.landscape_add( + ant_terrain_name="Landscape", + land_material="", + water_material="", + texture_block="", + at_cursor=True, + smooth_mesh=True, + tri_face=False, + sphere_mesh=False, + subdivision_x=subdivision_x, + subdivision_y=subdivision_y, + mesh_size=2, + mesh_size_x=2, + mesh_size_y=2, + random_seed=random_nat(), + water_plane=False, + water_level=0.01, + remove_double=False, + show_main_settings=True, + show_noise_settings=True, + show_displace_settings=True, + refresh=True, + auto_refresh=True, + **kwargs, + ) if preset_name == LandTile.Canyon: strata = np.random.randint(6, 12) - presets(noise_offset_x=0, noise_offset_y=-0.25, noise_offset_z=0, noise_size_x=1, noise_size_y=1.25, noise_size_z=1, noise_size=1.5, noise_type='marble_noise', basis_type='BLENDER', vl_basis_type='BLENDER', distortion=2, hard_noise='1', noise_depth=12, amplitude=0.5, frequency=2, dimension=1, lacunarity=2, offset=1, gain=1, marble_bias='0', marble_sharp='0', marble_shape='4', height=0.6, height_invert=False, height_offset=0, fx_mixfactor=0, fx_mix_mode='8', fx_type='20', fx_bias='0', fx_turb=0, fx_depth=3, fx_amplitude=0.5, fx_frequency=1.65, fx_size=1.5, fx_loc_x=3, fx_loc_y=2, fx_height=0.25, fx_invert=False, fx_offset=0.05, edge_falloff='2', falloff_x=4, falloff_y=4, edge_level=0.15, maximum=0.5, minimum=-0.2, vert_group="", strata=strata, strata_type='2') + presets( + noise_offset_x=0, + noise_offset_y=-0.25, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1.25, + noise_size_z=1, + noise_size=1.5, + noise_type="marble_noise", + basis_type="BLENDER", + vl_basis_type="BLENDER", + distortion=2, + hard_noise="1", + noise_depth=12, + amplitude=0.5, + frequency=2, + dimension=1, + lacunarity=2, + offset=1, + gain=1, + marble_bias="0", + marble_sharp="0", + marble_shape="4", + height=0.6, + height_invert=False, + height_offset=0, + fx_mixfactor=0, + fx_mix_mode="8", + fx_type="20", + fx_bias="0", + fx_turb=0, + fx_depth=3, + fx_amplitude=0.5, + fx_frequency=1.65, + fx_size=1.5, + fx_loc_x=3, + fx_loc_y=2, + fx_height=0.25, + fx_invert=False, + fx_offset=0.05, + edge_falloff="2", + falloff_x=4, + falloff_y=4, + edge_level=0.15, + maximum=0.5, + minimum=-0.2, + vert_group="", + strata=strata, + strata_type="2", + ) elif preset_name == LandTile.Canyons: strata = np.random.randint(2, 8) - presets(noise_offset_x=0, noise_offset_y=0, noise_offset_z=0, noise_size_x=1, noise_size_y=1, noise_size_z=1, noise_size=0.5, noise_type='hetero_terrain', basis_type='PERLIN_NEW', vl_basis_type='CELLNOISE', distortion=1, hard_noise='0', noise_depth=8, amplitude=0.5, frequency=2, dimension=1.09, lacunarity=1.86, offset=0.77, gain=2, marble_bias='1', marble_sharp='0', marble_shape='7', height=0.5, height_invert=False, height_offset=-0, fx_mixfactor=0, fx_mix_mode='0', fx_type='0', fx_bias='0', fx_turb=0, fx_depth=0, fx_amplitude=0.5, fx_frequency=2, fx_size=1, fx_loc_x=0, fx_loc_y=0, fx_height=0.5, fx_invert=False, fx_offset=0, edge_falloff='3', falloff_x=8, falloff_y=8, edge_level=0, maximum=0.5, minimum=-0.5, vert_group="", strata=strata, strata_type='2') + presets( + noise_offset_x=0, + noise_offset_y=0, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1, + noise_size_z=1, + noise_size=0.5, + noise_type="hetero_terrain", + basis_type="PERLIN_NEW", + vl_basis_type="CELLNOISE", + distortion=1, + hard_noise="0", + noise_depth=8, + amplitude=0.5, + frequency=2, + dimension=1.09, + lacunarity=1.86, + offset=0.77, + gain=2, + marble_bias="1", + marble_sharp="0", + marble_shape="7", + height=0.5, + height_invert=False, + height_offset=-0, + fx_mixfactor=0, + fx_mix_mode="0", + fx_type="0", + fx_bias="0", + fx_turb=0, + fx_depth=0, + fx_amplitude=0.5, + fx_frequency=2, + fx_size=1, + fx_loc_x=0, + fx_loc_y=0, + fx_height=0.5, + fx_invert=False, + fx_offset=0, + edge_falloff="3", + falloff_x=8, + falloff_y=8, + edge_level=0, + maximum=0.5, + minimum=-0.5, + vert_group="", + strata=strata, + strata_type="2", + ) elif preset_name == LandTile.Cliff: - presets(noise_offset_x=0, noise_offset_y=-0.88, noise_offset_z=3.72529e-09, noise_size_x=2, noise_size_y=2, noise_size_z=1, noise_size=1, noise_type='marble_noise', basis_type='VORONOI_F2F1', vl_basis_type='BLENDER', distortion=0.5, hard_noise='0', noise_depth=7, amplitude=0.5, frequency=2, dimension=1, lacunarity=2, offset=1, gain=1, marble_bias='0', marble_sharp='0', marble_shape='6', height=1.8, height_invert=False, height_offset=-0.15, fx_mixfactor=0, fx_mix_mode='0', fx_type='0', fx_bias='0', fx_turb=0, fx_depth=0, fx_amplitude=0.5, fx_frequency=2, fx_size=1, fx_loc_x=0, fx_loc_y=0, fx_height=0.5, fx_invert=False, fx_offset=0, edge_falloff='0', falloff_x=25, falloff_y=25, edge_level=0, maximum=1.25, minimum=0, vert_group="", strata=11, strata_type='0') + presets( + noise_offset_x=0, + noise_offset_y=-0.88, + noise_offset_z=3.72529e-09, + noise_size_x=2, + noise_size_y=2, + noise_size_z=1, + noise_size=1, + noise_type="marble_noise", + basis_type="VORONOI_F2F1", + vl_basis_type="BLENDER", + distortion=0.5, + hard_noise="0", + noise_depth=7, + amplitude=0.5, + frequency=2, + dimension=1, + lacunarity=2, + offset=1, + gain=1, + marble_bias="0", + marble_sharp="0", + marble_shape="6", + height=1.8, + height_invert=False, + height_offset=-0.15, + fx_mixfactor=0, + fx_mix_mode="0", + fx_type="0", + fx_bias="0", + fx_turb=0, + fx_depth=0, + fx_amplitude=0.5, + fx_frequency=2, + fx_size=1, + fx_loc_x=0, + fx_loc_y=0, + fx_height=0.5, + fx_invert=False, + fx_offset=0, + edge_falloff="0", + falloff_x=25, + falloff_y=25, + edge_level=0, + maximum=1.25, + minimum=0, + vert_group="", + strata=11, + strata_type="0", + ) elif preset_name == LandTile.Mesa: noise_size = np.random.uniform(0.5, 1) - presets(noise_offset_x=0, noise_offset_y=0, noise_offset_z=0, noise_size_x=1, noise_size_y=1, noise_size_z=1, noise_size=noise_size, noise_type='shattered_hterrain', basis_type='VORONOI_F1', vl_basis_type='VORONOI_F2F1', distortion=1.15, hard_noise='1', noise_depth=8, amplitude=0.4, frequency=2, dimension=1, lacunarity=2, offset=1, gain=4, marble_bias='0', marble_sharp='0', marble_shape='0', height=0.5, height_invert=False, height_offset=0.2, fx_mixfactor=0, fx_mix_mode='0', fx_type='0', fx_bias='0', fx_turb=0, fx_depth=0, fx_amplitude=0.5, fx_frequency=1.5, fx_size=1, fx_loc_x=0, fx_loc_y=0, fx_height=0.5, fx_invert=False, fx_offset=0, edge_falloff='3', falloff_x=3, falloff_y=3, edge_level=0, maximum=0.25, minimum=0, vert_group="", strata=2.25, strata_type='2') + presets( + noise_offset_x=0, + noise_offset_y=0, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1, + noise_size_z=1, + noise_size=noise_size, + noise_type="shattered_hterrain", + basis_type="VORONOI_F1", + vl_basis_type="VORONOI_F2F1", + distortion=1.15, + hard_noise="1", + noise_depth=8, + amplitude=0.4, + frequency=2, + dimension=1, + lacunarity=2, + offset=1, + gain=4, + marble_bias="0", + marble_sharp="0", + marble_shape="0", + height=0.5, + height_invert=False, + height_offset=0.2, + fx_mixfactor=0, + fx_mix_mode="0", + fx_type="0", + fx_bias="0", + fx_turb=0, + fx_depth=0, + fx_amplitude=0.5, + fx_frequency=1.5, + fx_size=1, + fx_loc_x=0, + fx_loc_y=0, + fx_height=0.5, + fx_invert=False, + fx_offset=0, + edge_falloff="3", + falloff_x=3, + falloff_y=3, + edge_level=0, + maximum=0.25, + minimum=0, + vert_group="", + strata=2.25, + strata_type="2", + ) elif preset_name == LandTile.River: - presets(noise_offset_x=0, noise_offset_y=0, noise_offset_z=0, noise_size_x=1, noise_size_y=1, noise_size_z=1, noise_size=1, noise_type='marble_noise', basis_type='BLENDER', vl_basis_type='BLENDER', distortion=1, hard_noise='0', noise_depth=8, amplitude=0.5, frequency=2, dimension=1, lacunarity=2, offset=1, gain=1, marble_bias='2', marble_sharp='0', marble_shape='7', height=0.2, height_invert=False, height_offset=0, fx_mixfactor=0, fx_mix_mode='0', fx_type='0', fx_bias='0', fx_turb=0, fx_depth=0, fx_amplitude=0.5, fx_frequency=1.5, fx_size=1, fx_loc_x=0, fx_loc_y=0, fx_height=0.5, fx_invert=False, fx_offset=0, edge_falloff='0', falloff_x=40, falloff_y=40, edge_level=0, maximum=0.5, minimum=0, vert_group="", strata=1.25, strata_type='1') + presets( + noise_offset_x=0, + noise_offset_y=0, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1, + noise_size_z=1, + noise_size=1, + noise_type="marble_noise", + basis_type="BLENDER", + vl_basis_type="BLENDER", + distortion=1, + hard_noise="0", + noise_depth=8, + amplitude=0.5, + frequency=2, + dimension=1, + lacunarity=2, + offset=1, + gain=1, + marble_bias="2", + marble_sharp="0", + marble_shape="7", + height=0.2, + height_invert=False, + height_offset=0, + fx_mixfactor=0, + fx_mix_mode="0", + fx_type="0", + fx_bias="0", + fx_turb=0, + fx_depth=0, + fx_amplitude=0.5, + fx_frequency=1.5, + fx_size=1, + fx_loc_x=0, + fx_loc_y=0, + fx_height=0.5, + fx_invert=False, + fx_offset=0, + edge_falloff="0", + falloff_x=40, + falloff_y=40, + edge_level=0, + maximum=0.5, + minimum=0, + vert_group="", + strata=1.25, + strata_type="1", + ) elif preset_name == LandTile.Volcano: - presets(noise_offset_x=0, noise_offset_y=0, noise_offset_z=0, noise_size_x=1, noise_size_y=1, noise_size_z=1, noise_size=1, noise_type='marble_noise', basis_type='BLENDER', vl_basis_type='PERLIN_ORIGINAL', distortion=1.5, hard_noise='0', noise_depth=8, amplitude=0.5, frequency=1.8, dimension=1, lacunarity=2, offset=1, gain=2, marble_bias='2', marble_sharp='3', marble_shape='1', height=0.6, height_invert=False, height_offset=0, fx_mixfactor=0, fx_mix_mode='1', fx_type='14', fx_bias='0', fx_turb=0.5, fx_depth=2, fx_amplitude=0.38, fx_frequency=1.5, fx_size=1.15, fx_loc_x=-1, fx_loc_y=1, fx_height=0.5, fx_invert=False, fx_offset=0.06, edge_falloff='3', falloff_x=2, falloff_y=2, edge_level=0, maximum=1, minimum=-1, vert_group="", strata=5, strata_type='0') + presets( + noise_offset_x=0, + noise_offset_y=0, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1, + noise_size_z=1, + noise_size=1, + noise_type="marble_noise", + basis_type="BLENDER", + vl_basis_type="PERLIN_ORIGINAL", + distortion=1.5, + hard_noise="0", + noise_depth=8, + amplitude=0.5, + frequency=1.8, + dimension=1, + lacunarity=2, + offset=1, + gain=2, + marble_bias="2", + marble_sharp="3", + marble_shape="1", + height=0.6, + height_invert=False, + height_offset=0, + fx_mixfactor=0, + fx_mix_mode="1", + fx_type="14", + fx_bias="0", + fx_turb=0.5, + fx_depth=2, + fx_amplitude=0.38, + fx_frequency=1.5, + fx_size=1.15, + fx_loc_x=-1, + fx_loc_y=1, + fx_height=0.5, + fx_invert=False, + fx_offset=0.06, + edge_falloff="3", + falloff_x=2, + falloff_y=2, + edge_level=0, + maximum=1, + minimum=-1, + vert_group="", + strata=5, + strata_type="0", + ) elif preset_name == LandTile.Mountain: - presets(noise_offset_x=0, noise_offset_y=0, noise_offset_z=0, noise_size_x=1, noise_size_y=1, noise_size_z=1, noise_size=1, noise_type='hetero_terrain', basis_type='BLENDER', vl_basis_type='BLENDER', distortion=1, hard_noise='0', noise_depth=8, amplitude=0.5, frequency=2, dimension=1, lacunarity=2, offset=1, gain=1, marble_bias='0', marble_sharp='0', marble_shape='0', height=0.5, height_invert=False, height_offset=0, fx_mixfactor=0, fx_mix_mode='0', fx_type='0', fx_bias='0', fx_turb=0, fx_depth=0, fx_amplitude=0.5, fx_frequency=2, fx_size=1, fx_loc_x=0, fx_loc_y=0, fx_height=1, fx_invert=False, fx_offset=0, edge_falloff='3', falloff_x=4, falloff_y=4, edge_level=0, maximum=1, minimum=-1, vert_group="", strata=5, strata_type='0') + presets( + noise_offset_x=0, + noise_offset_y=0, + noise_offset_z=0, + noise_size_x=1, + noise_size_y=1, + noise_size_z=1, + noise_size=1, + noise_type="hetero_terrain", + basis_type="BLENDER", + vl_basis_type="BLENDER", + distortion=1, + hard_noise="0", + noise_depth=8, + amplitude=0.5, + frequency=2, + dimension=1, + lacunarity=2, + offset=1, + gain=1, + marble_bias="0", + marble_sharp="0", + marble_shape="0", + height=0.5, + height_invert=False, + height_offset=0, + fx_mixfactor=0, + fx_mix_mode="0", + fx_type="0", + fx_bias="0", + fx_turb=0, + fx_depth=0, + fx_amplitude=0.5, + fx_frequency=2, + fx_size=1, + fx_loc_x=0, + fx_loc_y=0, + fx_height=1, + fx_invert=False, + fx_offset=0, + edge_falloff="3", + falloff_x=4, + falloff_y=4, + edge_level=0, + maximum=1, + minimum=-1, + vert_group="", + strata=5, + strata_type="0", + ) + @gin.configurable def ant_landscape_asset( @@ -57,16 +425,22 @@ def ant_landscape_asset( create(preset_name, N, N) obj = bpy.context.active_object N = int(len(obj.data.vertices) ** 0.5) - mverts_co = np.zeros((len(obj.data.vertices)*3), dtype=float) + mverts_co = np.zeros((len(obj.data.vertices) * 3), dtype=float) obj.data.vertices.foreach_get("co", mverts_co) mverts_co = mverts_co.reshape((N, N, 3)) - heightmap = cv2.resize(np.float32(mverts_co[..., -1]), (resolution, resolution)) * tile_size / 2 + heightmap = ( + cv2.resize(np.float32(mverts_co[..., -1]), (resolution, resolution)) + * tile_size + / 2 + ) if preset_name == LandTile.Mesa: heightmap *= 2 heightmap = smooth(heightmap, 3) - cv2.imwrite(str(folder/f'{AssetFile.Heightmap}.exr'), heightmap) + cv2.imwrite(str(folder / f"{AssetFile.Heightmap}.exr"), heightmap) bpy.data.objects.remove(obj, do_unlink=True) - with open(folder/f'{AssetFile.TileSize}.txt', "w") as f: + with open(folder / f"{AssetFile.TileSize}.txt", "w") as f: f.write(f"{tile_size}\n") - if erosion: run_erosion(folder) - if snowfall: run_snowfall(folder) + if erosion: + run_erosion(folder) + if snowfall: + run_snowfall(folder) diff --git a/infinigen/terrain/assets/landtiles/core.py b/infinigen/terrain/assets/landtiles/core.py index 6f1c265c6..4a2f91768 100644 --- a/infinigen/terrain/assets/landtiles/core.py +++ b/infinigen/terrain/assets/landtiles/core.py @@ -6,14 +6,19 @@ import json -import cv2 import gin import numpy as np -from infinigen.terrain.utils import boundary_smooth, read, smooth + from infinigen.core.util.organization import AssetFile, LandTile, Process +from infinigen.terrain.utils import boundary_smooth, read, smooth from .ant_landscape import ant_landscape_asset -from .custom import coast_asset, multi_mountains_asset, coast_params, multi_mountains_params +from .custom import ( + coast_asset, + coast_params, + multi_mountains_asset, + multi_mountains_params, +) @gin.configurable @@ -79,51 +84,67 @@ def tile_directions( def assets_to_data( - folder, land_process, + folder, + land_process, N=2048, do_smooth=False, ): preset_name = str(folder).split("/")[-2] data = {} - if land_process is None: path = folder/f"{AssetFile.Heightmap}.exr" - elif land_process == Process.Snowfall: path = folder/f"{Process.Snowfall}.{AssetFile.Heightmap}.exr" - elif land_process == Process.Erosion: path = folder/f"{Process.Erosion}.{AssetFile.Heightmap}.exr" + if land_process is None: + path = folder / f"{AssetFile.Heightmap}.exr" + elif land_process == Process.Snowfall: + path = folder / f"{Process.Snowfall}.{AssetFile.Heightmap}.exr" + elif land_process == Process.Erosion: + path = folder / f"{Process.Erosion}.{AssetFile.Heightmap}.exr" heightmap = read(path) - assert(heightmap.shape[0] == N) - if do_smooth: heightmap = smooth(heightmap, 3) + assert heightmap.shape[0] == N + if do_smooth: + heightmap = smooth(heightmap, 3) if land_process is None: mask = np.zeros(N * N) else: - if land_process == Process.Snowfall: path = folder/f"{Process.Snowfall}.{AssetFile.Mask}.exr" - elif land_process == Process.Erosion: path = folder/f"{Process.Erosion}.{AssetFile.Mask}.exr" + if land_process == Process.Snowfall: + path = folder / f"{Process.Snowfall}.{AssetFile.Mask}.exr" + elif land_process == Process.Erosion: + path = folder / f"{Process.Erosion}.{AssetFile.Mask}.exr" mask = read(path) mask = mask.reshape(-1) data["mask"] = mask - + # compute direction of directional tiles (must be done before smoothing it) direction = tile_directions()[preset_name] if direction == "dependent": data["direction"] = np.arctan2( np.mean(heightmap[:, -1] - heightmap[:, 0]), - np.mean(heightmap[-1] - heightmap[0]) + np.mean(heightmap[-1] - heightmap[0]), ).reshape(-1) elif direction == "initial": data["direction"] = np.array([0.0]) - - if direction != "dependent": heightmap = boundary_smooth(heightmap) + + if direction != "dependent": + heightmap = boundary_smooth(heightmap) data["heightmap"] = heightmap.reshape(-1) - L = float(np.loadtxt(folder/f"{AssetFile.TileSize}.txt")) - if preset_name == LandTile.MultiMountains and (folder/f"{AssetFile.Params}.txt").exists(): - with open(folder/f"{AssetFile.Params}.txt", "r") as file: + L = float(np.loadtxt(folder / f"{AssetFile.TileSize}.txt")) + if ( + preset_name == LandTile.MultiMountains + and (folder / f"{AssetFile.Params}.txt").exists() + ): + with open(folder / f"{AssetFile.Params}.txt", "r") as file: params = json.load(file) - assert params == multi_mountains_params(raw=1), "asset should not be reused if you changed settings" - if preset_name == LandTile.Coast and (folder/f"{AssetFile.Params}.txt").exists(): - with open(folder/f"{AssetFile.Params}.txt", "r") as file: + assert params == multi_mountains_params( + raw=1 + ), "asset should not be reused if you changed settings" + if preset_name == LandTile.Coast and (folder / f"{AssetFile.Params}.txt").exists(): + with open(folder / f"{AssetFile.Params}.txt", "r") as file: params = json.load(file) - assert params == {"multi_mountains_params": multi_mountains_params(raw=1), "coast_params": coast_params(raw=1)}, "asset should not be reused if you changed settings" - + assert params == { + "multi_mountains_params": multi_mountains_params(raw=1), + "coast_params": coast_params(raw=1), + }, "asset should not be reused if you changed settings" + return L, N, data diff --git a/infinigen/terrain/assets/landtiles/custom.py b/infinigen/terrain/assets/landtiles/custom.py index baab0f92c..8037f3915 100644 --- a/infinigen/terrain/assets/landtiles/custom.py +++ b/infinigen/terrain/assets/landtiles/custom.py @@ -11,18 +11,18 @@ import gin import numpy as np +from infinigen.core.util.organization import AssetFile +from infinigen.core.util.random import random_general as rg from infinigen.terrain.elements.core import Element from infinigen.terrain.elements.mountains import Mountains from infinigen.terrain.land_process.erosion import run_erosion from infinigen.terrain.land_process.snowfall import run_snowfall from infinigen.terrain.utils import grid_distance, perlin_noise, random_int -from infinigen.core.util.organization import AssetFile -from infinigen.core.util.random import random_general as rg - coast_params_ = {} multi_mountains_params_ = {} + @gin.configurable def coast_params( coast_freq=("uniform", 0.00005, 0.00015), @@ -34,11 +34,11 @@ def coast_params( ): """_summary_ - coast_freq: base frequency of coast line - beach_size: size of beach - beach_slope: slope of beach - steep_slope_size: size of the steep part between beach and sea floor - sea_depth: sea depth + coast_freq: base frequency of coast line + beach_size: size of beach + beach_slope: slope of beach + steep_slope_size: size of the steep part between beach and sea floor + sea_depth: sea depth """ if raw: d = { @@ -59,7 +59,7 @@ def coast_params( "sea_depth": rg(sea_depth), } return coast_params_ - + @gin.configurable def multi_mountains_params( @@ -73,12 +73,12 @@ def multi_mountains_params( ): """_summary_ - min_freq: min base frequency of all mountains - max_freq: max base frequency of all mountains - height: mountain height - coverage: mountain coverage - slope_freq: base frequency of the slope the mountains sit on - slope_height: height of such slope + min_freq: min base frequency of all mountains + max_freq: max base frequency of all mountains + height: mountain height + coverage: mountain coverage + slope_freq: base frequency of the slope the mountains sit on + slope_height: height of such slope """ if raw: d = { @@ -111,14 +111,19 @@ def coast_heightmapping(heightmap): steep_slope_size = params["steep_slope_size"] sea_depth = params["sea_depth"] seafloor_loc = beach_size / 2 + steep_slope_size - mapped[(heightmap > -beach_size/2) & (heightmap < beach_size/2)] *= beach_slope - mapped[heightmap > beach_size/2] = beach_size/2 * beach_slope - steep_slope = (sea_depth - beach_size/2 * beach_slope) / (seafloor_loc - beach_size/2) - steep_mask = (heightmap < -beach_size/2) & (heightmap > -seafloor_loc) - mapped[steep_mask] = (-beach_size/2 * beach_slope - (-beach_size/2 - heightmap) * steep_slope)[steep_mask] + mapped[(heightmap > -beach_size / 2) & (heightmap < beach_size / 2)] *= beach_slope + mapped[heightmap > beach_size / 2] = beach_size / 2 * beach_slope + steep_slope = (sea_depth - beach_size / 2 * beach_slope) / ( + seafloor_loc - beach_size / 2 + ) + steep_mask = (heightmap < -beach_size / 2) & (heightmap > -seafloor_loc) + mapped[steep_mask] = ( + -beach_size / 2 * beach_slope - (-beach_size / 2 - heightmap) * steep_slope + )[steep_mask] mapped[heightmap < -seafloor_loc] = -sea_depth return mapped + @gin.configurable def multi_mountains_asset( folder, @@ -145,14 +150,17 @@ def multi_mountains_asset( heightmap = mountains.get_heightmap(X, Y) mountains.cleanup() Element.called_time.pop("mountains") - cv2.imwrite(str(folder / f'{AssetFile.Heightmap}.exr'), heightmap) - with open(folder/f'{AssetFile.TileSize}.txt', "w") as f: + cv2.imwrite(str(folder / f"{AssetFile.Heightmap}.exr"), heightmap) + with open(folder / f"{AssetFile.TileSize}.txt", "w") as f: f.write(f"{tile_size}\n") - with open(folder/f'{AssetFile.Params}.txt', "w") as f: + with open(folder / f"{AssetFile.Params}.txt", "w") as f: json.dump(multi_mountains_params(raw=1), f) - if erosion: run_erosion(folder) - if snowfall: run_snowfall(folder) - + if erosion: + run_erosion(folder) + if snowfall: + run_snowfall(folder) + + @gin.configurable def coast_asset( folder, @@ -181,22 +189,45 @@ def coast_asset( Element.called_time.pop("mountains") params2 = coast_params() - positions = np.stack((X.reshape(-1), Y.reshape(-1), np.zeros(resolution * resolution)), -1) - coast_mask = perlin_noise( - device=device, - positions=positions, - seed=random_int(), - freq=params2["coast_freq"], - octaves=9, - ).reshape((resolution, resolution)) > 0 - coast_distance = (grid_distance(~coast_mask, downsample=512) - grid_distance(coast_mask, downsample=512)) * tile_size - mask = np.clip((coast_distance - 0.2 * params2["beach_size"]) / (0.4 * params2["beach_size"]), a_min=0, a_max=1) + positions = np.stack( + (X.reshape(-1), Y.reshape(-1), np.zeros(resolution * resolution)), -1 + ) + coast_mask = ( + perlin_noise( + device=device, + positions=positions, + seed=random_int(), + freq=params2["coast_freq"], + octaves=9, + ).reshape((resolution, resolution)) + > 0 + ) + coast_distance = ( + grid_distance(~coast_mask, downsample=512) + - grid_distance(coast_mask, downsample=512) + ) * tile_size + mask = np.clip( + (coast_distance - 0.2 * params2["beach_size"]) / (0.4 * params2["beach_size"]), + a_min=0, + a_max=1, + ) coast_heightmap = coast_heightmapping(coast_distance) heightmap = (coast_heightmap + heightmap * mask).astype(np.float32) - cv2.imwrite(str(folder / f'{AssetFile.Heightmap}.exr'), heightmap) - with open(folder/f'{AssetFile.TileSize}.txt', "w") as f: + cv2.imwrite(str(folder / f"{AssetFile.Heightmap}.exr"), heightmap) + with open(folder / f"{AssetFile.TileSize}.txt", "w") as f: f.write(f"{tile_size}\n") - with open(folder/f'{AssetFile.Params}.txt', "w") as f: - json.dump({"multi_mountains_params": multi_mountains_params(raw=1), "coast_params": coast_params(raw=1)}, f) - if erosion: run_erosion(folder, mask_height_range=(0, 0.1 * params2["beach_size"] * params2["beach_slope"])) - if snowfall: run_snowfall(folder) + with open(folder / f"{AssetFile.Params}.txt", "w") as f: + json.dump( + { + "multi_mountains_params": multi_mountains_params(raw=1), + "coast_params": coast_params(raw=1), + }, + f, + ) + if erosion: + run_erosion( + folder, + mask_height_range=(0, 0.1 * params2["beach_size"] * params2["beach_slope"]), + ) + if snowfall: + run_snowfall(folder) diff --git a/infinigen/terrain/assets/ocean.py b/infinigen/terrain/assets/ocean.py index f669e9ff3..b5adabfa8 100644 --- a/infinigen/terrain/assets/ocean.py +++ b/infinigen/terrain/assets/ocean.py @@ -4,22 +4,23 @@ # Authors: Zeyu Ma +import os +import shutil from pathlib import Path import bpy import gin -import os -import shutil -from infinigen.terrain.utils import random_int + from infinigen.core.util.blender import ViewportMode from infinigen.core.util.logging import Timer from infinigen.core.util.random import random_general as rg - +from infinigen.terrain.utils import random_int spatial_size = 40 resolution = 64 buffered_frames = 10 + @gin.configurable def ocean_asset( folder, @@ -30,16 +31,19 @@ def ocean_asset( choppiness=("uniform", 0.5, 1), wave_alignment=("uniform", 0, 0.1), verbose=0, - spectrum="PHILLIPS", #("choice", ["PHILLIPS", "PIERSON_MOSKOWITZ", "JONSWAP", "TEXEL_MARSEN_ARSLOE"], [0.5, 0.5/3, 0.5/3, 0.5/3]), + spectrum="PHILLIPS", # ("choice", ["PHILLIPS", "PIERSON_MOSKOWITZ", "JONSWAP", "TEXEL_MARSEN_ARSLOE"], [0.5, 0.5/3, 0.5/3, 0.5/3]), bake_foam_fade=0.8, link_folder=None, ): tmp_start, tmp_end = bpy.context.scene.frame_start, bpy.context.scene.frame_end - bpy.context.scene.frame_start, bpy.context.scene.frame_end = frame_start, frame_end + buffered_frames + bpy.context.scene.frame_start, bpy.context.scene.frame_end = ( + frame_start, + frame_end + buffered_frames, + ) spectrum = rg(spectrum) - params={ + params = { "random_seed": max(0, random_int()), - "geometry_mode": 'DISPLACE', + "geometry_mode": "DISPLACE", "spatial_size": spatial_size, "wave_scale": wave_scale if spectrum == "PHILLIPS" else 0.5, "resolution": resolution, @@ -60,8 +64,10 @@ def ocean_asset( obj = bpy.context.active_object obj.name = "ocean" with ViewportMode(obj, "EDIT"): - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) bpy.ops.mesh.subdivide(number_cuts=256) bpy.ops.mesh.subdivide(number_cuts=16) mod = obj.modifiers.new(name="ocean", type="OCEAN") @@ -73,16 +79,20 @@ def ocean_asset( shutil.rmtree(folder / "cache") (folder / "cache").mkdir(parents=True) mod.filepath = str(folder / "cache") - for t, f in [(time_scale * frame_start, frame_start), (time_scale * (frame_end + buffered_frames), frame_end + buffered_frames)]: + for t, f in [ + (time_scale * frame_start, frame_start), + (time_scale * (frame_end + buffered_frames), frame_end + buffered_frames), + ]: mod.time = t mod.keyframe_insert("time", frame=f) - obj.animation_data.action.fcurves[0].keyframe_points[0].interpolation = 'LINEAR' - obj.animation_data.action.fcurves[0].keyframe_points[1].interpolation = 'LINEAR' + obj.animation_data.action.fcurves[0].keyframe_points[0].interpolation = "LINEAR" + obj.animation_data.action.fcurves[0].keyframe_points[1].interpolation = "LINEAR" with Timer("bake ocean", disable_timer=not verbose): bpy.ops.object.ocean_bake(modifier="ocean") while True: - if (folder / f"cache/foam_{frame_end + buffered_frames:04d}.exr").exists(): break - + if (folder / f"cache/foam_{frame_end + buffered_frames:04d}.exr").exists(): + break + bpy.data.objects.remove(obj, do_unlink=True) bpy.context.scene.frame_start, bpy.context.scene.frame_end = tmp_start, tmp_end @@ -93,4 +103,4 @@ def ocean_asset( os.symlink(folder / f"cache/disp_{i + buffered_frames:04d}.exr", dst) dst = link_folder / f"cache/foam_{i:04d}.exr" if not dst.exists(): - os.symlink(folder / f"cache/foam_{i + buffered_frames:04d}.exr", dst) \ No newline at end of file + os.symlink(folder / f"cache/foam_{i + buffered_frames:04d}.exr", dst) diff --git a/infinigen/terrain/assets/upsidedown_mountains.py b/infinigen/terrain/assets/upsidedown_mountains.py index 09e359c50..c30c7bb49 100644 --- a/infinigen/terrain/assets/upsidedown_mountains.py +++ b/infinigen/terrain/assets/upsidedown_mountains.py @@ -9,16 +9,25 @@ import cv2 import gin import numpy as np -from landlab import RasterModelGrid -from landlab.components import FlowDirectorSteepest, TransportLengthHillslopeDiffuser from numpy import ascontiguousarray as AC from skimage.measure import label -from infinigen.terrain.elements.core import Element -from infinigen.terrain.elements.mountains import Mountains -from infinigen.terrain.utils import read from tqdm import tqdm + +try: + import landlab + from landlab import RasterModelGrid + from landlab.components import ( + FlowDirectorSteepest, + TransportLengthHillslopeDiffuser, + ) +except ImportError: + landlab = None + from infinigen.core.util.organization import AssetFile from infinigen.core.util.random import random_general as rg +from infinigen.terrain.elements.core import Element +from infinigen.terrain.elements.mountains import Mountains +from infinigen.terrain.utils import read @gin.configurable @@ -34,13 +43,20 @@ def upsidedown_mountains_asset( verbose=0, ): """_summary_ - min_freq: min base frequency of all upsidedown mountains - max_freq: max base frequency of all upsidedown mountains - height: upsidedown mountain height - coverage: upsidedown mountain coverage - tile_size: size of the upsidedown mountain tile + min_freq: min base frequency of all upsidedown mountains + max_freq: max base frequency of all upsidedown mountains + height: upsidedown mountain height + coverage: upsidedown mountain coverage + tile_size: size of the upsidedown mountain tile """ + + if landlab is None: + raise ImportError( + "landlab must be installed to use terrain mountain simulation " + "Please install optional terrain dependencies via `pip install .[terrain]`" + ) + Path(folder).mkdir(parents=True, exist_ok=True) N = resolution x = np.linspace(-tile_size / 2, tile_size / 2, N) @@ -68,19 +84,21 @@ def upsidedown_mountains_asset( ) heightmap = mountains1.get_heightmap(X, Y) x, y = np.meshgrid(np.linspace(-1, 1, N), np.linspace(-1, 1, N), indexing="ij") - radius = (x ** 2 + y ** 2) ** 0.5 + radius = (x**2 + y**2) ** 0.5 heightmap *= 1 - np.clip((radius - 0.8) * 5, a_min=0, a_max=1) mg = RasterModelGrid((N, N)) mg.set_closed_boundaries_at_grid_edges(False, False, False, False) _ = mg.add_field("topographic__elevation", heightmap.astype(float), at="node") fdir = FlowDirectorSteepest(mg) tl_diff = TransportLengthHillslopeDiffuser(mg, erodibility=0.001, slope_crit=0.6) - if verbose: range_t = tqdm(range(150)) - else: range_t = range(150) + if verbose: + range_t = tqdm(range(150)) + else: + range_t = range(150) for t in range_t: fdir.run_one_step() - tl_diff.run_one_step(1.) - res = mg.at_node['topographic__elevation'] + tl_diff.run_one_step(1.0) + res = mg.at_node["topographic__elevation"] heightmap = res.reshape((N, N)) - 2 peak = np.zeros((N, N)) mask = (heightmap > 0).astype(np.uint8) @@ -102,18 +120,20 @@ def upsidedown_mountains_asset( _ = mg.add_field("topographic__elevation", upside.astype(float), at="node") fdir = FlowDirectorSteepest(mg) tl_diff = TransportLengthHillslopeDiffuser(mg, erodibility=0.001, slope_crit=0.6) - if verbose: range_t = tqdm(range(150)) - else: range_t = range(150) + if verbose: + range_t = tqdm(range(150)) + else: + range_t = range(150) for t in range_t: fdir.run_one_step() - tl_diff.run_one_step(1.) - res = mg.at_node['topographic__elevation'] + tl_diff.run_one_step(1.0) + res = mg.at_node["topographic__elevation"] upside = res.reshape((N, N)) - - cv2.imwrite(str(folder/'upside.exr'), upside.astype(np.float32)) - cv2.imwrite(str(folder/'peak.exr'), peak.astype(np.float32)) - cv2.imwrite(str(folder/'downside.exr'), downside.astype(np.float32)) - with open(folder/f'{AssetFile.TileSize}.txt', "w") as f: + + cv2.imwrite(str(folder / "upside.exr"), upside.astype(np.float32)) + cv2.imwrite(str(folder / "peak.exr"), peak.astype(np.float32)) + cv2.imwrite(str(folder / "downside.exr"), downside.astype(np.float32)) + with open(folder / f"{AssetFile.TileSize}.txt", "w") as f: f.write(f"{tile_size}\n") mountains1.cleanup() @@ -122,13 +142,12 @@ def upsidedown_mountains_asset( (folder / AssetFile.Finish).touch() - def assets_to_data(folder): data = {} - upside = read(str(folder/'upside.exr')) + upside = read(str(folder / "upside.exr")) N = upside.shape[0] data["upside"] = AC(upside.reshape(-1)) - data["downside"] = AC(read(str(folder/'downside.exr')).reshape(-1)) - data["peak"] = AC(read(str(folder/'peak.exr')).reshape(-1)) + data["downside"] = AC(read(str(folder / "downside.exr")).reshape(-1)) + data["peak"] = AC(read(str(folder / "peak.exr")).reshape(-1)) L = float(np.loadtxt(f"{folder}/{AssetFile.TileSize}.txt")) return L, N, data diff --git a/infinigen/terrain/core.py b/infinigen/terrain/core.py index 91eb71e14..b9fc77bc6 100644 --- a/infinigen/terrain/core.py +++ b/infinigen/terrain/core.py @@ -4,27 +4,50 @@ # Authors: Zeyu Ma +import logging import os from pathlib import Path -import logging import bpy import gin import numpy as np from mathutils.bvhtree import BVHTree +from numpy import ascontiguousarray as AC -from infinigen.OcMesher.ocmesher import OcMesher as UntexturedOcMesher -from infinigen.terrain.mesher import OpaqueSphericalMesher, TransparentSphericalMesher, UniformMesher -from infinigen.terrain.scene import scene, transfer_scene_info -from infinigen.terrain.surface_kernel.core import SurfaceKernel -from infinigen.terrain.utils import Mesh, move_modifier, Vars, AttributeType, FieldsType, get_caminfo, write_attributes -from infinigen.terrain.assets.ocean import ocean_asset +from infinigen.core.tagging import tag_object, tag_system from infinigen.core.util.blender import SelectObjects, delete from infinigen.core.util.logging import Timer from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.organization import SurfaceTypes, Attributes, Task, TerrainNames, ElementNames, Transparency, Materials, Assets, ElementTag, Tags, SelectionCriterions -from infinigen.core.tagging import tag_object, tag_system -from numpy import ascontiguousarray as AC +from infinigen.core.util.organization import ( + Assets, + Attributes, + ElementNames, + ElementTag, + Materials, + SelectionCriterions, + SurfaceTypes, + Tags, + TerrainNames, + Transparency, +) +from infinigen.OcMesher.ocmesher import OcMesher as UntexturedOcMesher +from infinigen.terrain.assets.ocean import ocean_asset +from infinigen.terrain.mesher import ( + OpaqueSphericalMesher, + TransparentSphericalMesher, + UniformMesher, +) +from infinigen.terrain.scene import scene, transfer_scene_info +from infinigen.terrain.surface_kernel.core import SurfaceKernel +from infinigen.terrain.utils import ( + AttributeType, + FieldsType, + Mesh, + Vars, + get_caminfo, + move_modifier, + write_attributes, +) logger = logging.getLogger(__name__) @@ -32,6 +55,7 @@ hidden_in_viewport = [ElementNames.Atmosphere] ASSET_ENV_VAR = "INFINIGEN_ASSET_FOLDER" + @gin.configurable def get_surface_type(surface, degrade_sdf_to_displacement=True): if not degrade_sdf_to_displacement: @@ -45,7 +69,7 @@ def get_surface_type(surface, degrade_sdf_to_displacement=True): class OcMesher(UntexturedOcMesher): def __init__(self, cameras, bounds, **kwargs): UntexturedOcMesher.__init__(self, get_caminfo(cameras)[0], bounds, **kwargs) - + def __call__(self, kernels): sdf_kernels = [(lambda x, k0=k: k0(x)[Vars.SDF]) for k in kernels] meshes, in_view_tags = UntexturedOcMesher.__call__(self, sdf_kernels) @@ -57,12 +81,15 @@ def __call__(self, kernels): mesh = Mesh.cat(meshes) return mesh + class CollectiveOcMesher(UntexturedOcMesher): def __init__(self, cameras, bounds, **kwargs): UntexturedOcMesher.__init__(self, get_caminfo(cameras)[0], bounds, **kwargs) - + def __call__(self, kernels): - sdf_kernels = [lambda x: np.stack([k(x)[Vars.SDF] for k in kernels], -1).min(axis=-1)] + sdf_kernels = [ + lambda x: np.stack([k(x)[Vars.SDF] for k in kernels], -1).min(axis=-1) + ] mesh, in_view_tag = UntexturedOcMesher.__call__(self, sdf_kernels) mesh = mesh[0] with Timer("compute attributes"): @@ -70,10 +97,12 @@ def __call__(self, kernels): mesh.vertex_attributes[Tags.OutOfView] = (~in_view_tag[0]).astype(np.int32) mesh = Mesh(mesh=mesh) return mesh - + + @gin.configurable class Terrain: instance = None + def __init__( self, seed, @@ -104,29 +133,41 @@ def __init__( self.__dict__ = Terrain.instance.__dict__.copy() return - with Timer('Create terrain'): + with Timer("Create terrain"): if asset_folder is None: - if not ASSET_ENV_VAR in os.environ: - raise ValueError(f'Terrain recieved {asset_folder=} yet {ASSET_ENV_VAR} was not set') + if ASSET_ENV_VAR not in os.environ: + raise ValueError( + f"Terrain recieved {asset_folder=} yet {ASSET_ENV_VAR} was not set" + ) asset_folder = os.environ[ASSET_ENV_VAR] if asset_folder != "": - if not os.path.exists(asset_folder): - raise ValueError(f'Could not find non-empty user-specified {asset_folder=}') - asset_path = Path(asset_folder)/asset_version + if not os.path.exists(asset_folder): + raise ValueError( + f"Could not find non-empty user-specified {asset_folder=}" + ) + asset_path = Path(asset_folder) / asset_version if not asset_path.exists(): - raise ValueError(f'{asset_folder=} did not contain {asset_version=}, please download it') - logger.info(f'Terrain using pre-generated {asset_path=} and on the fly {on_the_fly_asset_folder=}') + raise ValueError( + f"{asset_folder=} did not contain {asset_version=}, please download it" + ) + logger.info( + f"Terrain using pre-generated {asset_path=} and on the fly {on_the_fly_asset_folder=}" + ) else: - logger.info(f'Terrain using only on the fly {on_the_fly_asset_folder=}') + logger.info(f"Terrain using only on the fly {on_the_fly_asset_folder=}") asset_path = Path("") self.on_the_fly_asset_folder = Path(on_the_fly_asset_folder) self.reused_asset_folder = asset_path - self.elements, scene_infos = scene(seed, Path(on_the_fly_asset_folder), asset_path, device) + self.elements, scene_infos = scene( + seed, Path(on_the_fly_asset_folder), asset_path, device + ) self.elements_list = list(self.elements.values()) - logger.info(f"Terrain elements: {[x.__class__.name for x in self.elements_list]}") + logger.info( + f"Terrain elements: {[x.__class__.name for x in self.elements_list]}" + ) transfer_scene_info(self, scene_infos) Terrain.instance = self @@ -136,16 +177,17 @@ def __init__( def __del__(self): self.cleanup() - + def cleanup(self): if hasattr(self, "elements"): for e in self.elements: self.elements[e].cleanup() @gin.configurable() - def export(self, + def export( + self, dynamic=False, - spherical=True, # false for OcMesher + spherical=True, # false for OcMesher cameras=None, main_terrain_only=False, remove_redundant_attrs=True, @@ -153,21 +195,33 @@ def export(self, meshes_dict = {} attributes_dict = {} if not main_terrain_only or TerrainNames.OpaqueTerrain == self.main_terrain: - opaque_elements = [element for element in self.elements_list if element.transparency == Transparency.Opaque] + opaque_elements = [ + element + for element in self.elements_list + if element.transparency == Transparency.Opaque + ] if opaque_elements != []: attributes_dict[TerrainNames.OpaqueTerrain] = set() if dynamic: - if spherical: mesher = OpaqueSphericalMesher(cameras, self.bounds) - else: mesher = OcMesher(cameras, self.bounds) + if spherical: + mesher = OpaqueSphericalMesher(cameras, self.bounds) + else: + mesher = OcMesher(cameras, self.bounds) else: mesher = UniformMesher(self.populated_bounds) with Timer(f"meshing {TerrainNames.OpaqueTerrain}"): mesh = mesher([element for element in opaque_elements]) meshes_dict[TerrainNames.OpaqueTerrain] = mesh for element in opaque_elements: - attributes_dict[TerrainNames.OpaqueTerrain].update(element.attributes) - - individual_transparent_elements = [element for element in self.elements_list if element.transparency == Transparency.IndividualTransparent] + attributes_dict[TerrainNames.OpaqueTerrain].update( + element.attributes + ) + + individual_transparent_elements = [ + element + for element in self.elements_list + if element.transparency == Transparency.IndividualTransparent + ] for element in individual_transparent_elements: if not main_terrain_only or element.__class__.name == self.main_terrain: if dynamic: @@ -175,28 +229,53 @@ def export(self, if element.__class__.name == ElementNames.Atmosphere: special_args["pixels_per_cube"] = 100 special_args["inv_scale"] = 1 - if spherical: mesher = TransparentSphericalMesher(cameras, self.bounds, **special_args) - else: mesher = OcMesher(cameras, self.bounds, simplify_occluded=False, **special_args) - else: mesher = UniformMesher(self.populated_bounds, enclosed=True) + if spherical: + mesher = TransparentSphericalMesher( + cameras, self.bounds, **special_args + ) + else: + mesher = OcMesher( + cameras, + self.bounds, + simplify_occluded=False, + **special_args, + ) + else: + mesher = UniformMesher(self.populated_bounds, enclosed=True) with Timer(f"meshing {element.__class__.name}"): mesh = mesher([element]) meshes_dict[element.__class__.name] = mesh attributes_dict[element.__class__.name] = element.attributes - - if not main_terrain_only or TerrainNames.CollectiveTransparentTerrain == self.main_terrain: - collective_transparent_elements = [element for element in self.elements_list if element.transparency == Transparency.CollectiveTransparent] + + if ( + not main_terrain_only + or TerrainNames.CollectiveTransparentTerrain == self.main_terrain + ): + collective_transparent_elements = [ + element + for element in self.elements_list + if element.transparency == Transparency.CollectiveTransparent + ] if collective_transparent_elements != []: attributes_dict[TerrainNames.CollectiveTransparentTerrain] = set() if dynamic: - if spherical: mesher = TransparentSphericalMesher(cameras, self.bounds) - else: mesher = CollectiveOcMesher(cameras, self.bounds, simplify_occluded=False) + if spherical: + mesher = TransparentSphericalMesher(cameras, self.bounds) + else: + mesher = CollectiveOcMesher( + cameras, self.bounds, simplify_occluded=False + ) else: mesher = UniformMesher(self.populated_bounds) with Timer(f"meshing {TerrainNames.CollectiveTransparentTerrain}"): - mesh = mesher([element for element in collective_transparent_elements]) + mesh = mesher( + [element for element in collective_transparent_elements] + ) meshes_dict[TerrainNames.CollectiveTransparentTerrain] = mesh for element in collective_transparent_elements: - attributes_dict[TerrainNames.CollectiveTransparentTerrain].update(element.attributes) + attributes_dict[TerrainNames.CollectiveTransparentTerrain].update( + element.attributes + ) if main_terrain_only or dynamic: for mesh_name in meshes_dict: @@ -207,26 +286,42 @@ def export(self, for attribute in sorted(attributes_dict[mesh_name]): surface = self.surfaces[attribute] if get_surface_type(surface) == SurfaceTypes.Displacement: - assert surface.mod_name in bpy.data.objects[mesh_name_unapplied].modifiers, "please make sure you include one of the scene config in your configs and the same in all tasks" - surface_kernel = SurfaceKernel(surface.name, attribute, bpy.data.objects[mesh_name_unapplied].modifiers[surface.mod_name], self.device) + assert ( + surface.mod_name + in bpy.data.objects[mesh_name_unapplied].modifiers + ), "please make sure you include one of the scene config in your configs and the same in all tasks" + surface_kernel = SurfaceKernel( + surface.name, + attribute, + bpy.data.objects[mesh_name_unapplied].modifiers[ + surface.mod_name + ], + self.device, + ) surface_kernel(meshes_dict[mesh_name]) meshes_dict[mesh_name].blender_displacements = [] for attribute in sorted(attributes_dict[mesh_name]): surface = self.surfaces[attribute] if get_surface_type(surface) == SurfaceTypes.BlenderDisplacement: - meshes_dict[mesh_name].blender_displacements.append(surface.mod_name) + meshes_dict[mesh_name].blender_displacements.append( + surface.mod_name + ) if dynamic: if remove_redundant_attrs: for mesh_name in meshes_dict: if len(attributes_dict[mesh_name]) == 1: - meshes_dict[mesh_name].vertex_attributes.pop(list(attributes_dict[mesh_name])[0]) + meshes_dict[mesh_name].vertex_attributes.pop( + list(attributes_dict[mesh_name])[0] + ) else: - self.bounding_box = np.array(self.populated_bounds)[::2], np.array(self.populated_bounds)[1::2] + self.bounding_box = ( + np.array(self.populated_bounds)[::2], + np.array(self.populated_bounds)[1::2], + ) return meshes_dict, attributes_dict - def sample_surface_templates(self): with FixedSeed(int_hash(["terrain surface", self.seed])): @@ -241,11 +336,25 @@ def sample_surface_templates(self): def apply_surface_templates(self, attributes_dict): for mesh_name in attributes_dict: for attribute in sorted(attributes_dict[mesh_name]): - with FixedSeed(int_hash(["terrain surface instantiate", self.seed, self.surfaces[attribute].__name__])): - if (len(attributes_dict[mesh_name]) == 1): - self.surfaces[attribute].apply(bpy.data.objects[mesh_name], selection=None, ocean_folder=self.on_the_fly_asset_folder/Assets.Ocean) + with FixedSeed( + int_hash( + [ + "terrain surface instantiate", + self.seed, + self.surfaces[attribute].__name__, + ] + ) + ): + if len(attributes_dict[mesh_name]) == 1: + self.surfaces[attribute].apply( + bpy.data.objects[mesh_name], + selection=None, + ocean_folder=self.on_the_fly_asset_folder / Assets.Ocean, + ) else: - self.surfaces[attribute].apply(bpy.data.objects[mesh_name], selection=attribute) + self.surfaces[attribute].apply( + bpy.data.objects[mesh_name], selection=attribute + ) def surfaces_into_sdf(self): for element in self.elements_list: @@ -262,8 +371,17 @@ def surfaces_into_sdf(self): for attribute in element.attributes: surface = self.surfaces[attribute] if get_surface_type(surface) == SurfaceTypes.SDFPerturb: - assert surface.mod_name in corresponding_mesh.modifiers, f"{surface.mod_name} not in {corresponding_mesh.modifiers.keys()} please make sure you include one of the scene config in your configs and the same in all tasks" - element.displacement.append(SurfaceKernel(surface.name, attribute, corresponding_mesh.modifiers[surface.mod_name], self.device)) + assert ( + surface.mod_name in corresponding_mesh.modifiers + ), f"{surface.mod_name} not in {corresponding_mesh.modifiers.keys()} please make sure you include one of the scene config in your configs and the same in all tasks" + element.displacement.append( + SurfaceKernel( + surface.name, + attribute, + corresponding_mesh.modifiers[surface.mod_name], + self.device, + ) + ) @gin.configurable def coarse_terrain(self): @@ -282,18 +400,25 @@ def coarse_terrain(self): # do second time to avoid surface application difference resulting in cloating rocks coarse_meshes, _ = self.export(main_terrain_only=True) main_mesh = coarse_meshes[self.main_terrain] - + # WaterCovered annotation if ElementNames.Liquid in self.elements: - main_mesh.vertex_attributes[Tags.LiquidCovered] = (self.elements[ElementNames.Liquid](main_mesh.vertices, sdf_only=1)[Vars.SDF] < 0).astype(np.float32) + main_mesh.vertex_attributes[Tags.LiquidCovered] = ( + self.elements[ElementNames.Liquid](main_mesh.vertices, sdf_only=1)[ + Vars.SDF + ] + < 0 + ).astype(np.float32) main_unapplied = bpy.data.objects[self.main_terrain] main_unapplied.name = self.main_terrain + "_unapplied" main_unapplied.hide_render = True main_unapplied.hide_viewport = True - terrain_objs[self.main_terrain] = main_obj = main_mesh.export_blender(self.main_terrain) + terrain_objs[self.main_terrain] = main_obj = main_mesh.export_blender( + self.main_terrain + ) mat = main_unapplied.data.materials[0] main_obj.data.materials.append(mat) - + self.terrain_objs = terrain_objs for name in self.terrain_objs: if name not in hidden_in_viewport: @@ -305,28 +430,47 @@ def fine_terrain(self, output_folder, cameras, optimize_terrain_diskusage=True): self.sample_surface_templates() if (self.on_the_fly_asset_folder / Assets.Ocean).exists(): with FixedSeed(int_hash(["Ocean", self.seed])): - ocean_asset(output_folder / Assets.Ocean, bpy.context.scene.frame_start, bpy.context.scene.frame_end, link_folder=self.on_the_fly_asset_folder / Assets.Ocean) + ocean_asset( + output_folder / Assets.Ocean, + bpy.context.scene.frame_start, + bpy.context.scene.frame_end, + link_folder=self.on_the_fly_asset_folder / Assets.Ocean, + ) self.surfaces_into_sdf() fine_meshes, _ = self.export(dynamic=True, cameras=cameras) for mesh_name in fine_meshes: obj = fine_meshes[mesh_name].export_blender(mesh_name + "_fine") - if mesh_name not in hidden_in_viewport: self.tag_terrain(obj) + if mesh_name not in hidden_in_viewport: + self.tag_terrain(obj) if not optimize_terrain_diskusage: object_to_copy_from = bpy.data.objects[mesh_name] - self.copy_materials_and_displacements(mesh_name, obj, object_to_copy_from, fine_meshes[mesh_name].blender_displacements) + self.copy_materials_and_displacements( + mesh_name, + obj, + object_to_copy_from, + fine_meshes[mesh_name].blender_displacements, + ) else: Mesh(obj=obj).save(output_folder / f"{mesh_name}.glb") - np.save(output_folder / f"{mesh_name}.b_displacement", fine_meshes[mesh_name].blender_displacements) + np.save( + output_folder / f"{mesh_name}.b_displacement", + fine_meshes[mesh_name].blender_displacements, + ) delete(obj) - - def copy_materials_and_displacements(self, mesh_name, object_to_copy_to, object_to_copy_from, displacements): + + def copy_materials_and_displacements( + self, mesh_name, object_to_copy_to, object_to_copy_from, displacements + ): mat = object_to_copy_from.data.materials[0] object_to_copy_to.data.materials.append(mat) mesh_name_unapplied = mesh_name if mesh_name + "_unapplied" in bpy.data.objects.keys(): mesh_name_unapplied = mesh_name + "_unapplied" for mod_name in displacements: - move_modifier(object_to_copy_to, bpy.data.objects[mesh_name_unapplied].modifiers[mod_name]) + move_modifier( + object_to_copy_to, + bpy.data.objects[mesh_name_unapplied].modifiers[mod_name], + ) object_to_copy_from.hide_render = True object_to_copy_from.hide_viewport = True if mesh_name in hidden_in_viewport: @@ -334,17 +478,23 @@ def copy_materials_and_displacements(self, mesh_name, object_to_copy_to, object_ def load_glb(self, output_folder): for mesh_name in os.listdir(output_folder): - if not mesh_name.endswith(".glb"): continue + if not mesh_name.endswith(".glb"): + continue mesh_name = mesh_name[:-4] - object_to_copy_to = Mesh(path=output_folder/f"{mesh_name}.glb").export_blender(mesh_name + "_fine") + object_to_copy_to = Mesh( + path=output_folder / f"{mesh_name}.glb" + ).export_blender(mesh_name + "_fine") object_to_copy_from = bpy.data.objects[mesh_name] displacements = np.load(output_folder / f"{mesh_name}.b_displacement.npy") - self.copy_materials_and_displacements(mesh_name, object_to_copy_to, object_to_copy_from, displacements) + self.copy_materials_and_displacements( + mesh_name, object_to_copy_to, object_to_copy_from, displacements + ) def compute_camera_space_sdf(self, XYZ): sdf = np.ones(len(XYZ), dtype=np.float32) * 1e9 for element in self.elements_list: - if element.__class__.name == ElementNames.Atmosphere: continue + if element.__class__.name == ElementNames.Atmosphere: + continue element_sdf = element(XYZ, sdf_only=1)["sdf"] if self.under_water and element.__class__.name == ElementNames.Liquid: element_sdf *= -1 @@ -362,26 +512,40 @@ def get_bounding_box(self): return min_gen, max_gen @gin.configurable - def build_terrain_bvh_and_attrs(self, terrain_tags_queries, avoid_border=False, looking_at_center_region_of_size=None): + def build_terrain_bvh_and_attrs( + self, + terrain_tags_queries, + avoid_border=False, + looking_at_center_region_of_size=None, + ): exclude_list = [ElementNames.Atmosphere, ElementNames.Clouds] - terrain_objs = [t for t in self.terrain_objs if not t in exclude_list] + terrain_objs = [t for t in self.terrain_objs if t not in exclude_list] for mesh in terrain_objs: with SelectObjects(bpy.data.objects[mesh]): - bpy.ops.object.duplicate(linked=0,mode='TRANSLATION') + bpy.ops.object.duplicate(linked=0, mode="TRANSLATION") for i, mesh in enumerate(terrain_objs): with SelectObjects(bpy.data.objects[f"{mesh}.001"]): for m in bpy.data.objects[f"{mesh}.001"].modifiers: bpy.ops.object.modifier_apply(modifier=m.name) - far_ocean = self.under_water and self.surfaces[Materials.LiquidCollection].info["is_ocean"] + far_ocean = ( + self.under_water + and self.surfaces[Materials.LiquidCollection].info["is_ocean"] + ) if far_ocean: obj = bpy.data.objects[f"{ElementNames.Liquid}.001"] - obj.data.attributes.new(name="vertexwise_min_dist", type=AttributeType.Float, domain='POINT') - obj.data.attributes["vertexwise_min_dist"].data.foreach_set(FieldsType.Value, np.zeros(len(obj.data.vertices), dtype=np.float32) + 20) - + obj.data.attributes.new( + name="vertexwise_min_dist", type=AttributeType.Float, domain="POINT" + ) + obj.data.attributes["vertexwise_min_dist"].data.foreach_set( + FieldsType.Value, + np.zeros(len(obj.data.vertices), dtype=np.float32) + 20, + ) + with SelectObjects(bpy.data.objects[f"{terrain_objs[0]}.001"]): for i, mesh in enumerate(terrain_objs): - if i != 0: bpy.data.objects[f"{mesh}.001"].select_set(True) + if i != 0: + bpy.data.objects[f"{mesh}.001"].select_set(True) bpy.ops.object.join() terrain_obj = bpy.context.view_layer.objects.active @@ -392,38 +556,71 @@ def build_terrain_bvh_and_attrs(self, terrain_tags_queries, avoid_border=False, q = (q0,) else: q = q0 - if q[0] in [SelectionCriterions.CloseUp]: continue + if q[0] in [SelectionCriterions.CloseUp]: + continue if q[0] == SelectionCriterions.Altitude: min_altitude, max_altitude = q[1:3] altitude = terrain_mesh.vertices[:, 2] - camera_selection_answers[q0] = terrain_mesh.facewise_mean((altitude > min_altitude) & (altitude < max_altitude)) + camera_selection_answers[q0] = terrain_mesh.facewise_mean( + (altitude > min_altitude) & (altitude < max_altitude) + ) else: - camera_selection_answers[q0] = np.zeros(len(terrain_mesh.faces), dtype=bool) + camera_selection_answers[q0] = np.zeros( + len(terrain_mesh.faces), dtype=bool + ) for key in self.tag_dict: - if set(q).issubset(set(key.split('.'))): - camera_selection_answers[q0] |= (terrain_mesh.face_attributes["MaskTag"] == self.tag_dict[key]).reshape(-1) - camera_selection_answers[q0] = camera_selection_answers[q0].astype(np.float64) + if set(q).issubset(set(key.split("."))): + camera_selection_answers[q0] |= ( + terrain_mesh.face_attributes["MaskTag"] + == self.tag_dict[key] + ).reshape(-1) + camera_selection_answers[q0] = camera_selection_answers[q0].astype( + np.float64 + ) if np.abs(np.asarray(terrain_obj.matrix_world) - np.eye(4)).max() > 1e-4: - raise ValueError(f"Not all transformations on {terrain_obj.name} have been applied. This function won't work correctly.") + raise ValueError( + f"Not all transformations on {terrain_obj.name} have been applied. This function won't work correctly." + ) if "vertexwise_min_dist" not in terrain_mesh.vertex_attributes: - terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.zeros((len(terrain_mesh.vertices), 1), dtype=np.float32) - + terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.zeros( + (len(terrain_mesh.vertices), 1), dtype=np.float32 + ) + if avoid_border: min_gen, max_gen = self.bounding_box dist_to_bbox = np.zeros((len(terrain_mesh.vertices), 1)) + 1e9 - for i in range(3): dist_to_bbox[:, 0] = np.minimum(dist_to_bbox[:, 0], terrain_mesh.vertices[:, i] - min_gen[i], max_gen[i] - terrain_mesh.vertices[:, i]) + for i in range(3): + dist_to_bbox[:, 0] = np.minimum( + dist_to_bbox[:, 0], + terrain_mesh.vertices[:, i] - min_gen[i], + max_gen[i] - terrain_mesh.vertices[:, i], + ) dist_to_bbox = np.maximum(dist_to_bbox, 0) - terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.maximum(terrain_mesh.vertex_attributes["vertexwise_min_dist"], 30 / (dist_to_bbox + 1e-9)) + terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.maximum( + terrain_mesh.vertex_attributes["vertexwise_min_dist"], + 30 / (dist_to_bbox + 1e-9), + ) if looking_at_center_region_of_size is not None: center_region_dist = np.zeros((len(terrain_mesh.vertices), 1)) for i in range(2): - center_region_dist[terrain_mesh.vertices[:, i] > looking_at_center_region_of_size / 2, 0] = 1e9 - center_region_dist[terrain_mesh.vertices[:, i] < -looking_at_center_region_of_size / 2, 0] = 1e9 - terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.maximum(terrain_mesh.vertex_attributes["vertexwise_min_dist"], center_region_dist) - - vertexwise_min_dist = terrain_mesh.facewise_mean(terrain_mesh.vertex_attributes["vertexwise_min_dist"].reshape(-1)) + center_region_dist[ + terrain_mesh.vertices[:, i] > looking_at_center_region_of_size / 2, + 0, + ] = 1e9 + center_region_dist[ + terrain_mesh.vertices[:, i] < -looking_at_center_region_of_size / 2, + 0, + ] = 1e9 + terrain_mesh.vertex_attributes["vertexwise_min_dist"] = np.maximum( + terrain_mesh.vertex_attributes["vertexwise_min_dist"], + center_region_dist, + ) + + vertexwise_min_dist = terrain_mesh.facewise_mean( + terrain_mesh.vertex_attributes["vertexwise_min_dist"].reshape(-1) + ) depsgraph = bpy.context.evaluated_depsgraph_get() scene_bvh = BVHTree.FromObject(terrain_obj, depsgraph) @@ -431,23 +628,28 @@ def build_terrain_bvh_and_attrs(self, terrain_tags_queries, avoid_border=False, return scene_bvh, camera_selection_answers, vertexwise_min_dist - def tag_terrain(self, obj): - if len(obj.data.vertices) == 0: return - + if len(obj.data.vertices) == 0: + return mesh = Mesh(obj=obj) first_time = 1 - #initialize with element tag + # initialize with element tag element_tag = np.zeros(len(obj.data.vertices), dtype=np.int32) - obj.data.attributes[Attributes.ElementTag].data.foreach_get("value", element_tag) + obj.data.attributes[Attributes.ElementTag].data.foreach_get( + "value", element_tag + ) element_tag_f = mesh.facewise_intmax(element_tag) for i in range(ElementTag.total_cnt): mask_i = element_tag_f == i if mask_i.any(): - obj.data.attributes.new(name=f"TAG_{ElementTag.map[i]}", type="FLOAT", domain='FACE') - obj.data.attributes[f"TAG_{ElementTag.map[i]}"].data.foreach_set("value", AC(mask_i.astype(np.float32))) + obj.data.attributes.new( + name=f"TAG_{ElementTag.map[i]}", type="FLOAT", domain="FACE" + ) + obj.data.attributes[f"TAG_{ElementTag.map[i]}"].data.foreach_set( + "value", AC(mask_i.astype(np.float32)) + ) if first_time: # "landscape" is a collective name for terrain and water tag_object(obj, Tags.Landscape) @@ -466,7 +668,7 @@ def tag_terrain(self, obj): (Materials.Beach, 0.5, 0), (Tags.OutOfView, 0.5, 1), ] - + for tag_name, threshold, to_remove in tag_thresholds: if tag_name in obj.data.attributes.keys(): tag = np.zeros(len(obj.data.vertices), dtype=np.float32) @@ -476,8 +678,12 @@ def tag_terrain(self, obj): if to_remove: obj.data.attributes.remove(obj.data.attributes[tag_name]) if tag_f.any(): - obj.data.attributes.new(name=f"TAG_{tag_name}", type="FLOAT", domain='FACE') - obj.data.attributes[f"TAG_{tag_name}"].data.foreach_set("value", AC(tag_f.astype(np.float32))) + obj.data.attributes.new( + name=f"TAG_{tag_name}", type="FLOAT", domain="FACE" + ) + obj.data.attributes[f"TAG_{tag_name}"].data.foreach_set( + "value", AC(tag_f.astype(np.float32)) + ) tag_object(obj) self.tag_dict = tag_system.tag_dict diff --git a/infinigen/terrain/elements/atmosphere.py b/infinigen/terrain/elements/atmosphere.py index 054ac651b..2529cfc70 100644 --- a/infinigen/terrain/elements/atmosphere.py +++ b/infinigen/terrain/elements/atmosphere.py @@ -7,13 +7,16 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.core.util.organization import Materials, Transparency, ElementNames + +from infinigen.core.util.organization import ElementNames, Materials, Transparency from .core import Element + @gin.configurable class Atmosphere(Element): name = ElementNames.Atmosphere + def __init__( self, device, @@ -26,10 +29,12 @@ def __init__( ): self.device = device self.int_params = AC(np.array([], dtype=np.int32)) - self.float_params = AC(np.array([height, spherical_radius, hacky_offset], dtype=np.float32)) + self.float_params = AC( + np.array([height, spherical_radius, hacky_offset], dtype=np.float32) + ) if waterbody is not None: self.int_params2 = waterbody.int_params self.float_params2 = waterbody.float_params self.meta_params = [waterbody is not None] - Element.__init__(self, "atmosphere", material, transparency) \ No newline at end of file + Element.__init__(self, "atmosphere", material, transparency) diff --git a/infinigen/terrain/elements/caves.py b/infinigen/terrain/elements/caves.py index 22d98a920..f2a578f90 100644 --- a/infinigen/terrain/elements/caves.py +++ b/infinigen/terrain/elements/caves.py @@ -9,11 +9,13 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.assets.caves import assets_to_data, caves_asset -from infinigen.terrain.utils import random_int, random_int_large + from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.random import random_general as rg from infinigen.core.util.organization import AssetFile +from infinigen.core.util.random import random_general as rg +from infinigen.terrain.assets.caves import assets_to_data, caves_asset +from infinigen.terrain.utils import random_int, random_int_large + from .core import Element @@ -42,15 +44,37 @@ def __init__( self.assets_seed = random_int_large() noise_freq = rg(noise_freq) n_instances, N, float_data = self.load_assets() - self.int_params = AC(np.array([ - nonpython_seed, n_lattice, is_horizontal, n_instances, N, - ]).astype(np.int32)) - self.float_params = AC(np.concatenate((np.array([ - randomness, frequency, deepest_level, rg(scale_increase), - noise_octaves, noise_freq, rg(noise_scale), height_offset, smoothness, - ]), float_data)).astype(np.float32)) - - + self.int_params = AC( + np.array( + [ + nonpython_seed, + n_lattice, + is_horizontal, + n_instances, + N, + ] + ).astype(np.int32) + ) + self.float_params = AC( + np.concatenate( + ( + np.array( + [ + randomness, + frequency, + deepest_level, + rg(scale_increase), + noise_octaves, + noise_freq, + rg(noise_scale), + height_offset, + smoothness, + ] + ), + float_data, + ) + ).astype(np.float32) + ) @gin.configurable def load_assets( @@ -61,18 +85,29 @@ def load_assets( asset_paths = [] if on_the_fly_instances > 0: for i in range(on_the_fly_instances): - if not (self.on_the_fly_asset_folder / str(i) / AssetFile.Finish).exists(): + if not ( + self.on_the_fly_asset_folder / str(i) / AssetFile.Finish + ).exists(): with FixedSeed(int_hash(("Caves", self.assets_seed, i))): caves_asset(self.on_the_fly_asset_folder / f"{i}") for i in range(on_the_fly_instances): asset_paths.append(self.on_the_fly_asset_folder / f"{i}") if reused_instances > 0: - assert(self.reused_asset_folder is not None and self.reused_asset_folder.exists()) - all_instances = len([x for x in os.listdir(str(self.reused_asset_folder)) if x[0] != '.']) - sample = np.random.choice(all_instances, reused_instances, replace=reused_instances > all_instances) + assert ( + self.reused_asset_folder is not None + and self.reused_asset_folder.exists() + ) + all_instances = len( + [x for x in os.listdir(str(self.reused_asset_folder)) if x[0] != "."] + ) + sample = np.random.choice( + all_instances, + reused_instances, + replace=reused_instances > all_instances, + ) for i in range(reused_instances): asset_paths.append(self.reused_asset_folder / f"{sample[i]}") - + datas = {} for asset_path in asset_paths: N, data = assets_to_data(asset_path) @@ -83,5 +118,7 @@ def load_assets( datas[key] = [data[key]] for key in datas: datas[key] = np.concatenate(datas[key]) - float_params = np.concatenate((datas["bounding_box"], datas["occupancy"])).astype(np.float32) - return on_the_fly_instances + reused_instances, N, float_params \ No newline at end of file + float_params = np.concatenate( + (datas["bounding_box"], datas["occupancy"]) + ).astype(np.float32) + return on_the_fly_instances + reused_instances, N, float_params diff --git a/infinigen/terrain/elements/core.py b/infinigen/terrain/elements/core.py index 46f1870cb..cc2f61094 100644 --- a/infinigen/terrain/elements/core.py +++ b/infinigen/terrain/elements/core.py @@ -9,12 +9,15 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import ASFLOAT, ASINT, Vars, load_cdll, register_func + from infinigen.core.util.organization import Materials +from infinigen.terrain.utils import ASFLOAT, ASINT, Vars, load_cdll, register_func + @gin.configurable class Element: called_time = {} + def __init__(self, lib_name, material, transparency): if lib_name in Element.called_time: lib_name_X = f"{lib_name}_{Element.called_time[lib_name]}" @@ -33,12 +36,27 @@ def __init__(self, lib_name, material, transparency): self.attributes.append(aux_name) call_param_type.append(POINTER(c_float)) register_func(self, dll, "call", call_param_type) - register_func(self, dll, "init", [ - c_int32, c_int32, - c_size_t, POINTER(c_int32), c_size_t, POINTER(c_float), - c_size_t, POINTER(c_int32), c_size_t, POINTER(c_float), - c_size_t, POINTER(c_int32), c_size_t, POINTER(c_float), - ]) + register_func( + self, + dll, + "init", + [ + c_int32, + c_int32, + c_size_t, + POINTER(c_int32), + c_size_t, + POINTER(c_float), + c_size_t, + POINTER(c_int32), + c_size_t, + POINTER(c_float), + c_size_t, + POINTER(c_int32), + c_size_t, + POINTER(c_float), + ], + ) register_func(self, dll, "cleanup") self.material = material @@ -52,24 +70,39 @@ def __init__(self, lib_name, material, transparency): meta_param2 = 0 else: meta_param = meta_param2 = 0 - if not hasattr(self, "int_params2"): self.int_params2 = np.zeros(0, dtype=np.int32) - if not hasattr(self, "float_params2"): self.float_params2 = np.zeros(0, dtype=np.float32) - if not hasattr(self, "int_params3"): self.int_params3 = np.zeros(0, dtype=np.int32) - if not hasattr(self, "float_params3"): self.float_params3 = np.zeros(0, dtype=np.float32) + if not hasattr(self, "int_params2"): + self.int_params2 = np.zeros(0, dtype=np.int32) + if not hasattr(self, "float_params2"): + self.float_params2 = np.zeros(0, dtype=np.float32) + if not hasattr(self, "int_params3"): + self.int_params3 = np.zeros(0, dtype=np.int32) + if not hasattr(self, "float_params3"): + self.float_params3 = np.zeros(0, dtype=np.float32) self.init( - meta_param, meta_param2, - len(self.int_params), ASINT(self.int_params), len(self.float_params), ASFLOAT(self.float_params), - len(self.int_params2), ASINT(self.int_params2), len(self.float_params2), ASFLOAT(self.float_params2), - len(self.int_params3), ASINT(self.int_params3), len(self.float_params3), ASFLOAT(self.float_params3), + meta_param, + meta_param2, + len(self.int_params), + ASINT(self.int_params), + len(self.float_params), + ASFLOAT(self.float_params), + len(self.int_params2), + ASINT(self.int_params2), + len(self.float_params2), + ASFLOAT(self.float_params2), + len(self.int_params3), + ASINT(self.int_params3), + len(self.float_params3), + ASFLOAT(self.float_params3), ) self.displacement = [] self.height_offset = 0 self.whole_bbox = None - def __call__(self, positions, sdf_only=False): if self.whole_bbox is not None: - mask = (positions >= self.whole_bbox[0].reshape((1, 3))).all(axis=-1) & (positions <= self.whole_bbox[1].reshape((1, 3))).all(axis=-1) + mask = (positions >= self.whole_bbox[0].reshape((1, 3))).all(axis=-1) & ( + positions <= self.whole_bbox[1].reshape((1, 3)) + ).all(axis=-1) positions[:, 2] += self.height_offset N = len(positions) sdf = AC(np.zeros(N, dtype=np.float32)) @@ -81,11 +114,16 @@ def __call__(self, positions, sdf_only=False): auxs.append(AC(np.zeros(N * len(self.aux_names), dtype=np.float32))) else: auxs.append(None) - self.call(N, ASFLOAT(AC(positions.astype(np.float32))), ASFLOAT(sdf), *[POINTER(c_float)() if x is None else ASFLOAT(x) for x in auxs]) + self.call( + N, + ASFLOAT(AC(positions.astype(np.float32))), + ASFLOAT(sdf), + *[POINTER(c_float)() if x is None else ASFLOAT(x) for x in auxs], + ) if self.whole_bbox is not None: sdf[mask] = 1e6 - + ret = {} ret[Vars.SDF] = sdf @@ -108,4 +146,4 @@ def __call__(self, positions, sdf_only=False): def get_heightmap(self, X, Y): N = X.shape[0] positions = np.stack((X.reshape(-1), Y.reshape(-1), np.zeros(N * N)), -1) - return -self.__call__(positions)[Vars.SDF].reshape((N, N)) \ No newline at end of file + return -self.__call__(positions)[Vars.SDF].reshape((N, N)) diff --git a/infinigen/terrain/elements/ground.py b/infinigen/terrain/elements/ground.py index 4dfacc810..38fd8c802 100644 --- a/infinigen/terrain/elements/ground.py +++ b/infinigen/terrain/elements/ground.py @@ -7,9 +7,16 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import random_int -from infinigen.core.util.organization import Materials, Transparency, Tags, ElementNames, ElementTag + +from infinigen.core.util.organization import ( + ElementNames, + ElementTag, + Materials, + Tags, + Transparency, +) from infinigen.core.util.random import random_general as rg +from infinigen.terrain.utils import random_int from .core import Element @@ -17,6 +24,7 @@ @gin.configurable class Ground(Element): name = ElementNames.Ground + def __init__( self, device, @@ -50,13 +58,24 @@ def __init__( self.float_params2 = caves.float_params self.int_params = AC(np.array([seed, is_3d, with_sand_dunes], dtype=np.int32)) - self.float_params = AC(np.array([ - spherical_radius, - freq, octaves, scale, height, - sand_dunes_warping_freq, sand_dunes_warping_octaves, sand_dunes_warping_scale, - sand_dunes_freq, sand_dunes_scale - ], dtype=np.float32)) + self.float_params = AC( + np.array( + [ + spherical_radius, + freq, + octaves, + scale, + height, + sand_dunes_warping_freq, + sand_dunes_warping_octaves, + sand_dunes_warping_scale, + sand_dunes_freq, + sand_dunes_scale, + ], + dtype=np.float32, + ) + ) self.meta_params = [caves is not None] Element.__init__(self, "ground", material, transparency) - self.tag = ElementTag.Terrain \ No newline at end of file + self.tag = ElementTag.Terrain diff --git a/infinigen/terrain/elements/landtiles.py b/infinigen/terrain/elements/landtiles.py index 7c128b01f..387a59176 100644 --- a/infinigen/terrain/elements/landtiles.py +++ b/infinigen/terrain/elements/landtiles.py @@ -10,27 +10,39 @@ import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.assets.landtiles import assets_to_data, landtile_asset -from infinigen.terrain.utils import random_int, random_int_large from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.organization import Tags, Materials, LandTile, Process, Transparency, ElementNames, ElementTag, AssetFile +from infinigen.core.util.organization import ( + AssetFile, + ElementNames, + ElementTag, + LandTile, + Materials, + Process, + Tags, + Transparency, +) from infinigen.core.util.random import random_general as rg +from infinigen.terrain.assets.landtiles import assets_to_data, landtile_asset +from infinigen.terrain.utils import random_int, random_int_large from .core import Element def none_to_0(x): - if x is None: return 0 + if x is None: + return 0 return x + @gin.configurable class LandTiles(Element): name = ElementNames.LandTiles + def __init__( self, device, caves, - on_the_fly_asset_folder, # for tiledlandscape the folder is the containing folder not specific type folder + on_the_fly_asset_folder, # for tiledlandscape the folder is the containing folder not specific type folder reused_asset_folder, n_lattice=1, tiles=[LandTile.MultiMountains], @@ -41,7 +53,8 @@ def __init__( island_probability=0, tile_heights=[-0.1], land_process=("choice", [Process.Erosion, None], [0.65, 0.35]), - height_modification_start=None, height_modification_end=None, + height_modification_start=None, + height_modification_end=None, attribute_modification_start_height=None, attribute_modification_end_height=None, attribute_modification_distort_freq=1, @@ -61,8 +74,12 @@ def __init__( nonpython_seed = random_int() self.assets_seed = random_int_large() self.tiles = tiles - self.attribute_modification_start_height = attribute_modification_start_height = rg(attribute_modification_start_height) - self.attribute_modification_end_height = attribute_modification_end_height = rg(attribute_modification_end_height) + self.attribute_modification_start_height = ( + attribute_modification_start_height + ) = rg(attribute_modification_start_height) + self.attribute_modification_end_height = attribute_modification_end_height = rg( + attribute_modification_end_height + ) self.smooth = smooth self.aux_names = [] land_process = rg(land_process) @@ -98,19 +115,55 @@ def __init__( frequency = 1 / (tile_size * 0.67) * tile_density - self.int_params = AC(np.concatenate((np.array([ - nonpython_seed, n_lattice, len(tiles), height_modification_start is not None, - attribute_modification_start_height is not None, n_instances, N, use_cblerp, - ]), )).astype(np.int32)) - self.float_params = AC(np.concatenate((np.array([ - randomness, frequency, attribute_probability, attribute_distance_range, island_probability, tile_size, - none_to_0(height_modification_start), none_to_0(height_modification_end), - none_to_0(attribute_modification_start_height), none_to_0(attribute_modification_end_height), - attribute_modification_distort_freq, attribute_modification_distort_mag, empty_below, y_tilt, y_tilt_clip, sharpen, - mask_random_freq, direction_deg, - *tile_heights, - ]), float_data)).astype(np.float32)) - + self.int_params = AC( + np.concatenate( + ( + np.array( + [ + nonpython_seed, + n_lattice, + len(tiles), + height_modification_start is not None, + attribute_modification_start_height is not None, + n_instances, + N, + use_cblerp, + ] + ), + ) + ).astype(np.int32) + ) + self.float_params = AC( + np.concatenate( + ( + np.array( + [ + randomness, + frequency, + attribute_probability, + attribute_distance_range, + island_probability, + tile_size, + none_to_0(height_modification_start), + none_to_0(height_modification_end), + none_to_0(attribute_modification_start_height), + none_to_0(attribute_modification_end_height), + attribute_modification_distort_freq, + attribute_modification_distort_mag, + empty_below, + y_tilt, + y_tilt_clip, + sharpen, + mask_random_freq, + direction_deg, + *tile_heights, + ] + ), + float_data, + ) + ).astype(np.float32) + ) + self.meta_params = [caves is not None] Element.__init__(self, "landtiles", material, transparency) self.tag = ElementTag.Terrain @@ -125,23 +178,43 @@ def load_assets( if on_the_fly_instances > 0: for t, tile in enumerate(self.tiles): for i in range(on_the_fly_instances): - if not (self.on_the_fly_asset_folder / tile / str(i) / AssetFile.Finish).exists(): + if not ( + self.on_the_fly_asset_folder / tile / str(i) / AssetFile.Finish + ).exists(): with FixedSeed(int_hash(("LandTiles", self.assets_seed, t, i))): - landtile_asset(self.on_the_fly_asset_folder / tile / f"{i}", tile, device=self.device) + landtile_asset( + self.on_the_fly_asset_folder / tile / f"{i}", + tile, + device=self.device, + ) for tile in self.tiles: for i in range(on_the_fly_instances): asset_paths.append(self.on_the_fly_asset_folder / tile / f"{i}") if reused_instances > 0: assert self.reused_asset_folder is not None - assert (self.reused_asset_folder / tile).exists(), f"{self.reused_asset_folder / tile} does not exists" - all_instances = len([x for x in os.listdir(str(self.reused_asset_folder / tile)) if x[0] != '.']) - sample = np.random.choice(all_instances, reused_instances, replace=reused_instances > all_instances) + assert ( + self.reused_asset_folder / tile + ).exists(), f"{self.reused_asset_folder / tile} does not exists" + all_instances = len( + [ + x + for x in os.listdir(str(self.reused_asset_folder / tile)) + if x[0] != "." + ] + ) + sample = np.random.choice( + all_instances, + reused_instances, + replace=reused_instances > all_instances, + ) for i in range(reused_instances): asset_paths.append(self.reused_asset_folder / tile / f"{sample[i]}") datas = {"direction": [np.zeros(0)]} for asset_path in asset_paths: - tile_size, N, data = assets_to_data(asset_path, self.land_process, do_smooth=self.smooth) + tile_size, N, data = assets_to_data( + asset_path, self.land_process, do_smooth=self.smooth + ) for key in data: if key in datas: datas[key].append(data[key]) @@ -149,12 +222,15 @@ def load_assets( datas[key] = [data[key]] for key in datas: datas[key] = np.concatenate(datas[key]) - float_params = np.concatenate((datas["heightmap"], datas["mask"], datas["direction"])).astype(np.float32) + float_params = np.concatenate( + (datas["heightmap"], datas["mask"], datas["direction"]) + ).astype(np.float32) return on_the_fly_instances + reused_instances, tile_size, N, float_params class Volcanos(LandTiles): name = ElementNames.Volcanos + def __init__( self, device, @@ -175,15 +251,18 @@ def __init__( attribute_probability=0.5, attribute_distance_range=150, land_process=Process.Eruption, - height_modification_start=-0.5, height_modification_end=-1.5, + height_modification_start=-0.5, + height_modification_end=-1.5, attribute_modification_start_height=None, attribute_modification_end_height=None, randomness=1, ) self.tag = ElementTag.Volcanos + class FloatingIce(LandTiles): name = ElementNames.FloatingIce + def __init__( self, device, @@ -203,10 +282,11 @@ def __init__( tile_heights=[-12.15], land_process=Process.IceErosion, transparency=Transparency.CollectiveTransparent, - height_modification_start=None, height_modification_end=None, + height_modification_start=None, + height_modification_end=None, attribute_modification_start_height=None, attribute_modification_end_height=None, empty_below=-0.4, randomness=1, ) - self.tag = ElementTag.FloatingIce \ No newline at end of file + self.tag = ElementTag.FloatingIce diff --git a/infinigen/terrain/elements/mountains.py b/infinigen/terrain/elements/mountains.py index 52992acdf..9fd4ec12c 100644 --- a/infinigen/terrain/elements/mountains.py +++ b/infinigen/terrain/elements/mountains.py @@ -7,23 +7,32 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import random_int -from infinigen.core.util.organization import Materials, Transparency, ElementNames, ElementTag + +from infinigen.core.util.organization import ( + ElementNames, + ElementTag, + Materials, + Transparency, +) from infinigen.core.util.random import random_general as rg +from infinigen.terrain.utils import random_int from .core import Element + # this element is used as auxiliary element @gin.configurable class Mountains(Element): name = ElementNames.Mountains + def __init__( self, device, - min_freq, max_freq, + min_freq, + max_freq, height, # i.e. scale (not base height) coverage, - slope_height, # i.e. slope_scale + slope_height, # i.e. slope_scale n_groups=3, is_3d=False, spherical_radius=-1, @@ -43,19 +52,31 @@ def __init__( mask_freq = min_freq * mask_freq_ratio mask_octaves = rg(mask_octaves) coverage = rg(coverage) - mask_ramp_min, mask_ramp_max = -1.1 - coverage*2, -0.9 - coverage*2 + mask_ramp_min, mask_ramp_max = -1.1 - coverage * 2, -0.9 - coverage * 2 slope_freq = rg(slope_freq) slope_octaves = rg(slope_octaves) slope_height = rg(slope_height) - + self.int_params = AC(np.array([random_int(), n_groups, is_3d], dtype=np.int32)) - self.float_params = AC(np.array([ - spherical_radius, - min_freq, max_freq, octaves, height * 2, - mask_freq, mask_octaves, - mask_ramp_min, mask_ramp_max, - slope_freq, slope_octaves, slope_height, - ], dtype=np.float32)) + self.float_params = AC( + np.array( + [ + spherical_radius, + min_freq, + max_freq, + octaves, + height * 2, + mask_freq, + mask_octaves, + mask_ramp_min, + mask_ramp_max, + slope_freq, + slope_octaves, + slope_height, + ], + dtype=np.float32, + ) + ) Element.__init__(self, "mountains", material, transparency) - self.tag = ElementTag.Terrain \ No newline at end of file + self.tag = ElementTag.Terrain diff --git a/infinigen/terrain/elements/upsidedown_mountains.py b/infinigen/terrain/elements/upsidedown_mountains.py index 7e73fadce..4d6db5563 100644 --- a/infinigen/terrain/elements/upsidedown_mountains.py +++ b/infinigen/terrain/elements/upsidedown_mountains.py @@ -9,16 +9,29 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.assets.upsidedown_mountains import assets_to_data, upsidedown_mountains_asset -from infinigen.terrain.utils import random_int, random_int_large + from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core.util.organization import Materials, Transparency, ElementNames, ElementTag, Tags, AssetFile +from infinigen.core.util.organization import ( + AssetFile, + ElementNames, + ElementTag, + Materials, + Tags, + Transparency, +) +from infinigen.terrain.assets.upsidedown_mountains import ( + assets_to_data, + upsidedown_mountains_asset, +) +from infinigen.terrain.utils import random_int, random_int_large from .core import Element + @gin.configurable class UpsidedownMountains(Element): name = ElementNames.UpsidedownMountains + def __init__( self, device, @@ -40,9 +53,30 @@ def __init__( self.assets_seed = random_int_large() self.aux_names = [Tags.UpsidedownMountainsLowerPart] n_instances, L, N, float_data = self.load_assets() - self.int_params = AC(np.concatenate((np.array([nonpython_seed, n_instances, N]),)).astype(np.int32)) - self.float_params = AC(np.concatenate((np.array([L, floating_height, randomness, frequency, perturb_octaves, perturb_freq, perturb_scale]), float_data)).astype(np.float32)) - + self.int_params = AC( + np.concatenate((np.array([nonpython_seed, n_instances, N]),)).astype( + np.int32 + ) + ) + self.float_params = AC( + np.concatenate( + ( + np.array( + [ + L, + floating_height, + randomness, + frequency, + perturb_octaves, + perturb_freq, + perturb_scale, + ] + ), + float_data, + ) + ).astype(np.float32) + ) + Element.__init__(self, "upsidedown_mountains", material, transparency) self.tag = ElementTag.UpsidedownMountains @@ -55,18 +89,33 @@ def load_assets( asset_paths = [] if on_the_fly_instances > 0: for i in range(on_the_fly_instances): - if not (self.on_the_fly_asset_folder / str(i) / AssetFile.Finish).exists(): - with FixedSeed(int_hash(("UpsidedownMountains", self.assets_seed, i))): - upsidedown_mountains_asset(self.on_the_fly_asset_folder / f"{i}", device=self.device) + if not ( + self.on_the_fly_asset_folder / str(i) / AssetFile.Finish + ).exists(): + with FixedSeed( + int_hash(("UpsidedownMountains", self.assets_seed, i)) + ): + upsidedown_mountains_asset( + self.on_the_fly_asset_folder / f"{i}", device=self.device + ) for i in range(on_the_fly_instances): asset_paths.append(self.on_the_fly_asset_folder / f"{i}") if reused_instances > 0: - assert(self.reused_asset_folder is not None and self.reused_asset_folder.exists()) - all_instances = len([x for x in os.listdir(str(self.reused_asset_folder)) if x[0] != '.']) - sample = np.random.choice(all_instances, reused_instances, replace=reused_instances > all_instances) + assert ( + self.reused_asset_folder is not None + and self.reused_asset_folder.exists() + ) + all_instances = len( + [x for x in os.listdir(str(self.reused_asset_folder)) if x[0] != "."] + ) + sample = np.random.choice( + all_instances, + reused_instances, + replace=reused_instances > all_instances, + ) for i in range(reused_instances): asset_paths.append(self.reused_asset_folder / f"{sample[i]}") - + datas = {} for asset_path in asset_paths: L, N, data = assets_to_data(asset_path) @@ -77,5 +126,7 @@ def load_assets( datas[key] = [data[key]] for key in datas: datas[key] = np.concatenate(datas[key]) - float_params = np.concatenate((datas["upside"], datas["downside"], datas["peak"])).astype(np.float32) - return on_the_fly_instances + reused_instances, L, N, float_params \ No newline at end of file + float_params = np.concatenate( + (datas["upside"], datas["downside"], datas["peak"]) + ).astype(np.float32) + return on_the_fly_instances + reused_instances, L, N, float_params diff --git a/infinigen/terrain/elements/voronoi_rocks.py b/infinigen/terrain/elements/voronoi_rocks.py index 217832a0e..cacc77fc6 100644 --- a/infinigen/terrain/elements/voronoi_rocks.py +++ b/infinigen/terrain/elements/voronoi_rocks.py @@ -7,19 +7,30 @@ import gin import numpy as np from numpy import ascontiguousarray as AC + +from infinigen.core.util.organization import ( + ElementNames, + ElementTag, + Materials, + Tags, + Transparency, +) from infinigen.terrain.utils import random_int -from infinigen.core.util.organization import Materials, ElementNames, Transparency, ElementTag, Tags from .core import Element from .landtiles import LandTiles + def none_to_0(x): - if x is None: return 0 + if x is None: + return 0 return x + @gin.configurable class VoronoiRocks(Element): name = ElementNames.VoronoiRocks + def __init__( self, device, @@ -29,18 +40,42 @@ def __init__( variable_material=False, transparency=Transparency.Opaque, n_lattice=3, - min_freq=1, max_freq=10, - gap_min_freq=0.003, gap_max_freq=0.03, gap_scale=0.1, gap_octaves=2, gap_base=10, - warp_min_freq=0.1, warp_max_freq=0.5, warp_octaves=3, warp_prob=0.5, - warp_modu_sigmoidscale=3, warp_modu_scale=0.4, warp_modu_octaves=2, warp_modu_freq=0.01, - mask_octaves=11, mask_freq=0.05, mask_shift=-0.2, + min_freq=1, + max_freq=10, + gap_min_freq=0.003, + gap_max_freq=0.03, + gap_scale=0.1, + gap_octaves=2, + gap_base=10, + warp_min_freq=0.1, + warp_max_freq=0.5, + warp_octaves=3, + warp_prob=0.5, + warp_modu_sigmoidscale=3, + warp_modu_scale=0.4, + warp_modu_octaves=2, + warp_modu_freq=0.01, + mask_octaves=11, + mask_freq=0.05, + mask_shift=-0.2, ): self.device = device seed = random_int() - height_modification = hasattr(attachment, "attribute_modification_start_height") and attachment.attribute_modification_start_height is not None - attribute_modification_start_height = attachment.attribute_modification_start_height if height_modification else None - attribute_modification_end_height = attachment.attribute_modification_end_height if height_modification else None + height_modification = ( + hasattr(attachment, "attribute_modification_start_height") + and attachment.attribute_modification_start_height is not None + ) + attribute_modification_start_height = ( + attachment.attribute_modification_start_height + if height_modification + else None + ) + attribute_modification_end_height = ( + attachment.attribute_modification_end_height + if height_modification + else None + ) if height_modification and variable_material: self.aux_names = [Materials.Beach] else: @@ -50,35 +85,69 @@ def __init__( else: self.aux_names.append(Tags.Cave) - self.int_params = AC(np.array([seed, n_lattice, height_modification], dtype=np.int32)) - self.float_params = AC(np.array([ - min_freq, max_freq, - gap_min_freq, gap_max_freq, gap_scale, gap_octaves, gap_base, - warp_min_freq, warp_max_freq, warp_octaves, warp_prob, - warp_modu_sigmoidscale, warp_modu_scale, warp_modu_octaves, warp_modu_freq, - mask_octaves, mask_freq, mask_shift, - none_to_0(attribute_modification_start_height), none_to_0(attribute_modification_end_height) - ], dtype=np.float32)) + self.int_params = AC( + np.array([seed, n_lattice, height_modification], dtype=np.int32) + ) + self.float_params = AC( + np.array( + [ + min_freq, + max_freq, + gap_min_freq, + gap_max_freq, + gap_scale, + gap_octaves, + gap_base, + warp_min_freq, + warp_max_freq, + warp_octaves, + warp_prob, + warp_modu_sigmoidscale, + warp_modu_scale, + warp_modu_octaves, + warp_modu_freq, + mask_octaves, + mask_freq, + mask_shift, + none_to_0(attribute_modification_start_height), + none_to_0(attribute_modification_end_height), + ], + dtype=np.float32, + ) + ) self.int_params2 = attachment.int_params self.float_params2 = attachment.float_params - + if caves is not None: self.int_params3 = caves.int_params self.float_params3 = caves.float_params - + self.meta_params = [not isinstance(attachment, LandTiles), caves is not None] Element.__init__(self, "voronoi_rocks", material, transparency) self.tag = ElementTag.VoronoiRocks + class VoronoiGrains(VoronoiRocks): name = ElementNames.VoronoiGrains + def __init__( self, device, attachment, caves, - min_freq=30, max_freq=300, + min_freq=30, + max_freq=300, ): - VoronoiRocks.__init__(self, device, attachment, caves, min_freq=min_freq, max_freq=max_freq, mask_shift=9, warp_prob=0, variable_material=1) + VoronoiRocks.__init__( + self, + device, + attachment, + caves, + min_freq=min_freq, + max_freq=max_freq, + mask_shift=9, + warp_prob=0, + variable_material=1, + ) self.tag = ElementTag.VoronoiGrains diff --git a/infinigen/terrain/elements/warped_rocks.py b/infinigen/terrain/elements/warped_rocks.py index 75392915c..838bec534 100644 --- a/infinigen/terrain/elements/warped_rocks.py +++ b/infinigen/terrain/elements/warped_rocks.py @@ -7,14 +7,23 @@ import gin import numpy as np from numpy import ascontiguousarray as AC + +from infinigen.core.util.organization import ( + ElementNames, + ElementTag, + Materials, + Tags, + Transparency, +) from infinigen.terrain.utils import random_int -from infinigen.core.util.organization import Materials, ElementNames, Transparency, ElementTag, Tags + from .core import Element @gin.configurable class WarpedRocks(Element): name = ElementNames.WarpedRocks + def __init__( self, device, @@ -23,9 +32,18 @@ def __init__( transparency=Transparency.Opaque, slope_is_3d=False, supressing_param=3, - content_min_freq=0.06, content_max_freq=0.1, content_octaves=15, content_scale=40, - warp_min_freq=0.1, warp_max_freq=0.15, warp_octaves=3, warp_scale=5, - slope_freq=0.02, slope_octaves=5, slope_scale=20, slope_shift=0 + content_min_freq=0.06, + content_max_freq=0.1, + content_octaves=15, + content_scale=40, + warp_min_freq=0.1, + warp_max_freq=0.15, + warp_octaves=3, + warp_scale=5, + slope_freq=0.02, + slope_octaves=5, + slope_scale=20, + slope_shift=0, ): self.device = device seed = random_int() @@ -36,15 +54,29 @@ def __init__( self.aux_names.append(Tags.Cave) self.int_params2 = caves.int_params self.float_params2 = caves.float_params - + self.int_params = AC(np.array([seed, slope_is_3d], dtype=np.int32)) - self.float_params = AC(np.array([ - supressing_param, - content_min_freq, content_max_freq, content_octaves, content_scale, - warp_min_freq, warp_max_freq, warp_octaves, warp_scale, - slope_freq, slope_octaves, slope_scale, slope_shift, - ], dtype=np.float32)) + self.float_params = AC( + np.array( + [ + supressing_param, + content_min_freq, + content_max_freq, + content_octaves, + content_scale, + warp_min_freq, + warp_max_freq, + warp_octaves, + warp_scale, + slope_freq, + slope_octaves, + slope_scale, + slope_shift, + ], + dtype=np.float32, + ) + ) self.meta_params = [caves is not None] Element.__init__(self, "warped_rocks", material, transparency) - self.tag = ElementTag.WarpedRocks \ No newline at end of file + self.tag = ElementTag.WarpedRocks diff --git a/infinigen/terrain/elements/waterbody.py b/infinigen/terrain/elements/waterbody.py index cff581fe5..8a5de350b 100644 --- a/infinigen/terrain/elements/waterbody.py +++ b/infinigen/terrain/elements/waterbody.py @@ -7,7 +7,14 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.core.util.organization import Materials, Transparency, ElementNames, ElementTag, Attributes + +from infinigen.core.util.organization import ( + Attributes, + ElementNames, + ElementTag, + Materials, + Transparency, +) from .core import Element @@ -15,6 +22,7 @@ @gin.configurable class Waterbody(Element): name = ElementNames.Liquid + def __init__( self, device, @@ -41,4 +49,4 @@ def __init__( self.aux_names = [None] Element.__init__(self, "waterbody", material, transparency) - self.tag = ElementTag.Liquid \ No newline at end of file + self.tag = ElementTag.Liquid diff --git a/infinigen/terrain/land_process/erosion.py b/infinigen/terrain/land_process/erosion.py index 712c39d97..7cf696932 100644 --- a/infinigen/terrain/land_process/erosion.py +++ b/infinigen/terrain/land_process/erosion.py @@ -11,9 +11,9 @@ import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import ASFLOAT, load_cdll, read, smooth +import infinigen from infinigen.core.util.organization import AssetFile, Process -from infinigen.core.init import repo_root +from infinigen.terrain.utils import ASFLOAT, load_cdll, read, smooth @gin.configurable @@ -31,20 +31,33 @@ def run_erosion( dll = load_cdll("terrain/lib/cpu/soil_machine/SoilMachine.so") func = dll.run func.argtypes = [ - POINTER(c_float), POINTER(c_float), POINTER(c_float), - c_int32, c_int32, c_int32, c_int32, c_int32, c_float, c_float, c_char_p + POINTER(c_float), + POINTER(c_float), + POINTER(c_float), + c_int32, + c_int32, + c_int32, + c_int32, + c_int32, + c_float, + c_float, + c_char_p, ] func.restype = None - heightmap = read(str(folder/f'{AssetFile.Heightmap}.exr')).astype(np.float32) + heightmap = read(str(folder / f"{AssetFile.Heightmap}.exr")).astype(np.float32) tile_size = float(np.loadtxt(f"{folder}/{AssetFile.TileSize}.txt")) - soil_config_path = repo_root()/"infinigen/terrain/source/cpu/soil_machine/soil/sand.soil" + soil_config_path = ( + infinigen.repo_root() + / "infinigen/terrain/source/cpu/soil_machine/soil/sand.soil" + ) for i, N, n_iter in zip(list(range(len(Ns))), Ns, n_iters): M = heightmap.shape[0] heightmap = cv2.resize(heightmap, (N, N)) - if N > M: heightmap = smooth(heightmap, 3) + if N > M: + heightmap = smooth(heightmap, 3) original_heightmap = heightmap.copy() ground_level = heightmap.min() - ground_depth height_scale = 1 @@ -52,18 +65,37 @@ def run_erosion( result_heightmap = np.zeros_like(heightmap) watertrack = np.zeros_like(heightmap) func( - ASFLOAT(heightmap), ASFLOAT(result_heightmap), ASFLOAT(watertrack), - N, N, 0, n_iter, 0, spatial * tile_size, c_eq_factor[i], str(soil_config_path).encode('utf-8'), + ASFLOAT(heightmap), + ASFLOAT(result_heightmap), + ASFLOAT(watertrack), + N, + N, + 0, + n_iter, + 0, + spatial * tile_size, + c_eq_factor[i], + str(soil_config_path).encode("utf-8"), ) heightmap = result_heightmap / height_scale + ground_level watertrack = watertrack.reshape((N, N)) - watertrack = np.clip((watertrack - mask_range[0]) / (mask_range[1] - mask_range[0]), a_min=0, a_max=1) - watertrack = watertrack ** 0.2 + watertrack = np.clip( + (watertrack - mask_range[0]) / (mask_range[1] - mask_range[0]), + a_min=0, + a_max=1, + ) + watertrack = watertrack**0.2 if mask_height_range is not None: - mask = np.clip((heightmap - mask_height_range[0]) / (mask_height_range[1] - mask_height_range[0]), a_min=0, a_max=1) + mask = np.clip( + (heightmap - mask_height_range[0]) + / (mask_height_range[1] - mask_height_range[0]), + a_min=0, + a_max=1, + ) else: mask = np.ones_like(heightmap) - if i == 0 and len(Ns) > 1: heightmap -= watertrack * sinking_rate + if i == 0 and len(Ns) > 1: + heightmap -= watertrack * sinking_rate heightmap = heightmap * mask + original_heightmap * (1 - mask) if mask_height_range is not None: kernel = np.ones((5, 5), np.float32) / 25 @@ -72,7 +104,7 @@ def run_erosion( heightmap = cv2.filter2D(heightmap, -1, kernel) heightmap = heightmap * (1 - mask) + original_heightmap * mask - cv2.imwrite(str(folder/f'{Process.Erosion}.{AssetFile.Heightmap}.exr'), heightmap) - cv2.imwrite(str(folder/f'{Process.Erosion}.{AssetFile.Mask}.exr'), watertrack) + cv2.imwrite(str(folder / f"{Process.Erosion}.{AssetFile.Heightmap}.exr"), heightmap) + cv2.imwrite(str(folder / f"{Process.Erosion}.{AssetFile.Mask}.exr"), watertrack) del dll diff --git a/infinigen/terrain/land_process/snowfall.py b/infinigen/terrain/land_process/snowfall.py index fe95a46c6..c91c9a91c 100644 --- a/infinigen/terrain/land_process/snowfall.py +++ b/infinigen/terrain/land_process/snowfall.py @@ -7,19 +7,29 @@ import cv2 import gin import numpy as np -from landlab import RasterModelGrid -from landlab.components import FlowDirectorSteepest, TransportLengthHillslopeDiffuser - -from infinigen.terrain.utils import get_normal, read, smooth from tqdm import tqdm + +try: + import landlab + from landlab import RasterModelGrid + from landlab.components import ( + FlowDirectorSteepest, + TransportLengthHillslopeDiffuser, + ) +except ImportError: + landlab = None + from infinigen.core.util.organization import AssetFile, Process from infinigen.core.util.random import random_general as rg +from infinigen.terrain.utils import get_normal, read, smooth snowfall_params_ = {} + + @gin.configurable def snowfall_params( normal_params=[ - ((np.cos(np.pi/6), 0, np.sin(np.pi/6)), (0.80, 0.801)), + ((np.cos(np.pi / 6), 0, np.sin(np.pi / 6)), (0.80, 0.801)), ((0, 0, 1), (0.90, 0.901)), ], detailed_normal_params=[((0, 0, 1), (0.80, 0.801))], @@ -34,6 +44,7 @@ def snowfall_params( } return snowfall_params_ + @gin.configurable def run_snowfall( folder, @@ -41,6 +52,12 @@ def run_snowfall( diffussion_params=[(256, 10, 9), (1024, 10, 5)], verbose=0, ): + if landlab is None: + raise ImportError( + "landlab must be installed to use terrain snowfall " + "Please install optional terrain dependencies via `pip install .[terrain]`" + ) + heightmap_path = f"{folder}/{Process.Erosion}.{AssetFile.Heightmap}.exr" tile_size = float(np.loadtxt(f"{folder}/{AssetFile.TileSize}.txt")) rocks = read(heightmap_path) @@ -54,13 +71,17 @@ def run_snowfall( mg.set_closed_boundaries_at_grid_edges(False, False, False, False) _ = mg.add_field("topographic__elevation", snow, at="node") fdir = FlowDirectorSteepest(mg) - tl_diff = TransportLengthHillslopeDiffuser(mg, erodibility=0.001, slope_crit=0.6) - if verbose: range_t = tqdm(range(n_iters)) - else: range_t = range(n_iters) + tl_diff = TransportLengthHillslopeDiffuser( + mg, erodibility=0.001, slope_crit=0.6 + ) + if verbose: + range_t = tqdm(range(n_iters)) + else: + range_t = range(n_iters) for t in range_t: fdir.run_one_step() - tl_diff.run_one_step(1.) - snow = mg.at_node['topographic__elevation'] + tl_diff.run_one_step(1.0) + snow = mg.at_node["topographic__elevation"] snow = snow.reshape((N, N)) snow = cv2.resize(snow, (M, M)) snow = smooth(snow, smoothing_kernel) @@ -72,9 +93,35 @@ def run_snowfall( reference_snow = rocks * blending + snows * (1 - blending) normal_map = get_normal(reference_snow, tile_size / snows.shape[0]) mask_sharpening = 1 / (th1 - th0) - mask += np.clip(((normal_map * np.array(normal_preference).reshape((1, 1, 3))).sum(axis=-1) - th0) * mask_sharpening, a_min=0, a_max=1) - mask -= np.clip(((-normal_map * np.array(normal_preference).reshape((1, 1, 3))).sum(axis=-1) - th0) * mask_sharpening, a_min=0, a_max=1) + mask += np.clip( + ( + (normal_map * np.array(normal_preference).reshape((1, 1, 3))).sum( + axis=-1 + ) + - th0 + ) + * mask_sharpening, + a_min=0, + a_max=1, + ) + mask -= np.clip( + ( + (-normal_map * np.array(normal_preference).reshape((1, 1, 3))).sum( + axis=-1 + ) + - th0 + ) + * mask_sharpening, + a_min=0, + a_max=1, + ) mask = np.clip(mask, a_min=0, a_max=1) heightmap = snows * mask + rocks * (1 - mask) - cv2.imwrite(str(folder/f'{Process.Snowfall}.{AssetFile.Heightmap}.exr'), heightmap.astype(np.float32)) - cv2.imwrite(str(folder/f'{Process.Snowfall}.{AssetFile.Mask}.exr'), mask.astype(np.float32)) \ No newline at end of file + cv2.imwrite( + str(folder / f"{Process.Snowfall}.{AssetFile.Heightmap}.exr"), + heightmap.astype(np.float32), + ) + cv2.imwrite( + str(folder / f"{Process.Snowfall}.{AssetFile.Mask}.exr"), + mask.astype(np.float32), + ) diff --git a/infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx b/infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx index 067a1fd51..bbaf9b7ec 100644 --- a/infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx +++ b/infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx @@ -26,11 +26,14 @@ by Almar Klein in 2012. Adapted for scikit-image in 2016. # Cython specific imports import numpy as np + cimport numpy as cnp + cnp.import_array() # Enable low level memory management -from libc.stdlib cimport malloc, free +from libc.stdlib cimport free, malloc + # Define tiny winy number cdef cnp.float64_t FLT_EPSILON = np.spacing(1.0) #0.0000001 diff --git a/infinigen/terrain/mesh_to_sdf/__init__.py b/infinigen/terrain/mesh_to_sdf/__init__.py index 098618b1a..64e2606e3 100644 --- a/infinigen/terrain/mesh_to_sdf/__init__.py +++ b/infinigen/terrain/mesh_to_sdf/__init__.py @@ -3,12 +3,27 @@ # Original files authored by Marian Kleineberg: https://github.com/marian42/mesh_to_sdf/tree/master import numpy as np +import trimesh + from . import surface_point_cloud from .surface_point_cloud import BadMeshException -from .utils import scale_to_unit_cube, scale_to_unit_sphere, get_raster_points, check_voxels -import trimesh +from .utils import ( + check_voxels, + get_raster_points, + scale_to_unit_cube, + scale_to_unit_sphere, +) + -def get_surface_point_cloud(mesh, surface_point_method='scan', bounding_radius=None, scan_count=100, scan_resolution=400, sample_point_count=10000000, calculate_normals=True): +def get_surface_point_cloud( + mesh, + surface_point_method="scan", + bounding_radius=None, + scan_count=100, + scan_resolution=400, + sample_point_count=10000000, + calculate_normals=True, +): if isinstance(mesh, trimesh.Scene): mesh = mesh.dump().sum() if not isinstance(mesh, trimesh.Trimesh): @@ -16,50 +31,140 @@ def get_surface_point_cloud(mesh, surface_point_method='scan', bounding_radius=N if bounding_radius is None: bounding_radius = np.max(np.linalg.norm(mesh.vertices, axis=1)) * 1.1 - - if surface_point_method == 'scan': - return surface_point_cloud.create_from_scans(mesh, bounding_radius=bounding_radius, scan_count=scan_count, scan_resolution=scan_resolution, calculate_normals=calculate_normals) - elif surface_point_method == 'sample': - return surface_point_cloud.sample_from_mesh(mesh, sample_point_count=sample_point_count, calculate_normals=calculate_normals) + + if surface_point_method == "scan": + return surface_point_cloud.create_from_scans( + mesh, + bounding_radius=bounding_radius, + scan_count=scan_count, + scan_resolution=scan_resolution, + calculate_normals=calculate_normals, + ) + elif surface_point_method == "sample": + return surface_point_cloud.sample_from_mesh( + mesh, + sample_point_count=sample_point_count, + calculate_normals=calculate_normals, + ) else: - raise ValueError('Unknown surface point sampling method: {:s}'.format(surface_point_method)) + raise ValueError( + "Unknown surface point sampling method: {:s}".format(surface_point_method) + ) -def mesh_to_sdf(mesh, query_points, surface_point_method='scan', sign_method='normal', bounding_radius=None, scan_count=100, scan_resolution=400, sample_point_count=10000000, normal_sample_count=11): +def mesh_to_sdf( + mesh, + query_points, + surface_point_method="scan", + sign_method="normal", + bounding_radius=None, + scan_count=100, + scan_resolution=400, + sample_point_count=10000000, + normal_sample_count=11, +): if not isinstance(query_points, np.ndarray): - raise TypeError('query_points must be a numpy array.') + raise TypeError("query_points must be a numpy array.") if len(query_points.shape) != 2 or query_points.shape[1] != 3: - raise ValueError('query_points must be of shape N ✕ 3.') - - if surface_point_method == 'sample' and sign_method == 'depth': - print("Incompatible methods for sampling points and determining sign, using sign_method='normal' instead.") - sign_method = 'normal' + raise ValueError("query_points must be of shape N ✕ 3.") + + if surface_point_method == "sample" and sign_method == "depth": + print( + "Incompatible methods for sampling points and determining sign, using sign_method='normal' instead." + ) + sign_method = "normal" - point_cloud = get_surface_point_cloud(mesh, surface_point_method, bounding_radius, scan_count, scan_resolution, sample_point_count, calculate_normals=sign_method=='normal') + point_cloud = get_surface_point_cloud( + mesh, + surface_point_method, + bounding_radius, + scan_count, + scan_resolution, + sample_point_count, + calculate_normals=sign_method == "normal", + ) - if sign_method == 'normal': + if sign_method == "normal": return point_cloud.get_sdf_in_batches(query_points, use_depth_buffer=False) - elif sign_method == 'depth': - return point_cloud.get_sdf_in_batches(query_points, use_depth_buffer=True, sample_count=sample_point_count) + elif sign_method == "depth": + return point_cloud.get_sdf_in_batches( + query_points, use_depth_buffer=True, sample_count=sample_point_count + ) else: - raise ValueError('Unknown sign determination method: {:s}'.format(sign_method)) + raise ValueError("Unknown sign determination method: {:s}".format(sign_method)) -def mesh_to_voxels(mesh, voxel_resolution=64, surface_point_method='scan', sign_method='normal', scan_count=100, scan_resolution=400, sample_point_count=10000000, normal_sample_count=11, pad=False, check_result=False, return_gradients=False): +def mesh_to_voxels( + mesh, + voxel_resolution=64, + surface_point_method="scan", + sign_method="normal", + scan_count=100, + scan_resolution=400, + sample_point_count=10000000, + normal_sample_count=11, + pad=False, + check_result=False, + return_gradients=False, +): mesh = scale_to_unit_cube(mesh) - surface_point_cloud = get_surface_point_cloud(mesh, surface_point_method, 3**0.5, scan_count, scan_resolution, sample_point_count, sign_method=='normal') + surface_point_cloud = get_surface_point_cloud( + mesh, + surface_point_method, + 3**0.5, + scan_count, + scan_resolution, + sample_point_count, + sign_method == "normal", + ) + + return surface_point_cloud.get_voxels( + voxel_resolution, + sign_method == "depth", + normal_sample_count, + pad, + check_result, + return_gradients, + ) - return surface_point_cloud.get_voxels(voxel_resolution, sign_method=='depth', normal_sample_count, pad, check_result, return_gradients) # Sample some uniform points and some normally distributed around the surface as proposed in the DeepSDF paper -def sample_sdf_near_surface(mesh, number_of_points = 500000, surface_point_method='scan', sign_method='normal', scan_count=100, scan_resolution=400, sample_point_count=10000000, normal_sample_count=11, min_size=0, return_gradients=False): +def sample_sdf_near_surface( + mesh, + number_of_points=500000, + surface_point_method="scan", + sign_method="normal", + scan_count=100, + scan_resolution=400, + sample_point_count=10000000, + normal_sample_count=11, + min_size=0, + return_gradients=False, +): mesh = scale_to_unit_sphere(mesh) - - if surface_point_method == 'sample' and sign_method == 'depth': - print("Incompatible methods for sampling points and determining sign, using sign_method='normal' instead.") - sign_method = 'normal' - surface_point_cloud = get_surface_point_cloud(mesh, surface_point_method, 1, scan_count, scan_resolution, sample_point_count, calculate_normals=sign_method=='normal' or return_gradients) + if surface_point_method == "sample" and sign_method == "depth": + print( + "Incompatible methods for sampling points and determining sign, using sign_method='normal' instead." + ) + sign_method = "normal" + + surface_point_cloud = get_surface_point_cloud( + mesh, + surface_point_method, + 1, + scan_count, + scan_resolution, + sample_point_count, + calculate_normals=sign_method == "normal" or return_gradients, + ) - return surface_point_cloud.sample_sdf_near_surface(number_of_points, surface_point_method=='scan', sign_method, normal_sample_count, min_size, return_gradients) \ No newline at end of file + return surface_point_cloud.sample_sdf_near_surface( + number_of_points, + surface_point_method == "scan", + sign_method, + normal_sample_count, + min_size, + return_gradients, + ) diff --git a/infinigen/terrain/mesh_to_sdf/pyrender_wrapper.py b/infinigen/terrain/mesh_to_sdf/pyrender_wrapper.py index 80884cf8f..387128277 100644 --- a/infinigen/terrain/mesh_to_sdf/pyrender_wrapper.py +++ b/infinigen/terrain/mesh_to_sdf/pyrender_wrapper.py @@ -9,10 +9,15 @@ import os import sys -if 'pyrender' in sys.modules: - raise ImportError('The mesh_to_sdf package must be imported before pyrender is imported.') -if 'OpenGL' in sys.modules: - raise ImportError('The mesh_to_sdf package must be imported before OpenGL is imported.') + +if "pyrender" in sys.modules: + raise ImportError( + "The mesh_to_sdf package must be imported before pyrender is imported." + ) +if "OpenGL" in sys.modules: + raise ImportError( + "The mesh_to_sdf package must be imported before OpenGL is imported." + ) # ruff: noqa: E402 # Disable antialiasing: @@ -21,35 +26,50 @@ suppress_multisampling = False old_gl_enable = OpenGL.GL.glEnable + def new_gl_enable(value): if suppress_multisampling and value == OpenGL.GL.GL_MULTISAMPLE: OpenGL.GL.glDisable(value) else: old_gl_enable(value) + OpenGL.GL.glEnable = new_gl_enable old_glRenderbufferStorageMultisample = OpenGL.GL.glRenderbufferStorageMultisample -def new_glRenderbufferStorageMultisample(target, samples, internalformat, width, height): + +def new_glRenderbufferStorageMultisample( + target, samples, internalformat, width, height +): if suppress_multisampling: OpenGL.GL.glRenderbufferStorage(target, internalformat, width, height) else: - old_glRenderbufferStorageMultisample(target, samples, internalformat, width, height) + old_glRenderbufferStorageMultisample( + target, samples, internalformat, width, height + ) + OpenGL.GL.glRenderbufferStorageMultisample = new_glRenderbufferStorageMultisample import pyrender + # Render a normal buffer instead of a color buffer -class CustomShaderCache(): +class CustomShaderCache: def __init__(self): self.program = None - def get_program(self, vertex_shader, fragment_shader, geometry_shader=None, defines=None): + def get_program( + self, vertex_shader, fragment_shader, geometry_shader=None, defines=None + ): if self.program is None: - shaders_directory = os.path.join(os.path.dirname(__file__), 'shaders') - self.program = pyrender.shader_program.ShaderProgram(os.path.join(shaders_directory, 'mesh.vert'), os.path.join(shaders_directory, 'mesh.frag'), defines=defines) + shaders_directory = os.path.join(os.path.dirname(__file__), "shaders") + self.program = pyrender.shader_program.ShaderProgram( + os.path.join(shaders_directory, "mesh.vert"), + os.path.join(shaders_directory, "mesh.frag"), + defines=defines, + ) return self.program @@ -57,7 +77,7 @@ def render_normal_and_depth_buffers(mesh, camera, camera_transform, resolution): global suppress_multisampling suppress_multisampling = True scene = pyrender.Scene() - scene.add(pyrender.Mesh.from_trimesh(mesh, smooth = False)) + scene.add(pyrender.Mesh.from_trimesh(mesh, smooth=False)) scene.add(camera, pose=camera_transform) renderer = pyrender.OffscreenRenderer(resolution, resolution) @@ -65,4 +85,4 @@ def render_normal_and_depth_buffers(mesh, camera, camera_transform, resolution): color, depth = renderer.render(scene, flags=pyrender.RenderFlags.SKIP_CULL_FACES) suppress_multisampling = False - return color, depth \ No newline at end of file + return color, depth diff --git a/infinigen/terrain/mesh_to_sdf/scan.py b/infinigen/terrain/mesh_to_sdf/scan.py index ad4993ed3..c450f44da 100644 --- a/infinigen/terrain/mesh_to_sdf/scan.py +++ b/infinigen/terrain/mesh_to_sdf/scan.py @@ -3,29 +3,43 @@ # Original files authored by Marian Kleineberg: https://github.com/marian42/mesh_to_sdf/tree/master import numpy as np -from .pyrender_wrapper import render_normal_and_depth_buffers -import pyrender from scipy.spatial.transform import Rotation from skimage import io -if hasattr(Rotation, "as_matrix"): # scipy>=1.4.0 - def get_rotation_matrix(angle, axis='y'): +try: + import pyrender_wrapper + import pyrender # isort: skip +except ImportError: + pyrender_wrapper = None + pyrender = None + + +if hasattr(Rotation, "as_matrix"): # scipy>=1.4.0 + + def get_rotation_matrix(angle, axis="y"): matrix = np.identity(4) matrix[:3, :3] = Rotation.from_euler(axis, angle).as_matrix() return matrix -else: # scipy<1.4.0 - def get_rotation_matrix(angle, axis='y'): +else: # scipy<1.4.0 + + def get_rotation_matrix(angle, axis="y"): matrix = np.identity(4) matrix[:3, :3] = Rotation.from_euler(axis, angle).as_dcm() return matrix + def get_camera_transform_looking_at_origin(rotation_y, rotation_x, camera_distance=2): camera_transform = np.identity(4) camera_transform[2, 3] = camera_distance - camera_transform = np.matmul(get_rotation_matrix(rotation_x, axis='x'), camera_transform) - camera_transform = np.matmul(get_rotation_matrix(rotation_y, axis='y'), camera_transform) + camera_transform = np.matmul( + get_rotation_matrix(rotation_x, axis="x"), camera_transform + ) + camera_transform = np.matmul( + get_rotation_matrix(rotation_y, axis="y"), camera_transform + ) return camera_transform + # Camera transform from position and look direction def get_camera_transform(position, look_direction): camera_forward = -look_direction / np.linalg.norm(look_direction) @@ -48,37 +62,65 @@ def get_camera_transform(position, look_direction): return np.matmul(translation, rotation) -''' + +""" A virtual laser scan of an object from one point in space. This renders a normal and depth buffer and reprojects it into a point cloud. The resulting point cloud contains a point for every pixel in the buffer that hit the model. -''' -class Scan(): - def __init__(self, mesh, camera_transform, resolution=400, calculate_normals=True, fov=1, z_near=0.1, z_far=10): +""" + + +class Scan: + def __init__( + self, + mesh, + camera_transform, + resolution=400, + calculate_normals=True, + fov=1, + z_near=0.1, + z_far=10, + ): + if pyrender is None or pyrender_wrapper is None: + raise ImportError( + "pyrender must be installed to use the Scan class " + "Please install optional terrain dependencies via `pip install .[terrain]`" + ) + self.camera_transform = camera_transform - self.camera_position = np.matmul(self.camera_transform, np.array([0, 0, 0, 1]))[:3] + self.camera_position = np.matmul(self.camera_transform, np.array([0, 0, 0, 1]))[ + :3 + ] self.resolution = resolution - - camera = pyrender.PerspectiveCamera(yfov=fov, aspectRatio=1.0, znear = z_near, zfar = z_far) + + camera = pyrender.PerspectiveCamera( + yfov=fov, aspectRatio=1.0, znear=z_near, zfar=z_far + ) self.projection_matrix = camera.get_projection_matrix() - color, depth = render_normal_and_depth_buffers(mesh, camera, self.camera_transform, resolution) + color, depth = pyrender_wrapper.render_normal_and_depth_buffers( + mesh, camera, self.camera_transform, resolution + ) self.normal_buffer = color if calculate_normals else None self.depth_buffer = depth.copy() - + indices = np.argwhere(depth != 0) - depth[depth == 0] = float('inf') + depth[depth == 0] = float("inf") # This reverts the processing that pyrender does and calculates the original depth buffer in clipping space - self.depth = (z_far + z_near - (2.0 * z_near * z_far) / depth) / (z_far - z_near) - + self.depth = (z_far + z_near - (2.0 * z_near * z_far) / depth) / ( + z_far - z_near + ) + points = np.ones((indices.shape[0], 4)) - points[:, [1, 0]] = indices.astype(float) / (resolution -1) * 2 - 1 + points[:, [1, 0]] = indices.astype(float) / (resolution - 1) * 2 - 1 points[:, 1] *= -1 points[:, 2] = self.depth[indices[:, 0], indices[:, 1]] - - clipping_to_world = np.matmul(self.camera_transform, np.linalg.inv(self.projection_matrix)) + + clipping_to_world = np.matmul( + self.camera_transform, np.linalg.inv(self.projection_matrix) + ) points = np.matmul(points, clipping_to_world.transpose()) points /= points[:, 3][:, np.newaxis] @@ -87,7 +129,7 @@ def __init__(self, mesh, camera_transform, resolution=400, calculate_normals=Tru if calculate_normals: normals = color[indices[:, 0], indices[:, 1]] / 255 * 2 - 1 camera_to_points = self.camera_position - self.points - normal_orientation = np.einsum('ij,ij->i', camera_to_points, normals) + normal_orientation = np.einsum("ij,ij->i", camera_to_points, normals) normals[normal_orientation < 0] *= -1 self.normals = normals else: @@ -95,17 +137,23 @@ def __init__(self, mesh, camera_transform, resolution=400, calculate_normals=Tru def convert_world_space_to_viewport(self, points): half_viewport_size = 0.5 * self.resolution - clipping_to_viewport = np.array([ - [half_viewport_size, 0.0, 0.0, half_viewport_size], - [0.0, -half_viewport_size, 0.0, half_viewport_size], - [0.0, 0.0, 1.0, 0.0], - [0, 0, 0.0, 1.0] - ]) - - world_to_clipping = np.matmul(self.projection_matrix, np.linalg.inv(self.camera_transform)) + clipping_to_viewport = np.array( + [ + [half_viewport_size, 0.0, 0.0, half_viewport_size], + [0.0, -half_viewport_size, 0.0, half_viewport_size], + [0.0, 0.0, 1.0, 0.0], + [0, 0, 0.0, 1.0], + ] + ) + + world_to_clipping = np.matmul( + self.projection_matrix, np.linalg.inv(self.camera_transform) + ) world_to_viewport = np.matmul(clipping_to_viewport, world_to_clipping) - - world_space_points = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1) + + world_space_points = np.concatenate( + [points, np.ones((points.shape[0], 1))], axis=1 + ) viewport_points = np.matmul(world_space_points, world_to_viewport.transpose()) viewport_points /= viewport_points[:, 3][:, np.newaxis] return viewport_points @@ -115,10 +163,19 @@ def is_visible(self, points): pixels = viewport_points[:, :2].astype(int) # This only has an effect if the camera is inside the model - in_viewport = (pixels[:, 0] >= 0) & (pixels[:, 1] >= 0) & (pixels[:, 0] < self.resolution) & (pixels[:, 1] < self.resolution) & (viewport_points[:, 2] > -1) + in_viewport = ( + (pixels[:, 0] >= 0) + & (pixels[:, 1] >= 0) + & (pixels[:, 0] < self.resolution) + & (pixels[:, 1] < self.resolution) + & (viewport_points[:, 2] > -1) + ) result = np.zeros(points.shape[0], dtype=bool) - result[in_viewport] = viewport_points[in_viewport, 2] < self.depth[pixels[in_viewport, 1], pixels[in_viewport, 0]] + result[in_viewport] = ( + viewport_points[in_viewport, 2] + < self.depth[pixels[in_viewport, 1], pixels[in_viewport, 0]] + ) return result @@ -129,11 +186,11 @@ def show(self): def save(self, filename_depth, filename_normals=None): if filename_normals is None and self.normal_buffer is not None: - items = filename_depth.split('.') - filename_normals = '.'.join(items[:-1]) + "_normals." + items[-1] - + items = filename_depth.split(".") + filename_normals = ".".join(items[:-1]) + "_normals." + items[-1] + depth = self.depth_buffer / np.max(self.depth_buffer) * 255 io.imsave(filename_depth, depth.astype(np.uint8)) if self.normal_buffer is not None: - io.imsave(filename_normals, self.normal_buffer.astype(np.uint8)) \ No newline at end of file + io.imsave(filename_normals, self.normal_buffer.astype(np.uint8)) diff --git a/infinigen/terrain/mesh_to_sdf/surface_point_cloud.py b/infinigen/terrain/mesh_to_sdf/surface_point_cloud.py index 932144bc6..7a11c1ff1 100644 --- a/infinigen/terrain/mesh_to_sdf/surface_point_cloud.py +++ b/infinigen/terrain/mesh_to_sdf/surface_point_cloud.py @@ -9,22 +9,28 @@ # Authors: Zeyu Ma -from .scan import Scan, get_camera_transform_looking_at_origin - -import trimesh import logging +import math + import numpy as np +import trimesh from sklearn.neighbors import KDTree -import math -import pyrender -from .utils import sample_uniform_points_in_unit_sphere -from .utils import get_raster_points, check_voxels + +from .scan import Scan, get_camera_transform_looking_at_origin +from .utils import check_voxels, get_raster_points, sample_uniform_points_in_unit_sphere + +try: + import pyrender # isort: skip +except ImportError: + pyrender = None logging.getLogger("trimesh").setLevel(9000) + class BadMeshException(Exception): pass + class SurfacePointCloud: def __init__(self, mesh, points, normals=None, scans=None): self.mesh = mesh @@ -39,10 +45,18 @@ def get_random_surface_points(self, count, use_scans=True): indices = np.random.choice(self.points.shape[0], count) return self.points[indices, :] else: - samples, index = trimesh.sample.sample_surface(mesh=self.mesh, count=count, face_weight=None, seed=0) + samples, index = trimesh.sample.sample_surface( + mesh=self.mesh, count=count, face_weight=None, seed=0 + ) return samples - def get_sdf(self, query_points, use_depth_buffer=False, sample_count=11, return_gradients=False): + def get_sdf( + self, + query_points, + use_depth_buffer=False, + sample_count=11, + return_gradients=False, + ): if use_depth_buffer: distances, indices = self.kd_tree.query(query_points) distances = distances.astype(np.float32).reshape(-1) @@ -59,7 +73,10 @@ def get_sdf(self, query_points, use_depth_buffer=False, sample_count=11, return_ closest_points = self.points[indices] direction_from_surface = query_points[:, np.newaxis, :] - closest_points - inside = np.einsum('ijk,ijk->ij', direction_from_surface, self.normals[indices]) < 0 + inside = ( + np.einsum("ijk,ijk->ij", direction_from_surface, self.normals[indices]) + < 0 + ) inside = np.sum(inside, axis=1) > sample_count * 0.5 distances = distances[:, 0] distances[inside] *= -1 @@ -69,20 +86,41 @@ def get_sdf(self, query_points, use_depth_buffer=False, sample_count=11, return_ gradients[inside] *= -1 if return_gradients: - near_surface = np.abs(distances) < math.sqrt(0.0025**2 * 3) * 3 # 3D 2-norm stdev * 3 - gradients = np.where(near_surface[:, np.newaxis], self.normals[indices[:, 0]], gradients) + near_surface = ( + np.abs(distances) < math.sqrt(0.0025**2 * 3) * 3 + ) # 3D 2-norm stdev * 3 + gradients = np.where( + near_surface[:, np.newaxis], self.normals[indices[:, 0]], gradients + ) gradients /= np.linalg.norm(gradients, axis=1)[:, np.newaxis] return distances, gradients else: return distances - def get_sdf_in_batches(self, query_points, use_depth_buffer=False, sample_count=11, batch_size=1000000, return_gradients=False): + def get_sdf_in_batches( + self, + query_points, + use_depth_buffer=False, + sample_count=11, + batch_size=1000000, + return_gradients=False, + ): if query_points.shape[0] <= batch_size: - return self.get_sdf(query_points, use_depth_buffer=use_depth_buffer, sample_count=sample_count, return_gradients=return_gradients) + return self.get_sdf( + query_points, + use_depth_buffer=use_depth_buffer, + sample_count=sample_count, + return_gradients=return_gradients, + ) n_batches = int(math.ceil(query_points.shape[0] / batch_size)) batches = [ - self.get_sdf(points, use_depth_buffer=use_depth_buffer, sample_count=sample_count, return_gradients=return_gradients) + self.get_sdf( + points, + use_depth_buffer=use_depth_buffer, + sample_count=sample_count, + return_gradients=return_gradients, + ) for points in np.array_split(query_points, n_batches) ] if return_gradients: @@ -90,54 +128,101 @@ def get_sdf_in_batches(self, query_points, use_depth_buffer=False, sample_count= gradients = np.concatenate([batch[1] for batch in batches]) return distances, gradients else: - return np.concatenate(batches) # distances - - def get_voxels(self, voxel_resolution, use_depth_buffer=False, sample_count=11, pad=False, check_result=False, return_gradients=False): - result = self.get_sdf_in_batches(get_raster_points(voxel_resolution), use_depth_buffer, sample_count, return_gradients=return_gradients) + return np.concatenate(batches) # distances + + def get_voxels( + self, + voxel_resolution, + use_depth_buffer=False, + sample_count=11, + pad=False, + check_result=False, + return_gradients=False, + ): + result = self.get_sdf_in_batches( + get_raster_points(voxel_resolution), + use_depth_buffer, + sample_count, + return_gradients=return_gradients, + ) if not return_gradients: sdf = result else: sdf, gradients = result - voxel_gradients = np.reshape(gradients, (voxel_resolution, voxel_resolution, voxel_resolution, 3)) - + voxel_gradients = np.reshape( + gradients, (voxel_resolution, voxel_resolution, voxel_resolution, 3) + ) + voxels = sdf.reshape((voxel_resolution, voxel_resolution, voxel_resolution)) if check_result and not check_voxels(voxels): raise BadMeshException() if pad: - voxels = np.pad(voxels, 1, mode='constant', constant_values=1) + voxels = np.pad(voxels, 1, mode="constant", constant_values=1) if return_gradients: if pad: - voxel_gradients = np.pad(voxel_gradients, ((1, 1), (1, 1), (1, 1), (0, 0)), mode='edge') + voxel_gradients = np.pad( + voxel_gradients, ((1, 1), (1, 1), (1, 1), (0, 0)), mode="edge" + ) return voxels, voxel_gradients else: return voxels - def sample_sdf_near_surface(self, number_of_points=500000, use_scans=True, sign_method='normal', normal_sample_count=11, min_size=0, return_gradients=False): + def sample_sdf_near_surface( + self, + number_of_points=500000, + use_scans=True, + sign_method="normal", + normal_sample_count=11, + min_size=0, + return_gradients=False, + ): query_points = [] surface_sample_count = int(number_of_points * 47 / 50) // 2 - surface_points = self.get_random_surface_points(surface_sample_count, use_scans=use_scans) - query_points.append(surface_points + np.random.normal(scale=0.0025, size=(surface_sample_count, 3))) - query_points.append(surface_points + np.random.normal(scale=0.00025, size=(surface_sample_count, 3))) - + surface_points = self.get_random_surface_points( + surface_sample_count, use_scans=use_scans + ) + query_points.append( + surface_points + + np.random.normal(scale=0.0025, size=(surface_sample_count, 3)) + ) + query_points.append( + surface_points + + np.random.normal(scale=0.00025, size=(surface_sample_count, 3)) + ) + unit_sphere_sample_count = number_of_points - surface_points.shape[0] * 2 - unit_sphere_points = sample_uniform_points_in_unit_sphere(unit_sphere_sample_count) + unit_sphere_points = sample_uniform_points_in_unit_sphere( + unit_sphere_sample_count + ) query_points.append(unit_sphere_points) query_points = np.concatenate(query_points).astype(np.float32) - if sign_method == 'normal': - sdf = self.get_sdf_in_batches(query_points, use_depth_buffer=False, sample_count=normal_sample_count, return_gradients=return_gradients) - elif sign_method == 'depth': - sdf = self.get_sdf_in_batches(query_points, use_depth_buffer=True, return_gradients=return_gradients) + if sign_method == "normal": + sdf = self.get_sdf_in_batches( + query_points, + use_depth_buffer=False, + sample_count=normal_sample_count, + return_gradients=return_gradients, + ) + elif sign_method == "depth": + sdf = self.get_sdf_in_batches( + query_points, use_depth_buffer=True, return_gradients=return_gradients + ) else: - raise ValueError('Unknown sign determination method: {:s}'.format(sign_method)) + raise ValueError( + "Unknown sign determination method: {:s}".format(sign_method) + ) if return_gradients: sdf, gradients = sdf if min_size > 0: - model_size = np.count_nonzero(sdf[-unit_sphere_sample_count:] < 0) / unit_sphere_sample_count + model_size = ( + np.count_nonzero(sdf[-unit_sphere_sample_count:] < 0) + / unit_sphere_sample_count + ) if model_size < min_size: raise BadMeshException() @@ -147,10 +232,13 @@ def sample_sdf_near_surface(self, number_of_points=500000, use_scans=True, sign_ return query_points, sdf def show(self): + if pyrender is None: + raise ImportError("pyrender is required to show the surface point cloud.") + scene = pyrender.Scene() scene.add(pyrender.Mesh.from_points(self.points, normals=self.normals)) pyrender.Viewer(scene, use_raymond_lighting=True, point_size=2) - + def is_outside(self, points): result = None for scan in self.scans: @@ -160,6 +248,7 @@ def is_outside(self, points): result = np.logical_or(result, scan.is_visible(points)) return result + def get_equidistant_camera_angles(count): increment = math.pi * (3 - math.sqrt(5)) for i in range(count): @@ -167,33 +256,45 @@ def get_equidistant_camera_angles(count): phi = ((i + 1) * increment) % (2 * math.pi) yield phi, theta -def create_from_scans(mesh, bounding_radius=1, scan_count=100, scan_resolution=400, calculate_normals=True): + +def create_from_scans( + mesh, bounding_radius=1, scan_count=100, scan_resolution=400, calculate_normals=True +): scans = [] for phi, theta in get_equidistant_camera_angles(scan_count): - camera_transform = get_camera_transform_looking_at_origin(phi, theta, camera_distance=2 * bounding_radius) - scans.append(Scan(mesh, - camera_transform=camera_transform, - resolution=scan_resolution, - calculate_normals=calculate_normals, - fov=1.0472, - z_near=bounding_radius * 1, - z_far=bounding_radius * 3 - )) - - return SurfacePointCloud(mesh, + camera_transform = get_camera_transform_looking_at_origin( + phi, theta, camera_distance=2 * bounding_radius + ) + scans.append( + Scan( + mesh, + camera_transform=camera_transform, + resolution=scan_resolution, + calculate_normals=calculate_normals, + fov=1.0472, + z_near=bounding_radius * 1, + z_far=bounding_radius * 3, + ) + ) + + return SurfacePointCloud( + mesh, points=np.concatenate([scan.points for scan in scans], axis=0), - normals=np.concatenate([scan.normals for scan in scans], axis=0) if calculate_normals else None, - scans=scans + normals=np.concatenate([scan.normals for scan in scans], axis=0) + if calculate_normals + else None, + scans=scans, ) + def sample_from_mesh(mesh, sample_point_count=10000000, calculate_normals=True): - points, face_indices = trimesh.sample.sample_surface(mesh=mesh, count=sample_point_count, face_weight=None, seed=0) + points, face_indices = trimesh.sample.sample_surface( + mesh=mesh, count=sample_point_count, face_weight=None, seed=0 + ) if calculate_normals: normals = mesh.face_normals[face_indices] - return SurfacePointCloud(mesh, - points=points, - normals=normals if calculate_normals else None, - scans=None - ) \ No newline at end of file + return SurfacePointCloud( + mesh, points=points, normals=normals if calculate_normals else None, scans=None + ) diff --git a/infinigen/terrain/mesh_to_sdf/utils.py b/infinigen/terrain/mesh_to_sdf/utils.py index 0f75d81d8..8f905e6d6 100644 --- a/infinigen/terrain/mesh_to_sdf/utils.py +++ b/infinigen/terrain/mesh_to_sdf/utils.py @@ -2,8 +2,9 @@ # Original files authored by Marian Kleineberg: https://github.com/marian42/mesh_to_sdf/tree/master -import trimesh import numpy as np +import trimesh + def scale_to_unit_sphere(mesh): if isinstance(mesh, trimesh.Scene): @@ -15,6 +16,7 @@ def scale_to_unit_sphere(mesh): return trimesh.Trimesh(vertices=vertices, faces=mesh.faces) + def scale_to_unit_cube(mesh): if isinstance(mesh, trimesh.Scene): mesh = mesh.dump().sum() @@ -24,16 +26,18 @@ def scale_to_unit_cube(mesh): return trimesh.Trimesh(vertices=vertices, faces=mesh.faces) + voxel_points = dict() + def get_raster_points(voxel_resolution): if voxel_resolution in voxel_points: return voxel_points[voxel_resolution] - + points = np.meshgrid( np.linspace(-1, 1, voxel_resolution), np.linspace(-1, 1, voxel_resolution), - np.linspace(-1, 1, voxel_resolution) + np.linspace(-1, 1, voxel_resolution), ) points = np.stack(points) points = np.swapaxes(points, 1, 2) @@ -42,6 +46,7 @@ def get_raster_points(voxel_resolution): voxel_points[voxel_resolution] = points return points + def check_voxels(voxels): block = voxels[:-1, :-1, :-1] d1 = (block - voxels[1:, :-1, :-1]).reshape(-1) @@ -51,16 +56,21 @@ def check_voxels(voxels): max_distance = max(np.max(d1), np.max(d2), np.max(d3)) return max_distance < 2.0 / voxels.shape[0] * 3**0.5 * 1.1 + def sample_uniform_points_in_unit_sphere(amount): unit_sphere_points = np.random.uniform(-1, 1, size=(amount * 2 + 20, 3)) - unit_sphere_points = unit_sphere_points[np.linalg.norm(unit_sphere_points, axis=1) < 1] + unit_sphere_points = unit_sphere_points[ + np.linalg.norm(unit_sphere_points, axis=1) < 1 + ] points_available = unit_sphere_points.shape[0] if points_available < amount: # This is a fallback for the rare case that too few points are inside the unit sphere result = np.zeros((amount, 3)) result[:points_available, :] = unit_sphere_points - result[points_available:, :] = sample_uniform_points_in_unit_sphere(amount - points_available) + result[points_available:, :] = sample_uniform_points_in_unit_sphere( + amount - points_available + ) return result else: - return unit_sphere_points[:amount, :] \ No newline at end of file + return unit_sphere_points[:amount, :] diff --git a/infinigen/terrain/mesher/__init__.py b/infinigen/terrain/mesher/__init__.py index 5d653672f..177aa037d 100644 --- a/infinigen/terrain/mesher/__init__.py +++ b/infinigen/terrain/mesher/__init__.py @@ -4,5 +4,5 @@ # Authors: Zeyu Ma +from .spherical_mesher import OpaqueSphericalMesher, TransparentSphericalMesher from .uniform_mesher import UniformMesher -from .spherical_mesher import OpaqueSphericalMesher, TransparentSphericalMesher \ No newline at end of file diff --git a/infinigen/terrain/mesher/_marching_cubes_lewiner.py b/infinigen/terrain/mesher/_marching_cubes_lewiner.py index 4ad515e17..9bfa68938 100644 --- a/infinigen/terrain/mesher/_marching_cubes_lewiner.py +++ b/infinigen/terrain/mesher/_marching_cubes_lewiner.py @@ -7,13 +7,22 @@ import numpy as np +from infinigen.terrain import marching_cubes as _marching_cubes_lewiner_cy + from . import _marching_cubes_lewiner_luts as mcluts -from infinigen.terrain import marching_cubes as _marching_cubes_lewiner_cy -def marching_cubes(volume, level=None, *, spacing=(1., 1., 1.), - gradient_direction='descent', step_size=1, - allow_degenerate=True, method='lewiner', mask=None): +def marching_cubes( + volume, + level=None, + *, + spacing=(1.0, 1.0, 1.0), + gradient_direction="descent", + step_size=1, + allow_degenerate=True, + method="lewiner", + mask=None, +): """Marching cubes algorithm to find surfaces in 3d volumetric data. In contrast with Lorensen et al. approach [2]_, Lewiner et @@ -129,19 +138,32 @@ def marching_cubes(volume, level=None, *, spacing=(1., 1., 1.), :DOI:`10.1145/37401.37422` """ use_classic = False - if method == 'lorensen': + if method == "lorensen": use_classic = True - elif method != 'lewiner': + elif method != "lewiner": raise ValueError("method should be either 'lewiner' or 'lorensen'") - return _marching_cubes_lewiner(volume, level, spacing, - gradient_direction, step_size, - allow_degenerate, use_classic=use_classic, - mask=mask) - - - -def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, - step_size, allow_degenerate, use_classic, mask): + return _marching_cubes_lewiner( + volume, + level, + spacing, + gradient_direction, + step_size, + allow_degenerate, + use_classic=use_classic, + mask=mask, + ) + + +def _marching_cubes_lewiner( + volume, + level, + spacing, + gradient_direction, + step_size, + allow_degenerate, + use_classic, + mask, +): """Lewiner et al. algorithm for marching cubes. See marching_cubes_lewiner for documentation. @@ -149,11 +171,10 @@ def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, # Check volume and ensure its in the format that the alg needs if not isinstance(volume, np.ndarray) or (volume.ndim != 3): - raise ValueError('Input volume should be a 3D numpy array.') + raise ValueError("Input volume should be a 3D numpy array.") if volume.shape[0] < 2 or volume.shape[1] < 2 or volume.shape[2] < 2: raise ValueError("Input array must be at least 2x2x2.") - volume = np.ascontiguousarray(volume, - np.float32) # no copy if not necessary + volume = np.ascontiguousarray(volume, np.float32) # no copy if not necessary # Check/convert other inputs: # level @@ -169,7 +190,7 @@ def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, # step_size step_size = int(step_size) if step_size < 1: - raise ValueError('step_size must be at least one.') + raise ValueError("step_size must be at least one.") # use_classic use_classic = bool(use_classic) @@ -179,15 +200,16 @@ def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, # Check if a mask array is passed if mask is not None: if not mask.shape == volume.shape: - raise ValueError('volume and mask must have the same shape.') + raise ValueError("volume and mask must have the same shape.") # Apply algorithm func = _marching_cubes_lewiner_cy.marching_cubes - vertices_integral, vertices_fractal, faces, normals, values = func(volume, level, L, - step_size, use_classic, mask) + vertices_integral, vertices_fractal, faces, normals, values = func( + volume, level, L, step_size, use_classic, mask + ) if not len(vertices_integral): - raise RuntimeError('No surface found at the given iso value.') + raise RuntimeError("No surface found at the given iso value.") # Output in z-y-x order, as is common in skimage vertices_integral = np.fliplr(vertices_integral) @@ -196,17 +218,17 @@ def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, # Finishing touches to output faces.shape = -1, 3 - if gradient_direction == 'descent': + if gradient_direction == "descent": # MC implementation is right-handed, but gradient_direction is # left-handed faces = np.fliplr(faces) - elif not gradient_direction == 'ascent': + elif not gradient_direction == "ascent": raise ValueError( f"Incorrect input {gradient_direction} in `gradient_direction`, " "see docstring." ) - assert(np.array_equal(spacing, (1, 1, 1))) - assert(allow_degenerate) + assert np.array_equal(spacing, (1, 1, 1)) + assert allow_degenerate # hacky fixing vertices_fractal[np.abs(vertices_fractal) < 1e-30] = 0 vertices_fractal[np.abs(vertices_fractal - 1) < 1e-30] = 1 @@ -215,8 +237,8 @@ def _marching_cubes_lewiner(volume, level, spacing, gradient_direction, def _to_array(args): shape, text = args - byts = base64.decodebytes(text.encode('utf-8')) - ar = np.frombuffer(byts, dtype='int8') + byts = base64.decodebytes(text.encode("utf-8")) + ar = np.frombuffer(byts, dtype="int8") ar.shape = shape return ar @@ -227,38 +249,115 @@ def _to_array(args): # 0 # 3 1 -> 0x # 2 xx -EDGETORELATIVEPOSX = np.array([ [0,1],[1,1],[1,0],[0,0], [0,1],[1,1],[1,0],[0,0], [0,0],[1,1],[1,1],[0,0] ], 'int8') -EDGETORELATIVEPOSY = np.array([ [0,0],[0,1],[1,1],[1,0], [0,0],[0,1],[1,1],[1,0], [0,0],[0,0],[1,1],[1,1] ], 'int8') -EDGETORELATIVEPOSZ = np.array([ [0,0],[0,0],[0,0],[0,0], [1,1],[1,1],[1,1],[1,1], [0,1],[0,1],[0,1],[0,1] ], 'int8') +EDGETORELATIVEPOSX = np.array( + [ + [0, 1], + [1, 1], + [1, 0], + [0, 0], + [0, 1], + [1, 1], + [1, 0], + [0, 0], + [0, 0], + [1, 1], + [1, 1], + [0, 0], + ], + "int8", +) +EDGETORELATIVEPOSY = np.array( + [ + [0, 0], + [0, 1], + [1, 1], + [1, 0], + [0, 0], + [0, 1], + [1, 1], + [1, 0], + [0, 0], + [0, 0], + [1, 1], + [1, 1], + ], + "int8", +) +EDGETORELATIVEPOSZ = np.array( + [ + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [1, 1], + [1, 1], + [1, 1], + [1, 1], + [0, 1], + [0, 1], + [0, 1], + [0, 1], + ], + "int8", +) def _get_mc_luts(): - """ Kind of lazy obtaining of the luts. - """ - if not hasattr(mcluts, 'THE_LUTS'): - + """Kind of lazy obtaining of the luts.""" + if not hasattr(mcluts, "THE_LUTS"): mcluts.THE_LUTS = _marching_cubes_lewiner_cy.LutProvider( - EDGETORELATIVEPOSX, EDGETORELATIVEPOSY, EDGETORELATIVEPOSZ, - - _to_array(mcluts.CASESCLASSIC), _to_array(mcluts.CASES), - - _to_array(mcluts.TILING1), _to_array(mcluts.TILING2), _to_array(mcluts.TILING3_1), _to_array(mcluts.TILING3_2), - _to_array(mcluts.TILING4_1), _to_array(mcluts.TILING4_2), _to_array(mcluts.TILING5), _to_array(mcluts.TILING6_1_1), - _to_array(mcluts.TILING6_1_2), _to_array(mcluts.TILING6_2), _to_array(mcluts.TILING7_1), - _to_array(mcluts.TILING7_2), _to_array(mcluts.TILING7_3), _to_array(mcluts.TILING7_4_1), - _to_array(mcluts.TILING7_4_2), _to_array(mcluts.TILING8), _to_array(mcluts.TILING9), - _to_array(mcluts.TILING10_1_1), _to_array(mcluts.TILING10_1_1_), _to_array(mcluts.TILING10_1_2), - _to_array(mcluts.TILING10_2), _to_array(mcluts.TILING10_2_), _to_array(mcluts.TILING11), - _to_array(mcluts.TILING12_1_1), _to_array(mcluts.TILING12_1_1_), _to_array(mcluts.TILING12_1_2), - _to_array(mcluts.TILING12_2), _to_array(mcluts.TILING12_2_), _to_array(mcluts.TILING13_1), - _to_array(mcluts.TILING13_1_), _to_array(mcluts.TILING13_2), _to_array(mcluts.TILING13_2_), - _to_array(mcluts.TILING13_3), _to_array(mcluts.TILING13_3_), _to_array(mcluts.TILING13_4), - _to_array(mcluts.TILING13_5_1), _to_array(mcluts.TILING13_5_2), _to_array(mcluts.TILING14), - - _to_array(mcluts.TEST3), _to_array(mcluts.TEST4), _to_array(mcluts.TEST6), - _to_array(mcluts.TEST7), _to_array(mcluts.TEST10), _to_array(mcluts.TEST12), - _to_array(mcluts.TEST13), _to_array(mcluts.SUBCONFIG13), - ) + EDGETORELATIVEPOSX, + EDGETORELATIVEPOSY, + EDGETORELATIVEPOSZ, + _to_array(mcluts.CASESCLASSIC), + _to_array(mcluts.CASES), + _to_array(mcluts.TILING1), + _to_array(mcluts.TILING2), + _to_array(mcluts.TILING3_1), + _to_array(mcluts.TILING3_2), + _to_array(mcluts.TILING4_1), + _to_array(mcluts.TILING4_2), + _to_array(mcluts.TILING5), + _to_array(mcluts.TILING6_1_1), + _to_array(mcluts.TILING6_1_2), + _to_array(mcluts.TILING6_2), + _to_array(mcluts.TILING7_1), + _to_array(mcluts.TILING7_2), + _to_array(mcluts.TILING7_3), + _to_array(mcluts.TILING7_4_1), + _to_array(mcluts.TILING7_4_2), + _to_array(mcluts.TILING8), + _to_array(mcluts.TILING9), + _to_array(mcluts.TILING10_1_1), + _to_array(mcluts.TILING10_1_1_), + _to_array(mcluts.TILING10_1_2), + _to_array(mcluts.TILING10_2), + _to_array(mcluts.TILING10_2_), + _to_array(mcluts.TILING11), + _to_array(mcluts.TILING12_1_1), + _to_array(mcluts.TILING12_1_1_), + _to_array(mcluts.TILING12_1_2), + _to_array(mcluts.TILING12_2), + _to_array(mcluts.TILING12_2_), + _to_array(mcluts.TILING13_1), + _to_array(mcluts.TILING13_1_), + _to_array(mcluts.TILING13_2), + _to_array(mcluts.TILING13_2_), + _to_array(mcluts.TILING13_3), + _to_array(mcluts.TILING13_3_), + _to_array(mcluts.TILING13_4), + _to_array(mcluts.TILING13_5_1), + _to_array(mcluts.TILING13_5_2), + _to_array(mcluts.TILING14), + _to_array(mcluts.TEST3), + _to_array(mcluts.TEST4), + _to_array(mcluts.TEST6), + _to_array(mcluts.TEST7), + _to_array(mcluts.TEST10), + _to_array(mcluts.TEST12), + _to_array(mcluts.TEST13), + _to_array(mcluts.SUBCONFIG13), + ) return mcluts.THE_LUTS @@ -300,4 +399,4 @@ def mesh_surface_area(verts, faces): del actual_verts # Area of triangle in 3D = 1/2 * Euclidean norm of cross product - return ((np.cross(a, b) ** 2).sum(axis=1) ** 0.5).sum() / 2. + return ((np.cross(a, b) ** 2).sum(axis=1) ** 0.5).sum() / 2.0 diff --git a/infinigen/terrain/mesher/_marching_cubes_lewiner_luts.py b/infinigen/terrain/mesher/_marching_cubes_lewiner_luts.py index e691a3195..ea8222015 100644 --- a/infinigen/terrain/mesher/_marching_cubes_lewiner_luts.py +++ b/infinigen/terrain/mesher/_marching_cubes_lewiner_luts.py @@ -8,8 +8,10 @@ # distributed with scikit-image, but are available in the # repository under tools/precompute/mc_meta. -#static const char casesClassic[256][16] -CASESCLASSIC = (256, 16), """ +# static const char casesClassic[256][16] +CASESCLASSIC = ( + (256, 16), + """ /////////////////////wAIA/////////////////8AAQn/////////////////AQgDCQgB//// /////////wECCv////////////////8ACAMBAgr/////////////CQIKAAIJ/////////////wII AwIKCAoJCP////////8DCwL/////////////////AAsCCAsA/////////////wEJAAIDC/////// @@ -82,10 +84,13 @@ ////AAILCAAL/////////////wMCC/////////////////8CAwgCCAoKCAn/////////CQoCAAkC /////////////wIDCAIICgABCAEKCP////8BCgL/////////////////AQMICQEI//////////// /wAJAf////////////////8AAwj//////////////////////////////////////w== -""" +""", +) -#static const char cases[256][2] -CASES = (256, 2), """ +# static const char cases[256][2] +CASES = ( + (256, 2), + """ AP8BAAEBAgABAgMAAgMFAAEDAgEDAwUBAgUFBAUJCAABBAICAwQFAgQCBgIGCQsAAwgFBQcDCQEG EA4DDAwFGAEFAwECBAUDAwYHAAUKCQAEAwYEBgsOAQYRDAQLBgUZAggFBwUMCAEGEgwFDgcFHAYV CwQMDwUeCgUGIAYnAgwBBgQAAwUGAAIGBgMFCw4AAwkGBQcEDAEFDgsDCQQFGgMKBgYHBQwCBhMK @@ -95,51 +100,72 @@ BhwMFQcKBikDDQUVCQMLCAUhDBYHCwYqAw4OCwUkBiwCEQYvAxIEBwEJAgsGCAYPCgAFEQwICwcG GgUTDgQMEgYdCAQFIwUoAg8FFgsFDBMGHg4KBiQGKwQECQcFJQcPAxEFLAITAxYBCgUXDAsOCAYf CQYHDAUqAw8LCwYmBi0EBQUtAxMCFQELCAUFJgUrAhIFLgMUAhYBDAUvAhQDFwENAhcBDgEPAP8= -""" +""", +) -#static const char tiling1[16][3] -TILING1 = (16, 3), """ +# static const char tiling1[16][3] +TILING1 = ( + (16, 3), + """ AAgDAAEJAQIKAwsCBAcICQUECgYFBwYLBwsGCgUGCQQFBAgHAwILAQoCAAkBAAMI -""" +""", +) -#static const char tiling2[24][6] -TILING2 = (24, 6), """ +# static const char tiling2[24][6] +TILING2 = ( + (24, 6), + """ AQgDCQgBAAsCCAsABAMABwMECQIKAAIJAAUEAQUAAwoBCwoDAQYFAgYBBwIDBgIHCQcIBQcJBggE CwgGCgQJBgQKCwUKBwULCwoFBwsFCgkEBgoEBgQICwYICQgHBQkHBwMCBgcCAQUGAgEGAwEKCwMK AAQFAQAFCQoCAAkCBAADBwQDAAILCAALAQMICQEI -""" +""", +) -#static const char tiling3_1[24][6] -TILING3_1 = (24, 6), """ +# static const char tiling3_1[24][6] +TILING3_1 = ( + (24, 6), + """ AAgDAQIKCQUEAAgDAwAICwcGAQkAAgMLAAEJCAQHCQABBQoGAQIKCQUECgECBgsHCAQHAwsCAgML CgYFBQoGBAcIBAkFBwYLBQkECwYHBgoFCAcECwMCBQYKBwQIAgsDAgEKBwsGCgIBBAUJAQAJBgoF CQEABwQIAAkBCwMCCAADBgcLBAUJAwgAAwgACgIB -""" +""", +) -#static const char tiling3_2[24][12] -TILING3_2 = (24, 12), """ +# static const char tiling3_2[24][12] +TILING3_2 = ( + (24, 12), + """ CgMCCggDCgEACAoAAwQIAwUEAwAJBQMJBggHBgAIBgsDAAYDCwADCwkACwIBCQsBBwkEBwEJBwgA AQcABgEKBgABCQAGCQYFBAoFBAIKBAkBAgQBBwILBwECBwYKAQcKAgcLAgQHAgMIBAIIBQsGBQML BQoCAwUCCAYHCAoGCAQFCggFCwUGCwkFCwcECQsEBgULBQkLBAcLBAsJBwYIBgoIBQQIBQgKBgsF CwMFAgoFAgUDCwcCBwQCCAMCCAIECwIHAgEHCgYHCgcBBQoECgIEAQkEAQQCCgEGAQAGBgAJBQYJ BAkHCQEHAAgHAAcBAwALAAkLAQILAQsJBwgGCAAGAwsGAwYACAQDBAUDCQADCQMFAgMKAwgKAAEK AAoI -""" +""", +) -#static const char tiling4_1[8][6] -TILING4_1 = (8, 6), """ +# static const char tiling4_1[8][6] +TILING4_1 = ( + (8, 6), + """ AAgDBQoGAAEJCwcGAQIKCAQHCQUEAgMLBAUJCwMCCgIBBwQICQEABgcLAwgABgoF -""" +""", +) -#static const char tiling4_2[8][18] -TILING4_2 = (8, 18), """ +# static const char tiling4_2[8][18] +TILING4_2 = ( + (8, 18), + """ CAUABQgGAwYIBgMKAAoDCgAFCQYBBgkHAAcJBwALAQsACwEGCgcCBwoEAQQKBAEIAggBCAIHCwQD BAsFAgULBQIJAwkCCQMEAwQLBQsECwUCCQIFAgkDBAMJAgcKBAoHCgQBCAEEAQgCBwIIAQYJBwkG CQcACwAHAAsBBgELAAUIBggFCAYDCgMGAwoABQAK -""" +""", +) -#static const char tiling5[48][9] -TILING5 = (48, 9), """ +# static const char tiling5[48][9] +TILING5 = ( + (48, 9), + """ AggDAgoICgkIAQsCAQkLCQgLBAEJBAcBBwMBCAUECAMFAwEFAAoBAAgKCAsKCwQHCwIEAgAEBwAI BwYABgIACQMACQUDBQcDAwYLAwAGAAQGAwkAAwsJCwoJBQIKBQQCBAACCQYFCQAGAAIGAAcIAAEH AQUHCgABCgYABgQABgMLBgUDBQEDCgcGCgEHAQMHAQQJAQIEAgYECwECCwcBBwUBCAIDCAQCBAYC @@ -148,10 +174,13 @@ AgQGCgYHCgcBAQcDBgsDBgMFBQMBCgEACgAGBgAEAAgHAAcBAQcFCQUGCQYAAAYCBQoCBQIEBAIA AwAJAwkLCwkKAwsGAwYAAAYECQADCQMFBQMHBwgABwAGBgACCwcECwQCAgQAAAEKAAoICAoLCAQF CAUDAwUBBAkBBAEHBwEDAQILAQsJCQsIAgMIAggKCggJ -""" +""", +) -#static const char tiling6_1_1[48][9] -TILING6_1_1 = (48, 9), """ +# static const char tiling6_1_1[48][9] +TILING6_1_1 = ( + (48, 9), + """ BgUKAwEICQgBCwcGCQMBAwkIAQIKBwAEAAcDAwAIBQIGAgUBBQQJAgALCAsACgYFCAIAAggLCgYF AAQDBwMEAwAIBgQKCQoECAMACgcFBwoLCAQHCgACAAoJBwYLAAIJCgkCAgMLBAEFAQQAAAEJBgMH AwYCCQABCwQGBAsICwcGAQUABAAFAAEJBwULCgsFBAcIAQMKCwoDCQUECwEDAQsKCgECCAUHBQgJ @@ -160,10 +189,13 @@ AwoLCQEACwUHBQsKBgcLAAUBBQAEAQAJBgQLCAsECQEABwMGAgYDCwMCBQEEAAQBCwYHCQIAAgkK BwQIAgAKCQoAAAMIBQcKCwoHCAADCgQGBAoJBQYKAwQABAMHBQYKAAIICwgCCQQFCwACAAsICAAD BgIFAQUCCgIBBAAHAwcABgcLAQMJCAkDCgUGCAEDAQgJ -""" +""", +) -#static const char tiling6_1_2[48][27] -TILING6_1_2 = (48, 27), """ +# static const char tiling6_1_2[48][27] +TILING6_1_2 = ( + (48, 27), + """ AQwDDAoDBgMKAwYIBQgGCAUMDAkIAQkMDAUKAQwDAQsMCwEGCQYBBgkHDAcJCQgMDAgDCwcMBAwA BAEMAQQKBwoECgcCDAIHBwMMDAMAAQIMBgwCBgMMAwYIBQgGCAUADAAFBQEMDAECAwAMAAwCDAkC BQIJAgULBAsFCwQMDAgLAAgMDAQJAAwCAAoMCgAFCAUABQgGDAYICAsMDAsCCgYMBAwADAUACgAF @@ -187,10 +219,13 @@ BQAKAAUIBggFCAYMDAsIAgsMDAYKAgwAAgkMCQIFCwUCBQsEDAQLCwgMDAgACQQMAgwGDAMGCAYD BggFAAUIBQAMDAEFAgEMDAADAAwEDAEECgQBBAoHAgcKBwIMDAMHAAMMDAIBAwwBDAsBBgELAQYJ BwkGCQcMDAgJAwgMDAcLAwwBAwoMCgMGCAYDBggFDAUICAkMDAkBCgUM -""" +""", +) -#static const char tiling6_2[48][15] -TILING6_2 = (48, 15), """ +# static const char tiling6_2[48][15] +TILING6_2 = ( + (48, 15), + """ AQoDBgMKAwYIBQgGCAUJAQsDCwEGCQYBBgkHCAcJBAEAAQQKBwoECgcCAwIHBgMCAwYIBQgGCAUA AQAFAAkCBQIJAgULBAsFCwQIAAoCCgAFCAUABQgGCwYIBAUACgAFAAoDBgMKAwYHBAgGAwYIBgMK AAoDCgAJBQgHCAUACgAFAAoDCwMKAggACAIHCgcCBwoECQQKAgsABwALAAcJBgkHCQYKBQIBAgUL @@ -204,17 +239,23 @@ AAgCBwIIAgcKBAoHCgQJBwgFAAUIBQAKAwoACgMLBggECAYDCgMGAwoACQAKAAUEBQAKAwoACgMG BwYDAgoABQAKAAUIBggFCAYLAgkACQIFCwUCBQsECAQLAgMGCAYDBggFAAUIBQABAAEECgQBBAoH AgcKBwIDAwsBBgELAQYJBwkGCQcIAwoBCgMGCAYDBggFCQUI -""" +""", +) -#static const char tiling7_1[16][9] -TILING7_1 = (16, 9), """ +# static const char tiling7_1[16][9] +TILING7_1 = ( + (16, 9), + """ CQUECgECCAMACwcGCAMACgECAwAIBQQJBwYLCAQHCQABCwIDCgYFCwIDCQABAAEJBgUKBAcIAQIK BwYLBQQJAgMLBAcIBgUKCwMCCAcECgUGCgIBCwYHCQQFCQEACgUGCAcEBQYKAwILAQAJBwQIAQAJ AwILCAADCQQFCwYHBgcLAAMIAgEKBAUJAgEKAAMI -""" +""", +) -#static const char tiling7_2[16][3][15] -TILING7_2 = (16, 3, 15), """ +# static const char tiling7_2[16][3][15] +TILING7_2 = ( + (16, 3, 15), + """ AQIKAwQIBAMFAAUDBQAJAwAICQEEAgQBBAIFCgUCCQUEAAoBCgAICggCAwIIAwAIAQYKBgEHAgcB BwILAQIKCwMGAAYDBgAHCAcACwcGAggDCAIKCAoAAQAKCQUECwMGAAYDBgAHCAcACwcGAwQIBAMF AAUDBQAJAwAIBAkHCwcJBQsJCwUGAAEJAgcLBwIEAwQCBAMIAgMLCAAHAQcABwEECQQBCAQHAwkA @@ -228,10 +269,13 @@ BwQIAAkDCwMJAQsJCwECBAUJBgMLAwYABwAGAAcIBgcLCAQDBQMEAwUACQAFCAADBwkECQcLCQsF BgULCAADCgYBBwEGAQcCCwIHCgIBBgMLAwYABwAGAAcIBgcLAwgCCgIIAAoICgABCgIBCAQDBQME AwUACQAFCAADBAEJAQQCBQIEAgUKBAUJAQoACAAKAggKCAID -""" +""", +) -#static const char tiling7_3[16][3][27] -TILING7_3 = (16, 3, 27), """ +# static const char tiling7_3[16][3][27] +TILING7_3 = ( + (16, 3, 27), + """ DAIKDAoFDAUEDAQIDAgDDAMADAAJDAkBDAECDAUEDAQIDAgDDAMCDAIKDAoBDAEADAAJDAkFBQQM CgUMAgoMAwIMCAMMAAgMAQAMCQEMBAkMDAAIDAgHDAcGDAYKDAoBDAECDAILDAsDDAMADAcGDAYK DAoBDAEADAAIDAgDDAMCDAILDAsHBwYMCAcMAAgMAQAMCgEMAgoMAwIMCwMMBgsMCQUMAAkMAwAM @@ -255,19 +299,25 @@ BgcMCgYMAQoMAgEMCwIMAwsMAAMMBgcMCgYMAQoMAAEMCAAMAwgMAgMMCwIMBwsMDAYHDAcIDAgA DAABDAEKDAoCDAIDDAMLDAsGCgIMBQoMBAUMCAQMAwgMAAMMCQAMAQkMAgEMBAUMCAQMAwgMAgMM CgIMAQoMAAEMCQAMBQkMDAQFDAUKDAoCDAIDDAMIDAgADAABDAEJDAkE -""" +""", +) -#static const char tiling7_4_1[16][15] -TILING7_4_1 = (16, 15), """ +# static const char tiling7_4_1[16][15] +TILING7_4_1 = ( + (16, 15), + """ AwQIBAMKAgoDBAoFCQEAAQYKBgEIAAgBBggHCwMCCwMGCQYDBgkFAAkDBwQIAgcLBwIJAQkCBwkE CAADAAUJBQALAwsABQsGCgIBCAAHCgcABwoGAQoABAUJCQEECwQBBAsHAgsBBQYKCgIFCAUCBQgE AwgCBgcLBQIKAgUIBAgFAggDCwcGBAEJAQQLBwsEAQsCCgYFBwAIAAcKBgoHAAoBCQUECQUACwAF AAsDBgsFAQIKCwcCCQIHAgkBBAkHAwAIBgMLAwYJBQkGAwkACAQHCgYBCAEGAQgABwgGAgMLCAQD CgMEAwoCBQoEAAEJ -""" +""", +) -#static const char tiling7_4_2[16][27] -TILING7_4_2 = (16, 27), """ +# static const char tiling7_4_2[16][27] +TILING7_4_2 = ( + (16, 27), + """ CQQIBAkFCgUJAQoJCgECAAIBAgADCAMACQgACwYKBgsHCAcLAwgLCAMAAgADAAIBCgECCwoCCwMI AAgDCAAJCAkEBQQJBAUHBgcFBwYLBwsICAcLBwgECQQIAAkICQABAwEAAQMCCwIDCAsDCgUJBQoG CwYKAgsKCwIDAQMCAwEACQABCgkBCAAJAQkACQEKCQoFBgUKBQYEBwQGBAcIBAgJCQEKAgoBCgIL @@ -276,81 +326,114 @@ BAYFBgQHCAcECQgECQUKBgoFCgYLCgsCAwILAgMBAAEDAQAJAQkKCwcIBAgHCAQJCAkAAQAJAAED AgMBAwILAwsICAMLAwgACQAIBAkICQQFBwUEBQcGCwYHCAsHCgYLBwsGCwcICwgDAAMIAwACAQIA AgEKAgoLCAQJBQkECQUKCQoBAgEKAQIAAwACAAMIAAgJ -""" +""", +) -#static const char tiling8[6][6] -TILING8 = (6, 6), """ +# static const char tiling8[6][6] +TILING8 = ( + (6, 6), + """ CQgKCggLAQUDAwUHAAQCBAYCAAIEBAIGAQMFAwcFCQoICgsI -""" +""", +) -#static const char tiling9[8][12] -TILING9 = (8, 12), """ +# static const char tiling9[8][12] +TILING9 = ( + (8, 12), + """ AgoFAwIFAwUEAwQIBAcLCQQLCQsCCQIBCgcGAQcKAQgHAQAIAwYLAAYDAAUGAAkFAwsGAAMGAAYF AAUJCgYHAQoHAQcIAQgABAsHCQsECQILCQECAgUKAwUCAwQFAwgE -""" +""", +) -#static const char tiling10_1_1[6][12] -TILING10_1_1 = (6, 12), """ +# static const char tiling10_1_1[6][12] +TILING10_1_1 = ( + (6, 12), + """ BQoHCwcKCAEJAQgDAQIFBgUCBAMAAwQHCwAIAAsCBAkGCgYJCQAKAgoABggECAYLBwIDAgcGAAEE BQQBBwkFCQcICgELAwsB -""" +""", +) -#static const char tiling10_1_1_[6][12] -TILING10_1_1_ = (6, 12), """ +# static const char tiling10_1_1_[6][12] +TILING10_1_1_ = ( + (6, 12), + """ BQkHCAcJCwEKAQsDAwIHBgcCBAEAAQQFCgAJAAoCBAgGCwYICAALAgsABgkECQYKBQIBAgUGAAME BwQDBwoFCgcLCQEIAwgB -""" +""", +) -#static const char tiling10_1_2[6][24] -TILING10_1_2 = (6, 24), """ +# static const char tiling10_1_2[6][24] +TILING10_1_2 = ( + (6, 24), + """ AwsHAwcICQgHBQkHCQUKCQoBAwEKCwMKBwYFBwUEAAQFAQAFAAECAAIDBwMCBgcCCwIKBgsKCwYE CwQIAAgECQAEAAkKAAoCCwIKCwoGBAYKCQQKBAkABAAICwgAAgsABwYFBAcFBwQABwADAgMAAQIA AgEFAgUGBwgDCwcDBwsKBwoFCQUKAQkKCQEDCQMI -""" +""", +) -#static const char tiling10_2[6][24] -TILING10_2 = (6, 24), """ +# static const char tiling10_2[6][24] +TILING10_2 = ( + (6, 24), + """ DAUJDAkIDAgDDAMBDAEKDAoLDAsHDAcFDAEADAAEDAQHDAcDDAMCDAIGDAYFDAUBBAgMBgQMCgYM CQoMAAkMAgAMCwIMCAsMDAkEDAQGDAYLDAsIDAgADAACDAIKDAoJAAMMBAAMBQQMAQUMAgEMBgIM BwYMAwcMCgUMCwoMAwsMAQMMCQEMCAkMBwgMBQcM -""" +""", +) -#static const char tiling10_2_[6][24] -TILING10_2_ = (6, 24), """ +# static const char tiling10_2_[6][24] +TILING10_2_ = ( + (6, 24), + """ CAcMCQgMAQkMAwEMCwMMCgsMBQoMBwUMBAUMAAQMAwAMBwMMBgcMAgYMAQIMBQEMDAsGDAYEDAQJ DAkKDAoCDAIADAAIDAgLBgoMBAYMCAQMCwgMAgsMAAIMCQAMCgkMDAcEDAQADAABDAEFDAUGDAYC DAIDDAMHDAcLDAsKDAoBDAEDDAMIDAgJDAkFDAUH -""" +""", +) -#static const char tiling11[12][12] -TILING11 = (12, 12), """ +# static const char tiling11[12][12] +TILING11 = ( + (12, 12), + """ AgoJAgkHAgcDBwkEAQYCAQgGAQkICAcGCAMBCAEGCAYEBgEKAAgLAAsFAAUBBQsGCQUHCQcCCQIA AgcLBQAEBQsABQoLCwMABQQABQALBQsKCwADCQcFCQIHCQACAgsHAAsIAAULAAEFBQYLCAEDCAYB CAQGBgoBAQIGAQYIAQgJCAYHAgkKAgcJAgMHBwQJ -""" +""", +) -#static const char tiling12_1_1[24][12] -TILING12_1_1 = (24, 12), """ +# static const char tiling12_1_1[24][12] +TILING12_1_1 = ( + (24, 12), + """ BwYLCgMCAwoICQgKBgUKCQIBAgkLCAsJCgYFBwkECQcBAwEHBwYLBAgFAwUIBQMBBQQJCAEAAQgK CwoIAQIKAAkDBQMJAwUHCgECAAsDCwAGBAYACAMAAgkBCQIEBgQCAwAIAgsBBwELAQcFBgUKBwsE AgQLBAIACQUEBggHCAYAAgAGCAMABwQLCQsECwkKBAcICwADAAsJCgkLBAcIBQkGAAYJBgACCwcG BAoFCgQCAAIECwIDAQgACAEHBQcBAAEJAwgCBAIIAgQGAgMLAQoABgAKAAYECQABAwoCCgMFBwUD CQABBAUICggFCAoLCAQHBQsGCwUDAQMFBQQJBgoHAQcKBwEDCgECBQYJCwkGCQsICwIDBgcKCAoH CggJ -""" +""", +) -#static const char tiling12_1_1_[24][12] -TILING12_1_1_ = (24, 12), """ +# static const char tiling12_1_1_[24][12] +TILING12_1_1_ = ( + (24, 12), + """ AwILCgcGBwoICQgKAgEKCQYFBgkLCAsJCQQFBwoGCgcBAwEHBwQIBgsFAwULBQMBAQAJCAUEBQgK CwoIAQAJAgoDBQMKAwUHCwMCAAoBCgAGBAYACQEAAggDCAIEBgQCAwILAAgBBwEIAQcFBgcLBQoE AgQKBAIACAcEBgkFCQYAAgAGCAcEAwALCQsACwkKAAMICwQHBAsJCgkLBAUJBwgGAAYIBgACCgUG BAsHCwQCAAIECAADAQsCCwEHBQcBAAMIAQkCBAIJAgQGAgEKAwsABgALAAYECgIBAwkACQMFBwUD CQQFAAEICggBCAoLCwYHBQgECAUDAQMFBQYKBAkHAQcJBwEDCgUGAQIJCwkCCQsICwYHAgMKCAoD CggJ -""" +""", +) -#static const char tiling12_1_2[24][24] -TILING12_1_2 = (24, 24), """ +# static const char tiling12_1_2[24][24] +TILING12_1_2 = ( + (24, 24), + """ BwMLAwcICQgHBgkHCQYKAgoGCwIGAgsDBgIKAgYLCAsGBQgGCAUJAQkFCgEFAQoCCgkFCQoBAwEK BgMKAwYHBAcGBQQGBAUJBwgLAwsICwMBCwEGBQYBBgUEBgQHCAcEBQEJAQUKCwoFBAsFCwQIAAgE CQAEAAkBAQkKBQoJCgUHCgcCAwIHAgMAAgABCQEACgsCCwoGBAYKAQQKBAEAAwABAgMBAwILCAkA @@ -362,10 +445,13 @@ CQUBCgEFAQoLAQsACAALAAgEAAQJBQkECAsHCwgDAQMIBAEIAQQFBgUEBwYEBgcLBQoJAQkKCQED CQMEBwQDBAcGBAYFCgUGCgYCCwIGAgsIAggBCQEIAQkFAQUKBgoFCwcDCAMHAwgJAwkCCgIJAgoG AgYLBwsG -""" +""", +) -#static const char tiling12_2[24][24] -TILING12_2 = (24, 24), """ +# static const char tiling12_2[24][24] +TILING12_2 = ( + (24, 24), + """ CQgMCgkMAgoMAwIMCwMMBgsMBwYMCAcMCAsMCQgMAQkMAgEMCgIMBQoMBgUMCwYMAwEMBwMMBAcM CQQMBQkMBgUMCgYMAQoMDAMBDAEFDAUGDAYLDAsHDAcEDAQIDAgDCwoMCAsMAAgMAQAMCQEMBAkM BQQMCgUMDAUHDAcDDAMCDAIKDAoBDAEADAAJDAkFBAYMAAQMAQAMCgEMAgoMAwIMCwMMBgsMBgQM @@ -377,10 +463,13 @@ DAoLDAsIDAgADAABDAEJDAkEDAQFDAUKAQMMBQEMBgUMCwYMBwsMBAcMCAQMAwgMDAEDDAMHDAcE DAQJDAkFDAUGDAYKDAoBDAsIDAgJDAkBDAECDAIKDAoFDAUGDAYLDAgJDAkKDAoCDAIDDAMLDAsG DAYHDAcI -""" +""", +) -#static const char tiling12_2_[24][24] -TILING12_2_ = (24, 24), """ +# static const char tiling12_2_[24][24] +TILING12_2_ = ( + (24, 24), + """ DAILDAsHDAcGDAYKDAoJDAkIDAgDDAMCDAEKDAoGDAYFDAUJDAkIDAgLDAsCDAIBDAQFDAUKDAoG DAYHDAcDDAMBDAEJDAkEBwYMCAcMBAgMBQQMAQUMAwEMCwMMBgsMDAAJDAkFDAUEDAQIDAgLDAsK DAoBDAEAAQIMCQEMAAkMAwAMBwMMBQcMCgUMAgoMDAECDAILDAsDDAMADAAEDAQGDAYKDAoBDAMA @@ -392,36 +481,51 @@ CQAMBQkMBAUMCAQMCwgMCgsMAQoMAAEMDAYHDAcIDAgEDAQFDAUBDAEDDAMLDAsGBQQMCgUMBgoM BwYMAwcMAQMMCQEMBAkMCgEMBgoMBQYMCQUMCAkMCwgMAgsMAQIMCwIMBwsMBgcMCgYMCQoMCAkM AwgMAgMM -""" +""", +) -#static const char tiling13_1[2][12] -TILING13_1 = (2, 12), """ +# static const char tiling13_1[2][12] +TILING13_1 = ( + (2, 12), + """ CwcGAQIKCAMACQUECAQHAgMLCQABCgYF -""" +""", +) -#static const char tiling13_1_[2][12] -TILING13_1_ = (2, 12), """ +# static const char tiling13_1_[2][12] +TILING13_1_ = ( + (2, 12), + """ BwQICwMCAQAJBQYKBgcLCgIBAAMIBAUJ -""" +""", +) -#static const char tiling13_2[2][6][18] -TILING13_2 = (2, 6, 18), """ +# static const char tiling13_2[2][6][18] +TILING13_2 = ( + (2, 6, 18), + """ AQIKCwcGAwQIBAMFAAUDBQAJCAMACwcGCQEEAgQBBAIFCgUCCQUECAMAAQYKBgEHAgcBBwILCQUE AQIKCwMGAAYDBgAHCAcACQUECwcGAAoBCgAICggCAwIIAQIKAwAIBAkHCwcJBQsJCwUGAgMLCAQH AAUJBQAGAQYABgEKCQABCAQHCgIFAwUCBQMGCwYDBgUKCQABAgcLBwIEAwQCBAMIBgUKAgMLCAAH AQcABwEECQQBBgUKCAQHAQsCCwEJCwkDAAMJAgMLAAEJBQoECAQKBggKCAYH -""" +""", +) -#static const char tiling13_2_[2][6][18] -TILING13_2_ = (2, 6, 18), """ +# static const char tiling13_2_[2][6][18] +TILING13_2_ = ( + (2, 6, 18), + """ CgUGCwMCBwAIAAcBBAEHAQQJCwMCBwQICQUABgAFAAYBCgEGAQAJBwQIBQIKAgUDBgMFAwYLCgUG AQAJCwcCBAIHAgQDCAMECgUGBwQIAgsBCQELAwkLCQMACwMCCQEABAoFCgQICggGBwYIBgcLCAAD BAEJAQQCBQIEAgUKCAADBAUJCgYBBwEGAQcCCwIHAgEKBAUJBgMLAwYABwAGAAcIBgcLAgEKCAQD BQMEAwUACQAFBgcLBAUJAwgCCgIIAAoICgABCAADCgIBBQsGCwUJCwkHBAcJ -""" +""", +) -#static const char tiling13_3[2][12][30] -TILING13_3 = (2, 12, 30), """ +# static const char tiling13_3[2][12][30] +TILING13_3 = ( + (2, 12, 30), + """ CwcGDAIKDAoFDAUEDAQIDAgDDAMADAAJDAkBDAECAQIKCQUMAAkMAwAMCwMMBgsMBwYMCAcMBAgM BQQMCwcGDAUEDAQIDAgDDAMCDAIKDAoBDAEADAAJDAkFAQIKDAMADAAJDAkFDAUGDAYLDAsHDAcE DAQIDAgDCAMACwcMAgsMAQIMCQEMBAkMBQQMCgUMBgoMBwYMCwcGBQQMCgUMAgoMAwIMCAMMAAgM @@ -435,10 +539,13 @@ BgUKDAEJDAkEDAQHDAcLDAsCDAIDDAMIDAgADAABBgUKDAQHDAcLDAsCDAIBDAEJDAkADAADDAMI DAgECQABDAIDDAMIDAgEDAQFDAUKDAoGDAYHDAcLDAsCBgUKBAcMCQQMAQkMAgEMCwIMAwsMAAMM CAAMBwgMAgMLAAEMCAAMBwgMBgcMCgYMBQoMBAUMCQQMAQkM -""" +""", +) -#static const char tiling13_3_[2][12][30] -TILING13_3_ = (2, 12, 30), """ +# static const char tiling13_3_[2][12][30] +TILING13_3_ = ( + (2, 12, 30), + """ AwILCAcMAAgMAQAMCgEMBgoMBQYMCQUMBAkMBwQMBQYKDAILDAsHDAcEDAQJDAkBDAEADAAIDAgD DAMCCgUGDAcEDAQJDAkBDAECDAILDAsDDAMADAAIDAgHCwMCDAEADAAIDAgHDAcGDAYKDAoFDAUE DAQJDAkBBwQICwMMBgsMBQYMCQUMAAkMAQAMCgEMAgoMAwIMBwQIBQYMCQUMAAkMAwAMCwMMAgsM @@ -452,82 +559,119 @@ AgEKDAUJDAkADAADDAMLDAsGDAYHDAcIDAgEDAQFBAUJDAYHDAcIDAgADAABDAEKDAoCDAIDDAML DAsGAgEKDAADDAMLDAsGDAYFDAUJDAkEDAQHDAcIDAgABgcLBAUMCAQMAwgMAgMMCgIMAQoMAAEM CQAMBQkMCgIBAAMMCQAMBQkMBgUMCwYMBwsMBAcMCAQMAwgM -""" +""", +) -#static const char tiling13_4[2][4][36] -TILING13_4 = (2, 4, 36), """ +# static const char tiling13_4[2][4][36] +TILING13_4 = ( + (2, 4, 36), + """ DAIKDAoFDAUGDAYLDAsHDAcEDAQIDAgDDAMADAAJDAkBDAECCwMMBgsMBwYMCAcMBAgMBQQMCQUM AAkMAQAMCgEMAgoMAwIMCQEMBAkMBQQMCgUMBgoMBwYMCwcMAgsMAwIMCAMMAAgMAQAMDAAIDAgH DAcEDAQJDAkFDAUGDAYKDAoBDAECDAILDAsDDAMADAMLDAsGDAYHDAcIDAgEDAQFDAUJDAkADAAB DAEKDAoCDAIDCAAMBwgMBAcMCQQMBQkMBgUMCgYMAQoMAgEMCwIMAwsMAAMMCgIMBQoMBgUMCwYM BwsMBAcMCAQMAwgMAAMMCQAMAQkMAgEMDAEJDAkEDAQFDAUKDAoGDAYHDAcLDAsCDAIDDAMIDAgA DAAB -""" +""", +) -#static const char tiling13_5_1[2][4][18] -TILING13_5_1 = (2, 4, 18), """ +# static const char tiling13_5_1[2][4][18] +TILING13_5_1 = ( + (2, 4, 18), + """ BwYLAQAJCgMCAwoFAwUIBAgFAQIKBwQIAwALBgsACQYABgkFAwAIBQYKAQIJBAkCCwQCBAsHBQQJ AwILCAEAAQgHAQcKBgoHBAcIAgEKCwADAAsGAAYJBQkGAgMLBAUJAAEIBwgBCgcBBwoGAAEJBgcL AgMKBQoDCAUDBQgEBgUKAAMICQIBAgkEAgQLBwsE -""" +""", +) -#static const char tiling13_5_2[2][4][30] -TILING13_5_2 = (2, 4, 30), """ +# static const char tiling13_5_2[2][4][30] +TILING13_5_2 = ( + (2, 4, 30), + """ AQAJBwQIBwgDBwMLAgsDCwIKCwoGBQYKBgUHBAcFBwQICwMCBgsCCgYCBgoFCQUKAQkKCQEAAgAB AAIDBQYKCQEABAkACAQABAgHCwcIAwsICwMCAAIDAgABAwILBQYKBQoBBQEJAAkBCQAICQgEBAgH BAcFBgUHAgEKBAUJBAkABAAIAwgACAMLCAsHBgcLBwYEBQQGBAUJCAADBwgDCwcDBwsGCgYLAgoL CgIBAwECAQMABgcLCgIBBQoBCQUBBQkECAQJAAgJCAADAQMAAwECAAMIBgcLBgsCBgIKAQoCCgEJ CgkFBQkEBQQGBwYE -""" +""", +) -#static const char tiling14[12][12] -TILING14 = (12, 12), """ +# static const char tiling14[12][12] +TILING14 = ( + (12, 12), + """ BQkIBQgCBQIGAwIIAgEFAgUIAggLBAgFCQQGCQYDCQMBCwMGAQsKAQQLAQAEBwsECAIACAUCCAcF CgIFAAcDAAoHAAkKBgcKAAMHAAcKAAoJBgoHCAACCAIFCAUHCgUCAQoLAQsEAQQABwQLCQYECQMG CQEDCwYDAgUBAggFAgsIBAUIBQgJBQIIBQYCAwgC -""" +""", +) -#static const char test3[24] -TEST3 = (24,), """ +# static const char test3[24] +TEST3 = ( + (24,), + """ BQEEBQECAgMEAwYG+vr9/P3+/v/7/P/7 -""" +""", +) -#static const char test4[8] -TEST4 = (8,), """ +# static const char test4[8] +TEST4 = ( + (8,), + """ BwcHB/n5+fk= -""" +""", +) -#static const char test6[48][3] -TEST6 = (48, 3), """ +# static const char test6[48][3] +TEST6 = ( + (48, 3), + """ AgcKBAcLBQcBBQcDAQcJAwcKBgcFAQcIBAcIAQcIAwcLBQcCBQcAAQcJBgcGAgcJBAcIAgcJAgcK BgcHAwcKBAcLAwcLBgcE+vkE/fkL/PkL/fkK+vkH/vkK/vkJ/PkI/vkJ+vkG//kJ+/kA+/kC/fkL //kI/PkI//kI+vkF/fkK//kJ+/kD+/kB/PkL/vkK -""" +""", +) -#static const char test7[16][5] -TEST7 = (16, 5), """ +# static const char test7[16][5] +TEST7 = ( + (16, 5), + """ AQIFBwEDBAUHAwQBBgcEBAEFBwACAwUHAgECBgcFAgMGBwYDBAYHB/38+vkH/v36+Qb//vr5Bf79 +/kC/P/7+QD8//r5BP38+/kD//77+QE= -""" +""", +) -#static const char test10[6][3] -TEST10 = (6, 3), """ +# static const char test10[6][3] +TEST10 = ( + (6, 3), + """ AgQHBQYHAQMHAQMHBQYHAgQH -""" +""", +) -#static const char test12[24][4] -TEST12 = (24, 4), """ +# static const char test12[24][4] +TEST12 = ( + (24, 4), + """ BAMHCwMCBwoCBgcFBgQHBwIBBwkFAgcBBQMHAgUBBwAFBAcDBgMHBgEGBwQBBAcIBAEHCAYBBwQD BgcGBAUHAwEFBwADBQcCAgUHAQECBwkEBgcHBgIHBQIDBwoDBAcL -""" +""", +) -#static const char test13[2][7] -TEST13 = (2, 7), """ +# static const char test13[2][7] +TEST13 = ( + (2, 7), + """ AQIDBAUGBwIDBAEFBgc= -""" +""", +) -#static const char subconfig13[64] -SUBCONFIG13 = (64,), """ +# static const char subconfig13[64] +SUBCONFIG13 = ( + (64,), + """ AAECBwP/C/8ECP//Dv///wUJDBcP/xUmERT/JBohHiwGCg0TEP8ZJRIY/yMWIB0r////Iv//HCr/ H/8pGygnLQ== -""" +""", +) diff --git a/infinigen/terrain/mesher/cube_spherical_mesher.py b/infinigen/terrain/mesher/cube_spherical_mesher.py index d3cdad686..14077fb02 100644 --- a/infinigen/terrain/mesher/cube_spherical_mesher.py +++ b/infinigen/terrain/mesher/cube_spherical_mesher.py @@ -4,23 +4,32 @@ # Authors: Zeyu Ma -from ctypes import POINTER, c_double, c_int32 import logging +from ctypes import POINTER, c_double, c_int32 import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import ASDOUBLE, ASINT, Mesh, write_attributes, register_func, load_cdll -from infinigen.terrain.utils import Timer as tTimer -from infinigen.core.util.organization import Tags from tqdm import tqdm +from infinigen.core.util.organization import Tags +from infinigen.terrain.utils import ( + ASDOUBLE, + ASINT, + Mesh, + load_cdll, + register_func, + write_attributes, +) +from infinigen.terrain.utils import Timer as tTimer + logger = logging.getLogger(__name__) try: from ._marching_cubes_lewiner import marching_cubes -except ImportError as e: - logger.warning(f'Could not import marching_cubes, terrain is likely not installed') +except ImportError: + logger.warning("Could not import marching_cubes, terrain is likely not installed") + @gin.configurable("CubeSphericalMesherTimer") class Timer(tTimer): @@ -30,14 +39,17 @@ def __init__(self, desc, verbose=False): @gin.configurable class CubeSphericalMesher: - def __init__( - self, cam_pose, - r_min, r_max, + self, + cam_pose, + r_min, + r_max, base_90d_resolution, base_R, - H_fov, W_fov, - N0, N1, + H_fov, + W_fov, + N0, + N1, complete_depth_test=False, inview_upscale=-1, test_downscale=5, @@ -62,173 +74,353 @@ def __init__( self.verbose = verbose dll = load_cdll(f"terrain/lib/{device}/meshing/cube_spherical_mesher.so") - register_func(self, dll, "init_and_get_emptytest_queries", [ - POINTER(c_double), c_double, c_double, c_int32, c_int32, POINTER(c_double), c_int32, c_double, c_double, - c_int32, c_int32, c_int32 - ]) + register_func( + self, + dll, + "init_and_get_emptytest_queries", + [ + POINTER(c_double), + c_double, + c_double, + c_int32, + c_int32, + POINTER(c_double), + c_int32, + c_double, + c_double, + c_int32, + c_int32, + c_int32, + ], + ) register_func(self, dll, "initial_update", [POINTER(c_double)], c_int32) - register_func(self, dll, "get_coarse_queries", [POINTER(c_double), POINTER(c_int32)]) - register_func(self, dll, "update", [ - c_int32, POINTER(c_double), POINTER(c_int32), POINTER(c_double), c_int32, POINTER(c_int32), c_int32 - ]) + register_func( + self, dll, "get_coarse_queries", [POINTER(c_double), POINTER(c_int32)] + ) + register_func( + self, + dll, + "update", + [ + c_int32, + POINTER(c_double), + POINTER(c_int32), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) register_func(self, dll, "get_cnt", restype=c_int32) register_func(self, dll, "get_mesh_cnt", [POINTER(c_int32)]) register_func(self, dll, "bisection_get_positions", [POINTER(c_double)]) register_func(self, dll, "bisection_update", [POINTER(c_double)]) register_func(self, dll, "finefront_init", restype=c_int32) register_func(self, dll, "finefront_get_queries", [POINTER(c_double)]) - register_func(self, dll, "finefront_update", [ - c_int32, POINTER(c_double), POINTER(c_double), c_int32, POINTER(c_int32), c_int32 - ]) + register_func( + self, + dll, + "finefront_update", + [ + c_int32, + POINTER(c_double), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) register_func(self, dll, "finefront_get_cnt", restype=c_int32) register_func(self, dll, "finefront_cleanup") - register_func(self, dll, "complete_depth_test_get_queries", [c_int32, c_int32, POINTER(c_double)]) - register_func(self, dll, "complete_depth_test_update", [c_int32, c_int32, POINTER(c_double)]) - register_func(self, dll, "get_stitching_queries", [POINTER(c_double), POINTER(c_int32)]) - register_func(self, dll, "stitch_update", [POINTER(c_double), POINTER(c_int32), POINTER(c_double), c_int32, POINTER(c_int32), c_int32]) - register_func(self, dll, "get_final_mesh", [POINTER(c_double), POINTER(c_int32), POINTER(c_int32)]) + register_func( + self, + dll, + "complete_depth_test_get_queries", + [c_int32, c_int32, POINTER(c_double)], + ) + register_func( + self, + dll, + "complete_depth_test_update", + [c_int32, c_int32, POINTER(c_double)], + ) + register_func( + self, dll, "get_stitching_queries", [POINTER(c_double), POINTER(c_int32)] + ) + register_func( + self, + dll, + "stitch_update", + [ + POINTER(c_double), + POINTER(c_int32), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) + register_func( + self, + dll, + "get_final_mesh", + [POINTER(c_double), POINTER(c_int32), POINTER(c_int32)], + ) def __call__(self, kernels): H = self.L - 2 * self.N0 W = self.L - 2 * self.N1 R = self.R with Timer("init_and_get_emptytest_queries"): - test_L = (self.L - 1) // self.test_downscale + 1 - test_R = (self.R - 1) // self.test_downscale + 1 - positions = AC(np.zeros((6 * (test_L + 1) ** 2 * (test_R + 1), 3), dtype=np.float64)) + test_L = (self.L - 1) // self.test_downscale + 1 + test_R = (self.R - 1) // self.test_downscale + 1 + positions = AC( + np.zeros((6 * (test_L + 1) ** 2 * (test_R + 1), 3), dtype=np.float64) + ) self.init_and_get_emptytest_queries( ASDOUBLE(AC((self.cam_pose.reshape(-1).astype(np.float64)))), - self.r_min, self.r_max, self.L, self.R, + self.r_min, + self.r_max, + self.L, + self.R, ASDOUBLE(positions), - self.test_downscale, self.H_fov, self.W_fov, self.upscale, - self.N0, self.N1, + self.test_downscale, + self.H_fov, + self.W_fov, + self.upscale, + self.N0, + self.N1, + ) + + with Timer( + f"compute emptytest sdf of #{len(positions)} (6x{test_L + 1}^2x{test_R + 1})" + ): + sdf = AC( + self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64) ) - - with Timer(f"compute emptytest sdf of #{len(positions)} (6x{test_L + 1}^2x{test_R + 1})"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) with Timer("initial_update"): cnt = self.initial_update(ASDOUBLE(sdf)) - + iter = 0 while cnt > 0: - if self.verbose: print(f"{iter=}") + if self.verbose: + print(f"{iter=}") iter += 1 with Timer(f"get_coarse_queries of {cnt} blocks"): - positions = AC(np.zeros(((self.test_downscale + 1) ** 3 * cnt, 3), dtype=np.float64)) + positions = AC( + np.zeros( + ((self.test_downscale + 1) ** 3 * cnt, 3), dtype=np.float64 + ) + ) position_bounds = AC(np.zeros((cnt * 3,), dtype=np.int32)) self.get_coarse_queries(ASDOUBLE(positions), ASINT(position_bounds)) with Timer("compute coarse sdf"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) del positions with Timer("run marching cube"): S = self.test_downscale + 1 - block_size = (self.test_downscale+1) ** 3 - if self.verbose: range_cnt = tqdm(range(cnt)) - else: range_cnt = range(cnt) + block_size = (self.test_downscale + 1) ** 3 + if self.verbose: + range_cnt = tqdm(range(cnt)) + else: + range_cnt = range(cnt) for i in range_cnt: - S1, S2, S3 = position_bounds[i * 3: (i+1) * 3] - part_sdf = sdf[i * block_size: (i+1) * block_size].reshape(S, S, S)[:S1 + 1, :S2 + 1, :S3 + 1] - verts_i_int, verts_i_frac, faces_i, _, _ = marching_cubes(part_sdf, 0) + S1, S2, S3 = position_bounds[i * 3 : (i + 1) * 3] + part_sdf = sdf[i * block_size : (i + 1) * block_size].reshape( + S, S, S + )[: S1 + 1, : S2 + 1, : S3 + 1] + verts_i_int, verts_i_frac, faces_i, _, _ = marching_cubes( + part_sdf, 0 + ) verts_i = verts_i_int + verts_i_frac self.update( - i, ASDOUBLE(sdf), ASINT(position_bounds), - ASDOUBLE(AC(verts_i.astype(np.float64))), len(verts_i), - ASINT(AC(faces_i.astype(np.int32))), len(faces_i), + i, + ASDOUBLE(sdf), + ASINT(position_bounds), + ASDOUBLE(AC(verts_i.astype(np.float64))), + len(verts_i), + ASINT(AC(faces_i.astype(np.int32))), + len(faces_i), ) with Timer("collect new cnt"): cnt = self.get_cnt() - + if self.upscale != -1 and self.upscale != 1: U = self.upscale S = U + 1 cnt = self.finefront_init() iter = 0 while True: - if self.verbose: print(f"{iter=}") + if self.verbose: + print(f"{iter=}") iter += 1 if cnt == 0 and self.complete_depth_test: with Timer("complete_depth_test"): - self.complete_depth_test = False # one time use - if self.verbose: batch_range = tqdm(range(0, W * self.upscale, self.complete_depth_test_relax)) - else: batch_range = range(0, W * self.upscale, self.complete_depth_test_relax) + self.complete_depth_test = False # one time use + if self.verbose: + batch_range = tqdm( + range( + 0, W * self.upscale, self.complete_depth_test_relax + ) + ) + else: + batch_range = range( + 0, W * self.upscale, self.complete_depth_test_relax + ) for b in batch_range: - positions = AC(np.zeros((((H * self.upscale - 1) // self.complete_depth_test_relax + 1) * ((R * self.upscale - 1) // self.complete_depth_test_relax + 1), 3), dtype=np.float64)) - self.complete_depth_test_get_queries(self.complete_depth_test_relax, b, ASDOUBLE(positions)) - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) - self.complete_depth_test_update(self.complete_depth_test_relax, b, ASDOUBLE(sdf)) + positions = AC( + np.zeros( + ( + ( + (H * self.upscale - 1) + // self.complete_depth_test_relax + + 1 + ) + * ( + (R * self.upscale - 1) + // self.complete_depth_test_relax + + 1 + ), + 3, + ), + dtype=np.float64, + ) + ) + self.complete_depth_test_get_queries( + self.complete_depth_test_relax, b, ASDOUBLE(positions) + ) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) + self.complete_depth_test_update( + self.complete_depth_test_relax, b, ASDOUBLE(sdf) + ) cnt = self.finefront_get_cnt() - if cnt == 0: break + if cnt == 0: + break with Timer(f"get_finefront_queries of {cnt} blocks"): - positions = AC(np.zeros(((self.upscale + 1) ** 3 * cnt, 3), dtype=np.float64)) + positions = AC( + np.zeros(((self.upscale + 1) ** 3 * cnt, 3), dtype=np.float64) + ) self.finefront_get_queries(ASDOUBLE(positions)) with Timer("compute finefront sdf"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) del positions with Timer("run marching cube"): - block_size = S ** 3 - if self.verbose: range_cnt = tqdm(range(cnt)) - else: range_cnt = range(cnt) + block_size = S**3 + if self.verbose: + range_cnt = tqdm(range(cnt)) + else: + range_cnt = range(cnt) for i in range_cnt: - part_sdf = sdf[i * block_size: (i+1) * block_size].reshape(S, S, S) + part_sdf = sdf[i * block_size : (i + 1) * block_size].reshape( + S, S, S + ) if (part_sdf > 0).any() and (part_sdf <= 0).any(): - verts_i_int, verts_i_frac, faces_i, _, _ = marching_cubes(part_sdf, 0) + verts_i_int, verts_i_frac, faces_i, _, _ = marching_cubes( + part_sdf, 0 + ) verts_i = verts_i_int + verts_i_frac self.finefront_update( - i, ASDOUBLE(sdf), - ASDOUBLE(AC(verts_i.astype(np.float64))), len(verts_i), - ASINT(AC(faces_i.astype(np.int32))), len(faces_i), + i, + ASDOUBLE(sdf), + ASDOUBLE(AC(verts_i.astype(np.float64))), + len(verts_i), + ASINT(AC(faces_i.astype(np.int32))), + len(faces_i), ) with Timer("collect new cnt"): cnt = self.finefront_get_cnt() self.finefront_cleanup() - - with Timer(f"get_stitching_queries of {((2 * (W + H) * R * (U * U + 2 * U + 1) + 4 * R * (U + 2)) * 8, 3)=}"): - positions = AC(np.zeros(((2 * (W + H) * R * (U * U + 2 * U + 1) + 4 * R * (U + 2)) * 8, 3), dtype=np.float64)) + + with Timer( + f"get_stitching_queries of {((2 * (W + H) * R * (U * U + 2 * U + 1) + 4 * R * (U + 2)) * 8, 3)=}" + ): + positions = AC( + np.zeros( + ( + (2 * (W + H) * R * (U * U + 2 * U + 1) + 4 * R * (U + 2)) + * 8, + 3, + ), + dtype=np.float64, + ) + ) self.get_stitching_queries(ASDOUBLE(positions), POINTER(c_int32)()) with Timer("compute stitching sdf"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) del positions sdf = sdf.reshape((-1, 2, 2, 2)) with Timer("marching stiches"): sdf_repeated1 = sdf[:, 1:, :, :] - sdf_repeated2 = np.concatenate((sdf[1:, :1, :, :], np.zeros((1, 1, 2, 2)))) - sdf = AC(np.concatenate((sdf, sdf_repeated1, sdf_repeated2), 1).reshape((-1, 2, 2))) + sdf_repeated2 = np.concatenate( + (sdf[1:, :1, :, :], np.zeros((1, 1, 2, 2))) + ) + sdf = AC( + np.concatenate((sdf, sdf_repeated1, sdf_repeated2), 1).reshape( + (-1, 2, 2) + ) + ) if (sdf > 0).any() and (sdf <= 0).any(): verts_int, verts_frac, faces, _, _ = marching_cubes(sdf, 0) self.stitch_update( ASDOUBLE(sdf), ASINT(AC(verts_int.astype(np.int32))), - ASDOUBLE(AC(verts_frac.astype(np.float64))), len(verts_int), - ASINT(AC(faces.astype(np.int32))), len(faces), + ASDOUBLE(AC(verts_frac.astype(np.float64))), + len(verts_int), + ASINT(AC(faces.astype(np.int32))), + len(faces), ) with Timer("merge identifiers and get coarse vert counts"): NM = AC(np.zeros(2, dtype=np.int32)) self.get_mesh_cnt(ASINT(NM)) - - if self.verbose: print(f"mesh has {NM[0]} vertices and {NM[1]} faces") - + + if self.verbose: + print(f"mesh has {NM[0]} vertices and {NM[1]} faces") + with Timer("bisection"): positions = AC(np.zeros((NM[0] * 3,), dtype=np.float64)) - if self.verbose: range_it = tqdm(range(self.bisection_iters)) - else: range_it = range(self.bisection_iters) + if self.verbose: + range_it = tqdm(range(self.bisection_iters)) + else: + range_it = range(self.bisection_iters) for it in range_it: self.bisection_get_positions(ASDOUBLE(positions)) - sdf = np.ascontiguousarray(self.kernel_caller(kernels, positions.reshape((-1, 3))).min(axis=-1).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller(kernels, positions.reshape((-1, 3))) + .min(axis=-1) + .astype(np.float64) + ) self.bisection_update(ASDOUBLE(sdf)) with Timer("get final results"): vertices = AC(np.zeros((NM[0], 3), dtype=np.float64)) outview_annotation = AC(np.zeros(NM[0], dtype=np.int32)) faces = AC(np.zeros((NM[1], 3), dtype=np.int32)) - self.get_final_mesh(ASDOUBLE(vertices), ASINT(faces), ASINT(outview_annotation)) + self.get_final_mesh( + ASDOUBLE(vertices), ASINT(faces), ASINT(outview_annotation) + ) mesh = Mesh(vertices=vertices, faces=faces) - with Timer("compute attributes"): write_attributes(kernels, mesh) mesh.vertex_attributes[Tags.OutOfView] = outview_annotation - return mesh \ No newline at end of file + return mesh diff --git a/infinigen/terrain/mesher/frontview_spherical_mesher.py b/infinigen/terrain/mesher/frontview_spherical_mesher.py index af52fb2e1..1fd45def2 100644 --- a/infinigen/terrain/mesher/frontview_spherical_mesher.py +++ b/infinigen/terrain/mesher/frontview_spherical_mesher.py @@ -4,22 +4,31 @@ # Authors: Zeyu Ma -from ctypes import POINTER, c_double, c_int32 import logging +from ctypes import POINTER, c_double, c_int32 import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import Mesh, ASDOUBLE, ASINT, write_attributes, register_func, load_cdll -from infinigen.terrain.utils import Timer as tTimer from tqdm import tqdm +from infinigen.terrain.utils import ( + ASDOUBLE, + ASINT, + Mesh, + load_cdll, + register_func, + write_attributes, +) +from infinigen.terrain.utils import Timer as tTimer + logger = logging.getLogger(__name__) try: from ._marching_cubes_lewiner import marching_cubes -except ImportError as e: - logger.warning(f'Could not import marching_cubes, terrain is likely not installed') +except ImportError: + logger.warning("Could not import marching_cubes, terrain is likely not installed") + @gin.configurable("FrontviewSphericalMesherTimer") class Timer(tTimer): @@ -29,12 +38,16 @@ def __init__(self, desc, verbose=False): @gin.configurable class FrontviewSphericalMesher: - - def __init__(self, + def __init__( + self, cam_pose, - H_fov, W_fov, - r_min, r_max, - H, W, R, + H_fov, + W_fov, + r_min, + r_max, + H, + W, + R, upscale, complete_depth_test, test_downscale=5, @@ -59,185 +72,337 @@ def __init__(self, self.verbose = verbose dll = load_cdll(f"terrain/lib/{device}/meshing/frontview_spherical_mesher.so") - register_func(self, dll, "init_and_get_emptytest_queries", [ - POINTER(c_double), c_double, c_double, c_double, c_double, c_int32, c_int32, c_int32, - POINTER(c_double), c_int32, c_int32, - ]) + register_func( + self, + dll, + "init_and_get_emptytest_queries", + [ + POINTER(c_double), + c_double, + c_double, + c_double, + c_double, + c_int32, + c_int32, + c_int32, + POINTER(c_double), + c_int32, + c_int32, + ], + ) register_func(self, dll, "initial_update", [POINTER(c_double)], c_int32) - register_func(self, dll, "get_coarse_queries", [POINTER(c_double), POINTER(c_int32)]) - register_func(self, dll, "update", [ - c_int32, POINTER(c_double), POINTER(c_int32), POINTER(c_int32), POINTER(c_double), c_int32, POINTER(c_int32), c_int32, - ]) + register_func( + self, dll, "get_coarse_queries", [POINTER(c_double), POINTER(c_int32)] + ) + register_func( + self, + dll, + "update", + [ + c_int32, + POINTER(c_double), + POINTER(c_int32), + POINTER(c_int32), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) register_func(self, dll, "get_cnt", restype=c_int32) register_func(self, dll, "get_coarse_mesh_cnt", [POINTER(c_int32)]) - register_func(self, dll, "bisection_get_positions", [c_int32, POINTER(c_double)]) + register_func( + self, dll, "bisection_get_positions", [c_int32, POINTER(c_double)] + ) register_func(self, dll, "bisection_update", [c_int32, POINTER(c_double)]) register_func(self, dll, "init_fine", [c_int32, c_int32], c_int32) register_func(self, dll, "get_fine_queries", [POINTER(c_double)]) - register_func(self, dll, "update_fine_small", [ - c_int32, c_int32, POINTER(c_double), POINTER(c_int32), POINTER(c_double), c_int32, POINTER(c_int32), c_int32, - ]) + register_func( + self, + dll, + "update_fine_small", + [ + c_int32, + c_int32, + POINTER(c_double), + POINTER(c_int32), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) register_func(self, dll, "update_fine", restype=c_int32) - register_func(self, dll, "complete_depth_test_get_query_cnt", [c_int32, c_int32], c_int32) - register_func(self, dll, "complete_depth_test_get_queries", [c_int32, c_int32, POINTER(c_double)]) - register_func(self, dll, "complete_depth_test_update", [c_int32, c_int32, POINTER(c_double)]) + register_func( + self, dll, "complete_depth_test_get_query_cnt", [c_int32, c_int32], c_int32 + ) + register_func( + self, + dll, + "complete_depth_test_get_queries", + [c_int32, c_int32, POINTER(c_double)], + ) + register_func( + self, + dll, + "complete_depth_test_update", + [c_int32, c_int32, POINTER(c_double)], + ) register_func(self, dll, "complete_depth_test_get_cnt", restype=c_int32) - - register_func(self, dll, "get_final_mesh_statistics", [POINTER(c_int32), POINTER(c_int32), POINTER(c_int32)]) - register_func(self, dll, "get_final_mesh", [POINTER(c_double), POINTER(c_int32)]) + register_func( + self, + dll, + "get_final_mesh_statistics", + [POINTER(c_int32), POINTER(c_int32), POINTER(c_int32)], + ) + register_func( + self, dll, "get_final_mesh", [POINTER(c_double), POINTER(c_int32)] + ) def __call__(self, kernels): n_elements = len(kernels) - + with Timer("init_and_get_emptytest_queries"): - test_H = (self.H - 1) // self.test_downscale + 1 - test_W = (self.W - 1) // self.test_downscale + 1 - test_R = (self.R - 1) // self.test_downscale + 1 - positions = AC(np.zeros(((test_H + 1) * (test_W + 1) * (test_R + 1), 3), dtype=np.float64)) + test_H = (self.H - 1) // self.test_downscale + 1 + test_W = (self.W - 1) // self.test_downscale + 1 + test_R = (self.R - 1) // self.test_downscale + 1 + positions = AC( + np.zeros( + ((test_H + 1) * (test_W + 1) * (test_R + 1), 3), dtype=np.float64 + ) + ) self.init_and_get_emptytest_queries( ASDOUBLE(AC((self.cam_pose.reshape(-1).astype(np.float64)))), - self.H_fov, self.W_fov, self.r_min, self.r_max, self.H, self.W, self.R, + self.H_fov, + self.W_fov, + self.r_min, + self.r_max, + self.H, + self.W, + self.R, ASDOUBLE(positions), self.test_downscale, self.upscale, ) - - with Timer(f"compute emptytest sdf of #{(test_H + 1) * (test_W + 1) * (test_R + 1)}"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) - + + with Timer( + f"compute emptytest sdf of #{(test_H + 1) * (test_W + 1) * (test_R + 1)}" + ): + sdf = AC( + self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64) + ) + with Timer("initial_update"): cnt = self.initial_update(ASDOUBLE(sdf)) S = self.test_downscale + 1 - block_size = S ** 3 + block_size = S**3 it = 0 while True: - if self.verbose: print(f"{it=}") + if self.verbose: + print(f"{it=}") it += 1 - if cnt == 0: break + if cnt == 0: + break with Timer(f"get_coarse_queries of {cnt} blocks"): - positions = AC(np.zeros((S ** 3 * cnt, 3), dtype=np.float64)) + positions = AC(np.zeros((S**3 * cnt, 3), dtype=np.float64)) position_bounds = AC(np.zeros((cnt * 3,), dtype=np.int32)) self.get_coarse_queries(ASDOUBLE(positions), ASINT(position_bounds)) with Timer("compute coarse sdf"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) with Timer("run marching cube"): - if self.verbose: range_cnt = tqdm(range(cnt)) - else: range_cnt = range(cnt) + if self.verbose: + range_cnt = tqdm(range(cnt)) + else: + range_cnt = range(cnt) for i in range_cnt: - S1, S2, S3 = position_bounds[i * 3: (i+1) * 3] - part_sdf = sdf[i * block_size: (i+1) * block_size].reshape(S, S, S)[:S1 + 1, :S2 + 1, :S3 + 1] + S1, S2, S3 = position_bounds[i * 3 : (i + 1) * 3] + part_sdf = sdf[i * block_size : (i + 1) * block_size].reshape( + S, S, S + )[: S1 + 1, : S2 + 1, : S3 + 1] verts_int, verts_frac, faces, _, _ = marching_cubes(part_sdf, 0) self.update( - i, ASDOUBLE(sdf), ASINT(position_bounds), + i, + ASDOUBLE(sdf), + ASINT(position_bounds), ASINT(AC(verts_int.astype(np.int32))), - ASDOUBLE(AC(verts_frac.astype(np.float64))), len(verts_int), - ASINT(AC(faces.astype(np.int32))), len(faces), + ASDOUBLE(AC(verts_frac.astype(np.float64))), + len(verts_int), + ASINT(AC(faces.astype(np.int32))), + len(faces), ) with Timer("collect new cnt"): cnt = self.get_cnt() - + with Timer("merge identifiers and get coarse vert counts"): NM = AC(np.zeros(2, dtype=np.int32)) self.get_coarse_mesh_cnt(ASINT(NM)) N = NM[0] M = NM[1] - - if self.verbose: print(f"Entire in view coarse mesh (without visibility face removal) has {N} vertices and {M} faces") - + + if self.verbose: + print( + f"Entire in view coarse mesh (without visibility face removal) has {N} vertices and {M} faces" + ) + with Timer("bisection on in view coarse mesh"): positions = AC(np.zeros((N * 3,), dtype=np.float64)) - if self.verbose: range_it = tqdm(range(self.bisection_iters_coarse)) - else: range_it = range(self.bisection_iters_coarse) + if self.verbose: + range_it = tqdm(range(self.bisection_iters_coarse)) + else: + range_it = range(self.bisection_iters_coarse) for it in range_it: self.bisection_get_positions(-1, ASDOUBLE(positions)) - sdf = np.ascontiguousarray(self.kernel_caller(kernels, positions.reshape((-1, 3))).min(axis=-1).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller(kernels, positions.reshape((-1, 3))) + .min(axis=-1) + .astype(np.float64) + ) self.bisection_update(-1, ASDOUBLE(sdf)) with Timer("visibility test for coarse mesh and init_fine_solids"): cnt = self.init_fine(self.relax1, n_elements) - + S = self.upscale + 1 - block_size = S ** 3 + block_size = S**3 it = 0 while True: - if self.verbose: print(f"{it=}") + if self.verbose: + print(f"{it=}") it += 1 if cnt == 0 and self.complete_depth_test: with Timer("complete_depth_test"): - self.complete_depth_test = False # one time use - if self.verbose: batch_range = tqdm(range(0, self.W * self.upscale, self.complete_depth_test_relax)) - else: batch_range = range(0, self.W * self.upscale, self.complete_depth_test_relax) + self.complete_depth_test = False # one time use + if self.verbose: + batch_range = tqdm( + range( + 0, self.W * self.upscale, self.complete_depth_test_relax + ) + ) + else: + batch_range = range( + 0, self.W * self.upscale, self.complete_depth_test_relax + ) for b in batch_range: - cnt = self.complete_depth_test_get_query_cnt(self.complete_depth_test_relax, b) + cnt = self.complete_depth_test_get_query_cnt( + self.complete_depth_test_relax, b + ) positions = AC(np.zeros((cnt, 3), dtype=np.float64)) - self.complete_depth_test_get_queries(self.complete_depth_test_relax, b, ASDOUBLE(positions)) - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) - self.complete_depth_test_update(self.complete_depth_test_relax, b, ASDOUBLE(sdf)) + self.complete_depth_test_get_queries( + self.complete_depth_test_relax, b, ASDOUBLE(positions) + ) + sdf = AC( + self.kernel_caller(kernels, positions) + .min(axis=-1) + .astype(np.float64) + ) + self.complete_depth_test_update( + self.complete_depth_test_relax, b, ASDOUBLE(sdf) + ) cnt = self.complete_depth_test_get_cnt() - if cnt == 0: break + if cnt == 0: + break with Timer(f"get_fine_positions of #{cnt}"): - positions = AC(np.zeros((cnt, (self.upscale + 1) ** 3, 3), dtype=np.float64)) + positions = AC( + np.zeros((cnt, (self.upscale + 1) ** 3, 3), dtype=np.float64) + ) self.get_fine_queries(ASDOUBLE(positions)) with Timer("compute fine sdf"): - sdf = np.ascontiguousarray(self.kernel_caller(kernels, positions.reshape((-1, 3))).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller(kernels, positions.reshape((-1, 3))).astype( + np.float64 + ) + ) del positions with Timer("run marching cube"): - if self.verbose: range_cnt = tqdm(range(cnt)) - else: range_cnt = range(cnt) + if self.verbose: + range_cnt = tqdm(range(cnt)) + else: + range_cnt = range(cnt) for i in range_cnt: for e in range(n_elements): - part_sdf = sdf[i * block_size: (i+1) * block_size].reshape(S, S, S, n_elements)[..., e] + part_sdf = sdf[i * block_size : (i + 1) * block_size].reshape( + S, S, S, n_elements + )[..., e] if (part_sdf >= 0).all() or (part_sdf <= 0).all(): continue verts_int, verts_frac, faces, _, _ = marching_cubes(part_sdf, 0) self.update_fine_small( - i, e, ASDOUBLE(sdf), + i, + e, + ASDOUBLE(sdf), ASINT(AC(verts_int.astype(np.int32))), - ASDOUBLE(AC(verts_frac.astype(np.float64))), len(verts_int), - ASINT(AC(faces.astype(np.int32))), len(faces), + ASDOUBLE(AC(verts_frac.astype(np.float64))), + len(verts_int), + ASINT(AC(faces.astype(np.int32))), + len(faces), ) with Timer("update"): cnt = self.update_fine() - with Timer("merge identifiers and get fine vert counts"): Ns = AC(np.zeros(n_elements, dtype=np.int32)) Ms = AC(np.zeros(n_elements, dtype=np.int32)) NM[:] = 0 self.get_final_mesh_statistics(ASINT(NM), ASINT(Ns), ASINT(Ms)) - if self.verbose: print(f"Invisible cleaned coarse mesh has {NM[0]} vertices and {NM[1]} faces") + if self.verbose: + print( + f"Invisible cleaned coarse mesh has {NM[0]} vertices and {NM[1]} faces" + ) for e in range(n_elements): - if self.verbose: print(f"In view fine mesh (element {e}) has {Ns[e]} vertices and {Ms[e]} faces") + if self.verbose: + print( + f"In view fine mesh (element {e}) has {Ns[e]} vertices and {Ms[e]} faces" + ) with Timer("fine bisection"): for e in range(n_elements): positions = AC(np.zeros((Ns[e] * 3,), dtype=np.float64)) - if self.verbose: range_it = tqdm(range(self.bisection_iters_fine)) - else: range_it = range(self.bisection_iters_fine) + if self.verbose: + range_it = tqdm(range(self.bisection_iters_fine)) + else: + range_it = range(self.bisection_iters_fine) for it in range_it: self.bisection_get_positions(e, ASDOUBLE(positions)) - sdf = np.ascontiguousarray(self.kernel_caller([kernels[e]], positions.reshape((-1, 3))).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller( + [kernels[e]], positions.reshape((-1, 3)) + ).astype(np.float64) + ) self.bisection_update(e, ASDOUBLE(sdf)) with Timer("get final results"): vertices = AC(np.zeros((NM[0] + np.sum(Ns)) * 3, dtype=np.float64)) faces = AC(np.zeros((NM[1] + np.sum(Ms)) * 3, dtype=np.int32)) self.get_final_mesh(ASDOUBLE(vertices), ASINT(faces)) - mesh = Mesh(vertices=vertices[:NM[0] * 3].reshape((-1, 3)), faces=faces[:NM[1] * 3].reshape((-1, 3))) + mesh = Mesh( + vertices=vertices[: NM[0] * 3].reshape((-1, 3)), + faces=faces[: NM[1] * 3].reshape((-1, 3)), + ) meshes = [] cnt_N = NM[0] cnt_M = NM[1] for e in range(n_elements): - meshes.append(Mesh(vertices=vertices[cnt_N * 3: (cnt_N+Ns[e]) * 3].reshape((-1, 3)), faces=faces[cnt_M * 3: (cnt_M+Ms[e]) * 3].reshape((-1, 3)))) + meshes.append( + Mesh( + vertices=vertices[cnt_N * 3 : (cnt_N + Ns[e]) * 3].reshape( + (-1, 3) + ), + faces=faces[cnt_M * 3 : (cnt_M + Ms[e]) * 3].reshape((-1, 3)), + ) + ) cnt_N += Ns[e] cnt_M += Ms[e] - + with Timer("compute attributes"): write_attributes(kernels, mesh, meshes) - + with Timer("concat meshes"): catted_mesh = Mesh.cat([mesh, *meshes]) - + return catted_mesh diff --git a/infinigen/terrain/mesher/spherical_mesher.py b/infinigen/terrain/mesher/spherical_mesher.py index ec36f199c..4809f451b 100644 --- a/infinigen/terrain/mesher/spherical_mesher.py +++ b/infinigen/terrain/mesher/spherical_mesher.py @@ -4,16 +4,18 @@ # Authors: Zeyu Ma -import numpy as np import gin +import numpy as np from infinigen.core.util.logging import Timer -from infinigen.terrain.utils import Mesh, get_caminfo, write_attributes, Vars +from infinigen.terrain.utils import Mesh, Vars, get_caminfo + from .cube_spherical_mesher import CubeSphericalMesher from .frontview_spherical_mesher import FrontviewSphericalMesher magnifier = 1e6 + @gin.configurable def kernel_caller(kernels, XYZ, bounds=None): sdfs = [] @@ -23,16 +25,20 @@ def kernel_caller(kernels, XYZ, bounds=None): if bounds is not None: out_bound = np.zeros(len(XYZ), dtype=bool) for i in range(3): - out_bound |= XYZ[:, i] <= bounds[i*2] - out_bound |= XYZ[:, i] >= bounds[i*2+1] - sdf[out_bound] = 1e6 # because of skimage mc only provides coords, which is has precision limit + out_bound |= XYZ[:, i] <= bounds[i * 2] + out_bound |= XYZ[:, i] >= bounds[i * 2 + 1] + sdf[out_bound] = ( + 1e6 # because of skimage mc only provides coords, which is has precision limit + ) sdfs.append(sdf) ret = np.stack(sdfs, -1) return ret + @gin.configurable class SphericalMesher: - def __init__(self, + def __init__( + self, cameras, bounds, r_min=1, @@ -40,7 +46,9 @@ def __init__(self, ): full_info, self.cam_pose, self.fov, self.H, self.W, _ = get_caminfo(cameras) cams = full_info[0] - assert self.fov[0] < np.pi / 2 and self.fov[1] < np.pi / 2, "the algorithm does not support larger-than-90-degree fov yet" + assert ( + self.fov[0] < np.pi / 2 and self.fov[1] < np.pi / 2 + ), "the algorithm does not support larger-than-90-degree fov yet" self.r_min = r_min self.complete_depth_test = complete_depth_test self.bounds = bounds @@ -49,13 +57,20 @@ def __init__(self, for i in range(2): for j in range(2): for k in range(2): - r_max = np.linalg.norm(np.array([self.bounds[i], self.bounds[2+j], self.bounds[4+k]]) - cam[:3, 3]) + r_max = np.linalg.norm( + np.array( + [self.bounds[i], self.bounds[2 + j], self.bounds[4 + k]] + ) + - cam[:3, 3] + ) self.r_max = max(self.r_max, r_max) self.r_max *= 1.1 + @gin.configurable class OpaqueSphericalMesher(SphericalMesher): - def __init__(self, + def __init__( + self, cameras, bounds, base_90d_resolution=None, @@ -67,46 +82,81 @@ def __init__(self, ): SphericalMesher.__init__(self, cameras, bounds) inview_upscale_coarse = upscale1 - inview_upscale_fine = upscale1 * upscale2 + inview_upscale_fine = upscale1 * upscale2 outview_upscale = 1 assert bool(base_90d_resolution is None) ^ bool(pixels_per_cube is None) - if base_90d_resolution is None: base_90d_resolution = int(1 / (pixels_per_cube * inview_upscale_fine * self.fov[0] / np.pi * 2 / self.H)) + if base_90d_resolution is None: + base_90d_resolution = int( + 1 + / ( + pixels_per_cube + * inview_upscale_fine + * self.fov[0] + / np.pi + * 2 + / self.H + ) + ) base_90d_resolution = base_90d_resolution // test_downscale * test_downscale - print(f"In view visible mesh angle resolution 90d/{base_90d_resolution * inview_upscale_fine}, about {base_90d_resolution * inview_upscale_fine * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel") - print(f"In view invisible mesh angle resolution 90d/{base_90d_resolution * inview_upscale_coarse}, about {base_90d_resolution * inview_upscale_coarse * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel") - print(f"Out view mesh angle resolution 90d/{base_90d_resolution * outview_upscale}, about {base_90d_resolution * outview_upscale * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel") + print( + f"In view visible mesh angle resolution 90d/{base_90d_resolution * inview_upscale_fine}, about {base_90d_resolution * inview_upscale_fine * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel" + ) + print( + f"In view invisible mesh angle resolution 90d/{base_90d_resolution * inview_upscale_coarse}, about {base_90d_resolution * inview_upscale_coarse * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel" + ) + print( + f"Out view mesh angle resolution 90d/{base_90d_resolution * outview_upscale}, about {base_90d_resolution * outview_upscale * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel" + ) fov = self.fov base_angle_resolution = np.pi / 2 / base_90d_resolution - base_R = int((np.log(self.r_max) - np.log(self.r_min)) / (np.pi/2 / base_90d_resolution) / r_lengthen) + base_R = int( + (np.log(self.r_max) - np.log(self.r_min)) + / (np.pi / 2 / base_90d_resolution) + / r_lengthen + ) N0 = int(np.floor((1 - fov[0] * 2 / np.pi) / 2 * base_90d_resolution)) N1 = int(np.floor((1 - fov[1] * 2 / np.pi) / 2 * base_90d_resolution)) - rounded_fov = (2 * (np.pi / 4 - N0 * base_angle_resolution), 2 * (np.pi / 4 - N1 * base_angle_resolution)) + rounded_fov = ( + 2 * (np.pi / 4 - N0 * base_angle_resolution), + 2 * (np.pi / 4 - N1 * base_angle_resolution), + ) H = (base_90d_resolution - N0 * 2) * upscale1 W = (base_90d_resolution - N1 * 2) * upscale1 R = base_R * upscale1 print(f"In view invisible mesh marching cube resolution {H}x{W}x{R}") self.frontview_mesher = FrontviewSphericalMesher( self.cam_pose, - rounded_fov[0], rounded_fov[1], - self.r_min, self.r_max, - H, W, R, + rounded_fov[0], + rounded_fov[1], + self.r_min, + self.r_max, + H, + W, + R, upscale2, test_downscale=test_downscale, complete_depth_test=self.complete_depth_test, ) - self.frontview_mesher.kernel_caller = lambda k, xyz: kernel_caller(k, xyz, self.bounds) + self.frontview_mesher.kernel_caller = lambda k, xyz: kernel_caller( + k, xyz, self.bounds + ) self.background_mesher = CubeSphericalMesher( self.cam_pose, - self.r_min, self.r_max, + self.r_min, + self.r_max, base_90d_resolution * outview_upscale, base_R * outview_upscale, test_downscale=test_downscale, - H_fov=rounded_fov[0], W_fov=rounded_fov[1], - N0=N0, N1=N1, + H_fov=rounded_fov[0], + W_fov=rounded_fov[1], + N0=N0, + N1=N1, + ) + self.background_mesher.kernel_caller = lambda k, xyz: kernel_caller( + k, xyz, self.bounds ) - self.background_mesher.kernel_caller = lambda k, xyz: kernel_caller(k, xyz, self.bounds) def __call__(self, kernels): with Timer("OpaqueSphericalMesher: frontview_mesher"): @@ -117,10 +167,10 @@ def __call__(self, kernels): return mesh - @gin.configurable class TransparentSphericalMesher(SphericalMesher): - def __init__(self, + def __init__( + self, cameras, bounds, base_90d_resolution=None, @@ -134,32 +184,48 @@ def __init__(self, self.cameras = cameras self.camera_annotation_frames = camera_annotation_frames assert bool(base_90d_resolution is None) ^ bool(pixels_per_cube is None) - if base_90d_resolution is None: base_90d_resolution = int(1 / (pixels_per_cube * inv_scale * self.fov[0] / np.pi * 2 / self.H)) + if base_90d_resolution is None: + base_90d_resolution = int( + 1 / (pixels_per_cube * inv_scale * self.fov[0] / np.pi * 2 / self.H) + ) base_90d_resolution = base_90d_resolution // test_downscale * test_downscale - base_R = int((np.log(self.r_max) - np.log(self.r_min)) / (np.pi/2 / base_90d_resolution) / r_lengthen) - print(f"In view mesh angle resolution 90d/{base_90d_resolution * inv_scale}, about {base_90d_resolution * inv_scale * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel") - print(f"Out view mesh angle resolution 90d/{base_90d_resolution}, about {base_90d_resolution * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel") + base_R = int( + (np.log(self.r_max) - np.log(self.r_min)) + / (np.pi / 2 / base_90d_resolution) + / r_lengthen + ) + print( + f"In view mesh angle resolution 90d/{base_90d_resolution * inv_scale}, about {base_90d_resolution * inv_scale * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel" + ) + print( + f"Out view mesh angle resolution 90d/{base_90d_resolution}, about {base_90d_resolution * self.fov[0] / np.pi * 2 / self.H: .2f} marching cube per pixel" + ) fov = self.fov base_angle_resolution = np.pi / 2 / base_90d_resolution N0 = int(np.floor((1 - fov[0] * 2 / np.pi) / 2 * base_90d_resolution)) N1 = int(np.floor((1 - fov[1] * 2 / np.pi) / 2 * base_90d_resolution)) - rounded_fov = (2 * (np.pi / 4 - N0 * base_angle_resolution), 2 * (np.pi / 4 - N1 * base_angle_resolution)) + rounded_fov = ( + 2 * (np.pi / 4 - N0 * base_angle_resolution), + 2 * (np.pi / 4 - N1 * base_angle_resolution), + ) self.mesher = CubeSphericalMesher( self.cam_pose, - self.r_min, self.r_max, + self.r_min, + self.r_max, base_90d_resolution, base_R, test_downscale=test_downscale, inview_upscale=inv_scale, - H_fov=rounded_fov[0], W_fov=rounded_fov[1], - N0=N0, N1=N1, + H_fov=rounded_fov[0], + W_fov=rounded_fov[1], + N0=N0, + N1=N1, complete_depth_test=self.complete_depth_test, ) self.mesher.kernel_caller = lambda k, xyz: kernel_caller(k, xyz, self.bounds) - def __call__(self, kernels): with Timer("TransparentSphericalMesher"): mesh = self.mesher(kernels) diff --git a/infinigen/terrain/mesher/uniform_mesher.py b/infinigen/terrain/mesher/uniform_mesher.py index 4c91b5989..3966f3201 100644 --- a/infinigen/terrain/mesher/uniform_mesher.py +++ b/infinigen/terrain/mesher/uniform_mesher.py @@ -9,18 +9,27 @@ import gin import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import ASDOUBLE, ASINT, Mesh + +from infinigen.terrain.utils import ( + ASDOUBLE, + ASINT, + Mesh, + Vars, + load_cdll, + register_func, + write_attributes, +) from infinigen.terrain.utils import Timer as tTimer -from infinigen.terrain.utils import Vars, load_cdll, register_func, write_attributes logger = logging.getLogger(__name__) try: from ._marching_cubes_lewiner import marching_cubes -except ImportError as e: - logger.warning(f'Could not import marching_cubes, terrain is likely not installed') +except ImportError: + logger.warning("Could not import marching_cubes, terrain is likely not installed") marching_cubes = None + @gin.configurable("UniformMesherTimer") class Timer(tTimer): def __init__(self, desc, verbose=False): @@ -29,9 +38,10 @@ def __init__(self, desc, verbose=False): @gin.configurable class UniformMesher: - def __init__(self, + def __init__( + self, bounds, - subdivisions=(64, -1, -1), # -1 means automatic + subdivisions=(64, -1, -1), # -1 means automatic upscale=3, enclosed=False, bisection_iters=10, @@ -42,11 +52,11 @@ def __init__(self, self.upscale = upscale self.bounds = bounds - assert(np.sum(subdivisions == -1) in [0, 2]) + assert np.sum(subdivisions == -1) in [0, 2] for i, s in enumerate(subdivisions): if s != -1: coarse_voxel_size = (bounds[i * 2 + 1] - bounds[i * 2]) / s - + if subdivisions[0] != -1: self.x_N = subdivisions[0] else: @@ -68,22 +78,47 @@ def __init__(self, self.bisection_iters = bisection_iters dll = load_cdll(f"terrain/lib/{device}/meshing/uniform_mesher.so") - register_func(self, dll, "init_and_get_coarse_queries", [ - c_double, c_double, c_int32, c_double, c_double, c_int32, - c_double, c_double, c_int32, c_int32, POINTER(c_double), - ]) + register_func( + self, + dll, + "init_and_get_coarse_queries", + [ + c_double, + c_double, + c_int32, + c_double, + c_double, + c_int32, + c_double, + c_double, + c_int32, + c_int32, + POINTER(c_double), + ], + ) register_func(self, dll, "initial_update", [POINTER(c_double)], c_int32) register_func(self, dll, "get_fine_queries", [POINTER(c_double)]) - register_func(self, dll, "update", [ - c_int32, POINTER(c_double), POINTER(c_int32), POINTER(c_double), c_int32, POINTER(c_int32), c_int32, - ]) + register_func( + self, + dll, + "update", + [ + c_int32, + POINTER(c_double), + POINTER(c_int32), + POINTER(c_double), + c_int32, + POINTER(c_int32), + c_int32, + ], + ) register_func(self, dll, "get_cnt", restype=c_int32) register_func(self, dll, "get_coarse_mesh_cnt", [POINTER(c_int32)]) register_func(self, dll, "bisection_get_positions", [POINTER(c_double)]) register_func(self, dll, "bisection_update", [POINTER(c_double)]) - register_func(self, dll, "get_final_mesh", [POINTER(c_double), POINTER(c_int32)]) - - + register_func( + self, dll, "get_final_mesh", [POINTER(c_double), POINTER(c_int32)] + ) def kernel_caller(self, kernels, XYZ): sdfs = [] @@ -91,74 +126,109 @@ def kernel_caller(self, kernels, XYZ): ret = kernel(XYZ, sdf_only=1) sdf = ret[Vars.SDF] if self.enclosed: - out_bound = (XYZ[:, 0] < self.x_min + self.closing_margin) | (XYZ[:, 0] > self.x_max - self.closing_margin) \ - | (XYZ[:, 1] < self.y_min + self.closing_margin) | (XYZ[:, 1] > self.y_max - self.closing_margin) \ - | (XYZ[:, 2] < self.z_min + self.closing_margin) | (XYZ[:, 2] > self.z_max - self.closing_margin) + out_bound = ( + (XYZ[:, 0] < self.x_min + self.closing_margin) + | (XYZ[:, 0] > self.x_max - self.closing_margin) + | (XYZ[:, 1] < self.y_min + self.closing_margin) + | (XYZ[:, 1] > self.y_max - self.closing_margin) + | (XYZ[:, 2] < self.z_min + self.closing_margin) + | (XYZ[:, 2] > self.z_max - self.closing_margin) + ) sdf[out_bound] = 1e6 sdfs.append(sdf) return np.stack(sdfs, -1) def __call__(self, kernels): - if marching_cubes is None: raise ValueError( - f'Attempted to run {self.__class__.__name__} but marching_cubes was not imported. ' - 'Either the user opted out of installing terrain (e.g. via INFINIGEN_MINIMAL_INSTALL), or there was an error during installation' + f"Attempted to run {self.__class__.__name__} but marching_cubes was not imported. " + "Either the user opted out of installing terrain (e.g. via INFINIGEN_MINIMAL_INSTALL), or there was an error during installation" ) with Timer("get_coarse_queries"): - positions = AC(np.zeros(((self.x_N + 1) * (self.y_N + 1) * (self.z_N + 1), 3), dtype=np.float64)) + positions = AC( + np.zeros( + ((self.x_N + 1) * (self.y_N + 1) * (self.z_N + 1), 3), + dtype=np.float64, + ) + ) self.init_and_get_coarse_queries( - self.x_min, self.x_max, self.x_N, - self.y_min, self.y_max, self.y_N, - self.z_min, self.z_max, self.z_N, + self.x_min, + self.x_max, + self.x_N, + self.y_min, + self.y_max, + self.y_N, + self.z_min, + self.z_max, + self.z_N, self.upscale, ASDOUBLE(positions), ) with Timer("compute sdf"): - sdf = AC(self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64)) + sdf = AC( + self.kernel_caller(kernels, positions).min(axis=-1).astype(np.float64) + ) with Timer("initial_update"): cnt = self.initial_update(ASDOUBLE(sdf)) - + S = self.upscale + 1 - block_size = (self.upscale+1) ** 3 + block_size = (self.upscale + 1) ** 3 while True: - if cnt == 0: break + if cnt == 0: + break with Timer(f"get_fine_queries of {cnt} blocks"): - positions = AC(np.zeros(((self.upscale + 1) ** 3 * cnt, 3), dtype=np.float64)) + positions = AC( + np.zeros(((self.upscale + 1) ** 3 * cnt, 3), dtype=np.float64) + ) self.get_fine_queries(ASDOUBLE(positions)) with Timer("compute fine sdf and run marching cube"): - sdf = np.ascontiguousarray(self.kernel_caller(kernels, positions.reshape((-1, 3))).min(axis=-1).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller(kernels, positions.reshape((-1, 3))) + .min(axis=-1) + .astype(np.float64) + ) for i in range(cnt): - verts_int, verts_frac, faces, _, _ = marching_cubes(sdf[i * block_size: (i+1) * block_size].reshape(S, S, S), 0) + verts_int, verts_frac, faces, _, _ = marching_cubes( + sdf[i * block_size : (i + 1) * block_size].reshape(S, S, S), 0 + ) self.update( - i, ASDOUBLE(sdf), + i, + ASDOUBLE(sdf), ASINT(AC(verts_int.astype(np.int32))), - ASDOUBLE(AC(verts_frac.astype(np.float64))), len(verts_frac), - ASINT(AC(faces.astype(np.int32))), len(faces), + ASDOUBLE(AC(verts_frac.astype(np.float64))), + len(verts_frac), + ASINT(AC(faces.astype(np.int32))), + len(faces), ) with Timer("update"): cnt = self.get_cnt() - + with Timer("merge identifiers and get coarse vert counts"): NM = AC(np.zeros(2, dtype=np.int32)) self.get_coarse_mesh_cnt(ASINT(NM)) N = NM[0] M = NM[1] - - if N == 0: return Mesh() - - if self.verbose: print(f"Coarse mesh has {N} vertices and {M} faces") - + + if N == 0: + return Mesh() + + if self.verbose: + print(f"Coarse mesh has {N} vertices and {M} faces") + with Timer("bisection on in view coarse mesh"): positions = AC(np.zeros((N * 3,), dtype=np.float64)) range_it = range(self.bisection_iters) for it in range_it: self.bisection_get_positions(ASDOUBLE(positions)) - sdf = np.ascontiguousarray(self.kernel_caller(kernels, positions.reshape((-1, 3))).min(axis=-1).astype(np.float64)) + sdf = np.ascontiguousarray( + self.kernel_caller(kernels, positions.reshape((-1, 3))) + .min(axis=-1) + .astype(np.float64) + ) self.bisection_update(ASDOUBLE(sdf)) with Timer("get final results"): diff --git a/infinigen/terrain/scene.py b/infinigen/terrain/scene.py index 4a0385ff7..86e291be8 100644 --- a/infinigen/terrain/scene.py +++ b/infinigen/terrain/scene.py @@ -5,18 +5,19 @@ import gin + +from infinigen.core.util.math import FixedSeed, int_hash +from infinigen.core.util.organization import Assets, ElementNames +from infinigen.terrain.elements.atmosphere import Atmosphere from infinigen.terrain.elements.caves import Caves from infinigen.terrain.elements.ground import Ground -from infinigen.terrain.elements.landtiles import LandTiles, Volcanos, FloatingIce +from infinigen.terrain.elements.landtiles import FloatingIce, LandTiles, Volcanos from infinigen.terrain.elements.upsidedown_mountains import UpsidedownMountains -from infinigen.terrain.elements.voronoi_rocks import VoronoiRocks, VoronoiGrains +from infinigen.terrain.elements.voronoi_rocks import VoronoiGrains, VoronoiRocks from infinigen.terrain.elements.warped_rocks import WarpedRocks from infinigen.terrain.elements.waterbody import Waterbody -from infinigen.terrain.elements.atmosphere import Atmosphere - from infinigen.terrain.utils import chance -from infinigen.core.util.organization import ElementNames, Assets -from infinigen.core.util.math import FixedSeed, int_hash + @gin.configurable def scene( @@ -40,23 +41,28 @@ def scene( with FixedSeed(int_hash([seed, "caves"])): if chance(caves_chance): - caves = Caves(on_the_fly_asset_folder / Assets.Caves, reused_asset_folder / Assets.Caves) + caves = Caves( + on_the_fly_asset_folder / Assets.Caves, + reused_asset_folder / Assets.Caves, + ) else: caves = None last_ground_element = None - + with FixedSeed(int_hash([seed, "ground"])): if chance(ground_chance): elements[ElementNames.Ground] = Ground(device, caves) last_ground_element = elements[ElementNames.Ground] - + with FixedSeed(int_hash([seed, "landtiles"])): if chance(landtiles_chance): - elements[ElementNames.LandTiles] = LandTiles(device, caves, on_the_fly_asset_folder, reused_asset_folder) + elements[ElementNames.LandTiles] = LandTiles( + device, caves, on_the_fly_asset_folder, reused_asset_folder + ) last_ground_element = elements[ElementNames.LandTiles] - assert(last_ground_element is not None) + assert last_ground_element is not None with FixedSeed(int_hash([seed, "warped_rocks"])): if chance(warped_rocks_chance): @@ -64,24 +70,30 @@ def scene( with FixedSeed(int_hash([seed, "voronoi_rocks"])): if chance(voronoi_rocks_chance): - elements[ElementNames.VoronoiRocks] = VoronoiRocks(device, last_ground_element, caves) + elements[ElementNames.VoronoiRocks] = VoronoiRocks( + device, last_ground_element, caves + ) with FixedSeed(int_hash([seed, "voronoi_grains"])): if chance(voronoi_grains_chance): - elements[ElementNames.VoronoiGrains] = VoronoiGrains(device, last_ground_element, caves) - + elements[ElementNames.VoronoiGrains] = VoronoiGrains( + device, last_ground_element, caves + ) + with FixedSeed(int_hash([seed, "upsidedown_mountains"])): if chance(upsidedown_mountains_chance): elements[ElementNames.UpsidedownMountains] = UpsidedownMountains( - device, on_the_fly_asset_folder / Assets.UpsidedownMountains, reused_asset_folder / Assets.UpsidedownMountains + device, + on_the_fly_asset_folder / Assets.UpsidedownMountains, + reused_asset_folder / Assets.UpsidedownMountains, ) - + with FixedSeed(int_hash([seed, "volcanos"])): if chance(volcanos_chance): elements[ElementNames.Volcanos] = Volcanos( device, None, on_the_fly_asset_folder, reused_asset_folder ) - + with FixedSeed(int_hash([seed, "ground_ice"])): if chance(ground_ice_chance): elements[ElementNames.FloatingIce] = FloatingIce( @@ -90,7 +102,7 @@ def scene( scene_infos["water_plane"] = -1e5 waterbody = None - + with FixedSeed(int_hash([seed, "waterbody"])): if chance(waterbody_chance): waterbody = Waterbody(device, elements.get(ElementNames.LandTiles, None)) @@ -101,6 +113,7 @@ def scene( return elements, scene_infos + def transfer_scene_info(terrain, scene_info): for key in scene_info: - setattr(terrain, key, scene_info[key]) \ No newline at end of file + setattr(terrain, key, scene_info[key]) diff --git a/infinigen/terrain/surface_kernel/core.py b/infinigen/terrain/surface_kernel/core.py index 0c5fa028f..8f63faad6 100644 --- a/infinigen/terrain/surface_kernel/core.py +++ b/infinigen/terrain/surface_kernel/core.py @@ -9,7 +9,17 @@ import numpy as np from numpy import ascontiguousarray as AC -from infinigen.terrain.utils import KernelDataType, Vars, load_cdll, register_func, ASFLOAT, ASINT, KERNELDATATYPE_DIMS, KERNELDATATYPE_NPTYPE, Mesh +from infinigen.terrain.utils import ( + ASFLOAT, + ASINT, + KERNELDATATYPE_DIMS, + KERNELDATATYPE_NPTYPE, + KernelDataType, + Mesh, + Vars, + load_cdll, + register_func, +) from .kernelizer import Kernelizer @@ -30,7 +40,8 @@ def __init__(self, name, attribute, modifier, device): call_param_type.append(POINTER(c_float)) imp_values_of_type = {} for var in sorted(inputs.keys()): - if var in [Vars.Position, Vars.Normal]: continue + if var in [Vars.Position, Vars.Normal]: + continue dtype, value = inputs[var] if dtype in imp_values_of_type: imp_values_of_type[dtype].append(value) @@ -58,7 +69,9 @@ def __call__(self, params): ret = {} values = [] for dtype in sorted(self.imp_values_of_type.keys()): - M = len(self.imp_values_of_type[dtype]) // int(np.product(KERNELDATATYPE_DIMS[dtype])) + M = len(self.imp_values_of_type[dtype]) // int( + np.product(KERNELDATATYPE_DIMS[dtype]) + ) values.append(M) if dtype != KernelDataType.int: values.append(ASFLOAT(self.imp_values_of_type[dtype])) @@ -72,16 +85,30 @@ def __call__(self, params): N = len(positions) for var in self.outputs: dtype = self.outputs[var] - ret[var] = AC(np.zeros((N, *KERNELDATATYPE_DIMS[dtype]), dtype=KERNELDATATYPE_NPTYPE[dtype])) + ret[var] = AC( + np.zeros( + (N, *KERNELDATATYPE_DIMS[dtype]), dtype=KERNELDATATYPE_NPTYPE[dtype] + ) + ) if dtype != KernelDataType.int: values.append(ASFLOAT(ret[var])) else: values.append(ASINT(ret[var])) if isinstance(params, dict): - normals = AC(np.concatenate((np.zeros((N, 2), dtype=np.float32), np.ones((N, 1), dtype=np.float32)), -1)) + normals = AC( + np.concatenate( + ( + np.zeros((N, 2), dtype=np.float32), + np.ones((N, 1), dtype=np.float32), + ), + -1, + ) + ) pvalues = [N] - if self.use_position: pvalues.append(ASFLOAT(positions)) - if self.use_normal: pvalues.append(ASFLOAT(normals)) + if self.use_position: + pvalues.append(ASFLOAT(positions)) + if self.use_normal: + pvalues.append(ASFLOAT(normals)) values = pvalues + values self.call(*values) @@ -95,16 +122,19 @@ def __call__(self, params): elif isinstance(params, Mesh): normals = AC(params.vertex_normals.astype(np.float32)) pvalues = [N] - if self.use_position: pvalues.append(ASFLOAT(positions)) - if self.use_normal: pvalues.append(ASFLOAT(normals)) + if self.use_position: + pvalues.append(ASFLOAT(positions)) + if self.use_normal: + pvalues.append(ASFLOAT(normals)) values = pvalues + values self.call(*values) for var in self.outputs: dtype = self.outputs[var] shape = [1] * len(KERNELDATATYPE_DIMS[dtype]) - ret[var] *= params.vertex_attributes[self.attribute].reshape((N, *shape)) + ret[var] *= params.vertex_attributes[self.attribute].reshape( + (N, *shape) + ) if var == Vars.Offset: params.vertices += ret[var] else: params.vertex_attributes[var] = ret[var] - \ No newline at end of file diff --git a/infinigen/terrain/surface_kernel/kernelizer.py b/infinigen/terrain/surface_kernel/kernelizer.py index 66d3eb100..06d40f945 100644 --- a/infinigen/terrain/surface_kernel/kernelizer.py +++ b/infinigen/terrain/surface_kernel/kernelizer.py @@ -4,33 +4,53 @@ # Authors: Zeyu Ma - import re from collections import OrderedDict import numpy as np -from infinigen.terrain.utils import SocketType, Vars, KernelDataType, usable_name, Nodes, NODE_ATTRS_AVAILABLE, SOCKETTYPE_KERNEL, \ - sanitize, special_sanitize, special_sanitize_float_curve, concat_string, value_string, var_list, NODE_FUNCTIONS, \ - collecting_vars, get_imp_var_name, special_sanitize_constant + +from infinigen.terrain.utils import ( + NODE_ATTRS_AVAILABLE, + NODE_FUNCTIONS, + SOCKETTYPE_KERNEL, + KernelDataType, + Nodes, + SocketType, + Vars, + collecting_vars, + concat_string, + get_imp_var_name, + sanitize, + special_sanitize, + special_sanitize_constant, + special_sanitize_float_curve, + usable_name, + value_string, + var_list, +) functional_nodes = [ - Nodes.SetPosition, Nodes.InputPosition, Nodes.InputNormal, - Nodes.GroupOutput, Nodes.GroupInput, + Nodes.SetPosition, + Nodes.InputPosition, + Nodes.InputNormal, + Nodes.GroupOutput, + Nodes.GroupInput, ] + def my_getattr(x, a): if "." not in a: return getattr(x, a) else: return getattr(my_getattr(x, ".".join(a.split(".")[:-1])), a.split(".")[-1]) -class Kernelizer: +class Kernelizer: def get_inputs(self, node_tree): inputs = OrderedDict() for node_input in node_tree.inputs: if node_input.type != SocketType.Geometry: - assert(node_input.type != SocketType.Image) + assert node_input.type != SocketType.Image inputs[node_input.identifier] = SOCKETTYPE_KERNEL[node_input.type] return inputs @@ -60,7 +80,7 @@ def regularize(self, node_tree): elif node.bl_idname == Nodes.InputNormal: use_normal = 1 # only accept a single set position node, please add multiple ones together - assert(n_set_position <= 1) + assert n_set_position <= 1 for link in node_tree.links: from_node = link.from_node @@ -125,10 +145,11 @@ def regularize(self, node_tree): elif i != head_link: from_node, from_socket, to_node, to_socket = link new_links.append( - (links[head_link][0], links[head_link][1], to_node, to_socket)) + (links[head_link][0], links[head_link][1], to_node, to_socket) + ) links = new_links - nodes = nodes[:capture_node] + nodes[capture_node + 1:] - + nodes = nodes[:capture_node] + nodes[capture_node + 1 :] + outlets_count = {} for node in nodes: outlets_count[node.name] = 0 @@ -151,7 +172,8 @@ def regularize(self, node_tree): new_links = [] for link in links: from_node, from_socket, to_node, to_socket = link - if to_node is not None and to_node.name == node: continue + if to_node is not None and to_node.name == node: + continue new_links.append(link) links = new_links @@ -170,7 +192,15 @@ def regularize(self, node_tree): return nodes, links, use_position, use_normal - def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, node_tree_name): + def code( + self, + nodes, + links, + kernel_inputs, + kernel_impl_inputs, + kernel_outputs, + node_tree_name, + ): nodes = {node.name: node for node in nodes} dependency_count = {} for node in nodes: @@ -180,27 +210,36 @@ def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, if from_node is not None and to_node is not None: dependency_count[to_node.name] += 1 code = "" - + while len(dependency_count) > 0: for node in dependency_count: if dependency_count[node] == 0: head_node = nodes[node] break - params = [str(sanitize(my_getattr(head_node, p), head_node, p)) - for p in NODE_ATTRS_AVAILABLE[head_node.bl_idname]] + params = [ + str(sanitize(my_getattr(head_node, p), head_node, p)) + for p in NODE_ATTRS_AVAILABLE[head_node.bl_idname] + ] inputs = [] outputs = [] for socket in head_node.inputs: var_input = False for link in links: from_node, from_socket, to_node, to_socket = link - if to_node is not None and to_socket.identifier == socket.identifier and to_node.name == head_node.name: + if ( + to_node is not None + and to_socket.identifier == socket.identifier + and to_node.name == head_node.name + ): var_input = True if from_node is None: input_raw = from_socket else: - input_raw = usable_name( - from_node.name) + "__" + from_socket.identifier + input_raw = ( + usable_name(from_node.name) + + "__" + + from_socket.identifier + ) if from_node is None: if from_socket in kernel_inputs: from_type = kernel_inputs[from_socket] @@ -218,21 +257,35 @@ def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, used = False for link in links: from_node, from_socket, to_node, to_socket = link - if from_node is not None and from_node.name == head_node.name and from_socket.identifier == socket.identifier: + if ( + from_node is not None + and from_node.name == head_node.name + and from_socket.identifier == socket.identifier + ): used = True break if used: - varname = usable_name(head_node.name) + \ - "__" + socket.identifier + varname = usable_name(head_node.name) + "__" + socket.identifier code += f"{SOCKETTYPE_KERNEL[socket.type]} {varname};\n" outputs.append("&" + varname) else: outputs.append("NULL") if head_node.bl_idname == Nodes.ColorRamp: if head_node.name.endswith("_VAR"): - code += special_sanitize(usable_name(head_node.name), my_getattr(head_node, NODE_ATTRS_AVAILABLE[head_node.bl_idname][0]), node_tree_name) + code += special_sanitize( + usable_name(head_node.name), + my_getattr( + head_node, NODE_ATTRS_AVAILABLE[head_node.bl_idname][0] + ), + node_tree_name, + ) else: - code += special_sanitize_constant(usable_name(head_node.name), my_getattr(head_node, NODE_ATTRS_AVAILABLE[head_node.bl_idname][0])) + code += special_sanitize_constant( + usable_name(head_node.name), + my_getattr( + head_node, NODE_ATTRS_AVAILABLE[head_node.bl_idname][0] + ), + ) elif head_node.bl_idname == Nodes.FloatCurve: code += special_sanitize_float_curve( usable_name(head_node.name), @@ -243,9 +296,9 @@ def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, # code += f''' # {varname} = {value_string(socket.default_value)}; # ''' - code += f''' + code += f""" {varname} = {get_imp_var_name(node_tree_name, head_node.name)}; - ''' + """ else: if head_node.bl_idname == Nodes.Group: func = usable_name(head_node.node_tree.name) @@ -253,15 +306,19 @@ def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, inputs.extend(sorted(list(imp_inputs.keys()))) else: func = NODE_FUNCTIONS[head_node.bl_idname] - code += f'''{func}( + code += f"""{func}( {concat_string(params)} {','.join(inputs)}, {','.join(outputs)} ); - ''' + """ for link in links: from_node, from_socket, to_node, to_socket = link - if from_node is not None and to_node is not None and from_node.name == head_node.name: + if ( + from_node is not None + and to_node is not None + and from_node.name == head_node.name + ): dependency_count[to_node.name] -= 1 del dependency_count[head_node.name] for link in links: @@ -269,18 +326,19 @@ def code(self, nodes, links, kernel_inputs, kernel_impl_inputs, kernel_outputs, if from_node is None: from_socket_name = from_socket else: - from_socket_name = usable_name( - from_node.name) + "__" + from_socket.identifier + from_socket_name = ( + usable_name(from_node.name) + "__" + from_socket.identifier + ) if to_node is None: to_type = kernel_outputs[to_socket][0] if SOCKETTYPE_KERNEL[from_socket.type] != to_type: - code += f''' + code += f""" if ({to_socket} != NULL) *{to_socket} = {from_socket_name}; - ''' + """ else: - code += f''' + code += f""" if ({to_socket} != NULL) *{to_socket} = {to_type}({from_socket_name}); - ''' + """ return code def execute_node_tree(self, node_tree, collective_style=False): @@ -295,29 +353,56 @@ def execute_node_tree(self, node_tree, collective_style=False): for node in nodes: if node.bl_idname == Nodes.Value: - imp_inputs[get_imp_var_name(node_tree.name, node.name)] = KernelDataType.float, np.array([node.outputs[0].default_value], dtype=np.float32) + imp_inputs[get_imp_var_name(node_tree.name, node.name)] = ( + KernelDataType.float, + np.array([node.outputs[0].default_value], dtype=np.float32), + ) elif node.bl_idname == Nodes.InputColor: - imp_inputs[get_imp_var_name(node_tree.name, node.name)] = KernelDataType.float4, np.array([node.color[i] for i in range(4)], dtype=np.float32) + imp_inputs[get_imp_var_name(node_tree.name, node.name)] = ( + KernelDataType.float4, + np.array([node.color[i] for i in range(4)], dtype=np.float32), + ) elif node.bl_idname == Nodes.Vector: - imp_inputs[get_imp_var_name(node_tree.name, node.name)] = KernelDataType.float3, np.array([node.vector[i] for i in range(3)], dtype=np.float32) + imp_inputs[get_imp_var_name(node_tree.name, node.name)] = ( + KernelDataType.float3, + np.array([node.vector[i] for i in range(3)], dtype=np.float32), + ) elif node.bl_idname == Nodes.ColorRamp and node.name.endswith("_VAR"): for i in range(len(node.color_ramp.elements)): - imp_inputs[get_imp_var_name(node_tree.name, node.name) + f"_pos{i}"] = (KernelDataType.float, np.array([node.color_ramp.elements[i].position], dtype=np.float32)) - imp_inputs[get_imp_var_name(node_tree.name, node.name) + f"_color{i}"] = (KernelDataType.float4, np.array([node.color_ramp.elements[i].color[j] for j in range(4)], dtype=np.float32)) + imp_inputs[ + get_imp_var_name(node_tree.name, node.name) + f"_pos{i}" + ] = ( + KernelDataType.float, + np.array( + [node.color_ramp.elements[i].position], dtype=np.float32 + ), + ) + imp_inputs[ + get_imp_var_name(node_tree.name, node.name) + f"_color{i}" + ] = ( + KernelDataType.float4, + np.array( + [node.color_ramp.elements[i].color[j] for j in range(4)], + dtype=np.float32, + ), + ) for node in nodes: - if node.bl_idname == Nodes.Group and node.node_tree.name not in self.node_tree_dict: + if ( + node.bl_idname == Nodes.Group + and node.node_tree.name not in self.node_tree_dict + ): subcode, sub_imp_inputs, _ = self.execute_node_tree(node.node_tree) imp_inputs.update(sub_imp_inputs) code += subcode outputs = self.get_output(node_tree) - code += f''' + code += f""" DEVICE_FUNC void {usable_name(node_tree.name)}( {var_list(inputs, imp_inputs, outputs, collective_style=collective_style)} ) {{ - ''' + """ if collective_style: code += collecting_vars(imp_inputs) code += self.code(nodes, links, inputs, imp_inputs, outputs, node_tree.name) @@ -328,10 +413,12 @@ def execute_node_tree(self, node_tree, collective_style=False): def __call__(self, modifier): node_tree = modifier.node_group self.node_tree_dict = {} - code, imp_inputs, outputs = self.execute_node_tree(node_tree, collective_style=True) + code, imp_inputs, outputs = self.execute_node_tree( + node_tree, collective_style=True + ) for nodeoutput in node_tree.outputs: id = nodeoutput.identifier - if id != 'Output_1': # not Geometry - code = re.sub(rf"\b{id}\b", modifier[f'{id}_attribute_name'], code) - outputs[modifier[f'{id}_attribute_name']] = outputs.pop(id) - return code, imp_inputs, outputs \ No newline at end of file + if id != "Output_1": # not Geometry + code = re.sub(rf"\b{id}\b", modifier[f"{id}_attribute_name"], code) + outputs[modifier[f"{id}_attribute_name"]] = outputs.pop(id) + return code, imp_inputs, outputs diff --git a/infinigen/terrain/utils/__init__.py b/infinigen/terrain/utils/__init__.py index 1f1862aef..e1009436c 100644 --- a/infinigen/terrain/utils/__init__.py +++ b/infinigen/terrain/utils/__init__.py @@ -4,20 +4,48 @@ # Authors: Zeyu Ma -from .mesh import Mesh, write_attributes, Vars, move_modifier -from .ctype_util import ASINT, ASDOUBLE, ASFLOAT, register_func, load_cdll -from .logging import Timer from .camera import get_caminfo +from .ctype_util import ASDOUBLE, ASFLOAT, ASINT, load_cdll, register_func from .image_processing import ( - boundary_smooth, smooth, read, sharpen, grid_distance, get_normal + boundary_smooth, + get_normal, + grid_distance, + read, + sharpen, + smooth, ) -from .random import perlin_noise, chance, drive_param, random_int, random_int_large, random_nat - from .kernelizer_util import ( - ATTRTYPE_DIMS, ATTRTYPE_FIELDS, ATTRTYPE_NP, NODE_ATTRS_AVAILABLE, - AttributeType, FieldsType, Nodes, SocketType, KernelDataType, - usable_name, SOCKETTYPE_KERNEL, sanitize, special_sanitize, - special_sanitize_float_curve, NODE_FUNCTIONS, concat_string, var_list, - value_string, collecting_vars, get_imp_var_name, special_sanitize_constant, - KERNELDATATYPE_NPTYPE, KERNELDATATYPE_DIMS + ATTRTYPE_DIMS, + ATTRTYPE_FIELDS, + ATTRTYPE_NP, + KERNELDATATYPE_DIMS, + KERNELDATATYPE_NPTYPE, + NODE_ATTRS_AVAILABLE, + NODE_FUNCTIONS, + SOCKETTYPE_KERNEL, + AttributeType, + FieldsType, + KernelDataType, + Nodes, + SocketType, + collecting_vars, + concat_string, + get_imp_var_name, + sanitize, + special_sanitize, + special_sanitize_constant, + special_sanitize_float_curve, + usable_name, + value_string, + var_list, +) +from .logging import Timer +from .mesh import Mesh, Vars, move_modifier, write_attributes +from .random import ( + chance, + drive_param, + perlin_noise, + random_int, + random_int_large, + random_nat, ) diff --git a/infinigen/terrain/utils/camera.py b/infinigen/terrain/utils/camera.py index 59b7bd58c..1c2932555 100644 --- a/infinigen/terrain/utils/camera.py +++ b/infinigen/terrain/utils/camera.py @@ -7,7 +7,6 @@ import bpy import gin import numpy as np -from infinigen.core.placement.camera import get_camera from scipy.spatial.transform import Rotation as R @@ -16,6 +15,7 @@ def getK(fov, H, W): fy = fx return np.array([[fx, 0, W / 2], [0, fy, H / 2], [0, 0, 1]]) + def pose_average(poses): translation = poses[:, :3, 3].mean(axis=0) quats = [] @@ -30,6 +30,7 @@ def pose_average(poses): res[:3, 3] = translation return res + def get_expanded_fov(cam_pose0, cam_poses, fov): rot0 = cam_pose0[:3, :3] bounds = np.array([1e9, -1e9, 1e9, -1e9]) @@ -43,7 +44,10 @@ def get_expanded_fov(cam_pose0, cam_poses, fov): bounds[1] = max(bounds[1], p[0] / p[2]) bounds[2] = min(bounds[2], p[1] / p[2]) bounds[3] = max(bounds[3], p[1] / p[2]) - return (np.arctan(max(-bounds[2], bounds[3])) * 2, np.arctan(max(-bounds[0], bounds[1])) * 2) + return ( + np.arctan(max(-bounds[2], bounds[3])) * 2, + np.arctan(max(-bounds[0], bounds[1])) * 2, + ) @gin.configurable @@ -53,7 +57,9 @@ def get_caminfo(cameras, relax=1.05): Ks = [] Hs = [] Ws = [] - coords_trans_matrix = np.array([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]) + coords_trans_matrix = np.array( + [[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]] + ) fs, fe = bpy.context.scene.frame_start, bpy.context.scene.frame_end fc = bpy.context.scene.frame_current for f in range(fs, fe + 1): @@ -62,9 +68,12 @@ def get_caminfo(cameras, relax=1.05): cam_pose = np.array(c.matrix_world) cam_pose = np.dot(np.array(cam_pose), coords_trans_matrix) cam_poses.append(cam_pose) - fov_rad = c.data.angle + fov_rad = c.data.angle fov_rad *= relax - H, W = bpy.context.scene.render.resolution_y, bpy.context.scene.render.resolution_x + H, W = ( + bpy.context.scene.render.resolution_y, + bpy.context.scene.render.resolution_x, + ) fov0 = np.arctan(H / 2 / (W / 2 / np.tan(fov_rad / 2))) * 2 fov = np.array([fov0, fov_rad]) fovs.append(fov) diff --git a/infinigen/terrain/utils/ctype_util.py b/infinigen/terrain/utils/ctype_util.py index 1abb8db15..b5598d834 100644 --- a/infinigen/terrain/utils/ctype_util.py +++ b/infinigen/terrain/utils/ctype_util.py @@ -4,26 +4,32 @@ # Authors: Zeyu Ma -import sys -from ctypes import CDLL, POINTER, c_double, c_float, c_int32, RTLD_LOCAL +from ctypes import CDLL, POINTER, RTLD_LOCAL, c_double, c_float, c_int32 from pathlib import Path # note: size of x should not exceed maximum def ASINT(x): return x.ctypes.data_as(POINTER(c_int32)) + + def ASDOUBLE(x): return x.ctypes.data_as(POINTER(c_double)) + + def ASFLOAT(x): return x.ctypes.data_as(POINTER(c_float)) + def register_func(me, dll, name, argtypes=[], restype=None, caller_name=None): - if caller_name is None: caller_name = name + if caller_name is None: + caller_name = name setattr(me, caller_name, getattr(dll, name)) func = getattr(me, caller_name) func.argtypes = argtypes func.restype = restype + def load_cdll(path): root = Path(__file__).parent.parent.parent - return CDLL(root/path, mode=RTLD_LOCAL) + return CDLL(root / path, mode=RTLD_LOCAL) diff --git a/infinigen/terrain/utils/image_processing.py b/infinigen/terrain/utils/image_processing.py index 6789b5dfe..da3b0a96c 100644 --- a/infinigen/terrain/utils/image_processing.py +++ b/infinigen/terrain/utils/image_processing.py @@ -13,6 +13,7 @@ import cv2 import numpy as np + def boundary_smooth(ar, p=0.1): N = ar.shape[0] P = int(N * p) @@ -33,8 +34,14 @@ def smooth(arr, k): def read(input_heightmap_path): input_heightmap_path = str(input_heightmap_path) - assert os.path.exists(input_heightmap_path), f"{input_heightmap_path} does not exists" - heightmap = cv2.imread(input_heightmap_path, cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH).copy().astype(float) + assert os.path.exists( + input_heightmap_path + ), f"{input_heightmap_path} does not exists" + heightmap = ( + cv2.imread(input_heightmap_path, cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH) + .copy() + .astype(float) + ) return heightmap @@ -53,7 +60,9 @@ def grid_distance(source, downsample): for i in range(N): for j in range(N): if boundary[i, j]: - dist = np.minimum(dist, (((I - i) / N) ** 2 + ((J - j) / N) ** 2) ** 0.5) + dist = np.minimum( + dist, (((I - i) / N) ** 2 + ((J - j) / N) ** 2) ** 0.5 + ) dist[source] = 0 dist = cv2.resize(dist, (M, M)) return dist @@ -72,4 +81,4 @@ def get_normal(z, grid_size): dzdy[:, 0] = dzdx[:, 1] n = np.stack((-dzdy, -dzdx, grid_size * np.ones_like(z)), -1) n /= np.linalg.norm(n, axis=-1).reshape((n.shape[0], n.shape[1], 1)) - return n \ No newline at end of file + return n diff --git a/infinigen/terrain/utils/kernelizer_util.py b/infinigen/terrain/utils/kernelizer_util.py index 27b5f998d..a04ffe747 100644 --- a/infinigen/terrain/utils/kernelizer_util.py +++ b/infinigen/terrain/utils/kernelizer_util.py @@ -5,9 +5,12 @@ import numpy as np + +from infinigen.core.nodes.node_info import ( + NODE_ATTRS_AVAILABLE as o_NODE_ATTRS_AVAILABLE, +) from infinigen.core.nodes.node_info import Nodes as oNodes -from infinigen.core.nodes.node_info import NODE_ATTRS_AVAILABLE as o_NODE_ATTRS_AVAILABLE -from infinigen.core.surface import Registry + class Vars: Position = "position" @@ -15,23 +18,44 @@ class Vars: Offset = "offset" SDF = "sdf" + class Nodes(oNodes): Group = "GeometryNodeGroup" + NODE_ATTRS_AVAILABLE = o_NODE_ATTRS_AVAILABLE.copy() -NODE_ATTRS_AVAILABLE.update({ - Nodes.ColorRamp: ["color_ramp.elements", "color_ramp.color_mode", "color_ramp.interpolation", "color_ramp.hue_interpolation"], - Nodes.MixRGB: ['use_clamp', 'blend_type'], - Nodes.Mix: ['data_type', 'blend_type', 'clamp_result', 'clamp_factor', 'factor_mode'], - Nodes.FloatCurve: ["mapping"], - Nodes.Value: [], - Nodes.Vector: [], - Nodes.InputColor: [], - Nodes.WaveTexture: ["wave_type", "bands_direction", "rings_direction", "wave_profile"], - Nodes.SeparateXYZ: [], - Nodes.Group: [], -}) +NODE_ATTRS_AVAILABLE.update( + { + Nodes.ColorRamp: [ + "color_ramp.elements", + "color_ramp.color_mode", + "color_ramp.interpolation", + "color_ramp.hue_interpolation", + ], + Nodes.MixRGB: ["use_clamp", "blend_type"], + Nodes.Mix: [ + "data_type", + "blend_type", + "clamp_result", + "clamp_factor", + "factor_mode", + ], + Nodes.FloatCurve: ["mapping"], + Nodes.Value: [], + Nodes.Vector: [], + Nodes.InputColor: [], + Nodes.WaveTexture: [ + "wave_type", + "bands_direction", + "rings_direction", + "wave_profile", + ], + Nodes.SeparateXYZ: [], + Nodes.Group: [], + } +) + class SocketType: Boolean = "BOOLEAN" @@ -42,6 +66,7 @@ class SocketType: RGBA = "RGBA" Image = "IMAGE" + class AttributeType: Float = "FLOAT" Int = "INT" @@ -49,12 +74,14 @@ class AttributeType: FloatColor = "FLOAT_COLOR" Boolean = "BOOLEAN" + class FieldsType: Value = "value" Vector = "vector" Color = "color" Boolean = "boolean" + ATTRTYPE_DIMS = { AttributeType.Float: 1, AttributeType.Int: 1, @@ -91,6 +118,7 @@ class FieldsType: # Pointwise = "Pointwise" # Constant = "Constant" + class KernelDataType: float = "float" float2 = "float2_nonbuiltin" @@ -98,6 +126,7 @@ class KernelDataType: float4 = "float4_nonbuiltin" int = "int" + KERNELDATATYPE_DIMS = { KernelDataType.float: [], KernelDataType.float2: [2], @@ -144,20 +173,35 @@ class KernelDataType: def special_sanitize_constant(node_name, x): positions = ",".join([str(x[i].position) for i in range(len(x))]) colors = ",".join( - [f"float4_nonbuiltin({x[i].color[0]}, {x[i].color[1]}, {x[i].color[2]}, {x[i].color[3]})" for i in range(len(x))]) - return f''' + [ + f"float4_nonbuiltin({x[i].color[0]}, {x[i].color[1]}, {x[i].color[2]}, {x[i].color[3]})" + for i in range(len(x)) + ] + ) + return f""" float {node_name}_positions[{len(x)}]{{{positions}}}; float4_nonbuiltin {node_name}_colors[{len(x)}]{{{colors}}}; - ''' + """ + def special_sanitize(node_name, x, node_tree_name): - positions = ",".join([get_imp_var_name(node_tree_name, node_name) + f"_pos{i}" for i in range(len(x))]) + positions = ",".join( + [ + get_imp_var_name(node_tree_name, node_name) + f"_pos{i}" + for i in range(len(x)) + ] + ) colors = ",".join( - [get_imp_var_name(node_tree_name, node_name) + f"_color{i}" for i in range(len(x))]) - return f''' + [ + get_imp_var_name(node_tree_name, node_name) + f"_color{i}" + for i in range(len(x)) + ] + ) + return f""" float {node_name}_positions[{len(x)}]{{{positions}}}; float4_nonbuiltin {node_name}_colors[{len(x)}]{{{colors}}}; - ''' + """ + def get_imp_var_name(node_tree_name, node_name): return usable_name(node_name) + "_FROM_" + usable_name(node_tree_name) @@ -169,13 +213,21 @@ def special_sanitize_float_curve(node_name, mapping, N=256): for p in positions: values.append(mapping.evaluate(mapping.curves[0], p)) values = ",".join([str(v) for v in values]) - return f''' + return f""" float {node_name}_values[{N}]{{{values}}}; int {node_name}_table_size = {N}; - ''' + """ + def usable_name(x): - return x.replace(".", "_DOT_").replace(" ", "_SPACE_").replace("~", "_WAVE_").replace("(", "_LBR_").replace(")", "_RBR_").replace(",", "_COMMA_") + return ( + x.replace(".", "_DOT_") + .replace(" ", "_SPACE_") + .replace("~", "_WAVE_") + .replace("(", "_LBR_") + .replace(")", "_RBR_") + .replace(",", "_COMMA_") + ) def sanitize(x, node, param): @@ -188,11 +240,20 @@ def sanitize(x, node, param): return int(x) elif node_type == Nodes.VectorMath and param == NODE_ATTRS_AVAILABLE[node_type][0]: return "NODE_VECTOR_MATH_" + x - elif node_type == Nodes.VoronoiTexture and param == NODE_ATTRS_AVAILABLE[node_type][0]: + elif ( + node_type == Nodes.VoronoiTexture + and param == NODE_ATTRS_AVAILABLE[node_type][0] + ): return x[:-1] - elif node_type == Nodes.VoronoiTexture and param == NODE_ATTRS_AVAILABLE[node_type][1]: + elif ( + node_type == Nodes.VoronoiTexture + and param == NODE_ATTRS_AVAILABLE[node_type][1] + ): return "SHD_VORONOI_" + x - elif node_type == Nodes.VoronoiTexture and param == NODE_ATTRS_AVAILABLE[node_type][2]: + elif ( + node_type == Nodes.VoronoiTexture + and param == NODE_ATTRS_AVAILABLE[node_type][2] + ): return "SHD_VORONOI_" + x elif node_type == Nodes.MapRange and param == NODE_ATTRS_AVAILABLE[node_type][0]: return x @@ -200,9 +261,15 @@ def sanitize(x, node, param): return "NODE_MAP_RANGE_" + x elif node_type == Nodes.MapRange and param == NODE_ATTRS_AVAILABLE[node_type][2]: return int(x) - elif node_type == Nodes.MusgraveTexture and param == NODE_ATTRS_AVAILABLE[node_type][0]: + elif ( + node_type == Nodes.MusgraveTexture + and param == NODE_ATTRS_AVAILABLE[node_type][0] + ): return x[:-1] - elif node_type == Nodes.MusgraveTexture and param == NODE_ATTRS_AVAILABLE[node_type][1]: + elif ( + node_type == Nodes.MusgraveTexture + and param == NODE_ATTRS_AVAILABLE[node_type][1] + ): return "SHD_MUSGRAVE_" + x elif node_type == Nodes.MixRGB and param == NODE_ATTRS_AVAILABLE[node_type][0]: return int(x) @@ -239,12 +306,14 @@ def sanitize(x, node, param): else: return x + def concat_string(strs): ret = "" for s in strs: ret += s + ", " return ret + def var_list(in_vars, imp_vars, out_vars, collective_style): code = [] for var in in_vars: @@ -253,7 +322,8 @@ def var_list(in_vars, imp_vars, out_vars, collective_style): if collective_style: imp_vars_of_type = {} for var in sorted(imp_vars.keys()): - if var in [Vars.Position, Vars.Normal]: continue + if var in [Vars.Position, Vars.Normal]: + continue dtype = imp_vars[var][0] if dtype in imp_vars_of_type: imp_vars_of_type[dtype].append(var) @@ -274,11 +344,13 @@ def var_list(in_vars, imp_vars, out_vars, collective_style): code.append(f"POINTER_OR_REFERENCE_ARG {dtype} *{var}") return ",".join(code) + def collecting_vars(imp_vars): code = "" imp_vars_count = {} for var in sorted(imp_vars.keys()): - if var in [Vars.Position, Vars.Normal]: continue + if var in [Vars.Position, Vars.Normal]: + continue dtype = imp_vars[var][0] if dtype in imp_vars_count: imp_vars_count[dtype] += 1 @@ -287,6 +359,7 @@ def collecting_vars(imp_vars): code += f"{dtype} {var} = {dtype}_vars[{imp_vars_count[dtype]}];\n" return code + def value_string(value): if isinstance(value, float): return str(value) diff --git a/infinigen/terrain/utils/logging.py b/infinigen/terrain/utils/logging.py index f8f44f674..670c1224c 100644 --- a/infinigen/terrain/utils/logging.py +++ b/infinigen/terrain/utils/logging.py @@ -4,9 +4,11 @@ # Authors: Zeyu Ma -import psutil import os + import gin +import psutil + from infinigen.core.util.logging import Timer as oTimer diff --git a/infinigen/terrain/utils/mesh.py b/infinigen/terrain/utils/mesh.py index 77715d08a..4dfd372b7 100644 --- a/infinigen/terrain/utils/mesh.py +++ b/infinigen/terrain/utils/mesh.py @@ -11,12 +11,13 @@ import numpy as np import trimesh from numpy import ascontiguousarray as AC + from infinigen.core.util import blender as butil from infinigen.core.util.logging import Timer from infinigen.core.util.organization import Attributes from .camera import getK -from .ctype_util import ASDOUBLE, ASINT, load_cdll, register_func +from .ctype_util import ASDOUBLE, ASINT, load_cdll from .kernelizer_util import ATTRTYPE_DIMS, ATTRTYPE_FIELDS, NPTYPEDIM_ATTR, Vars @@ -30,25 +31,35 @@ def object_to_vertex_attributes(obj, specified=None, skip_internal=True): for attr in obj.data.attributes.keys(): if skip_internal and butil.blender_internal_attr(attr): continue - if ((specified is None) or (specified is not None and attr in specified)) and obj.data.attributes[attr].domain == "POINT": + if ( + (specified is None) or (specified is not None and attr in specified) + ) and obj.data.attributes[attr].domain == "POINT": type_key = obj.data.attributes[attr].data_type - tmp = np.zeros(len(obj.data.vertices) * ATTRTYPE_DIMS[type_key], dtype=np.float32) + tmp = np.zeros( + len(obj.data.vertices) * ATTRTYPE_DIMS[type_key], dtype=np.float32 + ) obj.data.attributes[attr].data.foreach_get(ATTRTYPE_FIELDS[type_key], tmp) vertex_attributes[attr] = tmp.reshape((len(obj.data.vertices), -1)) return vertex_attributes + def object_to_face_attributes(obj, specified=None, skip_internal=True): face_attributes = {} for attr in obj.data.attributes.keys(): if skip_internal and butil.blender_internal_attr(attr): continue - if ((specified is None) or (specified is not None and attr in specified)) and obj.data.attributes[attr].domain == "FACE": + if ( + (specified is None) or (specified is not None and attr in specified) + ) and obj.data.attributes[attr].domain == "FACE": type_key = obj.data.attributes[attr].data_type - tmp = np.zeros(len(obj.data.polygons) * ATTRTYPE_DIMS[type_key], dtype=np.float32) + tmp = np.zeros( + len(obj.data.polygons) * ATTRTYPE_DIMS[type_key], dtype=np.float32 + ) obj.data.attributes[attr].data.foreach_get(ATTRTYPE_FIELDS[type_key], tmp) face_attributes[attr] = tmp.reshape((len(obj.data.polygons), -1)) return face_attributes + def objectdata_from_VF(vertices, faces): new_mesh = bpy.data.meshes.new("") new_mesh.vertices.add(len(vertices)) @@ -58,18 +69,22 @@ def objectdata_from_VF(vertices, faces): loop_total = np.ones(len(faces), np.int32) * 3 new_mesh.polygons.foreach_set("loop_total", loop_total) if len(loop_total) >= 1: - loop_start = np.concatenate((np.zeros(1, dtype=np.int32), np.cumsum(loop_total[:-1]))) + loop_start = np.concatenate( + (np.zeros(1, dtype=np.int32), np.cumsum(loop_total[:-1])) + ) new_mesh.polygons.foreach_set("loop_start", loop_start) new_mesh.polygons.foreach_set("vertices", faces.reshape(-1)) new_mesh.update(calc_edges=True) return new_mesh + def object_from_VF(name, vertices, faces): new_mesh = objectdata_from_VF(vertices, faces) new_object = bpy.data.objects.new(name, new_mesh) new_object.rotation_euler = (0, 0, 0) return new_object + def convert_face_array(face_array): l = face_array.shape[0] min_indices = np.argmin(face_array, axis=1) @@ -78,13 +93,22 @@ def convert_face_array(face_array): w = face_array[list(np.arange(l)), (min_indices + 2) % 3] return np.stack([u, v, w], -1) + class Mesh: - def __init__(self, normal_mode=NormalMode.Mean, + def __init__( + self, + normal_mode=NormalMode.Mean, path=None, - heightmap=None, L=None, downsample=1, - vertices=None, faces=None, vertex_attributes=None, + heightmap=None, + L=None, + downsample=1, + vertices=None, + faces=None, + vertex_attributes=None, mesh=None, - obj=None, mesh_only=False, **kwargs + obj=None, + mesh_only=False, + **kwargs, ): self.normal_mode = normal_mode self.face_attributes = {} @@ -100,29 +124,34 @@ def __init__(self, normal_mode=NormalMode.Mean, for i in range(N): verts[i, :, 0] = (-1 + 2 * i / (N - 1)) * L / 2 for j in range(N): - verts[:, j, 1] = (-1 + 2 * j / (N - 1)) * L / 2 + verts[:, j, 1] = (-1 + 2 * j / (N - 1)) * L / 2 verts[:, :, 2] = heightmap verts = verts.reshape((-1, 3)) faces = np.zeros((2, N - 1, N - 1, 3), np.int32) for i in range(N - 1): - faces[0, i, :, :] += [i * N, (i+1) * N, i * N] - faces[1, i, :, :] += [i * N, (i+1) * N, (i+1) * N] - for j in range(N - 1): - faces[0, :, j, :] += [j, j, j+1] - faces[1, :, j, :] += [j+1, j, j+1] + faces[0, i, :, :] += [i * N, (i + 1) * N, i * N] + faces[1, i, :, :] += [i * N, (i + 1) * N, (i + 1) * N] + for j in range(N - 1): + faces[0, :, j, :] += [j, j, j + 1] + faces[1, :, j, :] += [j + 1, j, j + 1] faces = faces.reshape((-1, 3)) _trimesh = trimesh.Trimesh(verts, faces) elif vertices is not None: - _trimesh = trimesh.Trimesh(vertices=vertices, faces=faces.astype(np.int32), vertex_attributes=vertex_attributes, process=False) + _trimesh = trimesh.Trimesh( + vertices=vertices, + faces=faces.astype(np.int32), + vertex_attributes=vertex_attributes, + process=False, + ) elif mesh is not None: _trimesh = mesh elif obj is not None: verts_bpy = obj.data.vertices faces_bpy = obj.data.polygons - verts = np.zeros((len(verts_bpy)*3), dtype=float) + verts = np.zeros((len(verts_bpy) * 3), dtype=float) verts_bpy.foreach_get("co", verts) verts = verts.reshape((-1, 3)) - faces = np.zeros((len(faces_bpy)*3), dtype=np.int32) + faces = np.zeros((len(faces_bpy) * 3), dtype=np.int32) faces_bpy.foreach_get("vertices", faces) faces = faces.reshape((-1, 3)) _trimesh = trimesh.Trimesh(vertices=verts, faces=faces, process=False) @@ -133,30 +162,31 @@ def __init__(self, normal_mode=NormalMode.Mean, for key in kwargs: setattr(self, key, kwargs[key]) else: - _trimesh = trimesh.Trimesh(vertices=np.zeros((0, 3)), faces=np.zeros((0, 3), np.int32)) - - self._trimesh = _trimesh + _trimesh = trimesh.Trimesh( + vertices=np.zeros((0, 3)), faces=np.zeros((0, 3), np.int32) + ) + self._trimesh = _trimesh def to_trimesh(self): return self._trimesh - + @property def vertex_attributes(self): return self._trimesh.vertex_attributes - + @vertex_attributes.setter def vertex_attributes(self, value): self._trimesh.vertex_attributes = value - + @property def vertices(self): return self._trimesh.vertices - + @vertices.setter def vertices(self, value): self._trimesh.vertices = value - + @property def faces(self): return self._trimesh.faces @@ -167,7 +197,9 @@ def faces(self, value): def save(self, path): for attr in self._trimesh.vertex_attributes: - self._trimesh.vertex_attributes[attr] = self._trimesh.vertex_attributes[attr].astype(np.float32) + self._trimesh.vertex_attributes[attr] = self._trimesh.vertex_attributes[ + attr + ].astype(np.float32) self._trimesh.export(path) def make_unique(self): @@ -176,7 +208,9 @@ def make_unique(self): self.vertices = self.vertices[sorted_indices] self.faces = np.argsort(sorted_indices)[self.faces] for attr_name in self.vertex_attributes: - self.vertex_attributes[attr_name] = self.vertex_attributes[attr_name][sorted_indices] + self.vertex_attributes[attr_name] = self.vertex_attributes[attr_name][ + sorted_indices + ] self.faces = convert_face_array(self.faces) transposed_array = self.faces.T sorted_indices = np.lexsort(transposed_array) @@ -186,46 +220,89 @@ def export_blender(self, name, collection="Collection", material=None): self.make_unique() new_object = object_from_VF(name, self.vertices, self.faces) for attr_name in self.vertex_attributes: - attr_name_ls = attr_name.lstrip("_") # this is because of trimesh bug - dim = self.vertex_attributes[attr_name].shape[1] if self.vertex_attributes[attr_name].ndim != 1 else 1 - type_key = NPTYPEDIM_ATTR[(str(self.vertex_attributes[attr_name].dtype), dim)] - new_object.data.attributes.new(name=attr_name_ls, type=type_key, domain='POINT') - new_object.data.attributes[attr_name_ls].data.foreach_set(ATTRTYPE_FIELDS[type_key], AC(self.vertex_attributes[attr_name].reshape(-1))) + attr_name_ls = attr_name.lstrip("_") # this is because of trimesh bug + dim = ( + self.vertex_attributes[attr_name].shape[1] + if self.vertex_attributes[attr_name].ndim != 1 + else 1 + ) + type_key = NPTYPEDIM_ATTR[ + (str(self.vertex_attributes[attr_name].dtype), dim) + ] + new_object.data.attributes.new( + name=attr_name_ls, type=type_key, domain="POINT" + ) + new_object.data.attributes[attr_name_ls].data.foreach_set( + ATTRTYPE_FIELDS[type_key], + AC(self.vertex_attributes[attr_name].reshape(-1)), + ) if material is not None: new_object.data.materials.append(material) - butil.put_in_collection(bpy.data.objects[name], butil.get_collection('terrain')) + butil.put_in_collection(bpy.data.objects[name], butil.get_collection("terrain")) with butil.SelectObjects(new_object): bpy.ops.object.shade_flat() return new_object - + @property def vertex_normals(self): if self.normal_mode == NormalMode.Mean: - mean_normals = trimesh.geometry.weighted_vertex_normals(len(self.vertices), self.faces, self.face_normals, np.ones((len(self.faces), 3)), use_loop=False) + mean_normals = trimesh.geometry.weighted_vertex_normals( + len(self.vertices), + self.faces, + self.face_normals, + np.ones((len(self.faces), 3)), + use_loop=False, + ) return mean_normals elif self.normal_mode == NormalMode.AngleWeighted: - w_normals = trimesh.geometry.weighted_vertex_normals(len(self.vertices), self.faces, self.face_normals, self._trimesh.face_angles, use_loop=False) + w_normals = trimesh.geometry.weighted_vertex_normals( + len(self.vertices), + self.faces, + self.face_normals, + self._trimesh.face_angles, + use_loop=False, + ) return w_normals - + def facewise_mean(self, attr): dll = load_cdll("terrain/lib/cpu/meshing/utils.so") facewise_mean = dll.facewise_mean - facewise_mean.argtypes = [POINTER(c_double), POINTER(c_int32), c_int32, POINTER(c_double)] + facewise_mean.argtypes = [ + POINTER(c_double), + POINTER(c_int32), + c_int32, + POINTER(c_double), + ] facewise_mean.restype = None result = AC(np.zeros(len(self.faces), dtype=np.float64)) - facewise_mean(ASDOUBLE(AC(attr.astype(np.float64))), ASINT(AC(self.faces.astype(np.int32))), len(self.faces), ASDOUBLE(result)) + facewise_mean( + ASDOUBLE(AC(attr.astype(np.float64))), + ASINT(AC(self.faces.astype(np.int32))), + len(self.faces), + ASDOUBLE(result), + ) return result - + def facewise_intmax(self, attr): dll = load_cdll("terrain/lib/cpu/meshing/utils.so") facewise_intmax = dll.facewise_intmax - facewise_intmax.argtypes = [POINTER(c_int32), POINTER(c_int32), c_int32, POINTER(c_int32)] + facewise_intmax.argtypes = [ + POINTER(c_int32), + POINTER(c_int32), + c_int32, + POINTER(c_int32), + ] facewise_intmax.restype = None result = AC(np.zeros(len(self.faces), dtype=np.int32)) - facewise_intmax(ASINT(AC(attr.astype(np.int32))), ASINT(AC(self.faces.astype(np.int32))), len(self.faces), ASINT(result)) + facewise_intmax( + ASINT(AC(attr.astype(np.int32))), + ASINT(AC(self.faces.astype(np.int32))), + len(self.faces), + ASINT(result), + ) return result def get_adjacency(self): @@ -240,12 +317,22 @@ def get_adjacency(self): @property def face_normals(self): - dll = load_cdll(f"terrain/lib/cpu/meshing/utils.so") + dll = load_cdll("terrain/lib/cpu/meshing/utils.so") compute_face_normals = dll.compute_face_normals - compute_face_normals.argtypes = [POINTER(c_double), POINTER(c_int32), c_int32, POINTER(c_double)] + compute_face_normals.argtypes = [ + POINTER(c_double), + POINTER(c_int32), + c_int32, + POINTER(c_double), + ] compute_face_normals.restype = None normals = AC(np.zeros((len(self.faces), 3), dtype=np.float64)) - compute_face_normals(ASDOUBLE(AC(self.vertices)), ASINT(AC(self.faces.astype(np.int32))), len(self.faces), ASDOUBLE(normals)) + compute_face_normals( + ASDOUBLE(AC(self.vertices)), + ASINT(AC(self.faces.astype(np.int32))), + len(self.faces), + ASDOUBLE(normals), + ) return normals def cat(meshes): @@ -259,24 +346,39 @@ def cat(meshes): for attr in mesh.vertex_attributes: if mesh.vertex_attributes[attr].ndim == 1: - mesh.vertex_attributes[attr] = mesh.vertex_attributes[attr].reshape((-1, 1)) + mesh.vertex_attributes[attr] = mesh.vertex_attributes[attr].reshape( + (-1, 1) + ) mesh_va = mesh.vertex_attributes[attr] if attr not in vertex_attributes: - va = np.zeros((lenv, mesh.vertex_attributes[attr].shape[1]), dtype=mesh.vertex_attributes[attr].dtype) + va = np.zeros( + (lenv, mesh.vertex_attributes[attr].shape[1]), + dtype=mesh.vertex_attributes[attr].dtype, + ) else: va = vertex_attributes[attr] vertex_attributes[attr] = np.concatenate((va, mesh_va)) lenv += len(mesh.vertices) - + for attr in vertex_attributes: if len(vertex_attributes[attr]) != lenv: - fillup = np.zeros((lenv - len(vertex_attributes[attr]), vertex_attributes[attr].shape[1]), dtype=vertex_attributes[attr].dtype) - vertex_attributes[attr] = np.concatenate((vertex_attributes[attr], fillup)) + fillup = np.zeros( + ( + lenv - len(vertex_attributes[attr]), + vertex_attributes[attr].shape[1], + ), + dtype=vertex_attributes[attr].dtype, + ) + vertex_attributes[attr] = np.concatenate( + (vertex_attributes[attr], fillup) + ) return Mesh(vertices=verts, faces=faces, vertex_attributes=vertex_attributes) def camera_annotation(self, cameras, fs, fe, relax=0.01): cam_poses = [] - coords_trans_matrix = np.array([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]) + coords_trans_matrix = np.array( + [[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]] + ) fc = bpy.context.scene.frame_current for f in range(fs, fe + 1): bpy.context.scene.frame_set(f) @@ -284,23 +386,42 @@ def camera_annotation(self, cameras, fs, fe, relax=0.01): cam_pose = np.array(cam.matrix_world) cam_pose = np.dot(np.array(cam_pose), coords_trans_matrix) cam_poses.append(cam_pose) - fov_rad = cam.data.angle + fov_rad = cam.data.angle bpy.context.scene.frame_set(fc) - - H, W = bpy.context.scene.render.resolution_y, bpy.context.scene.render.resolution_x + + H, W = ( + bpy.context.scene.render.resolution_y, + bpy.context.scene.render.resolution_x, + ) fov0 = np.arctan(H / 2 / (W / 2 / np.tan(fov_rad / 2))) * 2 fov = (fov0, fov_rad) K = getK(fov, H, W) - + self.vertex_attributes["invisible"] = np.zeros(len(self.vertices), bool) - + for cam_pose in cam_poses: - coords = np.matmul(K, np.matmul(np.linalg.inv(cam_pose), np.concatenate((self.vertices.transpose(), np.ones((1, len(self.vertices)))), 0))[:3, :]) + coords = np.matmul( + K, + np.matmul( + np.linalg.inv(cam_pose), + np.concatenate( + (self.vertices.transpose(), np.ones((1, len(self.vertices)))), 0 + ), + )[:3, :], + ) coords[:2, :] /= coords[2] - self.vertex_attributes["invisible"] |= ((coords[2] > 0) & (coords[0] > -relax * W) & (coords[0] < (1 + relax) * W) & (coords[1] > -relax * H) & (coords[1] < (1 + relax) * H)) - - self.vertex_attributes["invisible"] = (~self.vertex_attributes["invisible"]).astype(np.float32) - + self.vertex_attributes["invisible"] |= ( + (coords[2] > 0) + & (coords[0] > -relax * W) + & (coords[0] < (1 + relax) * W) + & (coords[1] > -relax * H) + & (coords[1] < (1 + relax) * H) + ) + + self.vertex_attributes["invisible"] = ( + ~self.vertex_attributes["invisible"] + ).astype(np.float32) + def move_modifier(target_obj, m): with Timer(f"copying {m.name}"): @@ -309,12 +430,13 @@ def move_modifier(target_obj, m): for i, inp in enumerate(modifier.node_group.inputs): if i > 0: id = inp.identifier - modifier[f'{id}_attribute_name'] = inp.name - modifier[f'{id}_use_attribute'] = True + modifier[f"{id}_attribute_name"] = inp.name + modifier[f"{id}_use_attribute"] = True for i, outp in enumerate(modifier.node_group.outputs): if i > 0: id = outp.identifier - modifier[f'{id}_attribute_name'] = m[f'{id}_attribute_name'] + modifier[f"{id}_attribute_name"] = m[f"{id}_attribute_name"] + def write_attributes(elements, mesh=None, meshes=[]): n_elements = len(elements) @@ -324,16 +446,21 @@ def write_attributes(elements, mesh=None, meshes=[]): for element in elements: ret = element(mesh.vertices) returns.append(ret) - surface_element = np.stack([ret[Vars.SDF] for ret in returns], -1).argmin(axis=-1) + surface_element = np.stack([ret[Vars.SDF] for ret in returns], -1).argmin( + axis=-1 + ) attributes = {} for i in range(n_elements): if hasattr(elements[i], "tag"): - returns[i][Attributes.ElementTag] = np.zeros(N, dtype=np.int32) + elements[i].tag + returns[i][Attributes.ElementTag] = ( + np.zeros(N, dtype=np.int32) + elements[i].tag + ) for output in returns[i]: - if output == Vars.SDF or output == Vars.Offset: continue + if output == Vars.SDF or output == Vars.Offset: + continue if returns[i][output].ndim == 1: - returns[i][output] *= (surface_element == i) + returns[i][output] *= surface_element == i else: returns[i][output] *= (surface_element == i).reshape((-1, 1)) @@ -343,16 +470,19 @@ def write_attributes(elements, mesh=None, meshes=[]): attributes[output] += returns[i][output] mesh.vertex_attributes = attributes if meshes != []: - assert(len(meshes) == n_elements) + assert len(meshes) == n_elements for i in range(n_elements): N = len(meshes[i].vertices) - if N == 0: continue + if N == 0: + continue returns = elements[i](meshes[i].vertices) attributes = {} for output in returns: - if output == Vars.SDF or output == Vars.Offset: continue + if output == Vars.SDF or output == Vars.Offset: + continue attributes[output] = returns[output] if hasattr(elements[i], "tag"): - attributes[Attributes.ElementTag] = np.zeros(N, dtype=np.int32) + elements[i].tag + attributes[Attributes.ElementTag] = ( + np.zeros(N, dtype=np.int32) + elements[i].tag + ) meshes[i].vertex_attributes = attributes - diff --git a/infinigen/terrain/utils/random.py b/infinigen/terrain/utils/random.py index 608664902..f3940b59c 100644 --- a/infinigen/terrain/utils/random.py +++ b/infinigen/terrain/utils/random.py @@ -4,15 +4,16 @@ # Authors: Zeyu Ma +import random from ctypes import POINTER, c_float, c_int32, c_size_t import bpy -import random import numpy as np from numpy import ascontiguousarray as AC from .ctype_util import ASFLOAT, load_cdll + def random_int(): return np.random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) @@ -29,23 +30,26 @@ def chance(x): return np.random.uniform() < x -def perlin_noise( - positions, - device, - freq, - octaves, - seed -): +def perlin_noise(positions, device, freq, octaves, seed): dll = load_cdll(f"terrain/lib/{device}/utils/FastNoiseLite.so") func = dll.perlin_call - func.argtypes = [c_size_t, POINTER(c_float), POINTER(c_float), c_int32, c_int32, c_float] + func.argtypes = [ + c_size_t, + POINTER(c_float), + POINTER(c_float), + c_int32, + c_int32, + c_float, + ] func.restype = None values = np.zeros(len(positions), dtype=np.float32) func( len(positions), ASFLOAT(AC(positions.astype(np.float32))), ASFLOAT(values), - seed, octaves, freq, + seed, + octaves, + freq, ) del dll return values @@ -53,6 +57,7 @@ def perlin_noise( def drive_param(parameter, scale=1, offset=0, index=None, name="default_value"): driver = parameter.driver_add(name) - if index is not None: driver = driver[index] - driver.driver.expression = f'frame*{scale}+{offset}' + if index is not None: + driver = driver[index] + driver.driver.expression = f"frame*{scale}+{offset}" bpy.context.view_layer.update() diff --git a/infinigen/tools/blendscript_import_infinigen.py b/infinigen/tools/blendscript_import_infinigen.py index 428ee5e14..a7a8d0a43 100644 --- a/infinigen/tools/blendscript_import_infinigen.py +++ b/infinigen/tools/blendscript_import_infinigen.py @@ -5,37 +5,42 @@ # Authors: Alexander Raistrick -''' +""" Copy this file into blender's scripting window and run it whenever you open a new blender instance. It will configure the sys.path and load gin. This is necessary before any other procgen files can be imported/used within blender. -Once this is done, you can do things like `from infinigen.assets.creatures.util.genomes.carnivore import CarnivoreFactory` then `CarnivoreFactory(0).spawn_asset(0)` directly in the blender commandline -''' +Once this is done, you can do things like `from infinigen.assets.objects.creatures.util.genomes.carnivore import CarnivoreFactory` then `CarnivoreFactory(0).spawn_asset(0)` directly in the blender commandline +""" # ruff: noqa -import bpy -from pathlib import Path -import sys, os import logging +import os +import sys +from pathlib import Path + +import bpy pwd = os.getcwd() sys.path.append(str(Path(__file__).parent.parent.parent)) - + import gin + gin.clear_config() gin.enter_interactive_mode() from infinigen.core import init, surface -from infinigen_examples import generate_nature +from infinigen_examples import generate_nature -init.apply_gin_configs(Path(pwd)/'infinigen_examples/configs_nature', ['base.gin'], skip_unknown=True) +init.apply_gin_configs( + Path(pwd) / "infinigen_examples/configs_nature", ["base.gin"], skip_unknown=True +) surface.registry.initialize_from_gin() logging.basicConfig( - format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', - level=logging.WARNING + format="[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.WARNING, ) -logging.getLogger("infinigen").setLevel(logging.DEBUG) \ No newline at end of file +logging.getLogger("infinigen").setLevel(logging.DEBUG) diff --git a/infinigen/tools/blendscript_path_append.py b/infinigen/tools/blendscript_path_append.py index 06cae49a5..c85fa7f37 100644 --- a/infinigen/tools/blendscript_path_append.py +++ b/infinigen/tools/blendscript_path_append.py @@ -4,6 +4,8 @@ # Authors: Alexander Raistrick -import os, sys +import os +import sys + pwd = os.getcwd() -sys.path.append(pwd) \ No newline at end of file +sys.path.append(pwd) diff --git a/infinigen/tools/compress_masks.py b/infinigen/tools/compress_masks.py index 36432d59c..ef2be379c 100644 --- a/infinigen/tools/compress_masks.py +++ b/infinigen/tools/compress_masks.py @@ -12,10 +12,13 @@ def show(x): return f"({x.shape} {x.dtype} {x.max()})" + def compress(arr): H, W, *_ = arr.shape - vals, indices = np.unique(np.squeeze(arr.reshape((H * W, -1))), return_inverse=True, axis=0) - max_ind = (vals.shape[0] - 1) + vals, indices = np.unique( + np.squeeze(arr.reshape((H * W, -1))), return_inverse=True, axis=0 + ) + max_ind = vals.shape[0] - 1 if max_ind < 2**8: indices = indices.astype(np.uint8) elif max_ind < 2**16: @@ -24,8 +27,10 @@ def compress(arr): indices = indices.astype(np.uint32) return dict(vals=vals, indices=indices, shape=np.asarray(arr.shape)) + def recover(d): - return d['vals'][d['indices']].reshape(d['shape']) + return d["vals"][d["indices"]].reshape(d["shape"]) + if __name__ == "__main__": parser = argparse.ArgumentParser() @@ -40,4 +45,4 @@ def recover(d): assert show(arr) == show(recover(d)) np.savez(file_path.with_suffix(".npz"), **d) print(f"{file_path} -> {file_path.with_suffix('.npz')}") - file_path.unlink() \ No newline at end of file + file_path.unlink() diff --git a/infinigen/tools/convert_displacement.py b/infinigen/tools/convert_displacement.py index 2e28c108d..685c99c26 100644 --- a/infinigen/tools/convert_displacement.py +++ b/infinigen/tools/convert_displacement.py @@ -4,11 +4,13 @@ # Authors: David Yan import bpy + from infinigen.core.nodes.node_wrangler import geometry_node_group_empty_new visited_nodes = [] -def find_connected(node, origin_socket_index, tree_origin_socket = False): #WIP, unused + +def find_connected(node, origin_socket_index, tree_origin_socket=False): # WIP, unused if node in visited_nodes: return visited_nodes.append(node) @@ -25,8 +27,9 @@ def find_connected(node, origin_socket_index, tree_origin_socket = False): #WIP, for link in node.inputs[index].links: from_node = link.from_node find_connected(from_node, from_node.outputs[:].index(link.from_socket)) - -def remove_unconnected(node_tree): #WIP, unused + + +def remove_unconnected(node_tree): # WIP, unused nodes = node_tree.nodes for node in nodes: if node not in visited_nodes: @@ -35,13 +38,14 @@ def remove_unconnected(node_tree): #WIP, unused if node.type == "GROUP": remove_unconnected(node.node_tree) -def copy_nodes(shader_node_tree, geo_node_tree): #WIP, unused + +def copy_nodes(shader_node_tree, geo_node_tree): # WIP, unused shader_nodes = shader_node_tree.nodes for shader_node in shader_nodes: if shader_node.type == "GROUP": geo_node = geo_node_tree.nodes.new("GeometryNodeGroup") copy_nodes(geo_node.node_tree, shader_node.node_tree) - else: + else: try: geo_node = geo_node_tree.nodes.new(shader_node.bl_idname) geo_node.location = shader_node.location @@ -51,47 +55,55 @@ def copy_nodes(shader_node_tree, geo_node_tree): #WIP, unused def bake_vertex_colors(obj): - bpy.context.scene.render.engine = 'CYCLES' + bpy.context.scene.render.engine = "CYCLES" bpy.context.scene.cycles.device = "GPU" bpy.context.scene.cycles.samples = 1 obj.select_set(True) - bpy.context.view_layer.objects.active = obj - vertColor = bpy.context.object.data.color_attributes.new(name="Displacement",domain='POINT',type='FLOAT_COLOR') + bpy.context.view_layer.objects.active = obj + vertColor = bpy.context.object.data.color_attributes.new( + name="Displacement", domain="POINT", type="FLOAT_COLOR" + ) bpy.context.object.data.attributes.active_color = vertColor - bpy.ops.object.bake(type='EMIT', pass_filter={'COLOR'}, target ='VERTEX_COLORS') + bpy.ops.object.bake(type="EMIT", pass_filter={"COLOR"}, target="VERTEX_COLORS") obj.select_set(False) -def create_modifier(obj, scale_val, apply_geo_modifier ): + +def create_modifier(obj, scale_val, apply_geo_modifier): modifier = obj.modifiers.new("Displacement", "NODES") modifier.node_group = geometry_node_group_empty_new() nodes = modifier.node_group.nodes - normal = nodes.new(type = 'GeometryNodeInputNormal') - attribute = nodes.new(type = 'GeometryNodeInputNamedAttribute') + normal = nodes.new(type="GeometryNodeInputNormal") + attribute = nodes.new(type="GeometryNodeInputNamedAttribute") attribute.data_type = "FLOAT_COLOR" attribute.inputs[0].default_value = "Displacement" - set_pos = nodes.new(type = 'GeometryNodeSetPosition') - mult = nodes.new(type = 'ShaderNodeVectorMath') - mult.operation = 'MULTIPLY' - scale = nodes.new(type = 'ShaderNodeVectorMath') - scale.operation = 'SCALE' + set_pos = nodes.new(type="GeometryNodeSetPosition") + mult = nodes.new(type="ShaderNodeVectorMath") + mult.operation = "MULTIPLY" + scale = nodes.new(type="ShaderNodeVectorMath") + scale.operation = "SCALE" scale.inputs["Scale"].default_value = scale_val output = nodes["Group Output"] input = nodes["Group Input"] modifier.node_group.links.new(input.outputs["Geometry"], set_pos.inputs["Geometry"]) - modifier.node_group.links.new(attribute.outputs[2], mult.inputs[0]) # index 2 must be hardcoded + modifier.node_group.links.new( + attribute.outputs[2], mult.inputs[0] + ) # index 2 must be hardcoded modifier.node_group.links.new(normal.outputs["Normal"], mult.inputs[1]) modifier.node_group.links.new(mult.outputs["Vector"], scale.inputs["Vector"]) modifier.node_group.links.new(scale.outputs["Vector"], set_pos.inputs["Offset"]) - modifier.node_group.links.new(set_pos.outputs["Geometry"], output.inputs["Geometry"]) + modifier.node_group.links.new( + set_pos.outputs["Geometry"], output.inputs["Geometry"] + ) if apply_geo_modifier: obj.select_set(True) - bpy.context.view_layer.objects.active = obj + bpy.context.view_layer.objects.active = obj bpy.ops.object.modifier_apply(modifier="Displacement") obj.select_set(False) -def convert_shader_displacement(obj, apply_geo_modifier = True): + +def convert_shader_displacement(obj, apply_geo_modifier=True): displaced_materials = {} for slot in obj.material_slots: @@ -104,9 +116,11 @@ def convert_shader_displacement(obj, apply_geo_modifier = True): bsdf_link = nodes["Material Output"].inputs["Surface"].links[0] bsdf_socket = bsdf_link.from_socket mat.node_tree.links.remove(displacement_link) - mat.node_tree.links.new(displace_socket, nodes["Material Output"].inputs["Surface"]) + mat.node_tree.links.new( + displace_socket, nodes["Material Output"].inputs["Surface"] + ) displaced_materials[mat] = bsdf_socket - + if len(displaced_materials) != 0: bake_vertex_colors(obj) create_modifier(obj, scale_val, apply_geo_modifier) @@ -114,11 +128,6 @@ def convert_shader_displacement(obj, apply_geo_modifier = True): for mat in displaced_materials: mat = slot.material mat.node_tree.links.remove(nodes["Material Output"].inputs["Surface"].links[0]) - mat.node_tree.links.new(displaced_materials[mat], nodes["Material Output"].inputs["Surface"]) - - - - - - - + mat.node_tree.links.new( + displaced_materials[mat], nodes["Material Output"].inputs["Surface"] + ) diff --git a/infinigen/tools/datarelease_toolkit.py b/infinigen/tools/datarelease_toolkit.py index 3bb5a2b12..fe15df2d5 100644 --- a/infinigen/tools/datarelease_toolkit.py +++ b/infinigen/tools/datarelease_toolkit.py @@ -3,73 +3,62 @@ # Authors: Alexander Raistrick -import os -from pathlib import Path import argparse -from tqdm import tqdm -from multiprocessing import Pool -from functools import partial -import subprocess -import shutil import json -from itertools import product -from copy import copy, deepcopy -import pdb +import shutil +import subprocess import tarfile +from copy import deepcopy +from functools import partial +from multiprocessing import Pool +from pathlib import Path +import cv2 import imageio import numpy as np -import cv2 +from tqdm import tqdm from infinigen.datagen.util import smb_client -from infinigen.datagen.states import parse_suffix, get_suffix +from infinigen.tools.suffixes import get_suffix, parse_suffix -from . import dataset_loader -from . import compress_masks +from . import compress_masks, dataset_loader -TOOLKIT_VERSION = '0.2.0' +TOOLKIT_VERSION = "0.2.0" IMAGE_RESIZE_ACTIONS = { - - ('Image', '.png', 'SINGLE_RES', 'INTER_LINEAR'), - - ('camview', '.npz', 'NO_ACTION', 'NO_INTERP'), - - ('Depth', '.npy', 'DOUBLE_RES', 'INTER_NEAREST'), - ('Depth', '.png', 'DOUBLE_RES', 'INTER_LINEAR'), - ('SurfaceNormal', '.npy', 'SINGLE_RES', 'INTER_NEAREST'), - ('SurfaceNormal', '.png', 'SINGLE_RES', 'INTER_LINEAR'), - - ('InstanceSegmentation', '.npz', 'SINGLE_RES', 'NPZ_INTER_NEAREST'), - ('InstanceSegmentation', '.png', 'SINGLE_RES', 'INTER_NEAREST'), - ('ObjectSegmentation', '.npz', 'SINGLE_RES', 'NPZ_INTER_NEAREST'), - ('ObjectSegmentation', '.png', 'SINGLE_RES', 'INTER_NEAREST'), - ('TagSegmentation', '.npz', 'SINGLE_RES', 'NPZ_INTER_NEAREST'), - ('TagSegmentation', '.png', 'SINGLE_RES', 'INTER_NEAREST'), - - ('Objects', '.json', 'COMPRESS_JSON', 'NO_INTERP'), - - ('Flow3D_', '.npy', 'SINGLE_RES', 'INTER_NEAREST'), - ('Flow3D_', '.png', 'SINGLE_RES', 'INTER_LINEAR'), - ('Flow3DMask', '.png', 'SINGLE_RES', 'MASK_POOL'), - - ('OcclusionBoundaries', '.png', 'ORIG_RES', 'NO_INTERP'), - - ('AO', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('DiffCol', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('DiffDir', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('DiffInd', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('Emit', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('Env', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('GlossCol', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('GlossDir', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('GlossInd', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('TransCol', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('TransDir', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('TransInd', '.png', 'SINGLE_RES', 'NO_INTERP'), - ('VolumeDir', '.png', 'SINGLE_RES', 'NO_INTERP'), + ("Image", ".png", "SINGLE_RES", "INTER_LINEAR"), + ("camview", ".npz", "NO_ACTION", "NO_INTERP"), + ("Depth", ".npy", "DOUBLE_RES", "INTER_NEAREST"), + ("Depth", ".png", "DOUBLE_RES", "INTER_LINEAR"), + ("SurfaceNormal", ".npy", "SINGLE_RES", "INTER_NEAREST"), + ("SurfaceNormal", ".png", "SINGLE_RES", "INTER_LINEAR"), + ("InstanceSegmentation", ".npz", "SINGLE_RES", "NPZ_INTER_NEAREST"), + ("InstanceSegmentation", ".png", "SINGLE_RES", "INTER_NEAREST"), + ("ObjectSegmentation", ".npz", "SINGLE_RES", "NPZ_INTER_NEAREST"), + ("ObjectSegmentation", ".png", "SINGLE_RES", "INTER_NEAREST"), + ("TagSegmentation", ".npz", "SINGLE_RES", "NPZ_INTER_NEAREST"), + ("TagSegmentation", ".png", "SINGLE_RES", "INTER_NEAREST"), + ("Objects", ".json", "COMPRESS_JSON", "NO_INTERP"), + ("Flow3D_", ".npy", "SINGLE_RES", "INTER_NEAREST"), + ("Flow3D_", ".png", "SINGLE_RES", "INTER_LINEAR"), + ("Flow3DMask", ".png", "SINGLE_RES", "MASK_POOL"), + ("OcclusionBoundaries", ".png", "ORIG_RES", "NO_INTERP"), + ("AO", ".png", "SINGLE_RES", "NO_INTERP"), + ("DiffCol", ".png", "SINGLE_RES", "NO_INTERP"), + ("DiffDir", ".png", "SINGLE_RES", "NO_INTERP"), + ("DiffInd", ".png", "SINGLE_RES", "NO_INTERP"), + ("Emit", ".png", "SINGLE_RES", "NO_INTERP"), + ("Env", ".png", "SINGLE_RES", "NO_INTERP"), + ("GlossCol", ".png", "SINGLE_RES", "NO_INTERP"), + ("GlossDir", ".png", "SINGLE_RES", "NO_INTERP"), + ("GlossInd", ".png", "SINGLE_RES", "NO_INTERP"), + ("TransCol", ".png", "SINGLE_RES", "NO_INTERP"), + ("TransDir", ".png", "SINGLE_RES", "NO_INTERP"), + ("TransInd", ".png", "SINGLE_RES", "NO_INTERP"), + ("VolumeDir", ".png", "SINGLE_RES", "NO_INTERP"), } + def mapfunc(f, its, n_workers): if n_workers == 1: return [f(i) for i in its] @@ -77,131 +66,130 @@ def mapfunc(f, its, n_workers): with Pool(n_workers) as p: return list(tqdm(p.imap(f, its), total=len(its))) -def download_except_already_present(smb_path, local_path, min_date_str, n_workers=1, verbose=False): - + +def download_except_already_present( + smb_path, local_path, min_date_str, n_workers=1, verbose=False +): remote_paths = list(smb_client.listdir(smb_path)) local_names = set(f.name for f in local_path.iterdir()) to_download = [f for f in remote_paths if f.name not in local_names] - + download_func = partial( - smb_client.download, - dest_folder=local_path, - verbose=verbose + smb_client.download, dest_folder=local_path, verbose=verbose ) mapfunc(download_func, to_download, n_workers=n_workers) + def untar(p): - command = f'tar -xf {str(p)} -C {str(p.parent)} --one-top-level' - dest_path = p.parent/(p.name.split('.')[0]) + command = f"tar -xf {str(p)} -C {str(p.parent)} --one-top-level" + dest_path = p.parent / (p.name.split(".")[0]) assert not dest_path.exists() - print('Untarring', p, ' --> ', dest_path) + print("Untarring", p, " --> ", dest_path) subprocess.run(command, check=True, shell=True) return dest_path -def cleanup(p): - for folder in p.glob('frames_*'): +def cleanup(p): + for folder in p.glob("frames_*"): for path in folder.iterdir(): - if path.name.endswith('_displacement.npy'): + if path.name.endswith("_displacement.npy"): path.unlink() - if path.name == 'assets': + if path.name == "assets": path.unlink() - if path.suffix == '.glb': + if path.suffix == ".glb": path.unlink() -def fix_scene_structure(p, n_subcams): +def fix_scene_structure(p, n_subcams): def parse_pre_1_1_suffix(suffix): - stem, ext = suffix.split('.') - parts = stem.split('_') + stem, ext = suffix.split(".") + parts = stem.split("_") assert len(parts) == 3 return dict( - frame=int(parts[0]), - cam_rig=int(parts[1]), - subcam=int(parts[2]), - resample=0 + frame=int(parts[0]), cam_rig=int(parts[1]), subcam=int(parts[2]), resample=0 ) - + def move(a, b): assert b.parent.exists() - #print(a, ' --> ', b) + # print(a, ' --> ', b) shutil.move(a, b) def postprocess_image(img_path, frame_folder, subcam): - dtype, *parts = img_path.name.split('_') - rest = '_'.join(parts) + dtype, *parts = img_path.name.split("_") + rest = "_".join(parts) - if len(rest.split('_')) == 3: + if len(rest.split("_")) == 3: keys = parse_pre_1_1_suffix(rest) new_img_name = dtype + get_suffix(keys) - new_img_path = img_path.parent/(new_img_name + img_path.suffix) + new_img_path = img_path.parent / (new_img_name + img_path.suffix) move(img_path, new_img_path) img_path = new_img_path img_keys = parse_suffix(img_path) - if img_keys['subcam'] != subcam: + if img_keys["subcam"] != subcam: keys = parse_suffix(frame_folder) - keys['subcam'] = img_keys['subcam'] - new_folder_name = 'frames' + get_suffix(keys) - new_frames_folder = frame_folder.parent/new_folder_name - new_img_path = new_frames_folder/img_path.name + keys["subcam"] = img_keys["subcam"] + new_folder_name = "frames" + get_suffix(keys) + new_frames_folder = frame_folder.parent / new_folder_name + new_img_path = new_frames_folder / img_path.name move(img_path, new_img_path) img_path = new_img_path cleanup(p) - + for subcam in range(n_subcams): - cam_frames_folders = sorted(list(p.glob(f'frames*_{subcam}'))) + cam_frames_folders = sorted(list(p.glob(f"frames*_{subcam}"))) for frame_folder in cam_frames_folders: for img_path in frame_folder.iterdir(): postprocess_image(img_path, frame_folder, subcam) - -def fix_metadata(p, override_version='1.0.4b'): - metadata_path = p.parent/(p.name+'_metadata.json') + +def fix_metadata(p, override_version="1.0.4b"): + metadata_path = p.parent / (p.name + "_metadata.json") if not metadata_path.exists(): - print(f'{p} is missing {metadata_path=}') + print(f"{p} is missing {metadata_path=}") return False - - with metadata_path.open('r') as f: + + with metadata_path.open("r") as f: metadata = json.load(f) - + if override_version is not None: - metadata['version'] = override_version + metadata["version"] = override_version - with metadata_path.open('w') as f: + with metadata_path.open("w") as f: json.dump(metadata, f) + def optimize_json_inplace(json_path): - with json_path.open('r') as f: + with json_path.open("r") as f: data = json.load(f, parse_float=lambda x: round(float(x), 6)) json_path.unlink() - with json_path.open('w') as f: + with json_path.open("w") as f: data = json.dump(data, f, indent=0) -def resize_inplace(img_path, target_shape, interp_method, npz_prefix='NPZ_'): +def resize_inplace(img_path, target_shape, interp_method, npz_prefix="NPZ_"): match img_path.suffix: - case '.png': + case ".png": img = imageio.imread(img_path) - case '.npy': + case ".npy": img = np.load(img_path) - case '.npz': + case ".npz": assert interp_method.startswith(npz_prefix) img = np.load(img_path) case suffix: - raise ValueError(f'Unrecognized {suffix=}') + raise ValueError(f"Unrecognized {suffix=}") using_npz_compression = interp_method.startswith(npz_prefix) if using_npz_compression: - interp_method = interp_method[len(npz_prefix):] + interp_method = interp_method[len(npz_prefix) :] img = compress_masks.recover(img) curr_shape = np.array(img.shape[:2]) - + if np.any(target_shape > curr_shape): raise ValueError(img_path, curr_shape, target_shape) @@ -209,120 +197,128 @@ def resize_inplace(img_path, target_shape, interp_method, npz_prefix='NPZ_'): return match interp_method: - case 'INTER_LINEAR' | 'INTER_NEAREST': - img = cv2.resize(img, target_shape[::-1], interpolation=getattr(cv2, interp_method)) - case 'MASK_POOL': - interp = cv2.resize((img.astype('float') / 255), target_shape[::-1], cv2.INTER_LINEAR) + case "INTER_LINEAR" | "INTER_NEAREST": + img = cv2.resize( + img, target_shape[::-1], interpolation=getattr(cv2, interp_method) + ) + case "MASK_POOL": + interp = cv2.resize( + (img.astype("float") / 255), target_shape[::-1], cv2.INTER_LINEAR + ) img = (255 * (interp > 0.01)).astype(img.dtype) case _: - raise ValueError(f'Unrecognized {interp_method=}') - + raise ValueError(f"Unrecognized {interp_method=}") + if using_npz_compression: img = compress_masks.compress(img) match img_path.suffix: - case '.png': + case ".png": imageio.imwrite(img_path, img) - case '.npy': + case ".npy": np.save(img_path, img) - case '.npz': + case ".npz": np.savez(img_path, **dict(img)) case _: - raise ValueError(f'{img_path.suffix=}') + raise ValueError(f"{img_path.suffix=}") -def optimize_groundtruth_filesize(scene_folder): - frames_folders = sorted(list(scene_folder.glob('frames_*'))) +def optimize_groundtruth_filesize(scene_folder): + frames_folders = sorted(list(scene_folder.glob("frames_*"))) if len(frames_folders) == 0: - raise ValueError(f'Couldnt find frames_* in {scene_folder}') + raise ValueError(f"Couldnt find frames_* in {scene_folder}") - first_folder_image_paths = list(frames_folders[0].glob('Image*.png')) + first_folder_image_paths = list(frames_folders[0].glob("Image*.png")) base_img_res = np.array(imageio.imread(first_folder_image_paths[0]).shape[:2]) for frames_folder in frames_folders: for dtype, ext, action, interp_method in IMAGE_RESIZE_ACTIONS: - targets = sorted(list(frames_folder.glob(f'{dtype}*{ext}'))) - + targets = sorted(list(frames_folder.glob(f"{dtype}*{ext}"))) + if len(targets) != len(first_folder_image_paths): - raise ValueError(f'Found incorrect {len(targets)=} for {dtype=} {ext=} in {frames_folder=}, expected {len(first_folder_image_paths)}') + raise ValueError( + f"Found incorrect {len(targets)=} for {dtype=} {ext=} in {frames_folder=}, expected {len(first_folder_image_paths)}" + ) for target_path in targets: - print(target_path.relative_to(scene_folder.parent), action, interp_method) + print( + target_path.relative_to(scene_folder.parent), action, interp_method + ) match action: - case 'SINGLE_RES': + case "SINGLE_RES": resize_inplace(target_path, base_img_res, interp_method) - case 'DOUBLE_RES': + case "DOUBLE_RES": resize_inplace(target_path, base_img_res * 2, interp_method) - case 'ORIG_RES': + case "ORIG_RES": pass - case 'COMPRESS_JSON': - assert interp_method == 'NO_INTERP' + case "COMPRESS_JSON": + assert interp_method == "NO_INTERP" optimize_json_inplace(target_path) - case 'NO_ACTION': - assert interp_method == 'NO_INTERP' + case "NO_ACTION": + assert interp_method == "NO_INTERP" case _: - raise ValueError(f'Unrecognized {action=}') + raise ValueError(f"Unrecognized {action=}") -def parse_jobscene_path(args): +def parse_jobscene_path(args): root = args.smb_root if args.jobscene_path is None: for d in smb_client.listdir(root): - for f in smb_client.globdir(d/'*.tar.gz'): + for f in smb_client.globdir(d / "*.tar.gz"): yield f.relative_to(root) - elif len(args.jobscene_path.parts) == 1: - d = root/args.jobscene_path - for f in smb_client.globdir(d/'*.tar.gz'): + elif len(args.jobscene_path.parts) == 1: + d = root / args.jobscene_path + for f in smb_client.globdir(d / "*.tar.gz"): yield f.relative_to(root) elif len(args.jobscene_path.parts) == 2: yield args.jobscene_path else: - raise ValueError(f'Unrecognized {args.jobscene_path=}') + raise ValueError(f"Unrecognized {args.jobscene_path=}") -def cleanup_smb(smb_parent_folder): +def cleanup_smb(smb_parent_folder): for f1, ftype, *_ in smb_client.listdir(smb_parent_folder, extras=True): - if ftype != 'D': - print(f'Ignoring {f1}, not a directory') + if ftype != "D": + print(f"Ignoring {f1}, not a directory") continue files = list(smb_client.listdir(f1)) n_files = len(files) if n_files <= 1: - print(f'Removing {f1} {n_files}') + print(f"Removing {f1} {n_files}") smb_client.remove(f1) else: - print(f'Keeping {f1} {n_files}') + print(f"Keeping {f1} {n_files}") + def fix_missing_camviewdata(local_folder, dummy): - camdata = deepcopy(dummy) - del camdata['T'] - camdata['baseline'] = 0.075 + del camdata["T"] + camdata["baseline"] = 0.075 - for frames_folder in local_folder.glob('frames*'): - for image_path in frames_folder.glob('Image*.png'): + for frames_folder in local_folder.glob("frames*"): + for image_path in frames_folder.glob("Image*.png"): idxs = parse_suffix(image_path.name) - outpath = image_path.parent/('camview' + get_suffix(idxs) + '.npz') + outpath = image_path.parent / ("camview" + get_suffix(idxs) + ".npz") if outpath.exists(): continue - with outpath.open('wb') as f: + with outpath.open("wb") as f: np.savez(f, camdata) + def reorganize_old_framesfolder(frames_old): - frames_old = Path(frames_old) for p in frames_old.iterdir(): if p.is_symlink(): p.unlink() - frames_dest = frames_old.parent/"frames" + frames_dest = frames_old.parent / "frames" excludes = [ "version.txt", @@ -336,98 +332,104 @@ def reorganize_old_framesfolder(frames_old): for img_path in frames_old.iterdir(): if img_path.is_dir() or img_path.name in excludes: continue - dtype, *_ = img_path.name.split('_') + dtype, *_ = img_path.name.split("_") idxs = parse_suffix(img_path.name) - new_path = frames_dest/dtype/f"camera_{idxs['subcam']}"/img_path.name + new_path = frames_dest / dtype / f"camera_{idxs['subcam']}" / img_path.name new_path.parent.mkdir(exist_ok=True, parents=True) shutil.move(img_path, new_path) if frames_dest != frames_old: frames_old.rmdir() -def fix_frames_folderstructure(p): +def fix_frames_folderstructure(p): p = args.local_path / p - for frames_old in p.glob('frames_*'): - reorganize_old_framesfolder(frames_old) + for frames_old in p.glob("frames_*"): + reorganize_old_framesfolder(frames_old) - for savemesh in p.glob('savemesh_*'): + for savemesh in p.glob("savemesh_*"): shutil.rmtree(savemesh) for folder in p.iterdir(): if not folder.is_dir(): continue - for f in folder.glob('*b_displacement.npy'): + for f in folder.glob("*b_displacement.npy"): f.unlink() -def retar_for_distribution(local_folder, distrib_path): +def retar_for_distribution(local_folder, distrib_path): seed = local_folder.name - frames_folder = (local_folder/'frames') + frames_folder = local_folder / "frames" for dtype_folder in frames_folder.iterdir(): for camera_folder in dtype_folder.iterdir(): exts = set(p.suffix for p in camera_folder.iterdir()) for ext in exts: - tar_path = distrib_path/seed/f"{seed}_{dtype_folder.name}_{ext.strip('.')}_{camera_folder.name}.tar.gz" + tar_path = ( + distrib_path + / seed + / f"{seed}_{dtype_folder.name}_{ext.strip('.')}_{camera_folder.name}.tar.gz" + ) if tar_path.exists(): continue tar_path.parent.mkdir(exist_ok=True, parents=True) - print(f'Creating {tar_path}') - with tarfile.open(tar_path, 'w:gz') as f: - for img in camera_folder.glob(f'*{ext}'): + print(f"Creating {tar_path}") + with tarfile.open(tar_path, "w:gz") as f: + for img in camera_folder.glob(f"*{ext}"): f.add(img, arcname=img.relative_to(local_folder.parent)) -def process_one_scene(p, args): - stem = p.name.split('.')[0] +def process_one_scene(p, args): + stem = p.name.split(".")[0] scene_files = [ p, - p.parent/f'{stem}_metadata.json', - p.parent/f'{stem}_thumbnail.png', + p.parent / f"{stem}_metadata.json", + p.parent / f"{stem}_thumbnail.png", ] local_tar = args.local_path / p - local_folder = local_tar.parent/(local_tar.name.split('.')[0]) - assert local_tar.name.endswith('.tar.gz'), local_tar + local_folder = local_tar.parent / (local_tar.name.split(".")[0]) + assert local_tar.name.endswith(".tar.gz"), local_tar if False: for s in scene_files: - local_s = args.local_path/s + local_s = args.local_path / s local_s.parent.mkdir(exist_ok=True, parents=True) if local_s == local_tar and local_folder.exists(): - print(f'Skipping {s} as {local_folder} exists') + print(f"Skipping {s} as {local_folder} exists") continue if not local_s.exists(): - print(f'Downloading {s}') - smb_client.download(args.smb_root/s, dest_folder=local_s.parent) + print(f"Downloading {s}") + smb_client.download(args.smb_root / s, dest_folder=local_s.parent) assert local_s.exists() if not local_folder.exists(): - print(f'Untarring {local_folder}') + print(f"Untarring {local_folder}") assert local_tar.exists() untar_folder = untar(local_tar) assert untar_folder == local_folder else: - print(f'Skipping untar {local_tar}') + print(f"Skipping untar {local_tar}") assert local_folder.exists() - if not (local_folder/'PREPROCESSED.txt').exists(): - print(f'Postprocessing {local_folder=}') + if not (local_folder / "PREPROCESSED.txt").exists(): + print(f"Postprocessing {local_folder=}") fix_scene_structure(local_folder, n_subcams=2) fix_metadata(local_folder) optimize_groundtruth_filesize(local_folder) fix_frames_folderstructure(local_folder) - with (local_folder/'PREPROCESSED.txt').open('w') as f: - f.write(f'{TOOLKIT_VERSION=}') + with (local_folder / "PREPROCESSED.txt").open("w") as f: + f.write(f"{TOOLKIT_VERSION=}") if not local_folder.exists(): return - print(f'Validating {local_folder=}') - dset = dataset_loader.InfinigenSceneDataset(local_folder, data_types=dataset_loader.ALLOWED_IMAGE_TYPES) + print(f"Validating {local_folder=}") + dset = dataset_loader.InfinigenSceneDataset( + local_folder, data_types=dataset_loader.ALLOWED_IMAGE_TYPES + ) dset.validate() if local_tar.exists(): @@ -436,27 +438,27 @@ def process_one_scene(p, args): if args.distrib_path is not None: retar_for_distribution(local_folder, args.distrib_path) + def try_process(p, args): process_one_scene(p, args) try: pass except Exception as e: - print('FAILED', p, e) - with (Path()/'failures.txt').open('a') as f: - f.write(f'{p} | {e}\n') - folder_name = p.parent/(p.name.split('.')[0]) + print("FAILED", p, e) + with (Path() / "failures.txt").open("a") as f: + f.write(f"{p} | {e}\n") + folder_name = p.parent / (p.name.split(".")[0]) if folder_name.exists(): shutil.rmtree(folder_name) - -def main(args): +def main(args): job_scene_paths = list(parse_jobscene_path(args)) - #cleanup_smb(args.smb_root) + # cleanup_smb(args.smb_root) def sort_key(p): - folder_name = p.parent/(p.name.split('.')[0]) + folder_name = p.parent / (p.name.split(".")[0]) if folder_name.exists(): return 0 if p.exists(): @@ -468,21 +470,21 @@ def sort_key(p): key=sort_key, ) - print(f'Found {len(job_scene_paths)=}') + print(f"Found {len(job_scene_paths)=}") mapfunc(partial(try_process, args=args), job_scene_paths, n_workers=args.n_workers) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('local_path', type=Path) - parser.add_argument('smb_root', type=Path) - parser.add_argument('--jobscene_path', type=Path, default=None) - parser.add_argument('--distrib_path', type=Path, default=None) - parser.add_argument('--n_workers', type=int, default=1) - parser.add_argument('--verbose', '-v', action='store_true') + parser.add_argument("local_path", type=Path) + parser.add_argument("smb_root", type=Path) + parser.add_argument("--jobscene_path", type=Path, default=None) + parser.add_argument("--distrib_path", type=Path, default=None) + parser.add_argument("--n_workers", type=int, default=1) + parser.add_argument("--verbose", "-v", action="store_true") args = parser.parse_args() - #cleanup_smb(args.smb_root) + # cleanup_smb(args.smb_root) - main(args) \ No newline at end of file + main(args) diff --git a/infinigen/tools/dataset_loader.py b/infinigen/tools/dataset_loader.py index bbfdc6e71..3524a3910 100644 --- a/infinigen/tools/dataset_loader.py +++ b/infinigen/tools/dataset_loader.py @@ -1,113 +1,118 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: +# Authors: -from pathlib import Path import json +import logging +from pathlib import Path + import imageio import numpy as np -import logging -#import torch.utils.data IMPORTED ONLY IF USING get_infinigen_dataset +from .suffixes import parse_suffix + +# import torch.utils.data IMPORTED ONLY IF USING get_infinigen_dataset -from .suffixes import parse_suffix, get_suffix logger = logging.getLogger(__name__) ALLOWED_IMAGE_TYPES = { - # Read docs/GroundTruthAnnotations.md for more explanations - - 'Image_png', - #('Image', '.exr'), # NOT IMPLEMENTED - - 'camview_npz', # intrinisic, extrinsic, etc - + "Image_png", + # ('Image', '.exr'), # NOT IMPLEMENTED + "camview_npz", # intrinisic, extrinsic, etc # names available via EITHER blender_gt.gin and opengl_gt.gin - 'Depth_npy', - 'Depth_png', - 'InstanceSegmentation_npz', - 'InstanceSegmentation_png', - 'ObjectSegmentation_npz', - 'ObjectSegmentation_png', - 'SurfaceNormal_npy', - 'SurfaceNormal_png', - 'Objects_json', - + "Depth_npy", + "Depth_png", + "InstanceSegmentation_npz", + "InstanceSegmentation_png", + "ObjectSegmentation_npz", + "ObjectSegmentation_png", + "SurfaceNormal_npy", + "SurfaceNormal_png", + "Objects_json", # blender_gt.gin only provides 2D flow. opengl_gt.gin produces Flow3D instead - 'Flow3D_npy', - 'Flow3D_png', - + "Flow3D_npy", + "Flow3D_png", # names available ONLY from opengl_gt.gin - 'OcclusionBoundaries_png', - 'TagSegmentation_npz', - 'TagSegmentation_png', - 'Flow3DMask_png', - + "OcclusionBoundaries_png", + "TagSegmentation_npz", + "TagSegmentation_png", + "Flow3DMask_png", # info from blender image rendering passes, usually enabled regardless of GT method - 'AO_png', - 'DiffCol_png', - 'DiffDir_png', - 'DiffInd_png', - 'Emit_png', - 'Env_png', - 'GlossCol_png', - 'GlossDir_png', - 'GlossInd_png', - 'TransCol_png', - 'TransDir_png', - 'TransInd_png', - 'VolumeDir_png', + "AO_png", + "DiffCol_png", + "DiffDir_png", + "DiffInd_png", + "Emit_png", + "Env_png", + "GlossCol_png", + "GlossDir_png", + "GlossInd_png", + "TransCol_png", + "TransDir_png", + "TransInd_png", + "VolumeDir_png", } + def get_blocksize(scene_folder): - first, second, *_ = sorted(scene_folder.glob('frames*_0')) - return parse_suffix(second)['frame'] - parse_suffix(first)['frame'] + first, second, *_ = sorted(scene_folder.glob("frames*_0")) + return parse_suffix(second)["frame"] - parse_suffix(first)["frame"] + def get_framebounds_inclusive(scene_folder): - rgb = scene_folder/'frames'/'Image'/'camera_0' - first, *_, last = sorted(rgb.glob('*.png')) - return ( - parse_suffix(first)['frame'], - parse_suffix(last)['frame'] - ) + rgb = scene_folder / "frames" / "Image" / "camera_0" + first, *_, last = sorted(rgb.glob("*.png")) + return (parse_suffix(first)["frame"], parse_suffix(last)["frame"]) + def get_cameras_available(scene_folder): - return [int(p.name.split('_')[-1]) for p in (scene_folder/'frames'/'Image').iterdir()] + return [ + int(p.name.split("_")[-1]) + for p in (scene_folder / "frames" / "Image").iterdir() + ] + def get_imagetypes_available(scene_folder): dtypes = [] - for dtype_folder in (scene_folder/'frames').iterdir(): - frames = dtype_folder/'camera_0' + for dtype_folder in (scene_folder / "frames").iterdir(): + frames = dtype_folder / "camera_0" uniq = set(p.suffix for p in frames.iterdir()) dtypes += [f'{dtype_folder.name}_{u.strip(".")}' for u in uniq] return dtypes + def get_frame_path(scene_folder, cam: int, frame_idx, data_type) -> Path: - data_type_name, data_type_ext = data_type.split('_') - imgname = f'{data_type_name}_0_0_{frame_idx:04d}_{cam}.{data_type_ext}' - return Path(scene_folder)/'frames'/data_type_name/f'camera_{cam}'/imgname + data_type_name, data_type_ext = data_type.split("_") + imgname = f"{data_type_name}_0_0_{frame_idx:04d}_{cam}.{data_type_ext}" + return Path(scene_folder) / "frames" / data_type_name / f"camera_{cam}" / imgname -class InfinigenSceneDataset: +class InfinigenSceneDataset: def __init__( - self, + self, scene_folder: Path, - data_types: list[str] = None, # see ALLOWED_IMAGE_KEYS above. Use 'None' to retrieve all available PNG datatypes + data_types: list[ + str + ] = None, # see ALLOWED_IMAGE_KEYS above. Use 'None' to retrieve all available PNG datatypes cameras=None, gt_for_first_camera_only=True, ): - self.scene_folder = Path(scene_folder) self.gt_for_first_camera_only = gt_for_first_camera_only if data_types is None: data_types = get_imagetypes_available(self.scene_folder) - logging.info(f'{self.__class__.__name__} recieved data_types=None, using whats available in {scene_folder}: {data_types}') + logging.info( + f"{self.__class__.__name__} recieved data_types=None, using whats available in {scene_folder}: {data_types}" + ) for t in data_types: if t not in ALLOWED_IMAGE_TYPES: - raise ValueError(f'Recieved data_types containing {t} which is not in ALLOWED_IMAGE_TYPES') + raise ValueError( + f"Recieved data_types containing {t} which is not in ALLOWED_IMAGE_TYPES" + ) self.data_types = data_types if cameras is None: @@ -119,33 +124,32 @@ def __init__( def __len__(self): first, last = self.framebounds_inclusive return last - first - + @staticmethod def load_any_filetype(path): - match path.suffix: - case '.png': + case ".png": return imageio.imread(path) - case '.exr': + case ".exr": raise NotImplementedError - case '.npy': + case ".npy": return np.load(path) - case '.npz': + case ".npz": return dict(np.load(path)) - case 'json': - with path.open('r') as f: + case "json": + with path.open("r") as f: return json.load(f) case _: - raise ValueError(f'Unhandled {path.suffix=} for {path=}') + raise ValueError(f"Unhandled {path.suffix=} for {path=}") def _imagetypes_to_load(self, cam: int): for data_type in self.data_types: dtypename = data_type[0] if ( - self.gt_for_first_camera_only and - cam != 0 and - dtypename != 'Image' and - dtypename != 'camview' + self.gt_for_first_camera_only + and cam != 0 + and dtypename != "Image" + and dtypename != "camview" ): continue yield data_type @@ -157,21 +161,22 @@ def validate(self): frame = self.framebounds_inclusive[0] p = self.frame_path(frame + i, cam, dtype) if not p.exists(): - raise ValueError(f'validate() failed for {self.scene_folder}, could not find {p}') + raise ValueError( + f"validate() failed for {self.scene_folder}, could not find {p}" + ) def frame_path(self, i: int, cam: int, dtype: str): frame_num = self.framebounds_inclusive[0] + i return get_frame_path(self.scene_folder, cam, frame_num, dtype) def __getitem__(self, i): - def get_camera_images(cam: int): imgs = {} for dtype in self._imagetypes_to_load(cam): path = self.frame_path(i, cam, dtype) imgs[dtype] = self.load_any_filetype(path) return imgs - + per_camera_data = [get_camera_images(i) for i in self.cameras] if len(self.cameras) == 1: @@ -179,16 +184,14 @@ def get_camera_images(cam: int): else: return per_camera_data -def get_infinigen_dataset(data_folder: Path, mode='concat', validate=False, **kwargs): - + +def get_infinigen_dataset(data_folder: Path, mode="concat", validate=False, **kwargs): import torch.utils.data data_folder = Path(data_folder) scene_datasets = [ - InfinigenSceneDataset(f, **kwargs) - for f in data_folder.iterdir() - if f.is_dir() + InfinigenSceneDataset(f, **kwargs) for f in data_folder.iterdir() if f.is_dir() ] if validate: @@ -196,9 +199,9 @@ def get_infinigen_dataset(data_folder: Path, mode='concat', validate=False, **kw d.validate() match mode: - case 'concat': + case "concat": return torch.utils.data.ConcatDataset(scene_datasets) - case 'chain': + case "chain": return torch.utils.data.ChainDataset(scene_datasets) case _: raise ValueError(mode) diff --git a/infinigen/tools/download_pregenerated_data.py b/infinigen/tools/download_pregenerated_data.py index 7c764292c..b0f9b7e3a 100644 --- a/infinigen/tools/download_pregenerated_data.py +++ b/infinigen/tools/download_pregenerated_data.py @@ -4,232 +4,276 @@ # Authors: Alexander Raistrick import argparse -from collections import OrderedDict -from pathlib import Path -import urllib.request -import re -import subprocess import json -from multiprocessing import Pool +import subprocess +import urllib.request +from collections import OrderedDict from functools import partial +from multiprocessing import Pool +from pathlib import Path SUPPORTED_DATARELEASE_FORMATS = {"0.2.0"} TEXT_SEPARATOR_LINE = "=" * 60 -ANNOT_DESCRIPTIONS = OrderedDict([ - - ("Image_png", "RGB image as a .png."), - ("Image_exr", "RGB image as an .exr"), - - ("camview_npz", "Camera intrinsic & extrinsic matricies, IE, camera calibration and poses."), - - ("Depth_npy", "Depth"), - ("Depth_png", "Color-mapped PNG of Depth_npy. FOR VISUALIZATION ONLY"), - - ("SurfaceNormal_npy", "Surface Normals"), - ("SurfaceNormal_png", "Color-mapped PNG of SurfaceNormal_npy. FOR VISUALIZATION ONLY"), - - ("Flow3D_npy", "Optical Flow and Depth change."), - ("Flow3D_png", "Color-wheel visualization of the 2D part of Flow3D_npy. FOR VISUALIZATION ONLY"), +ANNOT_DESCRIPTIONS = OrderedDict( + [ + ("Image_png", "RGB image as a .png."), + ("Image_exr", "RGB image as an .exr"), + ( + "camview_npz", + "Camera intrinsic & extrinsic matricies, IE, camera calibration and poses.", + ), + ("Depth_npy", "Depth"), + ("Depth_png", "Color-mapped PNG of Depth_npy. FOR VISUALIZATION ONLY"), + ("SurfaceNormal_npy", "Surface Normals"), + ( + "SurfaceNormal_png", + "Color-mapped PNG of SurfaceNormal_npy. FOR VISUALIZATION ONLY", + ), + ("Flow3D_npy", "Optical Flow and Depth change."), + ( + "Flow3D_png", + "Color-wheel visualization of the 2D part of Flow3D_npy. FOR VISUALIZATION ONLY", + ), + ("Flow3DMask_png", "Flow Occlusion mask."), + ("OcclusionBoundaries_png", "Occlusion Boundaries."), + ( + "ObjectSegmentation_npz", + "Semantic Segmentation mask. Compressed using a lookup table - see docs for more info.", + ), + ( + "ObjectSegmentation_png", + "Color-mapped PNG of ObjectSegmentation.npz. FOR VISUALIZATION ONLY", + ), + ( + "InstanceSegmentation_npz", + "Instance Segmentation mask. Compressed using a lookup table - see docs for more info.", + ), + ( + "InstanceSegmentation_png", + "Color-mapped PNG of InstanceSegmentation.npz. FOR VISUALIZATION ONLY", + ), + ( + "TagSegmentation_npz", + "Segmentation mask to help distinguish different parts of the same object. Compressed using a lookup table - see docs for more info.", + ), + ( + "TagSegmentation_png", + "Color-mapped PNG of TagSegmentation_npz. FOR VISUALIZATION ONLY", + ), + ( + "Objects_json", + "LARGE json object specifying names, poses and bounding boxes of objects in the scene. Required for 2D/3D BBox.", + ), + ("AO_png", "Ambient Occlusion."), + ("DiffCol_png", "Diffuse Color, a.k.a Albedo."), + ("DiffDir_png", "Diffuse Direct pass."), + ("DiffInd_png", "Diffuse Indirect pass."), + ("Emit_png", "Emission pass."), + ("Env_png", "Environment pass."), + ("GlossCol_png", "Glossy color."), + ("GlossDir_png", "Glossy direct pass."), + ("GlossInd_png", "Glossy indirect pass."), + ("TransCol_png", "Transmission color."), + ("TransDir_png", "Transmission direct pass."), + ("TransInd_png", "Transmission indirect pass."), + ("VolumeDir_png", "Volume direct pass."), + ] +) + +CAMERA_DESCRIPTIONS = OrderedDict( + { + "camera_0": "The default camera; select only this if you just want monocular data.", + "camera_1": "Select both this camera and the above if you want stereo data.", + } +) - ("Flow3DMask_png", "Flow Occlusion mask."), - ("OcclusionBoundaries_png", "Occlusion Boundaries."), - - ("ObjectSegmentation_npz", "Semantic Segmentation mask. Compressed using a lookup table - see docs for more info."), - ("ObjectSegmentation_png", "Color-mapped PNG of ObjectSegmentation.npz. FOR VISUALIZATION ONLY"), - - ("InstanceSegmentation_npz", "Instance Segmentation mask. Compressed using a lookup table - see docs for more info."), - ("InstanceSegmentation_png", "Color-mapped PNG of InstanceSegmentation.npz. FOR VISUALIZATION ONLY"), - - ("TagSegmentation_npz", "Segmentation mask to help distinguish different parts of the same object. Compressed using a lookup table - see docs for more info."), - ("TagSegmentation_png", "Color-mapped PNG of TagSegmentation_npz. FOR VISUALIZATION ONLY"), - - ("Objects_json", "LARGE json object specifying names, poses and bounding boxes of objects in the scene. Required for 2D/3D BBox."), - - ("AO_png", "Ambient Occlusion."), - ("DiffCol_png", "Diffuse Color, a.k.a Albedo."), - ("DiffDir_png", "Diffuse Direct pass."), - ("DiffInd_png", "Diffuse Indirect pass."), - ("Emit_png", "Emission pass."), - ("Env_png", "Environment pass."), - ("GlossCol_png", "Glossy color."), - ("GlossDir_png", "Glossy direct pass."), - ("GlossInd_png", "Glossy indirect pass."), - ("TransCol_png", "Transmission color."), - ("TransDir_png", "Transmission direct pass."), - ("TransInd_png", "Transmission indirect pass."), - ("VolumeDir_png", "Volume direct pass."), -]) - -CAMERA_DESCRIPTIONS = OrderedDict({ - "camera_0": "The default camera; select only this if you just want monocular data.", - "camera_1": "Select both this camera and the above if you want stereo data." -}) def wget_path(args, path): url = args.repo_url + str(path) - cmd = f'wget -q -N --show-progress {url} -P {str(args.output_folder)}' + cmd = f"wget -q -N --show-progress {url} -P {str(args.output_folder)}" subprocess.check_call(cmd.split()) + def untar_path(args, tarfile): assert tarfile.exists() - cmd = f'tar -xzf {tarfile} -C {args.output_folder}' + cmd = f"tar -xzf {tarfile} -C {args.output_folder}" print(cmd) subprocess.check_call(cmd.split()) tarfile.unlink() + def url_to_text(url): with urllib.request.urlopen(url) as f: - return f.read().decode('utf-8') + return f.read().decode("utf-8") -def user_select_string_list(values, descriptions_dict=None, extra_msg=None): +def user_select_string_list(values, descriptions_dict=None, extra_msg=None): if descriptions_dict is not None: + def sort_by_description_order(vinp): try: - return next(i for i, k in enumerate(descriptions_dict.keys()) if k == vinp) + return next( + i for i, k in enumerate(descriptions_dict.keys()) if k == vinp + ) except StopIteration: return len(values) + values = sorted(values, key=sort_by_description_order) print(TEXT_SEPARATOR_LINE) for i, v in enumerate(values): - prompt = f'({i}) {v:<25}' + prompt = f"({i}) {v:<25}" if descriptions_dict is not None and v in descriptions_dict: desc = descriptions_dict[v] - prompt += f' - {desc}' + prompt += f" - {desc}" print(prompt) - + if extra_msg is not None: print(extra_msg) print(TEXT_SEPARATOR_LINE) - print('Please enter your choices from above, as a space-separated list of integers or strings, or type \"ALL\"') - selections = input('Enter your selection: ') + print( + 'Please enter your choices from above, as a space-separated list of integers or strings, or type "ALL"' + ) + selections = input("Enter your selection: ") - print('\n') + print("\n") - if selections == 'ALL': + if selections == "ALL": return values def postprocess(x): - try: x = int(x) except ValueError: pass - if ( - (isinstance(x, str) and x not in values) or - (isinstance(x, int) and x not in range(len(values))) + if (isinstance(x, str) and x not in values) or ( + isinstance(x, int) and x not in range(len(values)) ): - raise ValueError(f'User provided input \"{x}\" was not recognized, expected integer 0 to {len(values)-1} or a shorthand string') + raise ValueError( + f'User provided input "{x}" was not recognized, expected integer 0 to {len(values)-1} or a shorthand string' + ) if isinstance(x, int): x = values[x] return x - - selections = [x.strip().strip(',') for x in selections.split()] + + selections = [x.strip().strip(",") for x in selections.split()] selections = [postprocess(x) for x in selections] - print('Selected: ', selections) + print("Selected: ", selections) return selections -def check_and_preprocess_args(args, metadata): - datarelease_format_version = metadata['datarelease_format_version'] +def check_and_preprocess_args(args, metadata): + datarelease_format_version = metadata["datarelease_format_version"] if datarelease_format_version not in SUPPORTED_DATARELEASE_FORMATS: raise ValueError( - f'{args.release_name} uses {datarelease_format_version=} which is not ' - ' supported by this download script. Please download a newer version of the code.' + f"{args.release_name} uses {datarelease_format_version=} which is not " + " supported by this download script. Please download a newer version of the code." ) - + if args.data_types is None: args.data_types = user_select_string_list( - metadata['data_types'], - ANNOT_DESCRIPTIONS, - extra_msg="\nNote: See https://docs.blender.org/manual/en/latest/render/layers/passes.html for a description of Blender-Cycles' render passes" + metadata["data_types"], + ANNOT_DESCRIPTIONS, + extra_msg="\nNote: See https://docs.blender.org/manual/en/latest/render/layers/passes.html for a description of Blender-Cycles' render passes", ) if not any("Image" in x for x in args.data_types): - print('WARNING: User did not request Image_png or Image_exr, this is unusual. Please restart if this was not intended.') + print( + "WARNING: User did not request Image_png or Image_exr, this is unusual. Please restart if this was not intended." + ) else: - missing = set(args.data_types) - set(metadata['data_types']) + missing = set(args.data_types) - set(metadata["data_types"]) if len(missing): - raise ValueError(f"In user-provided --seeds, {missing} could not be found in {args.release_name} metadata.json") + raise ValueError( + f"In user-provided --seeds, {missing} could not be found in {args.release_name} metadata.json" + ) if args.seeds is None: - n = len(metadata['seeds']) + n = len(metadata["seeds"]) print( f"How many videos do you wish to download? " f"Enter a quantity from 1 to {n}, or type SELECT to pick specific seeds" ) selection = input("Enter your selection: ") - if selection == 'SELECT': - args.seeds = user_select_string_list(metadata['seeds']) + if selection == "SELECT": + args.seeds = user_select_string_list(metadata["seeds"]) else: num_select = int(selection) - args.seeds = metadata['seeds'][:num_select] - - - - missing = set(args.seeds) - set(metadata['seeds']) + args.seeds = metadata["seeds"][:num_select] + + missing = set(args.seeds) - set(metadata["seeds"]) if len(missing): - raise ValueError(f"In user-provided --seeds, {missing} could not be found in {args.release_name} metadata.json") + raise ValueError( + f"In user-provided --seeds, {missing} could not be found in {args.release_name} metadata.json" + ) if args.cameras is None: - args.cameras = user_select_string_list(metadata['cameras'], CAMERA_DESCRIPTIONS) + args.cameras = user_select_string_list(metadata["cameras"], CAMERA_DESCRIPTIONS) else: - missing = set(args.cameras) - set(metadata['cameras']) + missing = set(args.cameras) - set(metadata["cameras"]) if len(missing): - raise ValueError(f"In user-provided --cameras, {missing} are not supported acording {args.release_name} metadata.json") + raise ValueError( + f"In user-provided --cameras, {missing} are not supported acording {args.release_name} metadata.json" + ) + def process_path(args, path): wget_path(args, path) - untar_path(args, tarfile=args.output_folder/path.name) + untar_path(args, tarfile=args.output_folder / path.name) -def main(args): +def main(args): if args.release_name is None: - print(f'Please specify a --release_name. Go to {args.repo_url} in your browser to see what folders are available.') + print( + f"Please specify a --release_name. Go to {args.repo_url} in your browser to see what folders are available." + ) exit() - metadata_url = f'{args.repo_url}/{args.release_name}/metadata.json' + metadata_url = f"{args.repo_url}/{args.release_name}/metadata.json" metadata = json.loads(url_to_text(metadata_url)) print(TEXT_SEPARATOR_LINE) print(f"Description for release {repr(args.release_name)}:") - print(metadata['description']) + print(metadata["description"]) print(TEXT_SEPARATOR_LINE) input("Press Enter to continue...") - print('\n') + print("\n") check_and_preprocess_args(args, metadata) - toplevel = Path(args.release_name)/'renders' + toplevel = Path(args.release_name) / "renders" paths = [] for seed in args.seeds: for camera in args.cameras: for imgtype in args.data_types: - name = f'{seed}_{imgtype}_{camera}.tar.gz' - paths.append(toplevel/seed/name) + name = f"{seed}_{imgtype}_{camera}.tar.gz" + paths.append(toplevel / seed / name) - print(f'User requested {len(args.seeds)} seeds x {len(args.cameras)} cameras x {len(args.data_types)} data types') - print(f'This script will download and untar {len(paths)} tarballs from {args.repo_url}') - choice = input('Do you wish to proceed? [y/n]: ') + print( + f"User requested {len(args.seeds)} seeds x {len(args.cameras)} cameras x {len(args.data_types)} data types" + ) + print( + f"This script will download and untar {len(paths)} tarballs from {args.repo_url}" + ) + choice = input("Do you wish to proceed? [y/n]: ") if not (choice == "" or choice in " yY1"): exit() with Pool(args.n_workers) as pool: pool.map(partial(process_path, args), paths) - - + + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('output_folder', type=Path) + parser.add_argument("output_folder", type=Path) parser.add_argument( "--repo_url", type=str, @@ -252,23 +296,18 @@ def main(args): parser.add_argument( "--cameras", type=str, - nargs='+', + nargs="+", default=None, help="What cameras should we download data for? Omit to download all available in this release", ) parser.add_argument( "--data_types", type=str, - nargs='+', + nargs="+", default=None, help="What data types (e.g Image, Depth, etc) should we download data for? Omit to download all available in this release", ) - parser.add_argument( - '--n_workers', - type=int, - default=1 - ) + parser.add_argument("--n_workers", type=int, default=1) args = parser.parse_args() main(args) - diff --git a/infinigen/tools/export.py b/infinigen/tools/export.py index c6ae6080b..f6c0fa5a5 100644 --- a/infinigen/tools/export.py +++ b/infinigen/tools/export.py @@ -3,61 +3,66 @@ # Authors: David Yan -import bpy -import os -import sys import argparse -import shutil -import subprocess import logging -import gin import math - +import shutil +import subprocess from pathlib import Path +import bpy import gin FORMAT_CHOICES = ["fbx", "obj", "usdc", "usda", "stl", "ply"] -BAKE_TYPES = {'DIFFUSE': 'Base Color', 'ROUGHNESS': 'Roughness'} # 'EMIT':'Emission' # "GLOSSY": 'Specular', 'TRANSMISSION':'Transmission' don't export -SPECIAL_BAKE = {'METAL': 'Metallic', 'NORMAL': 'Normal'} +BAKE_TYPES = { + "DIFFUSE": "Base Color", + "ROUGHNESS": "Roughness", +} # 'EMIT':'Emission' # "GLOSSY": 'Specular', 'TRANSMISSION':'Transmission' don't export +SPECIAL_BAKE = {"METAL": "Metallic", "NORMAL": "Normal"} ALL_BAKE = BAKE_TYPES | SPECIAL_BAKE + def apply_all_modifiers(obj): for mod in obj.modifiers: - if (mod is None): continue + if mod is None: + continue try: obj.select_set(True) - bpy.context.view_layer.objects.active = obj + bpy.context.view_layer.objects.active = obj bpy.ops.object.modifier_apply(modifier=mod.name) logging.info(f"Applied modifier {mod} on {obj}") obj.select_set(False) except RuntimeError: logging.info(f"Can't apply {mod} on {obj}") obj.select_set(False) - return + return + def realizeInstances(obj): for mod in obj.modifiers: - if (mod is None or mod.type != 'NODES'): continue + if mod is None or mod.type != "NODES": + continue geo_group = mod.node_group - outputNode = geo_group.nodes['Group Output'] + outputNode = geo_group.nodes["Group Output"] logging.info(f"Realizing instances on {mod}") link = outputNode.inputs[0].links[0] from_socket = link.from_socket geo_group.links.remove(link) - realizeNode = geo_group.nodes.new(type = 'GeometryNodeRealizeInstances') + realizeNode = geo_group.nodes.new(type="GeometryNodeRealizeInstances") geo_group.links.new(realizeNode.inputs[0], from_socket) geo_group.links.new(outputNode.inputs[0], realizeNode.outputs[0]) + def remove_shade_smooth(obj): for mod in obj.modifiers: - if (mod is None or mod.type != 'NODES'): continue + if mod is None or mod.type != "NODES": + continue geo_group = mod.node_group - outputNode = geo_group.nodes['Group Output'] - if geo_group.nodes.get('Set Shade Smooth'): + outputNode = geo_group.nodes["Group Output"] + if geo_group.nodes.get("Set Shade Smooth"): logging.info("Removing shade smooth on " + obj.name) - smooth_node = geo_group.nodes['Set Shade Smooth'] + smooth_node = geo_group.nodes["Set Shade Smooth"] else: continue @@ -66,23 +71,26 @@ def remove_shade_smooth(obj): geo_group.links.remove(link) geo_group.links.new(outputNode.inputs[0], from_socket) + def check_material_geonode(node_tree): if node_tree.nodes.get("Set Material"): logging.info("Found set material!") return True for node in node_tree.nodes: - if node.type == 'GROUP' and check_material_geonode(node.node_tree): + if node.type == "GROUP" and check_material_geonode(node.node_tree): return True - + return False + def handle_geo_modifiers(obj, export_usd): has_geo_nodes = False for mod in obj.modifiers: - if (mod is None or mod.type != 'NODES'): continue + if mod is None or mod.type != "NODES": + continue has_geo_nodes = True - + if has_geo_nodes and not obj.data.materials: mat = bpy.data.materials.new(name=f"{mod.name} shader") obj.data.materials.append(mat) @@ -92,81 +100,99 @@ def handle_geo_modifiers(obj, export_usd): if not export_usd: realizeInstances(obj) + def split_glass_mats(): split_objs = [] for obj in bpy.data.objects: - if any(exclude in obj.name for exclude in ['BowlFactory', 'CupFactory', 'OvenFactory', 'BottleFactory']): + if any( + exclude in obj.name + for exclude in ["BowlFactory", "CupFactory", "OvenFactory", "BottleFactory"] + ): continue for slot in obj.material_slots: mat = slot.material if mat is None: continue - if ('shader_glass' in mat.name or 'shader_lamp_bulb' in mat.name) and len(obj.material_slots) >= 2: - logging.info(f'Splitting {obj}') + if ("shader_glass" in mat.name or "shader_lamp_bulb" in mat.name) and len( + obj.material_slots + ) >= 2: + logging.info(f"Splitting {obj}") obj.select_set(True) - bpy.context.view_layer.objects.active = obj - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.separate(type='MATERIAL') - bpy.ops.object.mode_set(mode='OBJECT') + bpy.context.view_layer.objects.active = obj + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.separate(type="MATERIAL") + bpy.ops.object.mode_set(mode="OBJECT") obj.select_set(False) split_objs.append(obj.name) break - - matches = [obj for split_obj in split_objs for obj in bpy.data.objects if split_obj in obj.name] + + matches = [ + obj + for split_obj in split_objs + for obj in bpy.data.objects + if split_obj in obj.name + ] for match in matches: mat = match.material_slots[0].material if mat is None: continue - if ('shader_glass' in mat.name or 'shader_lamp_bulb' in mat.name): - match.name = f'{match.name}_SPLIT_GLASS' - -def clean_names(obj = None): + if "shader_glass" in mat.name or "shader_lamp_bulb" in mat.name: + match.name = f"{match.name}_SPLIT_GLASS" + + +def clean_names(obj=None): if obj is not None: - obj.name = (obj.name).replace(' ','_') - obj.name = (obj.name).replace('.','_') + obj.name = (obj.name).replace(" ", "_") + obj.name = (obj.name).replace(".", "_") - if obj.type == 'MESH': + if obj.type == "MESH": for uv_map in obj.data.uv_layers: - uv_map.name = uv_map.name.replace('.', '_') + uv_map.name = uv_map.name.replace(".", "_") for mat in bpy.data.materials: - if (mat is None): continue - mat.name = (mat.name).replace(' ','_') - mat.name = (mat.name).replace('.','_') + if mat is None: + continue + mat.name = (mat.name).replace(" ", "_") + mat.name = (mat.name).replace(".", "_") for slot in obj.material_slots: mat = slot.material - if (mat is None): + if mat is None: continue - mat.name = (mat.name).replace(' ','_') - mat.name = (mat.name).replace('.','_') + mat.name = (mat.name).replace(" ", "_") + mat.name = (mat.name).replace(".", "_") return - + for obj in bpy.data.objects: - obj.name = (obj.name).replace(' ','_') - obj.name = (obj.name).replace('.','_') + obj.name = (obj.name).replace(" ", "_") + obj.name = (obj.name).replace(".", "_") - if obj.type == 'MESH': + if obj.type == "MESH": for uv_map in obj.data.uv_layers: - uv_map.name = uv_map.name.replace('.', '_') # if uv has '.' in name the node will export wrong in USD + uv_map.name = uv_map.name.replace( + ".", "_" + ) # if uv has '.' in name the node will export wrong in USD for mat in bpy.data.materials: - if (mat is None): continue - mat.name = (mat.name).replace(' ','_') - mat.name = (mat.name).replace('.','_') + if mat is None: + continue + mat.name = (mat.name).replace(" ", "_") + mat.name = (mat.name).replace(".", "_") -def remove_obj_parents(obj = None): - if obj is not None : + +def remove_obj_parents(obj=None): + if obj is not None: old_location = obj.matrix_world.to_translation() obj.parent = None obj.matrix_world.translation = old_location return - - for obj in bpy.data.objects: + + for obj in bpy.data.objects: old_location = obj.matrix_world.to_translation() obj.parent = None obj.matrix_world.translation = old_location + def delete_objects(): logging.info("Deleting placeholders collection") collection_name = "placeholders" @@ -176,48 +202,50 @@ def delete_objects(): for scene in bpy.data.scenes: if collection.name in scene.collection.children: scene.collection.children.unlink(collection) - + for obj in collection.objects: bpy.data.objects.remove(obj, do_unlink=True) - + def delete_child_collections(parent_collection): for child_collection in parent_collection.children: delete_child_collections(child_collection) bpy.data.collections.remove(child_collection) - + delete_child_collections(collection) bpy.data.collections.remove(collection) - + if bpy.data.objects.get("Grid"): bpy.data.objects.remove(bpy.data.objects["Grid"], do_unlink=True) - + if bpy.data.objects.get("atmosphere"): bpy.data.objects.remove(bpy.data.objects["atmosphere"], do_unlink=True) - + if bpy.data.objects.get("KoleClouds"): bpy.data.objects.remove(bpy.data.objects["KoleClouds"], do_unlink=True) -def rename_all_meshes(obj = None): + +def rename_all_meshes(obj=None): if obj is not None: if obj.data and obj.data.users == 1: - obj.data.name = obj.name + obj.data.name = obj.name return for obj in bpy.data.objects: if obj.data and obj.data.users == 1: - obj.data.name = obj.name + obj.data.name = obj.name + def update_visibility(): - outliner_area = next(a for a in bpy.context.screen.areas if a.type == 'OUTLINER') + outliner_area = next(a for a in bpy.context.screen.areas if a.type == "OUTLINER") space = outliner_area.spaces[0] space.show_restrict_column_viewport = True # Global visibility (Monitor icon) collection_view = {} obj_view = {} for collection in bpy.data.collections: collection_view[collection] = collection.hide_render - collection.hide_viewport = False #reenables viewports for all - collection.hide_render = False # enables renders for all collections - + collection.hide_viewport = False # reenables viewports for all + collection.hide_render = False # enables renders for all collections + # disables viewports and renders for all objs for obj in bpy.data.objects: obj_view[obj] = obj.hide_render @@ -225,167 +253,191 @@ def update_visibility(): obj.hide_render = True return collection_view, obj_view - + + def uv_unwrap(obj): obj.select_set(True) - bpy.context.view_layer.objects.active = obj + bpy.context.view_layer.objects.active = obj - obj.data.uv_layers.new(name='ExportUV') - bpy.context.object.data.uv_layers['ExportUV'].active = True + obj.data.uv_layers.new(name="ExportUV") + bpy.context.object.data.uv_layers["ExportUV"].active = True logging.info("UV Unwrapping") - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_all(action="SELECT") try: bpy.ops.uv.smart_project(angle_limit=0.7) - except RuntimeError: + except RuntimeError: logging.info("UV Unwrap failed, skipping mesh") - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") obj.select_set(False) return False - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") obj.select_set(False) return True + def bakeVertexColors(obj): logging.info(f"Baking vertex color on {obj}") - bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.select_all(action="DESELECT") obj.select_set(True) - bpy.context.view_layer.objects.active = obj - vertColor = bpy.context.object.data.color_attributes.new(name='VertColor',domain='CORNER',type='BYTE_COLOR') + bpy.context.view_layer.objects.active = obj + vertColor = bpy.context.object.data.color_attributes.new( + name="VertColor", domain="CORNER", type="BYTE_COLOR" + ) bpy.context.object.data.attributes.active_color = vertColor - bpy.ops.object.bake(type='DIFFUSE', pass_filter={'COLOR'}, target ='VERTEX_COLORS') + bpy.ops.object.bake(type="DIFFUSE", pass_filter={"COLOR"}, target="VERTEX_COLORS") obj.select_set(False) + def apply_baked_tex(obj, paramDict={}): - bpy.context.view_layer.objects.active = obj - bpy.context.object.data.uv_layers['ExportUV'].active_render = True + bpy.context.view_layer.objects.active = obj + bpy.context.object.data.uv_layers["ExportUV"].active_render = True for uv_layer in reversed(obj.data.uv_layers): if "ExportUV" not in uv_layer.name: logging.info(f"Removed extraneous UV Layer {uv_layer}") obj.data.uv_layers.remove(uv_layer) - + for slot in obj.material_slots: mat = slot.material - if (mat is None): + if mat is None: continue mat.use_nodes = True nodes = mat.node_tree.nodes logging.info("Reapplying baked texs on " + mat.name) # delete all nodes except baked nodes and bsdf - excludedNodes = [type + '_node' for type in ALL_BAKE] - excludedNodes.extend(['Material Output','Principled BSDF']) - for n in nodes: + excludedNodes = [type + "_node" for type in ALL_BAKE] + excludedNodes.extend(["Material Output", "Principled BSDF"]) + for n in nodes: if n.name not in excludedNodes: - nodes.remove(n) # deletes an arbitrary principled BSDF in the case of a mix, which is handled below + nodes.remove( + n + ) # deletes an arbitrary principled BSDF in the case of a mix, which is handled below + + output = nodes["Material Output"] - output = nodes['Material Output'] - # stick baked texture in material - if nodes.get('Principled BSDF') is None: # no bsdf + if nodes.get("Principled BSDF") is None: # no bsdf logging.info("No BSDF, creating new one") - principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') - elif len(output.inputs[0].links) != 0 and output.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfPrincipled': # trivial bsdf graph + principled_bsdf_node = nodes.new("ShaderNodeBsdfPrincipled") + elif ( + len(output.inputs[0].links) != 0 + and output.inputs[0].links[0].from_node.bl_idname + == "ShaderNodeBsdfPrincipled" + ): # trivial bsdf graph logging.info("Trivial shader graph, using old BSDF") - principled_bsdf_node = nodes['Principled BSDF'] + principled_bsdf_node = nodes["Principled BSDF"] else: logging.info("Non-trivial shader graph, creating new BSDF") - nodes.remove(nodes['Principled BSDF']) # shader graph was a mix of bsdfs - principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') + nodes.remove(nodes["Principled BSDF"]) # shader graph was a mix of bsdfs + principled_bsdf_node = nodes.new("ShaderNodeBsdfPrincipled") links = mat.node_tree.links - + # create the new shader node links - links.new(output.inputs[0], principled_bsdf_node.outputs[0]) + links.new(output.inputs[0], principled_bsdf_node.outputs[0]) for type in ALL_BAKE: - if not nodes.get(type + '_node'): continue - tex_node = nodes[type + '_node'] - if type == 'NORMAL': - normal_node = nodes.new('ShaderNodeNormalMap') - links.new(normal_node.inputs['Color'], tex_node.outputs[0]) - links.new(principled_bsdf_node.inputs[ALL_BAKE[type]], normal_node.outputs[0]) + if not nodes.get(type + "_node"): + continue + tex_node = nodes[type + "_node"] + if type == "NORMAL": + normal_node = nodes.new("ShaderNodeNormalMap") + links.new(normal_node.inputs["Color"], tex_node.outputs[0]) + links.new( + principled_bsdf_node.inputs[ALL_BAKE[type]], normal_node.outputs[0] + ) continue links.new(principled_bsdf_node.inputs[ALL_BAKE[type]], tex_node.outputs[0]) - + # bring back cleared param values if mat.name in paramDict: - principled_bsdf_node.inputs['Metallic'].default_value = paramDict[mat.name]['Metallic'] - principled_bsdf_node.inputs['Sheen'].default_value = paramDict[mat.name]['Sheen'] - principled_bsdf_node.inputs['Clearcoat'].default_value = paramDict[mat.name]['Clearcoat'] + principled_bsdf_node.inputs["Metallic"].default_value = paramDict[mat.name][ + "Metallic" + ] + principled_bsdf_node.inputs["Sheen"].default_value = paramDict[mat.name][ + "Sheen" + ] + principled_bsdf_node.inputs["Clearcoat"].default_value = paramDict[ + mat.name + ]["Clearcoat"] + def create_glass_shader(node_tree, export_usd): nodes = node_tree.nodes - if nodes.get('Glass BSDF'): - color = nodes['Glass BSDF'].inputs[0].default_value - roughness = nodes['Glass BSDF'].inputs[1].default_value - ior = nodes['Glass BSDF'].inputs[2].default_value - - if nodes.get('Principled BSDF'): - nodes.remove(nodes['Principled BSDF']) - - principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') - - if nodes.get('Glass BSDF'): - principled_bsdf_node.inputs['Base Color'].default_value = color - principled_bsdf_node.inputs['Roughness'].default_value = roughness - principled_bsdf_node.inputs['IOR'].default_value = ior + if nodes.get("Glass BSDF"): + color = nodes["Glass BSDF"].inputs[0].default_value + roughness = nodes["Glass BSDF"].inputs[1].default_value + ior = nodes["Glass BSDF"].inputs[2].default_value + + if nodes.get("Principled BSDF"): + nodes.remove(nodes["Principled BSDF"]) + + principled_bsdf_node = nodes.new("ShaderNodeBsdfPrincipled") + + if nodes.get("Glass BSDF"): + principled_bsdf_node.inputs["Base Color"].default_value = color + principled_bsdf_node.inputs["Roughness"].default_value = roughness + principled_bsdf_node.inputs["IOR"].default_value = ior else: - principled_bsdf_node.inputs['Roughness'].default_value = 0 + principled_bsdf_node.inputs["Roughness"].default_value = 0 - principled_bsdf_node.inputs['Transmission'].default_value = 1 + principled_bsdf_node.inputs["Transmission"].default_value = 1 if export_usd: - principled_bsdf_node.inputs['Alpha'].default_value = 0 - node_tree.links.new(principled_bsdf_node.outputs[0], nodes['Material Output'].inputs[0]) + principled_bsdf_node.inputs["Alpha"].default_value = 0 + node_tree.links.new( + principled_bsdf_node.outputs[0], nodes["Material Output"].inputs[0] + ) + def process_glass_materials(obj, export_usd): for slot in obj.material_slots: mat = slot.material - if (mat is None or not mat.use_nodes): continue + if mat is None or not mat.use_nodes: + continue nodes = mat.node_tree.nodes - outputNode = nodes['Material Output'] - if nodes.get('Glass BSDF'): - if outputNode.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfGlass': + outputNode = nodes["Material Output"] + if nodes.get("Glass BSDF"): + if ( + outputNode.inputs[0].links[0].from_node.bl_idname + == "ShaderNodeBsdfGlass" + ): logging.info(f"Creating glass material on {obj.name}") else: - logging.info(f"Non-trivial glass material on {obj.name}, material export will be inaccurate") + logging.info( + f"Non-trivial glass material on {obj.name}, material export will be inaccurate" + ) create_glass_shader(mat.node_tree, export_usd) - elif 'glass' in mat.name or 'shader_lamp_bulb' in mat.name: + elif "glass" in mat.name or "shader_lamp_bulb" in mat.name: logging.info(f"Creating glass material on {obj.name}") create_glass_shader(mat.node_tree, export_usd) - -def bake_pass( - obj, - dest: Path, - img_size, - bake_type, - export_usd -): - - img = bpy.data.images.new(f'{obj.name}_{bake_type}',img_size,img_size) - clean_name = (obj.name).replace(' ','_').replace('.','_') - file_path = dest/f'{clean_name}_{bake_type}.png' - dest = dest/'textures' + + +def bake_pass(obj, dest: Path, img_size, bake_type, export_usd): + img = bpy.data.images.new(f"{obj.name}_{bake_type}", img_size, img_size) + clean_name = (obj.name).replace(" ", "_").replace(".", "_") + file_path = dest / f"{clean_name}_{bake_type}.png" + dest = dest / "textures" bake_obj = False bake_exclude_mats = {} - # materials are stored as stack so when removing traverse the reversed list + # materials are stored as stack so when removing traverse the reversed list for index, slot in reversed(list(enumerate(obj.material_slots))): mat = slot.material - if mat is None: + if mat is None: bpy.context.object.active_material_index = index bpy.ops.object.material_slot_remove() continue - + logging.info(mat.name) mat.use_nodes = True nodes = mat.node_tree.nodes output = nodes["Material Output"] - img_node = nodes.new('ShaderNodeTexImage') - img_node.name = f'{bake_type}_node' + img_node = nodes.new("ShaderNodeTexImage") + img_node.name = f"{bake_type}_node" img_node.image = img img_node.select = True nodes.active = img_node @@ -397,23 +449,29 @@ def bake_pass( continue surface_node = output.inputs[0].links[0].from_node - if (bake_type in ALL_BAKE and surface_node.bl_idname == 'ShaderNodeBsdfPrincipled' and - len(surface_node.inputs[ALL_BAKE[bake_type]].links) == 0): # trivial bsdf graph - - logging.info(f"{mat.name} has no procedural input for {bake_type}, not using baked textures") + if ( + bake_type in ALL_BAKE + and surface_node.bl_idname == "ShaderNodeBsdfPrincipled" + and len(surface_node.inputs[ALL_BAKE[bake_type]].links) == 0 + ): # trivial bsdf graph + logging.info( + f"{mat.name} has no procedural input for {bake_type}, not using baked textures" + ) bake_exclude_mats[mat] = img_node continue - - bake_obj = True - if (bake_type == 'METAL'): - internal_bake_type = 'EMIT' + bake_obj = True + + if bake_type == "METAL": + internal_bake_type = "EMIT" else: internal_bake_type = bake_type - if bake_obj: - logging.info(f'Baking {bake_type} pass') - bpy.ops.object.bake(type=internal_bake_type, pass_filter={'COLOR'}, save_mode='EXTERNAL') + if bake_obj: + logging.info(f"Baking {bake_type} pass") + bpy.ops.object.bake( + type=internal_bake_type, pass_filter={"COLOR"}, save_mode="EXTERNAL" + ) img.filepath_raw = str(file_path) if not export_usd: img.save() @@ -424,128 +482,153 @@ def bake_pass( for mat, img_node in bake_exclude_mats.items(): mat.node_tree.nodes.remove(img_node) -def bake_metal(obj, dest, img_size, export_usd): # metal baking is not really set up for node graphs w/ 2 mixed BSDFs. + +def bake_metal( + obj, dest, img_size, export_usd +): # metal baking is not really set up for node graphs w/ 2 mixed BSDFs. metal_map_mats = [] for slot in obj.material_slots: mat = slot.material - if (mat is None or not mat.use_nodes): continue + if mat is None or not mat.use_nodes: + continue nodes = mat.node_tree.nodes - if nodes.get('Principled BSDF') and nodes.get('Material Output'): - principled_bsdf_node = nodes['Principled BSDF'] - outputNode = nodes['Material Output'] - else: continue - + if nodes.get("Principled BSDF") and nodes.get("Material Output"): + principled_bsdf_node = nodes["Principled BSDF"] + outputNode = nodes["Material Output"] + else: + continue + links = mat.node_tree.links - if len(principled_bsdf_node.inputs['Metallic'].links) != 0: - link = principled_bsdf_node.inputs['Metallic'].links[0] + if len(principled_bsdf_node.inputs["Metallic"].links) != 0: + link = principled_bsdf_node.inputs["Metallic"].links[0] from_socket = link.from_socket links.remove(link) links.new(outputNode.inputs[0], from_socket) metal_map_mats.append(mat) if len(metal_map_mats) != 0: - bake_pass(obj, dest, img_size, 'METAL', export_usd) - + bake_pass(obj, dest, img_size, "METAL", export_usd) + for mat in metal_map_mats: nodes = mat.node_tree.nodes outputNode = nodes["Material Output"] - principled_bsdf_node = nodes['Principled BSDF'] + principled_bsdf_node = nodes["Principled BSDF"] links.remove(outputNode.inputs[0].links[0]) links.new(outputNode.inputs[0], principled_bsdf_node.outputs[0]) -def bake_normals(obj, dest, img_size, export_usd): +def bake_normals(obj, dest, img_size, export_usd): bake_obj = False for slot in obj.material_slots: mat = slot.material - if (mat is None or not mat.use_nodes): continue + if mat is None or not mat.use_nodes: + continue nodes = mat.node_tree.nodes - if nodes.get('Material Output'): - outputNode = nodes['Material Output'] - else: continue - - if len(outputNode.inputs['Displacement'].links) != 0: + if nodes.get("Material Output"): + outputNode = nodes["Material Output"] + else: + continue + + if len(outputNode.inputs["Displacement"].links) != 0: bake_obj = True if bake_obj: - bake_pass(obj, dest, img_size, 'NORMAL', export_usd) - + bake_pass(obj, dest, img_size, "NORMAL", export_usd) + + def remove_params(mat, node_tree): nodes = node_tree.nodes paramDict = {} - if nodes.get('Material Output'): - output = nodes['Material Output'] - elif nodes.get('Group Output'): - output = nodes['Group Output'] + if nodes.get("Material Output"): + output = nodes["Material Output"] + elif nodes.get("Group Output"): + output = nodes["Group Output"] else: raise ValueError("Could not find material output node") - - if nodes.get('Principled BSDF') and output.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfPrincipled': - principled_bsdf_node = nodes['Principled BSDF'] - metal = principled_bsdf_node.inputs['Metallic'].default_value # store metallic value and set to 0 - sheen = principled_bsdf_node.inputs['Sheen'].default_value - clearcoat = principled_bsdf_node.inputs['Clearcoat'].default_value - paramDict[mat.name] = {'Metallic': metal, 'Sheen': sheen, 'Clearcoat': clearcoat} - principled_bsdf_node.inputs['Metallic'].default_value = 0 - principled_bsdf_node.inputs['Sheen'].default_value = 0 - principled_bsdf_node.inputs['Clearcoat'].default_value = 0 + + if ( + nodes.get("Principled BSDF") + and output.inputs[0].links[0].from_node.bl_idname == "ShaderNodeBsdfPrincipled" + ): + principled_bsdf_node = nodes["Principled BSDF"] + metal = principled_bsdf_node.inputs[ + "Metallic" + ].default_value # store metallic value and set to 0 + sheen = principled_bsdf_node.inputs["Sheen"].default_value + clearcoat = principled_bsdf_node.inputs["Clearcoat"].default_value + paramDict[mat.name] = { + "Metallic": metal, + "Sheen": sheen, + "Clearcoat": clearcoat, + } + principled_bsdf_node.inputs["Metallic"].default_value = 0 + principled_bsdf_node.inputs["Sheen"].default_value = 0 + principled_bsdf_node.inputs["Clearcoat"].default_value = 0 return paramDict for node in nodes: - if node.type == 'GROUP': + if node.type == "GROUP": paramDict = remove_params(mat, node.node_tree) if len(paramDict) != 0: return paramDict - + return paramDict - + + def process_interfering_params(obj): for slot in obj.material_slots: mat = slot.material - if (mat is None or not mat.use_nodes): continue - paramDict = remove_params(mat, mat.node_tree) + if mat is None or not mat.use_nodes: + continue + paramDict = remove_params(mat, mat.node_tree) return paramDict + def skipBake(obj): if not obj.data.materials: logging.info("No material on mesh, skipping...") - return True - + return True + if len(obj.data.vertices) == 0: logging.info("Mesh has no vertices, skipping ...") return True return False + def triangulate_meshes(): logging.debug("Triangulating Meshes") for obj in bpy.context.scene.objects: - if obj.type == 'MESH': + if obj.type == "MESH": view_state = obj.hide_viewport obj.hide_viewport = False bpy.context.view_layer.objects.active = obj obj.select_set(True) - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.select_all(action="SELECT") logging.debug(f"Triangulating {obj}") bpy.ops.mesh.quads_convert_to_tris() - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode="OBJECT") obj.select_set(False) obj.hide_viewport = view_state + def adjust_wattages(): logging.info("Adjusting light wattage") for obj in bpy.context.scene.objects: - if obj.type == 'LIGHT' and obj.data.type == 'POINT': + if obj.type == "LIGHT" and obj.data.type == "POINT": light = obj.data - if hasattr(light, 'energy') and hasattr(light, 'shadow_soft_size'): + if hasattr(light, "energy") and hasattr(light, "shadow_soft_size"): X = light.energy r = light.shadow_soft_size # candelas * 1000 / (4 * math.pi * r**2). additionally units come out of blender at 1/100 scale - new_wattage = (X * 20 / (4 * math.pi)) * 1000 / (4 * math.pi * r**2) * 100 + new_wattage = ( + (X * 20 / (4 * math.pi)) * 1000 / (4 * math.pi * r**2) * 100 + ) light.energy = new_wattage + def set_center_of_mass(): logging.info("Resetting center of mass of objects") for obj in bpy.context.scene.objects: @@ -554,24 +637,27 @@ def set_center_of_mass(): obj.hide_viewport = False obj.select_set(True) bpy.context.view_layer.objects.active = obj - bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='MEDIAN') - obj.select_set(False) + bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY", center="MEDIAN") + obj.select_set(False) obj.hide_viewport = view_state + def bake_object(obj, dest, img_size, export_usd): if not uv_unwrap(obj): return - bpy.ops.object.select_all(action='DESELECT') - obj.select_set(True) + bpy.ops.object.select_all(action="DESELECT") + obj.select_set(True) for slot in obj.material_slots: mat = slot.material if mat is not None: - slot.material = mat.copy() # we duplicate in the case of distinct meshes sharing materials + slot.material = ( + mat.copy() + ) # we duplicate in the case of distinct meshes sharing materials + + process_glass_materials(obj, export_usd) - process_glass_materials(obj, export_usd) - bake_metal(obj, dest, img_size, export_usd) bake_normals(obj, dest, img_size, export_usd) @@ -581,29 +667,29 @@ def bake_object(obj, dest, img_size, export_usd): bake_pass(obj, dest, img_size, bake_type, export_usd) apply_baked_tex(obj, paramDict) - + obj.select_set(False) -def bake_scene(folderPath: Path, image_res, vertex_colors, export_usd): +def bake_scene(folderPath: Path, image_res, vertex_colors, export_usd): for obj in bpy.data.objects: logging.info("---------------------------") logging.info(obj.name) - - if obj.type != 'MESH' or obj not in list(bpy.context.view_layer.objects): + + if obj.type != "MESH" or obj not in list(bpy.context.view_layer.objects): logging.info("Not mesh, skipping ...") continue - if skipBake(obj): + if skipBake(obj): continue - - if format == "stl": + + if format == "stl": continue - obj.hide_render = False + obj.hide_render = False obj.hide_viewport = False - - if vertex_colors: + + if vertex_colors: bakeVertexColors(obj) else: bake_object(obj, folderPath, image_res, export_usd) @@ -611,148 +697,182 @@ def bake_scene(folderPath: Path, image_res, vertex_colors, export_usd): obj.hide_render = True obj.hide_viewport = True -def run_blender_export(exportPath: Path, format: str, vertex_colors: bool, individual_export: bool): +def run_blender_export( + exportPath: Path, format: str, vertex_colors: bool, individual_export: bool +): assert exportPath.parent.exists() - exportPath = str(exportPath) - + exportPath = str(exportPath) + if format == "obj": if vertex_colors: - bpy.ops.wm.obj_export(filepath = exportPath, export_colors=True, export_eval_mode='DAG_EVAL_RENDER', export_selected_objects=individual_export) - else: - bpy.ops.wm.obj_export(filepath = exportPath, path_mode='COPY', export_materials=True, export_pbr_extensions=True, export_eval_mode='DAG_EVAL_RENDER', export_selected_objects=individual_export) + bpy.ops.wm.obj_export( + filepath=exportPath, + export_colors=True, + export_eval_mode="DAG_EVAL_RENDER", + export_selected_objects=individual_export, + ) + else: + bpy.ops.wm.obj_export( + filepath=exportPath, + path_mode="COPY", + export_materials=True, + export_pbr_extensions=True, + export_eval_mode="DAG_EVAL_RENDER", + export_selected_objects=individual_export, + ) if format == "fbx": if vertex_colors: - bpy.ops.export_scene.fbx(filepath = exportPath, colors_type='SRGB', use_selection = individual_export) + bpy.ops.export_scene.fbx( + filepath=exportPath, colors_type="SRGB", use_selection=individual_export + ) else: - bpy.ops.export_scene.fbx(filepath = exportPath, path_mode='COPY', embed_textures = True, use_selection=individual_export) - - if format == "stl": - bpy.ops.export_mesh.stl(filepath = exportPath, use_selection = individual_export) - - if format == "ply": - bpy.ops.wm.ply_export(filepath = exportPath, export_selected_objects = individual_export) + bpy.ops.export_scene.fbx( + filepath=exportPath, + path_mode="COPY", + embed_textures=True, + use_selection=individual_export, + ) + + if format == "stl": + bpy.ops.export_mesh.stl(filepath=exportPath, use_selection=individual_export) + + if format == "ply": + bpy.ops.wm.ply_export( + filepath=exportPath, export_selected_objects=individual_export + ) - if format in ["usda", "usdc"]: + if format in ["usda", "usdc"]: bpy.ops.wm.usd_export( - filepath=exportPath, - export_textures=True, - #use_instancing=True, + filepath=exportPath, + export_textures=True, + # use_instancing=True, overwrite_textures=True, - selected_objects_only=individual_export, - root_prim_path='/World' + selected_objects_only=individual_export, + root_prim_path="/World", ) + def export_scene( - input_blend: Path, - output_folder: Path, - pipeline_folder=None, + input_blend: Path, + output_folder: Path, + pipeline_folder=None, task_uniqname=None, **kwargs, ): bpy.ops.wm.open_mainfile(filepath=str(input_blend)) - folder = output_folder/f"export_{input_blend.name}" + folder = output_folder / f"export_{input_blend.name}" folder.mkdir(exist_ok=True, parents=True) result = export_curr_scene(folder, **kwargs) - if pipeline_folder is not None and task_uniqname is not None : + if pipeline_folder is not None and task_uniqname is not None: (pipeline_folder / "logs" / f"FINISH_{task_uniqname}").touch() return result -# side effects: will remove parents of inputted obj and clean its name, hides viewport of all objects -def export_single_obj(obj: bpy.types.Object, output_folder: Path, format='usdc', image_res= 1024, vertex_colors=False): +# side effects: will remove parents of inputted obj and clean its name, hides viewport of all objects +def export_single_obj( + obj: bpy.types.Object, + output_folder: Path, + format="usdc", + image_res=1024, + vertex_colors=False, +): export_usd = format in ["usda", "usdc"] - + export_folder = output_folder export_folder.mkdir(exist_ok=True) - export_file = export_folder/output_folder.with_suffix(f'.{format}').name + export_file = export_folder / output_folder.with_suffix(f".{format}").name logging.info(f"Exporting to directory {export_folder=}") - - remove_obj_parents(obj) + + remove_obj_parents(obj) rename_all_meshes(obj) collection_views, obj_views = update_visibility() - bpy.context.scene.render.engine = 'CYCLES' - bpy.context.scene.cycles.device = 'GPU' - bpy.context.scene.cycles.samples = 1 # choose render sample + bpy.context.scene.render.engine = "CYCLES" + bpy.context.scene.cycles.device = "GPU" + bpy.context.scene.cycles.samples = 1 # choose render sample # Set the tile size bpy.context.scene.cycles.tile_x = image_res bpy.context.scene.cycles.tile_y = image_res - if obj.type != 'MESH' or obj not in list(bpy.context.view_layer.objects): + if obj.type != "MESH" or obj not in list(bpy.context.view_layer.objects): raise ValueError("Object not mesh") - + if export_usd: apply_all_modifiers(obj) else: realizeInstances(obj) apply_all_modifiers(obj) - if not skipBake(obj) and format != "stl": - if vertex_colors: + if not skipBake(obj) and format != "stl": + if vertex_colors: bakeVertexColors(obj) else: - obj.hide_render = False + obj.hide_render = False obj.hide_viewport = False - bake_object(obj, export_folder/'textures', image_res, export_usd) + bake_object(obj, export_folder / "textures", image_res, export_usd) obj.hide_render = True obj.hide_viewport = True - + for collection, status in collection_views.items(): collection.hide_render = status for obj, status in obj_views.items(): obj.hide_render = status - + clean_names(obj) - + old_loc = obj.location.copy() obj.location = (0, 0, 0) - if obj.type != 'MESH' or obj.hide_render or len(obj.data.vertices) == 0 or obj not in list(bpy.context.view_layer.objects): + if ( + obj.type != "MESH" + or obj.hide_render + or len(obj.data.vertices) == 0 + or obj not in list(bpy.context.view_layer.objects) + ): raise ValueError("Object is not mesh or hidden from render") - export_subfolder = export_folder/obj.name + export_subfolder = export_folder / obj.name export_subfolder.mkdir(exist_ok=True) - export_file = export_subfolder/f'{obj.name}.{format}' + export_file = export_subfolder / f"{obj.name}.{format}" logging.info(f"Exporting file to {export_file=}") obj.hide_viewport = False obj.select_set(True) run_blender_export(export_file, format, vertex_colors, individual_export=True) - obj.select_set(False) + obj.select_set(False) obj.location = old_loc return export_file + @gin.configurable def export_curr_scene( output_folder: Path, - format='usdc', - image_res= 1024, - vertex_colors=False, + format="usdc", + image_res=1024, + vertex_colors=False, individual_export=False, omniverse_export=False, - pipeline_folder=None, - task_uniqname=None + pipeline_folder=None, + task_uniqname=None, ) -> Path: - - export_usd = format in ["usda", "usdc"] - + export_folder = output_folder export_folder.mkdir(exist_ok=True) - export_file = export_folder/output_folder.with_suffix(f'.{format}').name + export_file = export_folder / output_folder.with_suffix(f".{format}").name logging.info(f"Exporting to directory {export_folder=}") - + remove_obj_parents() - delete_objects() + delete_objects() triangulate_meshes() if omniverse_export: split_glass_mats() @@ -760,28 +880,28 @@ def export_curr_scene( scatter_cols = [] if export_usd: - if bpy.data.collections.get("scatter"): + if bpy.data.collections.get("scatter"): scatter_cols.append(bpy.data.collections["scatter"]) if bpy.data.collections.get("scatters"): scatter_cols.append(bpy.data.collections["scatters"]) for col in scatter_cols: for obj in col.all_objects: - remove_shade_smooth(obj) + remove_shade_smooth(obj) # remove 0 polygon meshes except for scatters # if export_usd: # for obj in bpy.data.objects: # if obj.type == 'MESH' and len(obj.data.polygons) == 0: # if scatter_cols is not None: - # if any(x in scatter_cols for x in obj.users_collection): + # if any(x in scatter_cols for x in obj.users_collection): # continue - # logging.info(f"{obj.name} has no faces, removing...") - # bpy.data.objects.remove(obj, do_unlink=True) - + # logging.info(f"{obj.name} has no faces, removing...") + # bpy.data.objects.remove(obj, do_unlink=True) + collection_views, obj_views = update_visibility() for obj in bpy.data.objects: - if obj.type != 'MESH' or obj not in list(bpy.context.view_layer.objects): + if obj.type != "MESH" or obj not in list(bpy.context.view_layer.objects): continue if export_usd: apply_all_modifiers(obj) @@ -789,19 +909,19 @@ def export_curr_scene( realizeInstances(obj) apply_all_modifiers(obj) - bpy.context.scene.render.engine = 'CYCLES' - bpy.context.scene.cycles.device = 'GPU' - bpy.context.scene.cycles.samples = 1 # choose render sample + bpy.context.scene.render.engine = "CYCLES" + bpy.context.scene.cycles.device = "GPU" + bpy.context.scene.cycles.samples = 1 # choose render sample # Set the tile size bpy.context.scene.cycles.tile_x = image_res bpy.context.scene.cycles.tile_y = image_res # iterate through all objects and bake them bake_scene( - folderPath=export_folder/'textures', - image_res=image_res, - vertex_colors=vertex_colors, - export_usd=export_usd + folderPath=export_folder / "textures", + image_res=image_res, + vertex_colors=vertex_colors, + export_usd=export_usd, ) for collection, status in collection_views.items(): @@ -809,85 +929,95 @@ def export_curr_scene( for obj, status in obj_views.items(): obj.hide_render = status - + clean_names() for obj in bpy.data.objects: obj.hide_viewport = obj.hide_render - + if omniverse_export: adjust_wattages() set_center_of_mass() - # remove 0 polygon meshes + # remove 0 polygon meshes for obj in bpy.data.objects: - if obj.type == 'MESH' and len(obj.data.polygons) == 0: - logging.info(f"{obj.name} has no faces, removing...") + if obj.type == "MESH" and len(obj.data.polygons) == 0: + logging.info(f"{obj.name} has no faces, removing...") bpy.data.objects.remove(obj, do_unlink=True) if individual_export: - bpy.ops.object.select_all(action='SELECT') - bpy.ops.object.location_clear() # send all objects to (0,0,0) - bpy.ops.object.select_all(action='DESELECT') + bpy.ops.object.select_all(action="SELECT") + bpy.ops.object.location_clear() # send all objects to (0,0,0) + bpy.ops.object.select_all(action="DESELECT") for obj in bpy.data.objects: - if obj.type != 'MESH' or obj.hide_render or len(obj.data.vertices) == 0 or obj not in list(bpy.context.view_layer.objects): + if ( + obj.type != "MESH" + or obj.hide_render + or len(obj.data.vertices) == 0 + or obj not in list(bpy.context.view_layer.objects) + ): continue - export_subfolder = export_folder/obj.name + export_subfolder = export_folder / obj.name export_subfolder.mkdir(exist_ok=True) - export_file = export_subfolder/f'{obj.name}.{format}' + export_file = export_subfolder / f"{obj.name}.{format}" logging.info(f"Exporting file to {export_file=}") obj.hide_viewport = False obj.select_set(True) run_blender_export(export_file, format, vertex_colors, individual_export) - obj.select_set(False) + obj.select_set(False) else: logging.info(f"Exporting file to {export_file=}") run_blender_export(export_file, format, vertex_colors, individual_export) return export_file + def main(args): args.output_folder.mkdir(exist_ok=True) - logging.basicConfig(filename= args.output_folder/'export_logs.log', level=logging.DEBUG, filemode = "w+") - + logging.basicConfig( + filename=args.output_folder / "export_logs.log", + level=logging.DEBUG, + filemode="w+", + ) + targets = sorted(list(args.input_folder.iterdir())) for blendfile in targets: + if blendfile.stem == "solve_state": + shutil.copy(blendfile, args.output_folder / "solve_state.json") - if blendfile.stem == 'solve_state': - shutil.copy(blendfile, args.output_folder/'solve_state.json') - - if not blendfile.suffix == '.blend': - print(f'Skipping non-blend file {blendfile}') + if not blendfile.suffix == ".blend": + print(f"Skipping non-blend file {blendfile}") continue - + folder = export_scene( - blendfile, - args.output_folder, - format=args.format, - image_res=args.resolution, + blendfile, + args.output_folder, + format=args.format, + image_res=args.resolution, vertex_colors=args.vertex_colors, individual_export=args.individual, - omniverse_export=args.omniverse + omniverse_export=args.omniverse, ) # wanted to use shutil here but kept making corrupted files - subprocess.call(['zip', '-r', str(folder.with_suffix('.zip')), str(folder)]) + subprocess.call(["zip", "-r", str(folder.with_suffix(".zip")), str(folder)]) bpy.ops.wm.quit_blender() + def make_args(): parser = argparse.ArgumentParser() - parser.add_argument('--input_folder', type=Path) - parser.add_argument('--output_folder', type=Path) + parser.add_argument("--input_folder", type=Path) + parser.add_argument("--output_folder", type=Path) - parser.add_argument('-f', '--format', type=str, choices=FORMAT_CHOICES) + parser.add_argument("-f", "--format", type=str, choices=FORMAT_CHOICES) + + parser.add_argument("-v", "--vertex_colors", action="store_true") + parser.add_argument("-r", "--resolution", default=1024, type=int) + parser.add_argument("-i", "--individual", action="store_true") + parser.add_argument("-o", "--omniverse", action="store_true") - parser.add_argument('-v', '--vertex_colors', action = 'store_true') - parser.add_argument('-r', '--resolution', default= 1024, type=int) - parser.add_argument('-i', '--individual', action = 'store_true') - parser.add_argument('-o', '--omniverse', action = 'store_true') - args = parser.parse_args() if args.format not in FORMAT_CHOICES: @@ -896,11 +1026,12 @@ def make_args(): if args.vertex_colors and args.format not in ["ply", "fbx", "obj"]: raise ValueError("File format does not support vertex colors.") - if (args.format == "ply" and not args.vertex_colors): + if args.format == "ply" and not args.vertex_colors: raise ValueError(".ply export must use vertex colors.") return args -if __name__ == '__main__': + +if __name__ == "__main__": args = make_args() main(args) diff --git a/infinigen/tools/ground_truth/bounding_boxes_3d.py b/infinigen/tools/ground_truth/bounding_boxes_3d.py index 8a28bd8aa..eb97b9dac 100644 --- a/infinigen/tools/ground_truth/bounding_boxes_3d.py +++ b/infinigen/tools/ground_truth/bounding_boxes_3d.py @@ -11,11 +11,17 @@ import cv2 import numpy as np -from einops import pack, rearrange from imageio.v3 import imread, imwrite from numpy.linalg import inv from tqdm import tqdm +try: + from einops import pack, rearrange +except ImportError: + raise ImportError( + "GT visualization requires `einops`. Please install optional extras via `pip install .[vis]`." + ) + from ..compress_masks import recover from ..dataset_loader import get_frame_path @@ -27,113 +33,152 @@ - B.png # Original image + 3D-bounding-boxes for the provided query """ + def transform(T, p): - assert T.shape == (4,4) - p = T[:3,:3] @ p.T + assert T.shape == (4, 4) + p = T[:3, :3] @ p.T return (p + T[:3, [3]]).T + def calc_bbox_pts(min_pt, max_pt): min_x, min_y, min_z = min_pt max_x, max_y, max_z = max_pt - points = np.asarray([ # 8 x 2 - [min_x, min_y, min_z], - [max_x, min_y, min_z], - [min_x, max_y, min_z], - [max_x, max_y, min_z], - [min_x, min_y, max_z], - [max_x, min_y, max_z], - [min_x, max_y, max_z], - [max_x, max_y, max_z], - ]) - - faces = np.asarray([ # 6 x 4 - [0, 1, 2, 3], - [4, 5, 6, 7], - [0, 1, 4, 5], - [2, 3, 6, 7], - [0, 2, 4, 6], - [1, 3, 5, 7], - ]) - faces = faces[:,[0,1,3,2]] + points = np.asarray( + [ # 8 x 2 + [min_x, min_y, min_z], + [max_x, min_y, min_z], + [min_x, max_y, min_z], + [max_x, max_y, min_z], + [min_x, min_y, max_z], + [max_x, min_y, max_z], + [min_x, max_y, max_z], + [max_x, max_y, max_z], + ] + ) + + faces = np.asarray( + [ # 6 x 4 + [0, 1, 2, 3], + [4, 5, 6, 7], + [0, 1, 4, 5], + [2, 3, 6, 7], + [0, 2, 4, 6], + [1, 3, 5, 7], + ] + ) + faces = faces[:, [0, 1, 3, 2]] return points, faces + # Deterministic, but probably slow. Good enough for visualization. def arr2color(e): s = np.random.RandomState(np.array(e, dtype=np.uint32)) - return (np.asarray(colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1)) * 255).astype(np.uint8) + return ( + np.asarray(colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1)) * 255 + ).astype(np.uint8) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('frame', type=int) - parser.add_argument('--query', type=str, default=None) - parser.add_argument('--output', type=Path, default=Path("testbed")) + parser.add_argument("folder", type=Path) + parser.add_argument("frame", type=int) + parser.add_argument("--query", type=str, default=None) + parser.add_argument("--output", type=Path, default=Path("testbed")) args = parser.parse_args() - object_segmentation_mask = recover(np.load(get_frame_path(args.folder, 0, args.frame, 'ObjectSegmentation_npz'))) - instance_segmentation_mask = recover(np.load(get_frame_path(args.folder, 0, args.frame, 'InstanceSegmentation_npz'))) + object_segmentation_mask = recover( + np.load(get_frame_path(args.folder, 0, args.frame, "ObjectSegmentation_npz")) + ) + instance_segmentation_mask = recover( + np.load(get_frame_path(args.folder, 0, args.frame, "InstanceSegmentation_npz")) + ) image = imread(get_frame_path(args.folder, 0, args.frame, "Image_png")) - object_json = json.loads(get_frame_path(args.folder, 0, args.frame, 'Objects_json').read_text()) - camview = np.load(get_frame_path(args.folder, 0, args.frame, 'camview_npz')) + object_json = json.loads( + get_frame_path(args.folder, 0, args.frame, "Objects_json").read_text() + ) + camview = np.load(get_frame_path(args.folder, 0, args.frame, "camview_npz")) # Identify objects visible in the image unique_object_idxs = set(np.unique(object_segmentation_mask)) - present_objects = [obj for obj in object_json if (obj['object_index'] in unique_object_idxs)] + present_objects = [ + obj for obj in object_json if (obj["object_index"] in unique_object_idxs) + ] # Complain if the query isn't valid/present - unique_names = sorted({q['name'] for q in present_objects}) + unique_names = sorted({q["name"] for q in present_objects}) if args.query is None: - print('`--query` not specified. Choices are:') + print("`--query` not specified. Choices are:") for qn in unique_names: print(f"- {qn}") sys.exit(0) elif not any((args.query.lower() in name.lower()) for name in unique_names): - print(f'"{args.query}" doesn\'t match any object names in this image. Choices are:') + print( + f'"{args.query}" doesn\'t match any object names in this image. Choices are:' + ) for qn in unique_names: print(f"- {qn}") sys.exit(0) H, W, _ = image.shape - camera_pose = camview['T'] - K = camview['K'] + camera_pose = camview["T"] + K = camview["K"] # Assign unique colors to each object instance - combined_mask, _ = pack([object_segmentation_mask, instance_segmentation_mask], 'h w *') - combined_mask = rearrange(combined_mask, 'h w d -> (h w) d') - visible_instances = np.unique(combined_mask, axis=0) # this line is a bottleneck + combined_mask, _ = pack( + [object_segmentation_mask, instance_segmentation_mask], "h w *" + ) + combined_mask = rearrange(combined_mask, "h w d -> (h w) d") + visible_instances = np.unique(combined_mask, axis=0) # this line is a bottleneck visible_instances = {tuple(row) for row in visible_instances} boxes_to_draw = [] - for obj in tqdm(present_objects, desc='Identifying boxes to draw'): - if args.query.lower() in obj['name'].lower(): - for instance_id, model_mat in zip(obj['instance_ids'], np.asarray(obj['model_matrices'])): - if ((obj['object_index'],) + tuple(instance_id)) in visible_instances: - boxes_to_draw.append(dict(model_mat=model_mat, min=obj['min'], max=obj['max'], color=arr2color(instance_id).tolist())) + for obj in tqdm(present_objects, desc="Identifying boxes to draw"): + if args.query.lower() in obj["name"].lower(): + for instance_id, model_mat in zip( + obj["instance_ids"], np.asarray(obj["model_matrices"]) + ): + if ((obj["object_index"],) + tuple(instance_id)) in visible_instances: + boxes_to_draw.append( + dict( + model_mat=model_mat, + min=obj["min"], + max=obj["max"], + color=arr2color(instance_id).tolist(), + ) + ) canvas = np.copy(image) - for bbox in tqdm(boxes_to_draw, desc='Drawing boxes'): - if bbox['min'] is None: # Object has no volume (e.g. a light/camera) + for bbox in tqdm(boxes_to_draw, desc="Drawing boxes"): + if bbox["min"] is None: # Object has no volume (e.g. a light/camera) continue - min_pt = np.asarray(bbox['min']) - max_pt = np.asarray(bbox['max']) + min_pt = np.asarray(bbox["min"]) + max_pt = np.asarray(bbox["max"]) size = np.linalg.norm(max_pt - min_pt) bbox_points, faces = calc_bbox_pts(min_pt, max_pt) - bbox_points_wc = transform(bbox['model_mat'], bbox_points) + bbox_points_wc = transform(bbox["model_mat"], bbox_points) bbox_points_cc = transform(inv(camera_pose), bbox_points_wc) bbox_points_h = (K @ bbox_points_cc.T).T - bbox_points_uv = (bbox_points_h[:,:2] / bbox_points_h[:,[2]]).astype(int) - if bbox_points_h[:,2].min() < 0: # bbox goes behind the camera + bbox_points_uv = (bbox_points_h[:, :2] / bbox_points_h[:, [2]]).astype(int) + if bbox_points_h[:, 2].min() < 0: # bbox goes behind the camera continue points_in_faces_uv = bbox_points_uv[faces.flatten()].reshape((6, 4, 2)) - sign = np.cross(points_in_faces_uv[:, 1] - points_in_faces_uv[:, 0], points_in_faces_uv[:, 2] - points_in_faces_uv[:, 0]) + sign = np.cross( + points_in_faces_uv[:, 1] - points_in_faces_uv[:, 0], + points_in_faces_uv[:, 2] - points_in_faces_uv[:, 0], + ) sign = sign * np.array([-1, 1, 1, -1, -1, 1]) for is_visible, indices in zip(sign < 0, faces): if is_visible: for i in range(4): - canvas = cv2.line(canvas, bbox_points_uv[indices[i]], bbox_points_uv[indices[(i+1)%4]], color=bbox['color'], thickness=1) + canvas = cv2.line( + canvas, + bbox_points_uv[indices[i]], + bbox_points_uv[indices[(i + 1) % 4]], + color=bbox["color"], + thickness=1, + ) args.output.mkdir(exist_ok=True) imwrite(args.output / "A.png", image) diff --git a/infinigen/tools/ground_truth/depth_to_normals.py b/infinigen/tools/ground_truth/depth_to_normals.py index bda0407ba..545b89598 100644 --- a/infinigen/tools/ground_truth/depth_to_normals.py +++ b/infinigen/tools/ground_truth/depth_to_normals.py @@ -8,14 +8,20 @@ from pathlib import Path import cv2 +import imageio import numpy as np -from einops import einsum from imageio.v3 import imread, imwrite from numpy.linalg import inv -import imageio from ..dataset_loader import get_frame_path +try: + from einops import einsum +except ImportError: + raise ImportError( + "GT visualization requires `einops`. Please install optional extras via `pip install .[vis]`." + ) + """ Usage: python -m tools.ground_truth.depth_to_normals Output: @@ -25,29 +31,31 @@ - C.png # Surface normals from geometry """ + def unproject(depth, K): H, W = depth.shape - x, y = np.meshgrid(np.arange(W), np.arange(H), indexing='xy') + x, y = np.meshgrid(np.arange(W), np.arange(H), indexing="xy") img_coords = np.stack((x, y, np.ones_like(x)), axis=-1).astype(np.float64) - return einsum(depth, img_coords, inv(K), 'H W, H W j, i j -> H W i') + return einsum(depth, img_coords, inv(K), "H W, H W j, i j -> H W i") + def normalize(v): return v / np.linalg.norm(v, axis=-1, keepdims=True) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('frame', type=int) - parser.add_argument('--output', type=Path, default=Path("testbed")) + parser.add_argument("folder", type=Path) + parser.add_argument("frame", type=int) + parser.add_argument("--output", type=Path, default=Path("testbed")) args = parser.parse_args() args.output.mkdir(exist_ok=True) - depth_path = get_frame_path(args.folder, 0, args.frame, 'Depth_npy') - normal_path = get_frame_path(args.folder, 0, args.frame, 'SurfaceNormal_png') - image_path = get_frame_path(args.folder, 0, args.frame, 'Image_png') - camview_path = get_frame_path(args.folder, 0, args.frame, 'camview_npz') + depth_path = get_frame_path(args.folder, 0, args.frame, "Depth_npy") + normal_path = get_frame_path(args.folder, 0, args.frame, "SurfaceNormal_png") + image_path = get_frame_path(args.folder, 0, args.frame, "Image_png") + camview_path = get_frame_path(args.folder, 0, args.frame, "camview_npz") assert depth_path.exists(), depth_path assert image_path.exists(), image_path assert camview_path.exists(), camview_path @@ -55,21 +63,21 @@ def normalize(v): image = imread(image_path) depth = np.load(depth_path) - K = np.load(camview_path)['K'] + K = np.load(camview_path)["K"] cam_coords = unproject(depth, K) - cam_coords = cam_coords * np.array([1., -1., -1]) + cam_coords = cam_coords * np.array([1.0, -1.0, -1]) mask = ~np.isinf(depth) depth[~mask] = -1 - vy = normalize(cam_coords[1:,1:] - cam_coords[:-1,1:]) - vx = normalize(cam_coords[1:,1:] - cam_coords[1:,:-1]) + vy = normalize(cam_coords[1:, 1:] - cam_coords[:-1, 1:]) + vx = normalize(cam_coords[1:, 1:] - cam_coords[1:, :-1]) cross_prod = np.cross(vy, vx) normals = normalize(cross_prod) - normals[~mask[1:,1:]] = 0 + normals[~mask[1:, 1:]] = 0 - normals_color = np.round((normals + 1) * (255/2)).astype(np.uint8) + normals_color = np.round((normals + 1) * (255 / 2)).astype(np.uint8) target_shape = imageio.imread(normal_path).shape[:2][::-1] normals_color = cv2.resize(normals_color, target_shape) @@ -78,4 +86,4 @@ def normalize(v): imwrite(args.output / "B.png", normals_color) print(f'Wrote {args.output / "B.png"}') shutil.copyfile(normal_path, args.output / "C.png") - print(f'Wrote {args.output / "C.png"}') \ No newline at end of file + print(f'Wrote {args.output / "C.png"}') diff --git a/infinigen/tools/ground_truth/optical_flow_warp.py b/infinigen/tools/ground_truth/optical_flow_warp.py index 6be8ac2bd..330ff4e14 100644 --- a/infinigen/tools/ground_truth/optical_flow_warp.py +++ b/infinigen/tools/ground_truth/optical_flow_warp.py @@ -22,15 +22,14 @@ """ if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('frame', type=int) - parser.add_argument('--output', type=Path, default=Path("testbed")) + parser.add_argument("folder", type=Path) + parser.add_argument("frame", type=int) + parser.add_argument("--output", type=Path, default=Path("testbed")) args = parser.parse_args() flow3d_path = get_frame_path(args.folder, 0, args.frame, "Flow3D_npy") image1_path = get_frame_path(args.folder, 0, args.frame, "Image_png") - image2_path = get_frame_path(args.folder, 0, args.frame+1, "Image_png") + image2_path = get_frame_path(args.folder, 0, args.frame + 1, "Image_png") assert flow3d_path.exists() assert image1_path.exists() assert image2_path.exists() @@ -39,9 +38,15 @@ image1 = imread(image1_path) H, W, _ = image1.shape - flow2d = cv2.resize(np.load(flow3d_path), dsize=(W, H), interpolation=cv2.INTER_LINEAR)[...,:2] - new_coords = flow2d + np.stack(np.meshgrid(np.arange(W), np.arange(H), indexing='xy'), axis=-1) - warped_image = cv2.remap(image2, new_coords.astype(np.float32), None, interpolation=cv2.INTER_LINEAR) + flow2d = cv2.resize( + np.load(flow3d_path), dsize=(W, H), interpolation=cv2.INTER_LINEAR + )[..., :2] + new_coords = flow2d + np.stack( + np.meshgrid(np.arange(W), np.arange(H), indexing="xy"), axis=-1 + ) + warped_image = cv2.remap( + image2, new_coords.astype(np.float32), None, interpolation=cv2.INTER_LINEAR + ) args.output.mkdir(exist_ok=True) imwrite(args.output / "A.png", image1) @@ -49,4 +54,4 @@ imwrite(args.output / "C.png", image2) print(f'Wrote {args.output / "C.png"}') imwrite(args.output / "B.png", warped_image) - print(f'Wrote {args.output / "B.png"}') \ No newline at end of file + print(f'Wrote {args.output / "B.png"}') diff --git a/infinigen/tools/ground_truth/rigid_warp.py b/infinigen/tools/ground_truth/rigid_warp.py index 2fcbdd30b..517297d10 100644 --- a/infinigen/tools/ground_truth/rigid_warp.py +++ b/infinigen/tools/ground_truth/rigid_warp.py @@ -8,12 +8,18 @@ import cv2 import numpy as np -from einops import einsum from imageio.v3 import imread, imwrite from numpy.linalg import inv from ..dataset_loader import get_frame_path +try: + from einops import einsum +except ImportError: + raise ImportError( + "GT visualization requires `einops`. Please install optional extras via `pip install .[vis]`." + ) + """ Usage: python -m tools.ground_truth.rigid_warp Output: @@ -23,52 +29,58 @@ - C.png # Image at frame j """ + def transform(T, p): - assert T.shape == (4,4) - return einsum(p, T[:3,:3], 'H W j, i j -> H W i') + T[:3, 3] + assert T.shape == (4, 4) + return einsum(p, T[:3, :3], "H W j, i j -> H W i") + T[:3, 3] + def from_homog(x): - return x[...,:-1] / x[...,[-1]] + return x[..., :-1] / x[..., [-1]] + def reproject(depth1, pose1, pose2, K1, K2): H, W = depth1.shape - x, y = np.meshgrid(np.arange(W), np.arange(H), indexing='xy') + x, y = np.meshgrid(np.arange(W), np.arange(H), indexing="xy") img_1_coords = np.stack((x, y, np.ones_like(x)), axis=-1).astype(np.float64) - cam1_coords = einsum(depth1, img_1_coords, inv(K1), 'H W, H W j, i j -> H W i') + cam1_coords = einsum(depth1, img_1_coords, inv(K1), "H W, H W j, i j -> H W i") rel_pose = inv(pose2) @ pose1 cam2_coords = transform(rel_pose, cam1_coords) - return from_homog(einsum(cam2_coords, K2, 'H W j, i j -> H W i')) + return from_homog(einsum(cam2_coords, K2, "H W j, i j -> H W i")) if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('frame_1', type=int) - parser.add_argument('frame_2', type=int) - parser.add_argument('--output', type=Path, default=Path("testbed")) + parser.add_argument("folder", type=Path) + parser.add_argument("frame_1", type=int) + parser.add_argument("frame_2", type=int) + parser.add_argument("--output", type=Path, default=Path("testbed")) args = parser.parse_args() - depth_path = get_frame_path(args.folder, 0, args.frame_1, 'Depth_npy') - image1_path = get_frame_path(args.folder, 0, args.frame_1, 'Image_png') - image2_path = get_frame_path(args.folder, 0, args.frame_2, 'Image_png') - camview1_path = get_frame_path(args.folder, 0, args.frame_1, 'camview_npz') - camview2_path = get_frame_path(args.folder, 0, args.frame_2, 'camview_npz') + depth_path = get_frame_path(args.folder, 0, args.frame_1, "Depth_npy") + image1_path = get_frame_path(args.folder, 0, args.frame_1, "Image_png") + image2_path = get_frame_path(args.folder, 0, args.frame_2, "Image_png") + camview1_path = get_frame_path(args.folder, 0, args.frame_1, "camview_npz") + camview2_path = get_frame_path(args.folder, 0, args.frame_2, "camview_npz") image2 = imread(image2_path) image1 = imread(image1_path) depth1 = np.load(depth_path) - pose1 = np.load(camview1_path)['T'] - pose2 = np.load(camview2_path)['T'] - K1 = np.load(camview1_path)['K'] - K2 = np.load(camview2_path)['K'] + pose1 = np.load(camview1_path)["T"] + pose2 = np.load(camview2_path)["T"] + K1 = np.load(camview1_path)["K"] + K2 = np.load(camview2_path)["K"] H, W, _ = image1.shape - depth1 = cv2.resize(np.load(depth_path), dsize=(W, H), interpolation=cv2.INTER_LINEAR) + depth1 = cv2.resize( + np.load(depth_path), dsize=(W, H), interpolation=cv2.INTER_LINEAR + ) img2_coords = reproject(depth1, pose1, pose2, K1, K2) - warped_image = cv2.remap(image2, img2_coords.astype(np.float32), None, interpolation=cv2.INTER_LINEAR) + warped_image = cv2.remap( + image2, img2_coords.astype(np.float32), None, interpolation=cv2.INTER_LINEAR + ) args.output.mkdir(exist_ok=True) imwrite(args.output / "A.png", image1) @@ -76,4 +88,4 @@ def reproject(depth1, pose1, pose2, K1, K2): imwrite(args.output / "C.png", image2) print(f'Wrote {args.output / "C.png"}') imwrite(args.output / "B.png", warped_image) - print(f'Wrote {args.output / "B.png"}') \ No newline at end of file + print(f'Wrote {args.output / "B.png"}') diff --git a/infinigen/tools/ground_truth/segmentation_lookup.py b/infinigen/tools/ground_truth/segmentation_lookup.py index d25776711..3db0825b9 100644 --- a/infinigen/tools/ground_truth/segmentation_lookup.py +++ b/infinigen/tools/ground_truth/segmentation_lookup.py @@ -12,13 +12,19 @@ import cv2 import numba as nb import numpy as np -from einops import pack, rearrange, repeat from imageio.v3 import imread, imwrite from numba.types import bool_ from ..compress_masks import recover from ..dataset_loader import get_frame_path +try: + from einops import pack, rearrange, repeat +except ImportError: + raise ImportError( + "GT visualization requires `einops`. Please install optional extras via `pip install .[vis]`." + ) + """ Usage: python -m tools.ground_truth.segmentation_lookup [--query ] [--boxes] Output: @@ -27,6 +33,7 @@ - B.png # Original image + mask/2D-bounding-boxes for the provided query """ + @nb.njit def should_highlight_pixel(arr2d, set1d): """Compute boolean mask for items in arr2d that are also in set1d""" @@ -35,16 +42,17 @@ def should_highlight_pixel(arr2d, set1d): for i in range(H): for j in range(W): for n in range(set1d.size): - output[i,j] = output[i,j] or (arr2d[i,j] == set1d[n]) + output[i, j] = output[i, j] or (arr2d[i, j] == set1d[n]) return output + @nb.njit def compute_boxes(indices, binary_tag_mask): """Compute 2d bounding boxes for highlighted pixels""" H, W = binary_tag_mask.shape num_u = indices.max() + 1 - x_min = np.full(num_u, W-1, dtype=np.int32) - y_min = np.full(num_u, H-1, dtype=np.int32) + x_min = np.full(num_u, W - 1, dtype=np.int32) + y_min = np.full(num_u, H - 1, dtype=np.int32) x_max = np.full(num_u, -1, dtype=np.int32) y_max = np.full(num_u, -1, dtype=np.int32) for y in range(H): @@ -58,73 +66,103 @@ def compute_boxes(indices, binary_tag_mask): y_max[idx] = max(y_max[idx], y) return np.stack((x_min, y_min, x_max, y_max), axis=-1) + # Deterministic, but probably slow. Good enough for visualization. def arr2color(e): s = np.random.RandomState(np.array(e, dtype=np.uint32)) - return (np.asarray(colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1)) * 255).astype(np.uint8) + return ( + np.asarray(colorsys.hsv_to_rgb(s.uniform(0, 1), s.uniform(0.1, 1), 1)) * 255 + ).astype(np.uint8) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('frame', type=int) - parser.add_argument('--query', type=str, default=None) - parser.add_argument('--boxes', action='store_true') - parser.add_argument('--output', type=Path, default=Path("testbed")) + parser.add_argument("folder", type=Path) + parser.add_argument("frame", type=int) + parser.add_argument("--query", type=str, default=None) + parser.add_argument("--boxes", action="store_true") + parser.add_argument("--output", type=Path, default=Path("testbed")) args = parser.parse_args() # Load images & masks - object_segmentation_mask = recover(np.load(get_frame_path(args.folder, 0, args.frame, 'ObjectSegmentation_npz'))) - instance_segmentation_mask = recover(np.load(get_frame_path(args.folder, 0, args.frame, 'InstanceSegmentation_npz'))) + object_segmentation_mask = recover( + np.load(get_frame_path(args.folder, 0, args.frame, "ObjectSegmentation_npz")) + ) + instance_segmentation_mask = recover( + np.load(get_frame_path(args.folder, 0, args.frame, "InstanceSegmentation_npz")) + ) image = imread(get_frame_path(args.folder, 0, args.frame, "Image_png")) - object_json = json.loads(get_frame_path(args.folder, 0, args.frame, 'Objects_json').read_text()) + object_json = json.loads( + get_frame_path(args.folder, 0, args.frame, "Objects_json").read_text() + ) H, W = object_segmentation_mask.shape image = cv2.resize(image, dsize=(W, H), interpolation=cv2.INTER_LINEAR) # Identify objects visible in the image unique_object_idxs = set(np.unique(object_segmentation_mask)) - present_objects = [obj for obj in object_json if (obj['object_index'] in unique_object_idxs)] + present_objects = [ + obj for obj in object_json if (obj["object_index"] in unique_object_idxs) + ] # Complain if the query isn't valid/present - unique_names = sorted({q['name'] for q in present_objects}) + unique_names = sorted({q["name"] for q in present_objects}) if args.query is None: - print('`--query` not specified. Choices are:') + print("`--query` not specified. Choices are:") for qn in unique_names: print(f"- {qn}") sys.exit(0) elif not any((args.query.lower() in name.lower()) for name in unique_names): - print(f'"{args.query}" doesn\'t match any object names in this image. Choices are:') + print( + f'"{args.query}" doesn\'t match any object names in this image. Choices are:' + ) for qn in unique_names: print(f"- {qn}") sys.exit(0) # Mask the pixels with any relevant object - objects_to_highlight = [obj for obj in present_objects if (args.query.lower() in obj['name'].lower())] - highlighted_pixels = should_highlight_pixel(object_segmentation_mask, np.array([o['object_index'] for o in objects_to_highlight])) + objects_to_highlight = [ + obj for obj in present_objects if (args.query.lower() in obj["name"].lower()) + ] + highlighted_pixels = should_highlight_pixel( + object_segmentation_mask, + np.array([o["object_index"] for o in objects_to_highlight]), + ) assert highlighted_pixels.dtype == bool # Assign unique colors to each object instance - combined_mask, _ = pack([object_segmentation_mask, instance_segmentation_mask], 'h w *') - combined_mask = rearrange(combined_mask, 'h w d -> (h w) d') - uniq_instances, indices = np.unique(combined_mask, return_inverse=True, axis=0) # this line is the bottleneck + combined_mask, _ = pack( + [object_segmentation_mask, instance_segmentation_mask], "h w *" + ) + combined_mask = rearrange(combined_mask, "h w d -> (h w) d") + uniq_instances, indices = np.unique( + combined_mask, return_inverse=True, axis=0 + ) # this line is the bottleneck unique_colors = np.stack([arr2color(row) for row in uniq_instances]) if args.boxes: bbox = compute_boxes(indices.reshape((H, W)), highlighted_pixels) - m = bbox[:,3] >= 0 # Ignore objects which weren't queried + m = bbox[:, 3] >= 0 # Ignore objects which weren't queried bbox = bbox[m] unique_colors = unique_colors[m] uniq_instances = uniq_instances[m] canvas = np.copy(image) - for (x_min, y_min, x_max, y_max), color, idx, ui in zip(bbox, unique_colors, np.arange(m.size)[m], uniq_instances): + for (x_min, y_min, x_max, y_max), color, idx, ui in zip( + bbox, unique_colors, np.arange(m.size)[m], uniq_instances + ): points = [(x_min, y_min), (x_min, y_max), (x_max, y_max), (x_max, y_min)] for i in range(4): - canvas = cv2.line(canvas, points[i], points[(i+1)%4], color=color.tolist(), thickness=2) + canvas = cv2.line( + canvas, + points[i], + points[(i + 1) % 4], + color=color.tolist(), + thickness=2, + ) else: colors_for_instances = unique_colors[indices].reshape((H, W, 3)) canvas = np.zeros((H, W, 3), dtype=np.uint8) for obj in objects_to_highlight: - m = repeat(object_segmentation_mask == obj['object_index'], 'H W -> H W 3') + m = repeat(object_segmentation_mask == obj["object_index"], "H W -> H W 3") canvas[m] = colors_for_instances[m] args.output.mkdir(exist_ok=True) diff --git a/infinigen/tools/indoor_profile.py b/infinigen/tools/indoor_profile.py index b55ad020e..f649c8772 100644 --- a/infinigen/tools/indoor_profile.py +++ b/infinigen/tools/indoor_profile.py @@ -3,61 +3,75 @@ # Authors: David Yan -from pathlib import Path +import argparse import re -from datetime import timedelta from collections import defaultdict -import argparse -import pandas as pd -from tabulate import tabulate +from datetime import timedelta +from pathlib import Path +import pandas as pd -''' +""" The following function s attributed to FObersteiner from Stack Overflow at https://stackoverflow.com/a/64662985 and is licensed under CC-BY-SA 4.0 (https://creativecommons.org/licenses/by-sa/4.0/deed.en#ref-appropriate-credit). David Yan used this code WITHOUT modification. -''' -def td_to_str(td): +""" + + +def td_to_str(td): """ convert a timedelta object td to a string in HH:MM:SS format. """ - if (pd.isnull(td)): + if pd.isnull(td): return td hours, remainder = divmod(td.total_seconds(), 3600) minutes, seconds = divmod(remainder, 60) - return f'{int(hours):02}:{int(minutes):02}:{int(seconds):02}' + return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" -def main(dir : Path): + +def main(dir: Path): coarse_data = defaultdict(list) render_data = defaultdict(list) - with open(dir/"finished_seeds.txt") as f: - seeds = f.read().splitlines() + with open(dir / "finished_seeds.txt") as f: + seeds = f.read().splitlines() for seed in seeds: try: - coarse_log = open(dir/seed/'logs'/'coarse.err').read() - render_log = open(next((dir/seed/"logs").glob('shortrender*.err'))).read() - except: + coarse_log = open(dir / seed / "logs" / "coarse.err").read() + render_log = open( + next((dir / seed / "logs").glob("shortrender*.err")) + ).read() + except FileNotFoundError: continue - for name, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)', coarse_log): + for name, h, m, s in re.findall( + r"\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)", coarse_log + ): timedelta_obj = timedelta(hours=int(h), minutes=int(m), seconds=int(s)) - if (timedelta_obj.total_seconds() < 1): continue + if timedelta_obj.total_seconds() < 1: + continue coarse_data[name].append(timedelta_obj) - - for name, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)', render_log): + + for name, h, m, s in re.findall( + r"\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)", render_log + ): timedelta_obj = timedelta(hours=int(h), minutes=int(m), seconds=int(s)) - if (timedelta_obj.total_seconds() < 1): continue + if timedelta_obj.total_seconds() < 1: + continue render_data[name].append(timedelta_obj) - coarse_stats = make_stats(pd.DataFrame.from_dict(coarse_data, orient='index')) - render_stats = make_stats(pd.DataFrame.from_dict(render_data, orient='index')) + coarse_stats = make_stats(pd.DataFrame.from_dict(coarse_data, orient="index")) + render_stats = make_stats(pd.DataFrame.from_dict(render_data, orient="index")) for column in coarse_stats: - coarse_stats[column] = coarse_stats[column].dt.round('1s').map(lambda x: td_to_str(x)) + coarse_stats[column] = ( + coarse_stats[column].dt.round("1s").map(lambda x: td_to_str(x)) + ) for column in coarse_stats: - render_stats[column] = render_stats[column].dt.round('1s').map(lambda x: td_to_str(x)) + render_stats[column] = ( + render_stats[column].dt.round("1s").map(lambda x: td_to_str(x)) + ) print(coarse_stats.sort_values("median", ascending=False)) print(render_stats.sort_values("median", ascending=False)) @@ -65,19 +79,21 @@ def main(dir : Path): def make_stats(data_df): stats = pd.DataFrame() - stats['mean'] = data_df.mean(axis=1) - stats['median'] = data_df.median(axis=1) - stats['90%'] = data_df.quantile(0.9, axis=1) - stats['95%'] = data_df.quantile(0.95, axis=1) - stats['99%'] = data_df.quantile(0.99, axis=1) + stats["mean"] = data_df.mean(axis=1) + stats["median"] = data_df.median(axis=1) + stats["90%"] = data_df.quantile(0.9, axis=1) + stats["95%"] = data_df.quantile(0.95, axis=1) + stats["99%"] = data_df.quantile(0.99, axis=1) return stats - + + def make_args(): parser = argparse.ArgumentParser() - parser.add_argument('-d', '--dir', type=Path) + parser.add_argument("-d", "--dir", type=Path) args = parser.parse_args() return args -if __name__ == '__main__': + +if __name__ == "__main__": args = make_args() main(args.dir) diff --git a/infinigen/tools/isaac_sim.py b/infinigen/tools/isaac_sim.py index 0f704b2a1..551cb5921 100644 --- a/infinigen/tools/isaac_sim.py +++ b/infinigen/tools/isaac_sim.py @@ -5,39 +5,44 @@ # Acknowledgement: This file draws inspiration from https://docs.omniverse.nvidia.com/isaacsim/latest/index.html -import numpy as np - -from omni.isaac.kit import SimulationApp -CONFIG = {"renderer": "RayTracedLighting", "headless": False} -simulation_app = SimulationApp(launch_config=CONFIG) +import json +import numpy as np import omni -import json from omni.isaac.core import World -from pxr import Usd,UsdGeom, UsdLux, Sdf -from omni.isaac.core.utils.prims import create_prim +from omni.isaac.core.prims import XFormPrim +from omni.isaac.core.utils.extensions import enable_extension from omni.isaac.core.utils.nucleus import get_assets_root_path -from omni.isaac.core.prims import XFormPrim +from omni.isaac.core.utils.prims import create_prim +from omni.isaac.kit import SimulationApp from omni.kit.commands import execute as omni_exec -from omni.isaac.core.utils.extensions import enable_extension +from pxr import Sdf, Usd, UsdGeom, UsdLux + +# ruff: noqa: E402 enable_extension("omni.isaac.examples") -from omni.isaac.core.utils.nucleus import get_assets_root_path -from omni.isaac.core.utils.types import ArticulationAction from omni.isaac.core.controllers import BaseController +from omni.isaac.core.utils.types import ArticulationAction from omni.isaac.wheeled_robots.robots import WheeledRobot from omni.physx.scripts import utils +CONFIG = {"renderer": "RayTracedLighting", "headless": False} +simulation_app = SimulationApp(launch_config=CONFIG) + + class RobotController(BaseController): def __init__(self): super().__init__(name="robot_controller") def forward(self): - return ArticulationAction(joint_velocities=[2,2]) + return ArticulationAction(joint_velocities=[2, 2]) + class InfinigenIsaacScene(object): def __init__(self, cfg): self.cfg = cfg - self.world = World(stage_units_in_meters=1.0, backend='numpy', physics_dt=1/400.) + self.world = World( + stage_units_in_meters=1.0, backend="numpy", physics_dt=1 / 400.0 + ) self.world._physics_context.set_gravity(-9.8) self.scene = self.world.scene self._support = None @@ -49,14 +54,16 @@ def setup_scene(self): self._add_robot() def _add_infinigen_scene(self): - create_prim(prim_path="/World/Support", - usd_path=self.cfg.scene_path, - semantic_label='scene') + create_prim( + prim_path="/World/Support", + usd_path=self.cfg.scene_path, + semantic_label="scene", + ) self._support = XFormPrim(prim_path="/World/Support", name="Support") stage = omni.usd.get_context().get_stage() - - prims = [prim for prim in stage.Traverse() if prim.IsA(UsdGeom.Mesh)] + + prims = [prim for prim in stage.Traverse() if prim.IsA(UsdGeom.Mesh)] if self.cfg.json_path is None: for prim in prims: utils.setStaticCollider(prim) @@ -70,50 +77,57 @@ def _add_infinigen_scene(self): for key, value in relations.items(): obj = value.get("obj") if obj: - obj_to_target[obj.replace('(', '_').replace(')', '_').replace('.', '_')] = key + obj_to_target[ + obj.replace("(", "_").replace(")", "_").replace(".", "_") + ] = key for prim in prims: prim_name = prim.GetName() target = obj_to_target.get(prim_name) - - if 'SPLIT' in prim_name: - do_not_cast_shadows = prim.CreateAttribute('primvars:doNotCastShadows', Sdf.ValueTypeNames.Bool) - do_not_cast_shadows.Set(True) - if 'terrain' in prim_name: + if "SPLIT" in prim_name: + do_not_cast_shadows = prim.CreateAttribute( + "primvars:doNotCastShadows", Sdf.ValueTypeNames.Bool + ) + do_not_cast_shadows.Set(True) + + if "terrain" in prim_name: continue if not target: utils.setStaticCollider(prim) continue - if any(x["relation"]["relation_type"] == "StableAgainst" and "Subpart(wall)" in x["relation"].get("parent_tags") or "Subpart(ceiling)" in x["relation"].get("parent_tags") for x in relations[target]["relations"]): + if any( + x["relation"]["relation_type"] == "StableAgainst" + and "Subpart(wall)" in x["relation"].get("parent_tags") + or "Subpart(ceiling)" in x["relation"].get("parent_tags") + for x in relations[target]["relations"] + ): utils.setStaticCollider(prim) else: - utils.setRigidBody(prim, 'convexDecomposition', False) + utils.setRigidBody(prim, "convexDecomposition", False) self.scene.add(self._support) def _add_lighting(self): omni_exec( "CreatePrim", - prim_path='/World/DomeLight', + prim_path="/World/DomeLight", prim_type="DomeLight", select_new_prim=False, attributes={ UsdLux.Tokens.inputsIntensity: 5000, - UsdLux.Tokens.inputsColor: (0.7, 0.88, 1.0) + UsdLux.Tokens.inputsColor: (0.7, 0.88, 1.0), }, create_default_xform=True, ) omni_exec( "CreatePrim", - prim_path='/World/DistantLight', + prim_path="/World/DistantLight", prim_type="DistantLight", select_new_prim=False, - attributes={ - UsdLux.Tokens.inputsIntensity: 8000 - }, + attributes={UsdLux.Tokens.inputsIntensity: 8000}, create_default_xform=True, ) @@ -137,7 +151,7 @@ def _add_robot(self): wheel_dof_names=["left_wheel_joint", "right_wheel_joint"], create_robot=True, usd_path=robot_path, - position=init_pos + position=init_pos, ) ) self.robot.set_local_scale(np.array([4, 4, 4])) @@ -156,11 +170,13 @@ def run(self): self.apply_action() self.world.step(render=True) -if __name__ == '__main__': + +if __name__ == "__main__": import argparse + parser = argparse.ArgumentParser() - parser.add_argument('--scene-path', type=str) - parser.add_argument('--json-path', type=str) + parser.add_argument("--scene-path", type=str) + parser.add_argument("--json-path", type=str) args = parser.parse_args() scene = InfinigenIsaacScene(args) @@ -168,4 +184,3 @@ def run(self): scene.reset() scene.run() simulation_app.close() - diff --git a/infinigen/tools/perceptual/create_pairs.py b/infinigen/tools/perceptual/create_pairs.py index 4f86e65ac..3c5404e4c 100644 --- a/infinigen/tools/perceptual/create_pairs.py +++ b/infinigen/tools/perceptual/create_pairs.py @@ -3,24 +3,17 @@ # Authors: Karhan Kayan -import sys -import cv2 import os -import numpy as np -import matplotlib.pyplot as plt - +import random import sys - -import pandas as pd -from PIL import Image +from PIL import Image, ImageDraw, ImageFont from tqdm import tqdm -from PIL import Image -from PIL import Image, ImageDraw, ImageFont -import random -def merge_images(image_path1, image_path2, text1='Program A', text2='Program B', strip_width=5): +def merge_images( + image_path1, image_path2, text1="Program A", text2="Program B", strip_width=5 +): # Open the images image1 = Image.open(image_path1) image2 = Image.open(image_path2) @@ -40,7 +33,7 @@ def merge_images(image_path1, image_path2, text1='Program A', text2='Program B', # Create a new image with the combined width plus the strip width and the max height combined_width = image1.width + image2.width + strip_width - combined_image = Image.new('RGB', (combined_width, max_height), 'black') + combined_image = Image.new("RGB", (combined_width, max_height), "black") # Paste the two images into the new image # Adjust the position if one image is shorter than the other @@ -55,10 +48,12 @@ def merge_images(image_path1, image_path2, text1='Program A', text2='Program B', draw = ImageDraw.Draw(combined_image) try: # Load a specific TrueType or OpenType font file - font = ImageFont.truetype("/System/Library/Fonts/Supplemental/Arial Black.ttf", font_size) + font = ImageFont.truetype( + "/System/Library/Fonts/Supplemental/Arial Black.ttf", font_size + ) except IOError: # If the specific font file is not found, load the default font - print('Font not found, using default font.') + print("Font not found, using default font.") font = ImageFont.load_default() text_color = (255, 0, 0) # White color @@ -75,9 +70,10 @@ def merge_images(image_path1, image_path2, text1='Program A', text2='Program B', # Save the combined image return combined_image -from PIL import Image, ImageDraw, ImageFont -def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B', strip_width=5): +def merge_images2( + image_path1, image_path2, text1="Program A", text2="Program B", strip_width=5 +): # Open the images image1 = Image.open(image_path1) image2 = Image.open(image_path2) @@ -96,7 +92,7 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' max_width = image1.width + image2.width + strip_width # Create a new image with the max width and the combined height - combined_image = Image.new('RGB', (max_width, image1.height), 'black') + combined_image = Image.new("RGB", (max_width, image1.height), "black") # Paste the two images into the new image image1_x = (max_width - image1.width - image2.width - strip_width) // 2 @@ -110,10 +106,12 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' draw = ImageDraw.Draw(combined_image) try: # Load a specific TrueType or OpenType font file - font = ImageFont.truetype("/System/Library/Fonts/Supplemental/Arial.ttf", font_size) + font = ImageFont.truetype( + "/System/Library/Fonts/Supplemental/Arial.ttf", font_size + ) except IOError: # If the specific font file is not found, load the default font - print('Font not found, using default font.') + print("Font not found, using default font.") font = ImageFont.load_default() text_color = (255, 0, 0) # Red color @@ -124,7 +122,6 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' text2_x = image1.width + strip_width + 10 text2_y = 10 - draw.text((text1_x, text1_y), text1, fill=text_color, font=font) draw.text((text2_x, text2_y), text2, fill=text_color, font=font) @@ -132,8 +129,7 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' return combined_image - -if __name__ == '__main__': +if __name__ == "__main__": # methods = ['eevee', 'fastsynth'] # perspective = 'first_person' main_directory = sys.argv[1] @@ -147,7 +143,6 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' k = 50 # Set your main directory, methods, and perspective here - if not os.path.exists(output_directory): os.makedirs(output_directory) @@ -169,19 +164,21 @@ def merge_images2(image_path1, image_path2, text1='Program A', text2='Program B' image_path_1 = os.path.join(path_method1, img1) image_path_2 = os.path.join(path_method2, img2) # Extracting image identifiers - img_0_id = img1.split('.')[0] - img_1_id = img2.split('.')[0] + img_0_id = img1.split(".")[0] + img_1_id = img2.split(".")[0] # skip if not image - if not (image_path_1.endswith('.png') or image_path_1.endswith('.jpg')): + if not (image_path_1.endswith(".png") or image_path_1.endswith(".jpg")): continue - if not (image_path_2.endswith('.png') or image_path_2.endswith('.jpg')): + if not (image_path_2.endswith(".png") or image_path_2.endswith(".jpg")): continue # Creating a unique filename for the merged image - merged_filename = f'{perspective}-{methods[0]}-{img_0_id}-{methods[1]}-{img_1_id}.jpg' + merged_filename = ( + f"{perspective}-{methods[0]}-{img_0_id}-{methods[1]}-{img_1_id}.jpg" + ) merged_image_path = os.path.join(output_directory, merged_filename) # Merge and save images merged_img = merge_images2(image_path_1, image_path_2) - merged_img.save(merged_image_path, 'JPEG') + merged_img.save(merged_image_path, "JPEG") diff --git a/infinigen/tools/perceptual/create_submission.py b/infinigen/tools/perceptual/create_submission.py index b3245051c..8c8f7fda3 100644 --- a/infinigen/tools/perceptual/create_submission.py +++ b/infinigen/tools/perceptual/create_submission.py @@ -3,33 +3,38 @@ # Authors: Karhan Kayan +import csv import os import random -import csv import sys + def select_random_files_to_csv(folder_path, k, output_directory): # Get all files in the folder - files = [f for f in os.listdir(folder_path) if os.path.isfile(os.path.join(folder_path, f))] + files = [ + f + for f in os.listdir(folder_path) + if os.path.isfile(os.path.join(folder_path, f)) + ] # Select k random files selected_files = random.sample(files, min(k, len(files))) # Create CSV file with the name of the folder folder_name = os.path.basename(folder_path) - csv_file_path = os.path.join(output_directory, f'{folder_name}-{k}.csv') + csv_file_path = os.path.join(output_directory, f"{folder_name}-{k}.csv") # Write the selected file names to the CSV - with open(csv_file_path, 'w', newline='') as csvfile: + with open(csv_file_path, "w", newline="") as csvfile: writer = csv.writer(csvfile) - writer.writerow(['image_url']) # Header + writer.writerow(["image_url"]) # Header for file in selected_files: writer.writerow([file]) - print(f'CSV file created at {csv_file_path}') + print(f"CSV file created at {csv_file_path}") -if __name__ == '__main__': +if __name__ == "__main__": k = 50 output_directory = sys.argv[1] folder_path = sys.argv[2] diff --git a/infinigen/tools/perceptual/perceptual_extract.py b/infinigen/tools/perceptual/perceptual_extract.py index 563fe59a1..a9a9405e5 100644 --- a/infinigen/tools/perceptual/perceptual_extract.py +++ b/infinigen/tools/perceptual/perceptual_extract.py @@ -7,26 +7,27 @@ import shutil import sys - -if __name__ == '__main__': +if __name__ == "__main__": input_directory = sys.argv[1] output_directory = sys.argv[2] # Supported image formats - image_formats = ['.png', '.jpg', '.jpeg', '.bmp', '.gif', '.tiff'] + image_formats = [".png", ".jpg", ".jpeg", ".bmp", ".gif", ".tiff"] if not os.path.exists(output_directory): os.makedirs(output_directory) for folder_name in os.listdir(input_directory): folder_path = os.path.join(input_directory, folder_name) - + if os.path.isdir(folder_path): # Find the image file inside the folder for file_name in os.listdir(folder_path): if any(file_name.lower().endswith(ext) for ext in image_formats): old_file_path = os.path.join(folder_path, file_name) - new_file_name = f'{folder_name}.png' # Change the extension if needed + new_file_name = ( + f"{folder_name}.png" # Change the extension if needed + ) new_file_path = os.path.join(output_directory, new_file_name) # Move and rename the image file diff --git a/infinigen/tools/perceptual/rename.py b/infinigen/tools/perceptual/rename.py index 8f51cd65a..c5c8776cd 100644 --- a/infinigen/tools/perceptual/rename.py +++ b/infinigen/tools/perceptual/rename.py @@ -6,8 +6,7 @@ import os import sys - -if __name__ == '__main__': +if __name__ == "__main__": # Set the directory containing your files directory = sys.argv[1] @@ -21,5 +20,5 @@ for i, filename in enumerate(files, start=1): old_path = os.path.join(directory, filename) _, file_extension = os.path.splitext(filename) - new_path = os.path.join(directory, f'{i}{file_extension}') - os.rename(old_path, new_path) \ No newline at end of file + new_path = os.path.join(directory, f"{i}{file_extension}") + os.rename(old_path, new_path) diff --git a/infinigen/tools/results/aggregate_job_stats.py b/infinigen/tools/results/aggregate_job_stats.py index 3d0bd7cd6..68a33d7a7 100644 --- a/infinigen/tools/results/aggregate_job_stats.py +++ b/infinigen/tools/results/aggregate_job_stats.py @@ -32,7 +32,7 @@ def end_time(self): return self.start_time + self.time_elapsed def __lt__(self, other): - return (int(self.job_id) < int(other.job_id)) + return int(self.job_id) < int(other.job_id) def __str__(self): if self.memory is not None: @@ -40,62 +40,107 @@ def __str__(self): else: return f"{self.job_id} {self.name.ljust(40)} {self.current_status.ljust(10)}" -sacct_line_regex = re.compile("([0-9]+) +([^ ]+) +([^ ]+) +([0-9]+) +([A-Z]+) +(node[0-9]+) +([^ ]+).*").fullmatch + +sacct_line_regex = re.compile( + "([0-9]+) +([^ ]+) +([^ ]+) +([0-9]+) +([A-Z]+) +(node[0-9]+) +([^ ]+).*" +).fullmatch + def parse_sacct_line(line): if sacct_line_regex(line) is None: return - job_id, job_name, resources, elapsed_raw, current_status, node, start_time = sacct_line_regex(line).groups() - request = dict(e.split('=') for e in resources.split(',')) - start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S') + job_id, job_name, resources, elapsed_raw, current_status, node, start_time = ( + sacct_line_regex(line).groups() + ) + request = dict(e.split("=") for e in resources.split(",")) + start_time = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S") elapsed = timedelta(seconds=int(elapsed_raw)) - return Job(job_id=job_id, name=job_name, memory=request['mem'], cpu=request['cpu'], gpu=request.get('gpu', '0'), current_status=current_status, node=node, start_time=start_time, time_elapsed=elapsed) + return Job( + job_id=job_id, + name=job_name, + memory=request["mem"], + cpu=request["cpu"], + gpu=request.get("gpu", "0"), + current_status=current_status, + node=node, + start_time=start_time, + time_elapsed=elapsed, + ) + -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-d', '--days_since', type=int, default=14) - parser.add_argument('-o', '--output_folder', type=Path, required=True) + parser.add_argument("-d", "--days_since", type=int, default=14) + parser.add_argument("-o", "--output_folder", type=Path, required=True) args = parser.parse_args() assert args.output_folder.exists() - squeue_out = subprocess.check_output('squeue --user=llipson -o "%.24i%.40j%.14R" -h'.split()).decode() - non_started_jobs = [Job(job_id=i, name=n, current_status=s) for i,n,s in re.findall("([0-9]+) +([^ ]+) +(\([a-zA-Z]+\))", squeue_out) if n.startswith(args.output_folder.stem)] - sacct_start_date = (datetime.now() - timedelta(days=args.days_since)).strftime('%Y-%m-%d')# 2022-05-07 + squeue_out = subprocess.check_output( + 'squeue --user=llipson -o "%.24i%.40j%.14R" -h'.split() + ).decode() + non_started_jobs = [ + Job(job_id=i, name=n, current_status=s) + for i, n, s in re.findall("([0-9]+) +([^ ]+) +(\([a-zA-Z]+\))", squeue_out) + if n.startswith(args.output_folder.stem) + ] + sacct_start_date = (datetime.now() - timedelta(days=args.days_since)).strftime( + "%Y-%m-%d" + ) # 2022-05-07 sacct_command = f"sacct --starttime {sacct_start_date} -u {os.environ['USER']} --noheader -o jobid,jobname%40,AllocTRES%80,ElapsedRaw,stat,NodeList,Start" sacct_output = subprocess.check_output(sacct_command.split()).decode() relevant_started_jobs = [] for sacct_line in sacct_output.splitlines(): parsed_job = parse_sacct_line(sacct_line) - if (parsed_job is not None) and (parsed_job.name.startswith(args.output_folder.stem)): + if (parsed_job is not None) and ( + parsed_job.name.startswith(args.output_folder.stem) + ): relevant_started_jobs.append(parsed_job) - all_jobs = (relevant_started_jobs + non_started_jobs) + all_jobs = relevant_started_jobs + non_started_jobs seed_dict = defaultdict(list) for j in all_jobs: - seed, = re.compile(f"{args.output_folder.stem}_([^ _]+)_.*").fullmatch(j.name).groups() + (seed,) = ( + re.compile(f"{args.output_folder.stem}_([^ _]+)_.*") + .fullmatch(j.name) + .groups() + ) seed_dict[seed].append(j) all_times = {"fine": defaultdict(list), "coarse": defaultdict(list), "full": []} - for k,v in seed_dict.items(): + for k, v in seed_dict.items(): coarse_job, fine_job, *render_jobs = sorted(v) pipeline_start = coarse_job.start_time pipeline_end = max(j.end_time() for j in render_jobs) total_pipeline_time = pipeline_end - pipeline_start assert pipeline_end > fine_job.end_time() assert pipeline_start < fine_job.start_time - assert fine_job.time_elapsed.total_seconds() <= total_pipeline_time.total_seconds(), (pipeline_start, pipeline_end, total_pipeline_time) - coarse_job_percentage = (100 * coarse_job.time_elapsed.total_seconds()) / total_pipeline_time.total_seconds() - fine_job_percentage = (100 * fine_job.time_elapsed.total_seconds()) / total_pipeline_time.total_seconds() + assert ( + fine_job.time_elapsed.total_seconds() <= total_pipeline_time.total_seconds() + ), ( + pipeline_start, + pipeline_end, + total_pipeline_time, + ) + coarse_job_percentage = ( + 100 * coarse_job.time_elapsed.total_seconds() + ) / total_pipeline_time.total_seconds() + fine_job_percentage = ( + 100 * fine_job.time_elapsed.total_seconds() + ) / total_pipeline_time.total_seconds() all_times["fine"]["elapsed"].append(fine_job.time_elapsed.total_seconds()) all_times["fine"]["percentage"].append(fine_job_percentage) all_times["coarse"]["elapsed"].append(coarse_job.time_elapsed.total_seconds()) all_times["coarse"]["percentage"].append(coarse_job_percentage) all_times["full"].append(total_pipeline_time.total_seconds()) - fine_status_freq = Counter([j.current_status for j in all_jobs if j.name.endswith("_fine")]) + fine_status_freq = Counter( + [j.current_status for j in all_jobs if j.name.endswith("_fine")] + ) print(fine_status_freq) avg_fine_elapsed = timedelta(seconds=np.mean(all_times["fine"]["elapsed"])) print(f'{avg_fine_elapsed} {round(np.mean(all_times["fine"]["percentage"])):02d}%') avg_coarse_elapsed = timedelta(seconds=np.mean(all_times["coarse"]["elapsed"])) - print(f'{avg_fine_elapsed} {round(np.mean(all_times["coarse"]["percentage"])):02d}%') + print( + f'{avg_fine_elapsed} {round(np.mean(all_times["coarse"]["percentage"])):02d}%' + ) vg_full_elapsed = timedelta(seconds=np.mean(all_times["full"])) print(vg_full_elapsed) diff --git a/infinigen/tools/results/job_stats.py b/infinigen/tools/results/job_stats.py index dd52a9437..40ec1385c 100644 --- a/infinigen/tools/results/job_stats.py +++ b/infinigen/tools/results/job_stats.py @@ -8,7 +8,7 @@ import os import re import subprocess -from collections import Counter, defaultdict +from collections import defaultdict from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path @@ -31,7 +31,7 @@ def end_time(self): return self.start_time + self.time_elapsed def __lt__(self, other): - return (int(self.job_id) < int(other.job_id)) + return int(self.job_id) < int(other.job_id) def __str__(self): if self.req_memory is not None: @@ -39,43 +39,96 @@ def __str__(self): else: return f"{self.job_id} {self.name.ljust(60+73)} {self.current_status.ljust(10)}" -sacct_line_regex = re.compile("([0-9]+) +(\S+) +(\S+) +([0-9]+) +([A-Z_]+) +(node[0-9]+) +(\S+).*").fullmatch + +sacct_line_regex = re.compile( + "([0-9]+) +(\S+) +(\S+) +([0-9]+) +([A-Z_]+) +(node[0-9]+) +(\S+).*" +).fullmatch HEADER = "Job ID Job name GPU CPU Max Mem Start date, time Elapsed Status Node" MEM_FACTOR = {"G": 1, "M": 1e3, "K": 1e6} + def parse_sacct_line(line): if sacct_line_regex(line) is None: return - job_id, job_name, resources, elapsed_raw, current_status, node, start_time = sacct_line_regex(line).groups() - request = dict(e.split('=') for e in resources.split(',')) - start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S') + job_id, job_name, resources, elapsed_raw, current_status, node, start_time = ( + sacct_line_regex(line).groups() + ) + request = dict(e.split("=") for e in resources.split(",")) + start_time = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S") elapsed = timedelta(seconds=int(elapsed_raw)) - return Job(job_id=job_id, name=job_name, req_memory=request['mem'], cpu=request['cpu'], gpu=request.get('gpu', '0'), current_status=current_status, node=node, start_time=start_time, time_elapsed=elapsed) + return Job( + job_id=job_id, + name=job_name, + req_memory=request["mem"], + cpu=request["cpu"], + gpu=request.get("gpu", "0"), + current_status=current_status, + node=node, + start_time=start_time, + time_elapsed=elapsed, + ) + def get_node_info(): - sinfo_out = subprocess.check_output('/usr/bin/sinfo --Node --format=%12N%12P%C --noheader'.split()).decode() + sinfo_out = subprocess.check_output( + "/usr/bin/sinfo --Node --format=%12N%12P%C --noheader".split() + ).decode() nodes_info = defaultdict(dict) - for node, group, allocated_cpus, total_cpus in re.findall("(\S+) +(\S+) +([0-9]+)/[0-9]+/[0-9]+/([0-9]+)", sinfo_out): + for node, group, allocated_cpus, total_cpus in re.findall( + "(\S+) +(\S+) +([0-9]+)/[0-9]+/[0-9]+/([0-9]+)", sinfo_out + ): if group != "all": assert node not in nodes_info - nodes_info[node]['cpus'] = int(total_cpus) - nodes_info[node]['allocated_cpus'] = int(allocated_cpus) - nodes_info[node]['group'] = group + nodes_info[node]["cpus"] = int(total_cpus) + nodes_info[node]["allocated_cpus"] = int(allocated_cpus) + nodes_info[node]["group"] = group return dict(nodes_info) -if __name__ == '__main__': - *_, pvl_users = subprocess.check_output("/usr/bin/getent group pvl".split()).decode().rstrip('\n').split(":") + +if __name__ == "__main__": + *_, pvl_users = ( + subprocess.check_output("/usr/bin/getent group pvl".split()) + .decode() + .rstrip("\n") + .split(":") + ) parser = argparse.ArgumentParser() - parser.add_argument('-d', '--days_since', type=int, default=14, help="At least how long ago were the jobs run? Smaller values are faster.") - parser.add_argument('-o', '--output_folder', type=Path, required=True, help="Output directory of experiments.") - parser.add_argument('--user', type=str, default=os.environ['USER'], choices=pvl_users.split(','), help="User who ran the jobs.") + parser.add_argument( + "-d", + "--days_since", + type=int, + default=14, + help="At least how long ago were the jobs run? Smaller values are faster.", + ) + parser.add_argument( + "-o", + "--output_folder", + type=Path, + required=True, + help="Output directory of experiments.", + ) + parser.add_argument( + "--user", + type=str, + default=os.environ["USER"], + choices=pvl_users.split(","), + help="User who ran the jobs.", + ) args = parser.parse_args() assert args.output_folder.exists() - squeue_out = subprocess.check_output(f'squeue --user={args.user} -o "%.24i%.40j%.14R" -h'.split()).decode() - non_started_jobs = [Job(job_id=i, name=n, current_status=s) for i,n,s in re.findall("([0-9]+) +(\S+) +(\([a-zA-Z]+\))", squeue_out) if n.startswith(args.output_folder.stem)] - sacct_start_date = (datetime.now() - timedelta(days=args.days_since)).strftime('%Y-%m-%d')# 2022-05-07 + squeue_out = subprocess.check_output( + f'squeue --user={args.user} -o "%.24i%.40j%.14R" -h'.split() + ).decode() + non_started_jobs = [ + Job(job_id=i, name=n, current_status=s) + for i, n, s in re.findall("([0-9]+) +(\S+) +(\([a-zA-Z]+\))", squeue_out) + if n.startswith(args.output_folder.stem) + ] + sacct_start_date = (datetime.now() - timedelta(days=args.days_since)).strftime( + "%Y-%m-%d" + ) # 2022-05-07 sacct_command = f"sacct --starttime {sacct_start_date} -u {args.user} --noheader -o jobid,jobname%80,AllocTRES%80,ElapsedRaw,stat%30,NodeList,Start,MaxRSS" print(f"Running command: {sacct_command}") sacct_output = subprocess.check_output(sacct_command.split()).decode() @@ -84,20 +137,28 @@ def get_node_info(): for sacct_line in sacct_output.splitlines(): parsed_job = parse_sacct_line(sacct_line) - if (parsed_job is not None) and (parsed_job.name.startswith(args.output_folder.stem)): + if (parsed_job is not None) and ( + parsed_job.name.startswith(args.output_folder.stem) + ): if parsed_job.job_id in mem_dict: max_memory = mem_dict[parsed_job.job_id] - parsed_job.max_memory_gb = float(max_memory[:-1]) / MEM_FACTOR[max_memory[-1]] + parsed_job.max_memory_gb = ( + float(max_memory[:-1]) / MEM_FACTOR[max_memory[-1]] + ) relevant_started_jobs.append(parsed_job) - all_jobs = (relevant_started_jobs + non_started_jobs) + all_jobs = relevant_started_jobs + non_started_jobs seed_dict = defaultdict(list) for j in all_jobs: - seed, = re.compile(f"{args.output_folder.stem}_([^ _]+)_.*").fullmatch(j.name).groups() + (seed,) = ( + re.compile(f"{args.output_folder.stem}_([^ _]+)_.*") + .fullmatch(j.name) + .groups() + ) seed_dict[seed].append(j) node_info = get_node_info() - for k,v in sorted(seed_dict.items()): - print("-"*len(HEADER) + "\n" + HEADER) + for k, v in sorted(seed_dict.items()): + print("-" * len(HEADER) + "\n" + HEADER) for j in v: if j.node is not None: print(f"{j}({node_info[j.node]['group']})") diff --git a/infinigen/tools/results/make_grid_figure.py b/infinigen/tools/results/make_grid_figure.py index c753dfceb..8219c76ab 100644 --- a/infinigen/tools/results/make_grid_figure.py +++ b/infinigen/tools/results/make_grid_figure.py @@ -5,8 +5,9 @@ import os -import numpy as np + import cv2 +import numpy as np root_folder = "outputs_scratch/fig1_v6" scene_types = [ @@ -20,13 +21,13 @@ "mountain", "plain", ] -titles=scene_types +titles = scene_types level0_layout = (3, 3) sublevel_mode = "below" sublevel_layout = (2, 6) margin = 0 -H, W = (1080, 1920) # resized resolution +H, W = (1080, 1920) # resized resolution with_txt = True if sublevel_mode == "below": @@ -40,32 +41,40 @@ block_W = W + (margin + subfigure_W) * sublevel_layout[1] + margin block_H = H + margin -canvas = np.zeros((block_H * level0_layout[0] - margin, block_W * level0_layout[1] - margin, 3)) + 255 +canvas = ( + np.zeros( + (block_H * level0_layout[0] - margin, block_W * level0_layout[1] - margin, 3) + ) + + 255 +) for i, scene_type, title in zip(range(len(scene_types)), scene_types, titles): y, x = i // level0_layout[1], i % level0_layout[1] for j in range(sublevel_layout[0] * sublevel_layout[1] + 1): print(scene_type, j) - folder = f'{scene_type}_{j}' + folder = f"{scene_type}_{j}" path = f"{root_folder}/{folder}/frames_{folder}_resmpl0" - if not os.path.exists(path): - print(f'{path} did not exist') + if not os.path.exists(path): + print(f"{path} did not exist") + continue + image_path = [ + x for x in os.listdir(path) if x.startswith("Noisy") and x.endswith(".png") + ] + if image_path == []: continue - image_path = [x for x in os.listdir(path) if x.startswith("Noisy") and x.endswith(".png")] - if image_path == []: continue image_path = f"{path}/{image_path[0]}" image = cv2.imread(image_path) if j == 0: image = cv2.resize(image, (W, H)) - canvas[y * block_H: y * block_H + H, x * block_W: x * block_W + W] = image + canvas[y * block_H : y * block_H + H, x * block_W : x * block_W + W] = image else: y_j, x_j = (j - 1) // sublevel_layout[1], (j - 1) % sublevel_layout[1] subfigure_W0, subfigure_H0 = subfigure_W, subfigure_H if sublevel_mode == "below": - assert(sublevel_layout[1] > 1) + assert sublevel_layout[1] > 1 if x_j == sublevel_layout[1] - 1: subfigure_W0 = W - (subfigure_W + margin) * (sublevel_layout[1] - 1) else: - assert(sublevel_layout[0] > 1) + assert sublevel_layout[0] > 1 if y_j == sublevel_layout[0] - 1: subfigure_H0 = H - (subfigure_H + margin) * (sublevel_layout[0] - 1) image = cv2.resize(image, (subfigure_W0, subfigure_H0)) @@ -75,11 +84,22 @@ else: H_offset = (margin + subfigure_H) * y_j W_offset = W + margin + (margin + subfigure_W) * x_j - canvas[y * block_H + H_offset: y * block_H + H_offset + subfigure_H0, x * block_W + W_offset: x * block_W + W_offset + subfigure_W0] = image + canvas[ + y * block_H + H_offset : y * block_H + H_offset + subfigure_H0, + x * block_W + W_offset : x * block_W + W_offset + subfigure_W0, + ] = image if with_txt: if np.sum(canvas[y * block_H + 5, x * block_W + 5]) > 255 * 1.5: color = (0, 0, 0) else: color = (255, 255, 255) - cv2.putText(canvas, title, (x * block_W, y * block_H + 50), cv2.FONT_HERSHEY_SIMPLEX, fontScale=2, color=color, thickness=4) + cv2.putText( + canvas, + title, + (x * block_W, y * block_H + 50), + cv2.FONT_HERSHEY_SIMPLEX, + fontScale=2, + color=color, + thickness=4, + ) cv2.imwrite(f"{root_folder}/figure.png", canvas) diff --git a/infinigen/tools/results/parse_times.py b/infinigen/tools/results/parse_times.py index f1d222bbf..6e089c8ae 100644 --- a/infinigen/tools/results/parse_times.py +++ b/infinigen/tools/results/parse_times.py @@ -10,32 +10,73 @@ import re import subprocess from collections import defaultdict -from datetime import timedelta from pathlib import Path import numpy as np -REGEX_PATTERN = f'(\[.*\]) *([^ ]+) -> ([^ ]+) \| ([0-9\.]+)h:([0-9\.]+)m:([0-9\.]+)s' +REGEX_PATTERN = "(\[.*\]) *([^ ]+) -> ([^ ]+) \| ([0-9\.]+)h:([0-9\.]+)m:([0-9\.]+)s" if __name__ == "__main__": - *_, pvl_users = subprocess.check_output("/usr/bin/getent group pvl".split()).decode().rstrip('\n').split(":") + *_, pvl_users = ( + subprocess.check_output("/usr/bin/getent group pvl".split()) + .decode() + .rstrip("\n") + .split(":") + ) parser = argparse.ArgumentParser() - parser.add_argument('-s', '--stage', required=True, choices=['coarse', 'fine', 'fine_terrain'], type=str) - parser.add_argument('-o', '--output_folder', type=Path, required=True, help="Output directory of experiments.") - parser.add_argument('--user', type=str, default=os.environ['USER'], choices=pvl_users.split(','), help="User who ran the jobs.") - parser.add_argument('-d', '--days_since', type=int, default=14, help="At least how long ago were the jobs run? Smaller values are faster.") + parser.add_argument( + "-s", + "--stage", + required=True, + choices=["coarse", "fine", "fine_terrain"], + type=str, + ) + parser.add_argument( + "-o", + "--output_folder", + type=Path, + required=True, + help="Output directory of experiments.", + ) + parser.add_argument( + "--user", + type=str, + default=os.environ["USER"], + choices=pvl_users.split(","), + help="User who ran the jobs.", + ) + parser.add_argument( + "-d", + "--days_since", + type=int, + default=14, + help="At least how long ago were the jobs run? Smaller values are faster.", + ) args = parser.parse_args() - date_since = (datetime.datetime.now() - datetime.timedelta(days=args.days_since)).strftime("%Y-%m-%d") - cmd = ['sacct', '--noheader', '--starttime', date_since, '-u', args.user, '-o', 'jobname%50,ElapsedRaw%50,stat%50'] - out = subprocess.check_output(cmd).decode('utf-8') + date_since = ( + datetime.datetime.now() - datetime.timedelta(days=args.days_since) + ).strftime("%Y-%m-%d") + cmd = [ + "sacct", + "--noheader", + "--starttime", + date_since, + "-u", + args.user, + "-o", + "jobname%50,ElapsedRaw%50,stat%50", + ] + out = subprocess.check_output(cmd).decode("utf-8") job_times = {} - for l in out.splitlines(): - job_name, job_sec, status, *_ = l.strip().split() - regex = re.compile(f"{args.output_folder.stem}_({'[A-Z]'*8}_.+)").fullmatch(job_name) + for line in out.splitlines(): + job_name, job_sec, status, *_ = line.strip().split() + regex = re.compile(f"{args.output_folder.stem}_({'[A-Z]'*8}_.+)").fullmatch( + job_name + ) if regex is not None: - seed_stage, = regex.groups() - job_times[seed_stage] = (int(job_sec), (status == "COMPLETED")) + (seed_stage,) = regex.groups() + job_times[seed_stage] = (int(job_sec), (status == "COMPLETED")) data_dict = defaultdict(list) all_time_logs = list(args.output_folder.rglob(f"{args.stage}_times.txt")) @@ -45,16 +86,20 @@ if not finished: continue for name, start, end, h, m, s in re.findall(REGEX_PATTERN, log.read_text()): - chunk_time = float(h)*3600 + float(m)*60 + float(s) + chunk_time = float(h) * 3600 + float(m) * 60 + float(s) percent = chunk_time * 100 / job_time data_dict[name].append((chunk_time, percent)) to_print = [] for key, data_list in data_dict.items(): - average_time = round(np.mean([t for t,_ in data_list])) - average_percent = round(np.mean([p for _,p in data_list])) - to_print.append((average_percent, f"{key.ljust(40)} {average_time//3600:02d}h:{((average_time%3600)//60):02d}m ({average_percent}%) [#{len(data_list)}]")) - - for _,s in sorted(to_print): + average_time = round(np.mean([t for t, _ in data_list])) + average_percent = round(np.mean([p for _, p in data_list])) + to_print.append( + ( + average_percent, + f"{key.ljust(40)} {average_time//3600:02d}h:{((average_time%3600)//60):02d}m ({average_percent}%) [#{len(data_list)}]", + ) + ) + + for _, s in sorted(to_print): print(s) - \ No newline at end of file diff --git a/infinigen/tools/results/parse_videos.py b/infinigen/tools/results/parse_videos.py index b0850060b..19bd17eaf 100644 --- a/infinigen/tools/results/parse_videos.py +++ b/infinigen/tools/results/parse_videos.py @@ -9,18 +9,17 @@ from pathlib import Path parser = argparse.ArgumentParser() -parser.add_argument('input_folder', type=Path, nargs='+') -parser.add_argument('--output_folder', type=Path, default=None) -parser.add_argument('--image_type', default='Image') -parser.add_argument('--camera', type=int, default=0) -parser.add_argument('--overlay', type=int, default=1) -parser.add_argument('--join', type=int, default=1) -parser.add_argument('--fps', type=int, default=24) -parser.add_argument('--resize', type=int, nargs='+', default=[720, 1280]) +parser.add_argument("input_folder", type=Path, nargs="+") +parser.add_argument("--output_folder", type=Path, default=None) +parser.add_argument("--image_type", default="Image") +parser.add_argument("--camera", type=int, default=0) +parser.add_argument("--overlay", type=int, default=1) +parser.add_argument("--join", type=int, default=1) +parser.add_argument("--fps", type=int, default=24) +parser.add_argument("--resize", type=int, nargs="+", default=[720, 1280]) args = parser.parse_args() for input_folder in args.input_folder: - if not input_folder.is_dir(): continue @@ -33,32 +32,34 @@ for seed_folder in input_folder.iterdir(): if not seed_folder.is_dir(): continue - if len(list(seed_folder.glob('frames*'))) == 0: - print(f'{seed_folder=} has no frames*') + if len(list(seed_folder.glob("frames*"))) == 0: + print(f"{seed_folder=} has no frames*") continue filters = [] if args.resize is not None: filters += ["-s", f"{args.resize[0]}x{args.resize[1]}"] if args.overlay: - text = f'{seed_folder.name} {args.image_type} camera_{args.camera}' + text = f"{seed_folder.name} {args.image_type} camera_{args.camera}" filters += ["-vf", f"drawtext='text={text}'"] cmd = ( - f'ffmpeg -y -r {args.fps} -pattern_type glob '.split() + - f'-i {seed_folder.absolute()}/frames/{args.image_type}/camera_{args.camera}/*.png'.split() + - filters + - '-pix_fmt yuv420p '.split() + - f'{output_folder}/{seed_folder.name}_{args.image_type}_{args.camera}.mp4'.split() + f"ffmpeg -y -r {args.fps} -pattern_type glob ".split() + + f"-i {seed_folder.absolute()}/frames/{args.image_type}/camera_{args.camera}/*.png".split() + + filters + + "-pix_fmt yuv420p ".split() + + f"{output_folder}/{seed_folder.name}_{args.image_type}_{args.camera}.mp4".split() ) print(cmd) subprocess.run(cmd) if args.join: - cat_output = output_folder/f'{output_folder.name}_{args.image_type}.mp4' - videos = [x for x in output_folder.glob(f'*_{args.image_type}.mp4') if x != cat_output ] + cat_output = output_folder / f"{output_folder.name}_{args.image_type}.mp4" + videos = [ + x for x in output_folder.glob(f"*_{args.image_type}.mp4") if x != cat_output + ] + + instructions = output_folder / "videos.txt" + instructions.write_text("\n".join([f"file '{x.absolute()}'" for x in videos])) - instructions = (output_folder/'videos.txt') - instructions.write_text('\n'.join([f"file '{x.absolute()}'" for x in videos])) - cmd = f"ffmpeg -y -f concat -safe 0 -i {instructions} -c copy {cat_output}" subprocess.run(cmd.split()) instructions.unlink() diff --git a/infinigen/tools/results/resource_stats.py b/infinigen/tools/results/resource_stats.py index bb2e92f47..9c1d88aca 100644 --- a/infinigen/tools/results/resource_stats.py +++ b/infinigen/tools/results/resource_stats.py @@ -16,20 +16,22 @@ plt = sns.mpl.pyplot + def parse_mem(s: str): - for c, m in [('K', 1e6), ('M', 1e3), ('G', 1)]: + for c, m in [("K", 1e6), ("M", 1e3), ("G", 1)]: if s.endswith(c): return (float(s.rstrip(c))) / m raise Exception() + def make_plot(data, bin_width, path: os.PathLike): data = np.asarray(data) data = data[data < np.quantile(data, 0.99)] - suffix = '' + suffix = "" if data.max() > 1e6: data /= 1e6 - suffix = 'M' + suffix = "M" ax = sns.histplot(data, bins=int(data.max() / bin_width)) plt.xticks(fontsize=16) @@ -46,15 +48,21 @@ def make_plot(data, bin_width, path: os.PathLike): plt.savefig(str(path)) plt.clf() -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-p', '--pipeline_dirs', type=Path, nargs='+', required=True) - parser.add_argument('--assume_resample', action='store_true') + parser.add_argument("-p", "--pipeline_dirs", type=Path, nargs="+", required=True) + parser.add_argument("--assume_resample", action="store_true") args = parser.parse_args() print("Running sacct...") - sacct = subprocess.check_output('sacct --allusers --starttime 2022-10-31 --endtime 2022-11-07 -o JobID,JobName%100,MaxRSS,Elapsed,AllocTres%100,State%30'.split()).decode().splitlines() + sacct = ( + subprocess.check_output( + "sacct --allusers --starttime 2022-10-31 --endtime 2022-11-07 -o JobID,JobName%100,MaxRSS,Elapsed,AllocTres%100,State%30".split() + ) + .decode() + .splitlines() + ) # sacct = Path("sacct_saved.txt").read_text().splitlines() print("Processing") @@ -68,33 +76,43 @@ def make_plot(data, bin_width, path: os.PathLike): print(f"Total number of scenes: {len(finished_seeds)}") for line in sacct: - match = re.fullmatch(f"(\d+) +(\S*{run_name}\S*) +" + "(\d{2}:\d{2}:\d{2}) +\S*cpu=(.*),mem=\S+ +COMPLETED *", line) + match = re.fullmatch( + f"(\d+) +(\S*{run_name}\S*) +" + + "(\d{2}:\d{2}:\d{2}) +\S*cpu=(.*),mem=\S+ +COMPLETED *", + line, + ) if match: jobid, name, elapsed, tres = match.groups() - cpus, *_ = tres.replace('gres/gpu:', '').split(',') + cpus, *_ = tres.replace("gres/gpu:", "").split(",") gpu = re.fullmatch(".*gres/gpu:(.*=\d+),gres/gpu=.*", tres) - hours, mins, secs = map(float, elapsed.split(':')) - relevant_jobs[jobid] = {"name": name, "elapsed": (hours + mins/60 + secs/3600), "cpus": int(cpus), "gpus": gpu.group(1).split('=') if gpu else None} + hours, mins, secs = map(float, elapsed.split(":")) + relevant_jobs[jobid] = { + "name": name, + "elapsed": (hours + mins / 60 + secs / 3600), + "cpus": int(cpus), + "gpus": gpu.group(1).split("=") if gpu else None, + } for line in sacct: - memory_match = re.fullmatch("(\d+).0 +python +([\.\d]+[K|M|G]) .*COMPLETED *", line) + memory_match = re.fullmatch( + "(\d+).0 +python +([\.\d]+[K|M|G]) .*COMPLETED *", line + ) if memory_match: jobid, mem = memory_match.groups() if jobid in relevant_jobs: - relevant_jobs[jobid]['max_memory'] = parse_mem(mem) + relevant_jobs[jobid]["max_memory"] = parse_mem(mem) - for k,v in relevant_jobs.items(): + for k, v in relevant_jobs.items(): seed = re.fullmatch(".*_([A-Z]+)_.*", v["name"]).group(1) if seed in finished_seeds: scene_lookup[seed].add(k) for seed in finished_seeds: stats_txt = folder / seed / "logs" / "fine_polycounts.txt" - text = stats_txt.read_text().replace(',','') - num_tris, = map(int, re.findall("Tris:(\d+)\n", text)) + text = stats_txt.read_text().replace(",", "") + (num_tris,) = map(int, re.findall("Tris:(\d+)\n", text)) num_tri_counts.append(float(num_tris)) - all_max_mems = [] all_elapsed = [] all_cpu_hours = [] @@ -103,21 +121,23 @@ def make_plot(data, bin_width, path: os.PathLike): for scene_seed, job_id_list in scene_lookup.items(): jobs = [relevant_jobs[j] for j in sorted(job_id_list)] if args.assume_resample: - while 'render_2' in jobs[-1]["name"]: + while "render_2" in jobs[-1]["name"]: jobs.pop() - while 'render_1' in jobs[-1]["name"]: + while "render_1" in jobs[-1]["name"]: jobs.pop() - max_mem = max(j['max_memory'] for j in jobs) + max_mem = max(j["max_memory"] for j in jobs) all_max_mems.append(max_mem) - total_elapsed = sum(j['elapsed'] for j in jobs) + total_elapsed = sum(j["elapsed"] for j in jobs) all_elapsed.append(total_elapsed) - cpu_hours = sum((j['elapsed']*j["cpus"]) for j in jobs) + cpu_hours = sum((j["elapsed"] * j["cpus"]) for j in jobs) all_cpu_hours.append(cpu_hours) - gpu_hours = sum((j['elapsed']*int(j["gpus"][1])) for j in jobs if (j["gpus"] is not None)) + gpu_hours = sum( + (j["elapsed"] * int(j["gpus"][1])) for j in jobs if (j["gpus"] is not None) + ) all_gpu_hours.append(gpu_hours) - make_plot(all_max_mems, 3, 'plots/all_max_mems.png') - make_plot(all_elapsed, 0.5, 'plots/all_elapsed.png') - make_plot(all_cpu_hours, 1, 'plots/all_cpu_hours.png') - make_plot(all_gpu_hours, 0.1, 'plots/all_gpu_hours.png') - make_plot(num_tri_counts, 2, 'plots/all_tricounts.png') + make_plot(all_max_mems, 3, "plots/all_max_mems.png") + make_plot(all_elapsed, 0.5, "plots/all_elapsed.png") + make_plot(all_cpu_hours, 1, "plots/all_cpu_hours.png") + make_plot(all_gpu_hours, 0.1, "plots/all_gpu_hours.png") + make_plot(num_tri_counts, 2, "plots/all_tricounts.png") diff --git a/infinigen/tools/results/scatter_figure.py b/infinigen/tools/results/scatter_figure.py index 895029f89..c1723abca 100644 --- a/infinigen/tools/results/scatter_figure.py +++ b/infinigen/tools/results/scatter_figure.py @@ -4,105 +4,128 @@ # Authors: Hongyu Wen +import argparse +import os + import bpy +import gin import mathutils -from pathlib import Path -import sys, importlib -import numpy as np -import os -import argparse import numpy as np from numpy.random import uniform as U - -import gin -from infinigen.core.surface import registry +from infinigen.assets.lighting import sky_lighting +from infinigen.assets.materials import dirt, mud from infinigen.assets.scatters import ( - grass, - chopped_trees, - pine_needle, - flowerplant, - fern, - pinecone, - urchin, + chopped_trees, + grass, + pine_needle, + pinecone, + seashells, seaweed, - seashells + urchin, ) -from infinigen.assets.materials import dirt, sand, mud -from infinigen.core.placement import density -import math -from infinigen.core.util import blender as butil -from infinigen.assets.lighting import sky_lighting from infinigen.core import init +from infinigen.core.placement import density +from infinigen.core.surface import registry +from infinigen.core.util import blender as butil gin.clear_config() gin.enter_interactive_mode() -gin.parse_config_files_and_bindings(['config/base.gin'], []) +gin.parse_config_files_and_bindings(["config/base.gin"], []) registry.initialize_from_gin() parser = argparse.ArgumentParser() -parser.add_argument('-s', '--save', type=str, default='stable') -parser.add_argument('-m', '--mode', type=str, default='grass') -parser.add_argument('-d', '--debug', type=bool, default=False) -parser.add_argument('-v', '--view', type=int, default=0.5) -parser.add_argument('-ix', '--index_x', type=int, default=0) -parser.add_argument('-iy', '--index_y', type=int, default=0) +parser.add_argument("-s", "--save", type=str, default="stable") +parser.add_argument("-m", "--mode", type=str, default="grass") +parser.add_argument("-d", "--debug", type=bool, default=False) +parser.add_argument("-v", "--view", type=int, default=0.5) +parser.add_argument("-ix", "--index_x", type=int, default=0) +parser.add_argument("-iy", "--index_y", type=int, default=0) args = init.parse_args_blender() def apply_scatters(obj, mode, index): all_objects = [obj] path = f"outputs/scatter_figure/{mode}/{mode}_{index[0]}_{index[1]}.png" - if os.path.exists(path) and args.save == 'stable': + if os.path.exists(path) and args.save == "stable": butil.delete(all_objects) return - - if mode == 'grass': + + if mode == "grass": mud.apply(obj) - selection = density.placement_mask(normal_dir=(0, 0, 1), scale=3, - return_scalar=True, select_thresh=U(0, 0.2)) + selection = density.placement_mask( + normal_dir=(0, 0, 1), scale=3, return_scalar=True, select_thresh=U(0, 0.2) + ) go, _ = grass.apply(obj, selection=selection, density=15) all_objects.append(go) # if U() < 0.3: - # fo, _ = flowerplant.apply(obj, - # selection=density.get_placement_distribution(normal_dir=(0, 0, 1), scale=U(1, 3), select_thresh=0.1, return_scalar=True), + # fo, _ = flowerplant.apply(obj, + # selection=density.get_placement_distribution(normal_dir=(0, 0, 1), scale=U(1, 3), select_thresh=0.1, return_scalar=True), # density=U(0.3, 2)) # all_objects.append(fo) # elif mode == 'fern': # dirt.apply(obj) # fern.apply(obj, density=3.5, selection=density.get_placement_distribution(normal_dir=(0, 0, 1), scale=0.1, return_scalar=True)) - elif mode == 'seafloor': + elif mode == "seafloor": mud.apply(obj) dirt.apply(obj) - uo, _ = urchin.apply(obj, selection=density.placement_mask(scale=U(1, 3), select_thresh=U(0.8, 1.2)), density=U(0.8, 1.2), n=int(U(3, 10))) - mo, _ = seashells.apply(obj, selection=density.placement_mask(scale=U(1, 3), select_thresh=U(0.8, 1.2)), density=U(1.5, 2.5), n=int(U(5, 15))) - so, _ = seaweed.apply(obj, selection=density.placement_mask(scale=U(1, 3), select_thresh=U(0.8, 1.2), normal_thresh=0.4), density=U(1, 2), n=int(U(3, 10))) + uo, _ = urchin.apply( + obj, + selection=density.placement_mask(scale=U(1, 3), select_thresh=U(0.8, 1.2)), + density=U(0.8, 1.2), + n=int(U(3, 10)), + ) + mo, _ = seashells.apply( + obj, + selection=density.placement_mask(scale=U(1, 3), select_thresh=U(0.8, 1.2)), + density=U(1.5, 2.5), + n=int(U(5, 15)), + ) + so, _ = seaweed.apply( + obj, + selection=density.placement_mask( + scale=U(1, 3), select_thresh=U(0.8, 1.2), normal_thresh=0.4 + ), + density=U(1, 2), + n=int(U(3, 10)), + ) all_objects.append(uo) all_objects.append(mo) all_objects.append(so) - elif mode == 'fallen_trees': - po, _ = pine_needle.apply(obj, - selection=density.placement_mask(scale=U(0.2, 1), select_thresh=U(0.4, 0.6), return_scalar=True), - density=U(1000, 3000)) - pio, _ = pinecone.apply(obj, - selection=density.placement_mask(scale=U(0.1, 0.4), select_thresh=U(0.4, 0.6)), - density=U(0.4, 0.6)) - selection = density.placement_mask(scale=U(0.1, 0.4), select_thresh=U(0.4, 0.6), density=U(0.4, 0.6)) + elif mode == "fallen_trees": + po, _ = pine_needle.apply( + obj, + selection=density.placement_mask( + scale=U(0.2, 1), select_thresh=U(0.4, 0.6), return_scalar=True + ), + density=U(1000, 3000), + ) + pio, _ = pinecone.apply( + obj, + selection=density.placement_mask( + scale=U(0.1, 0.4), select_thresh=U(0.4, 0.6) + ), + density=U(0.4, 0.6), + ) + selection = density.placement_mask( + scale=U(0.1, 0.4), select_thresh=U(0.4, 0.6), density=U(0.4, 0.6) + ) co, _ = chopped_trees.apply(obj, selection=selection) all_objects.append(po) all_objects.append(pio) all_objects.append(co) else: assert False - + bpy.context.scene.render.film_transparent = True - bpy.context.scene.render.image_settings.color_mode = 'RGBA' + bpy.context.scene.render.image_settings.color_mode = "RGBA" # bpy.context.scene.render.engine = 'CYCLES' bpy.context.scene.render.filepath = path - bpy.ops.render.render(scene=bpy.context.scene.name, write_still=True) + bpy.ops.render.render(scene=bpy.context.scene.name, write_still=True) # butil.delete(all_objects) - + + s = 3 margin = 0 n = 100 @@ -113,25 +136,56 @@ def apply_scatters(obj, mode, index): sky_lighting.add_lighting() -bpy.ops.object.camera_add(location=mathutils.Vector(args.view * np.array([-10,-10,10])), rotation=(np.deg2rad(70), 0, np.deg2rad(-45))) +bpy.ops.object.camera_add( + location=mathutils.Vector(args.view * np.array([-10, -10, 10])), + rotation=(np.deg2rad(70), 0, np.deg2rad(-45)), +) cam = bpy.context.active_object bpy.context.scene.camera = cam bpy.context.scene.render.resolution_x = 2048 bpy.context.scene.render.resolution_y = 1024 seen_list = [ - (0, 0), (0, 1), (0, 2), (0, 3), (0, 4), - (1, 0), (1, 1), (1, 2), (1, 3), (1, 4), - (2, 0), (2, 1), (2, 2), (2, 3), (2, 4), - (3, 0), (3, 1), (3, 2), (3, 3), (3, 4), - (4, 0), (4, 1), (4, 2), (4, 3), (4, 4), - (5, 1), (5, 2), (5, 3), (5, 4), (5, 5), - (6, 1), (6, 2), (6, 3), (6, 4), (6, 5), + (0, 0), + (0, 1), + (0, 2), + (0, 3), + (0, 4), + (1, 0), + (1, 1), + (1, 2), + (1, 3), + (1, 4), + (2, 0), + (2, 1), + (2, 2), + (2, 3), + (2, 4), + (3, 0), + (3, 1), + (3, 2), + (3, 3), + (3, 4), + (4, 0), + (4, 1), + (4, 2), + (4, 3), + (4, 4), + (5, 1), + (5, 2), + (5, 3), + (5, 4), + (5, 5), + (6, 1), + (6, 2), + (6, 3), + (6, 4), + (6, 5), ] try_list = [(1, 1)] -if (args.debug): +if args.debug: enum_list = try_list else: enum_list = seen_list @@ -144,11 +198,13 @@ def apply_scatters(obj, mode, index): np.random.seed(x * prime1 + y + prime2) # x, y = i // rowsize, i % rowsize pos = (s + margin) * mathutils.Vector((x, y, 0)) -bpy.ops.mesh.primitive_grid_add(size=s, location=pos, x_subdivisions=planeres, y_subdivisions=planeres) +bpy.ops.mesh.primitive_grid_add( + size=s, location=pos, x_subdivisions=planeres, y_subdivisions=planeres +) plane = bpy.context.active_object apply_scatters(plane, mode=mode, index=(x, y)) - # butil.delete(plane) +# butil.delete(plane) # path = f"outputs/scatter_figure/grass.png" # bpy.context.scene.render.filepath = path -# bpy.ops.render.render(scene=bpy.context.scene.name, write_still=True) \ No newline at end of file +# bpy.ops.render.render(scene=bpy.context.scene.name, write_still=True) diff --git a/infinigen/tools/results/strip_alpha_background.py b/infinigen/tools/results/strip_alpha_background.py index cf34ba76d..4b989d615 100644 --- a/infinigen/tools/results/strip_alpha_background.py +++ b/infinigen/tools/results/strip_alpha_background.py @@ -4,27 +4,31 @@ # Authors: Alexander Raistrick, Lingjie Mei -import imageio -from pathlib import Path import argparse -import pdb +import logging +from pathlib import Path + +import imageio + +logger = logging.getLogger(__name__) parser = argparse.ArgumentParser() -parser.add_argument('folder', type=Path, nargs='+') -parser.add_argument('thresh', type=float, default=0.05) +parser.add_argument("folder", type=Path, nargs="+") +parser.add_argument("thresh", type=float, default=0.05) def main(thresh, folder): - assert thresh > 1, f'Images are 0-255 you probably didnt want {thresh=}' + assert thresh > 1, f"Images are 0-255 you probably didnt want {thresh=}" for folder in folder: - out_folder = folder.parent / (folder.stem + f'_thresh_{thresh}') + out_folder = folder.parent / (folder.stem + f"_thresh_{thresh}") out_folder.mkdir(exist_ok=True, parents=True) for imgpath in folder.iterdir(): try: img = imageio.imread(imgpath) - except: + except FileNotFoundError: + logger.warning(f"Could not read {imgpath}") continue pixs = img.reshape(-1, 4) @@ -32,7 +36,7 @@ def main(thresh, folder): pixs[mask] = 0 img = pixs.reshape(img.shape) - print(f'Stripped {100 * mask.mean()}% from {imgpath}') + print(f"Stripped {100 * mask.mean()}% from {imgpath}") imageio.imwrite(out_folder / imgpath.name, img) diff --git a/infinigen/tools/results/summarize.py b/infinigen/tools/results/summarize.py index 5fa4dc225..b74bb7217 100644 --- a/infinigen/tools/results/summarize.py +++ b/infinigen/tools/results/summarize.py @@ -13,38 +13,52 @@ from pathlib import Path import cv2 -import flow_vis # run pip install flow_vis import numpy as np import skimage.measure -from einops import repeat from imageio.v3 import imread from matplotlib import pyplot as plt from tqdm import tqdm +try: + import flow_vis # run pip install flow_vis + from einops import repeat +except ImportError: + raise ImportError( + "GT visualization requires extra dependencies. Please install optional extras via `pip install .[vis]`." + ) + def make_defaultdict(inner): - return (lambda: defaultdict(inner)) + return lambda: defaultdict(inner) + def parse_mask_tag_jsons(base_folder): - for file_path in base_folder.rglob('MaskTag.json'): - if match := re.fullmatch("fine_([0-9])_([0-9])_([0-9]{4})_([0-9])", file_path.parent.name): + for file_path in base_folder.rglob("MaskTag.json"): + if match := re.fullmatch( + "fine_([0-9])_([0-9])_([0-9]{4})_([0-9])", file_path.parent.name + ): _, _, frame_str, _ = match.groups() yield (int(frame_str), file_path) - for file_path in base_folder.rglob('MaskTag.json'): + for file_path in base_folder.rglob("MaskTag.json"): if match := re.fullmatch("fine.*", file_path.parent.name): yield (0, file_path) + def summarize_folder(base_folder): base_folder = Path(base_folder) output = defaultdict(make_defaultdict(make_defaultdict(make_defaultdict(dict)))) max_frame = -1 - for file_path in base_folder.rglob('*'): + for file_path in base_folder.rglob("*"): if (not file_path.is_file) or ("saved_mesh" in file_path.parts): continue - if match := re.fullmatch("(.*)_([0-9]{4})_([0-9]{2})_([0-9]{2})\.([a-z]+)", file_path.name): + if match := re.fullmatch( + "(.*)_([0-9]{4})_([0-9]{2})_([0-9]{2})\.([a-z]+)", file_path.name + ): data_type, frame_str, rig, subcam, suffix = match.groups() - output[data_type][suffix][rig][subcam][frame_str] = str(file_path.relative_to(base_folder)) + output[data_type][suffix][rig][subcam][frame_str] = str( + file_path.relative_to(base_folder) + ) max_frame = max(max_frame, int(frame_str)) # Rename keys @@ -52,8 +66,8 @@ def summarize_folder(base_folder): output["Camera Intrinsics"] = output.pop("K") mask_tag_jsons = sorted(parse_mask_tag_jsons(base_folder)) - for frame in range(1, max_frame+1): - _, closest = max((f,p) for f,p in mask_tag_jsons if (int(f) <= frame)) + for frame in range(1, max_frame + 1): + _, closest = max((f, p) for f, p in mask_tag_jsons if (int(f) <= frame)) output["Mask Tags"][f"{frame:04d}"] = str(closest.relative_to(base_folder)) output["stats"] = {"Max Frame": max_frame} @@ -61,10 +75,21 @@ def summarize_folder(base_folder): (base_folder / "summary.json").write_text(json.dumps(output, indent=4)) return base_folder / "summary.json" + def what_is_missing(summary): max_frame = summary["stats"]["Max Frame"] - all_rigs = set(chain((summary["SurfaceNormal"]["png"].keys()), (summary["SurfaceNormal"]["png"].keys()))) - all_subcams = set(chain((summary["SurfaceNormal"]["png"]["00"].keys()), (summary["SurfaceNormal"]["png"]["00"].keys()))) + all_rigs = set( + chain( + (summary["SurfaceNormal"]["png"].keys()), + (summary["SurfaceNormal"]["png"].keys()), + ) + ) + all_subcams = set( + chain( + (summary["SurfaceNormal"]["png"]["00"].keys()), + (summary["SurfaceNormal"]["png"]["00"].keys()), + ) + ) logs = [] for rig in all_rigs: for subcam in all_subcams: @@ -77,34 +102,43 @@ def what_is_missing(summary): logs.append(f"Image is missing for frame {frame}") return logs + def process_flow_frame(path, shape): flow3d = np.load(path) - flow2d_resized = cv2.resize(flow3d, dsize=shape, interpolation=cv2.INTER_LINEAR)[...,:2] + flow2d_resized = cv2.resize(flow3d, dsize=shape, interpolation=cv2.INTER_LINEAR)[ + ..., :2 + ] flow2d_resized[(np.abs(flow2d_resized) > 1e3) | np.isnan(flow2d_resized)] = -1 flow_color = flow_vis.flow_to_color(flow2d_resized, convert_to_bgr=False) return flow_color + def process_depth_frame(path, shape): depth = np.load(path) return cv2.resize(depth, dsize=shape, interpolation=cv2.INTER_LINEAR) + def process_mask(path, shape): mask = imread(path) H, W = mask.shape scale = (W // shape[0], H // shape[1]) out = skimage.measure.block_reduce(mask, scale, np.max) - return repeat(out, 'H W -> H W 3') + return repeat(out, "H W -> H W 3") + def frames_to_video(file_path, frames: list, fps=24): - assert Path(file_path).suffix == '.avi' + assert Path(file_path).suffix == ".avi" H, W, _ = frames[0].shape - video = cv2.VideoWriter(str(file_path), cv2.VideoWriter_fourcc(*'DIVX'),frameSize=(W, H), fps=fps) + video = cv2.VideoWriter( + str(file_path), cv2.VideoWriter_fourcc(*"DIVX"), frameSize=(W, H), fps=fps + ) for img in frames: video.write(img) video.release() assert os.path.exists(file_path) print(f"Wrote {file_path}") + def depth_to_jet(depth, scale_vmin=1.0): valid = (depth > 1e-3) & (depth < 1e4) vmin = depth[valid].min() * scale_vmin @@ -113,7 +147,8 @@ def depth_to_jet(depth, scale_vmin=1.0): norm = plt.Normalize(vmin=vmin, vmax=vmax) depth = cmap(norm(depth)) depth[~valid] = 1 - return np.ascontiguousarray(depth[...,:3] * 255, dtype=np.uint8) + return np.ascontiguousarray(depth[..., :3] * 255, dtype=np.uint8) + def process_scene_folder(folder, preview): summary_json = summarize_folder(folder) @@ -125,35 +160,52 @@ def process_scene_folder(folder, preview): if not preview: return - depth_paths = folder_data["Depth"]['npy']["00"]["00"] - flow3d_paths = folder_data["Flow3D"]['npy']["00"]["00"] - image_paths = folder_data["Image"]['png']["00"]["00"] - occlusion_boundary_paths = folder_data["OcclusionBoundaries"]['png']["00"]["00"] - flow_mask_paths = folder_data["Flow3D_Mask"]['png']["00"]["00"] + depth_paths = folder_data["Depth"]["npy"]["00"]["00"] + flow3d_paths = folder_data["Flow3D"]["npy"]["00"]["00"] + image_paths = folder_data["Image"]["png"]["00"]["00"] + occlusion_boundary_paths = folder_data["OcclusionBoundaries"]["png"]["00"]["00"] + flow_mask_paths = folder_data["Flow3D_Mask"]["png"]["00"]["00"] all_flow_frames = sorted(image_paths.keys()) shape = (1280, 720) with mp.Pool() as pool: - all_flow_frames = pool.starmap(process_flow_frame, tqdm([(folder / path, shape) for _, path in sorted(flow3d_paths.items())])) - all_depth_frames = pool.starmap(process_depth_frame, tqdm([(folder / path, shape) for _, path in sorted(depth_paths.items())])) - all_occlusion_frames = pool.starmap(process_mask, tqdm([(folder / path, shape) for _, path in sorted(occlusion_boundary_paths.items())])) - all_flow_mask_frames = pool.starmap(process_mask, tqdm([(folder / path, shape) for _, path in sorted(flow_mask_paths.items())])) + all_flow_frames = pool.starmap( + process_flow_frame, + tqdm([(folder / path, shape) for _, path in sorted(flow3d_paths.items())]), + ) + all_depth_frames = pool.starmap( + process_depth_frame, + tqdm([(folder / path, shape) for _, path in sorted(depth_paths.items())]), + ) + all_occlusion_frames = pool.starmap( + process_mask, + tqdm( + [ + (folder / path, shape) + for _, path in sorted(occlusion_boundary_paths.items()) + ] + ), + ) + all_flow_mask_frames = pool.starmap( + process_mask, + tqdm( + [(folder / path, shape) for _, path in sorted(flow_mask_paths.items())] + ), + ) previews: Path = folder / "previews" previews.mkdir(exist_ok=True) - frames_to_video(previews / 'occlusion_boundaries.avi', all_occlusion_frames) - frames_to_video(previews / 'flow_mask.avi', all_flow_mask_frames) + frames_to_video(previews / "occlusion_boundaries.avi", all_occlusion_frames) + frames_to_video(previews / "flow_mask.avi", all_flow_mask_frames) depth_visualization = depth_to_jet(np.asarray(all_depth_frames)) - frames_to_video(previews / 'video_depth.avi', depth_visualization) - frames_to_video(previews / 'flow_video.avi', all_flow_frames) + frames_to_video(previews / "video_depth.avi", depth_visualization) + frames_to_video(previews / "flow_video.avi", all_flow_frames) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('folder', type=Path) - parser.add_argument('--preview', action='store_true') + parser.add_argument("folder", type=Path) + parser.add_argument("--preview", action="store_true") args = parser.parse_args() process_scene_folder(args.folder, preview=args.preview) - - diff --git a/infinigen/tools/results/visualize_planar_graph.py b/infinigen/tools/results/visualize_planar_graph.py index b88480b8b..ef84388af 100644 --- a/infinigen/tools/results/visualize_planar_graph.py +++ b/infinigen/tools/results/visualize_planar_graph.py @@ -12,18 +12,20 @@ sys.path.insert(0, os.getcwd()) from PIL import Image -from infinigen.core.util.math import FixedSeed + +from infinigen.core.constraints.example_solver.room import GraphMaker + # noinspection PyUnresolvedReferences -from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed from infinigen_examples.generate_individual_assets import make_args -from infinigen.core.constraints.example_solver.room import GraphMaker + def build_scene(idx, path): with FixedSeed(idx): factory = GraphMaker(idx) graph = factory.make_graph(idx) factory.draw(graph) - (path / 'images').mkdir(exist_ok=True) + (path / "images").mkdir(exist_ok=True) imgpath = path / f"images/image_{idx:03d}.png" plt.savefig(imgpath) plt.clf() @@ -31,26 +33,30 @@ def build_scene(idx, path): def make_grid(args, path, n): files = [] - for filename in sorted(os.listdir(f'{path}/images')): - if filename.endswith('.png'): - files.append(f'{path}/images/{filename}') + for filename in sorted(os.listdir(f"{path}/images")): + if filename.endswith(".png"): + files.append(f"{path}/images/{filename}") files = files[:n] if len(files) == 0: - print('No images found') + print("No images found") return with Image.open(files[0]) as i: x, y = i.size - sz_x = list(sorted(range(1, n + 1), key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio)))[0] + sz_x = list( + sorted( + range(1, n + 1), key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio) + ) + )[0] sz_y = math.ceil(n / sz_x) - img = Image.new('RGBA', (sz_x * x, sz_y * y)) + img = Image.new("RGBA", (sz_x * x, sz_y * y)) for idx, file in enumerate(files): with Image.open(file) as i: img.paste(i, (idx % sz_x * x, idx // sz_x * y)) - img.save(f'{path}/grid.png') + img.save(f"{path}/grid.png") def main(args): - path = Path(os.getcwd()) / 'outputs' + path = Path(os.getcwd()) / "outputs" path.mkdir(exist_ok=True) fac_path = path / GraphMaker.__name__ if fac_path.exists() and args.skip_existing: @@ -64,7 +70,7 @@ def main(args): make_grid(args, fac_path, n_images) -if __name__ == '__main__': +if __name__ == "__main__": args = make_args() args.no_mod = args.no_mod or args.fire with FixedSeed(1): diff --git a/infinigen/tools/submit_asset_cache.py b/infinigen/tools/submit_asset_cache.py index 5e10c0b4c..3bf548b6a 100644 --- a/infinigen/tools/submit_asset_cache.py +++ b/infinigen/tools/submit_asset_cache.py @@ -3,12 +3,12 @@ # Authors: Karhan Kayan -import submitit import argparse -from pathlib import Path -import sys import os -import time +import sys +from pathlib import Path + +import submitit sys.path.append(str(Path(os.path.split(os.path.abspath(__file__))[0]) / "..")) @@ -16,22 +16,27 @@ def get_slurm_banned_nodes(config_path=None): if config_path is None: return [] - with Path(config_path).open('r') as f: + with Path(config_path).open("r") as f: return list(f.read().split()) - - + + parser = argparse.ArgumentParser() -parser.add_argument('-f', '--asset_folder', type=str) -parser.add_argument('-a', '--assets', nargs='+', default=[ - 'CachedBushFactory', - 'CachedTreeFactory', - 'CachedCactusFactory', - 'CachedCreatureFactory', - 'CachedBoulderFactory' -]) -parser.add_argument('-n', '--number', type=int, default=1) -parser.add_argument('-s', '--start_frame', type=int, default=-20) -parser.add_argument('-d', '--simulation_duration', type=int, default=24*20+20) +parser.add_argument("-f", "--asset_folder", type=str) +parser.add_argument( + "-a", + "--assets", + nargs="+", + default=[ + "CachedBushFactory", + "CachedTreeFactory", + "CachedCactusFactory", + "CachedCreatureFactory", + "CachedBoulderFactory", + ], +) +parser.add_argument("-n", "--number", type=int, default=1) +parser.add_argument("-s", "--start_frame", type=int, default=-20) +parser.add_argument("-d", "--simulation_duration", type=int, default=24 * 20 + 20) # parser.add_argument('-r', '--resolution', type=int) # parser.add_argument('--dissolve_speed', type=int, default=25) # parser.add_argument('--dom_scale', type=int, default=1) @@ -42,16 +47,18 @@ def get_slurm_banned_nodes(config_path=None): for asset in args.assets: for i in range(args.number): - cmd = f"{sys.executable} -m infinigen.assets.fluid.run_asset_cache -f {args.asset_folder}/ -a {asset} -s {args.start_frame} -d {args.simulation_duration}".split(" ") + cmd = f"{sys.executable} -m infinigen.assets.fluid.run_asset_cache -f {args.asset_folder}/ -a {asset} -s {args.start_frame} -d {args.simulation_duration}".split( + " " + ) print(cmd) executor = submitit.AutoExecutor(folder=str(Path(args.asset_folder) / "logs")) executor.update_parameters( mem_gb=16, name=f"{asset}_{i}", cpus_per_task=4, - timeout_min=60*24, + timeout_min=60 * 24, slurm_account="pvl", - slurm_exclude= "node408,node409", + slurm_exclude="node408,node409", ) render_fn = submitit.helpers.CommandFunction(cmd) executor.submit(render_fn) diff --git a/infinigen/tools/suffixes.py b/infinigen/tools/suffixes.py index 3a8ce7bbe..0cdb59750 100644 --- a/infinigen/tools/suffixes.py +++ b/infinigen/tools/suffixes.py @@ -3,45 +3,45 @@ # Authors: Alexander Raistrick -from pathlib import Path from copy import copy +from pathlib import Path -SUFFIX_ORDERING = ['cam_rig', 'resample', 'frame', 'subcam'] +SUFFIX_ORDERING = ["cam_rig", "resample", "frame", "subcam"] -def get_suffix(indices): - suffix = '' +def get_suffix(indices): + suffix = "" if indices is None: return suffix - + indices = copy(indices) for key in SUFFIX_ORDERING: val = indices.get(key, 0) - if key == 'frame' and isinstance(val, int): - suffix += '_' + f'{val:04d}' + if key == "frame" and isinstance(val, int): + suffix += "_" + f"{val:04d}" else: - suffix += '_' + str(val) + suffix += "_" + str(val) return suffix -def parse_suffix(s): +def parse_suffix(s): if isinstance(s, Path): s = s.name - if '.' in s: - s = s[:s.index('.')] + if "." in s: + s = s[: s.index(".")] - s = s.strip('_') - - s_parts = s.split('_') + s = s.strip("_") + + s_parts = s.split("_") if len(s_parts) > len(SUFFIX_ORDERING) + 1: - raise ValueError(f'Couldnt parse {s=} with {len(s_parts)=}') - + raise ValueError(f"Couldnt parse {s=} with {len(s_parts)=}") + if len(s_parts) == len(SUFFIX_ORDERING) + 1: - s_parts = s_parts[1:] # discard leading filename / description etc + s_parts = s_parts[1:] # discard leading filename / description etc if len(s_parts) != len(SUFFIX_ORDERING): return None diff --git a/infinigen/tools/terrain/generate_terrain_assets.py b/infinigen/tools/terrain/generate_terrain_assets.py index 45fa00f8d..4c814ee7d 100644 --- a/infinigen/tools/terrain/generate_terrain_assets.py +++ b/infinigen/tools/terrain/generate_terrain_assets.py @@ -4,20 +4,19 @@ # Authors: Zeyu Ma - -import os -import sys import argparse from pathlib import Path import bpy + +from infinigen.core import init +from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed, int_hash +from infinigen.core.util.organization import AssetFile, Assets, LandTile from infinigen.terrain.assets.caves import caves_asset from infinigen.terrain.assets.landtiles import landtile_asset from infinigen.terrain.assets.upsidedown_mountains import upsidedown_mountains_asset -from infinigen.core.util import blender as butil -from infinigen.core.util.math import int_hash, FixedSeed -from infinigen.core.util.organization import Assets, LandTile, AssetFile -from infinigen.core import init + def asset_generation( output_folder, @@ -29,51 +28,78 @@ def asset_generation( ): for i in instance_ids: for asset in assets: - if asset in [LandTile.Mesa, LandTile.Canyon, LandTile.Canyons, LandTile.Cliff, LandTile.Mountain, LandTile.River, LandTile.Volcano, LandTile.MultiMountains, LandTile.Coast]: - if not (output_folder/asset/f"{i}"/AssetFile.Finish).exists(): + if asset in [ + LandTile.Mesa, + LandTile.Canyon, + LandTile.Canyons, + LandTile.Cliff, + LandTile.Mountain, + LandTile.River, + LandTile.Volcano, + LandTile.MultiMountains, + LandTile.Coast, + ]: + if not (output_folder / asset / f"{i}" / AssetFile.Finish).exists(): print(asset, i) if not check_only: with FixedSeed(int_hash([asset, seed, i])): - landtile_asset(output_folder/asset/f"{i}", asset, device=device) + landtile_asset( + output_folder / asset / f"{i}", asset, device=device + ) if asset == Assets.UpsidedownMountains: - if not (output_folder/asset/f"{i}"/AssetFile.Finish).exists(): + if not (output_folder / asset / f"{i}" / AssetFile.Finish).exists(): print(asset, i) if not check_only: with FixedSeed(int_hash([asset, seed, i])): - upsidedown_mountains_asset(output_folder/Assets.UpsidedownMountains/f"{i}", device=device) + upsidedown_mountains_asset( + output_folder / Assets.UpsidedownMountains / f"{i}", + device=device, + ) if asset == Assets.Caves: - if not (output_folder/asset/f"{i}"/AssetFile.Finish).exists(): + if not (output_folder / asset / f"{i}" / AssetFile.Finish).exists(): print(asset, i) if not check_only: with FixedSeed(int_hash([asset, seed, i])): - caves_asset(output_folder/Assets.Caves/f"{i}") + caves_asset(output_folder / Assets.Caves / f"{i}") if __name__ == "__main__": # by default infinigen does on-the-fly terrain asset generation, but if you want to pre-generate a pool of assets, run this code parser = argparse.ArgumentParser() - parser.add_argument('-a', '--assets', nargs='+', default=[ - LandTile.MultiMountains, - LandTile.Coast, - LandTile.Mesa, - LandTile.Canyon, - LandTile.Canyons, - LandTile.Cliff, - LandTile.Mountain, - LandTile.River, - LandTile.Volcano, - Assets.UpsidedownMountains, - Assets.Caves, - ]) - parser.add_argument('-s', '--start', type=int, default=0) - parser.add_argument('-e', '--end', type=int, default=1) - parser.add_argument('-f', '--folder') - parser.add_argument('--seed', type=int, default=0) - parser.add_argument('--check_only', type=int, default=0) - parser.add_argument('--device', type=str, default="cpu") + parser.add_argument( + "-a", + "--assets", + nargs="+", + default=[ + LandTile.MultiMountains, + LandTile.Coast, + LandTile.Mesa, + LandTile.Canyon, + LandTile.Canyons, + LandTile.Cliff, + LandTile.Mountain, + LandTile.River, + LandTile.Volcano, + Assets.UpsidedownMountains, + Assets.Caves, + ], + ) + parser.add_argument("-s", "--start", type=int, default=0) + parser.add_argument("-e", "--end", type=int, default=1) + parser.add_argument("-f", "--folder") + parser.add_argument("--seed", type=int, default=0) + parser.add_argument("--check_only", type=int, default=0) + parser.add_argument("--device", type=str, default="cpu") args = init.parse_args_blender(parser) - bpy.ops.preferences.addon_enable(module='add_mesh_extra_objects') - bpy.ops.preferences.addon_enable(module='ant_landscape') + bpy.ops.preferences.addon_enable(module="add_mesh_extra_objects") + bpy.ops.preferences.addon_enable(module="ant_landscape") butil.clear_scene(targets=[bpy.data.objects]) - asset_generation(Path(args.folder), args.assets, list(range(args.start, args.end)), args.seed, args.device, check_only=args.check_only) + asset_generation( + Path(args.folder), + args.assets, + list(range(args.start, args.end)), + args.seed, + args.device, + check_only=args.check_only, + ) diff --git a/infinigen/tools/terrain/kernelize_surfaces.py b/infinigen/tools/terrain/kernelize_surfaces.py index dde488010..ffd6fa14d 100644 --- a/infinigen/tools/terrain/kernelize_surfaces.py +++ b/infinigen/tools/terrain/kernelize_surfaces.py @@ -4,21 +4,45 @@ # Authors: Zeyu Ma -import os -import sys - import subprocess from pathlib import Path import bpy + from infinigen import __version__ -from infinigen.assets.materials import chunkyrock, cobble_stone, cracked_ground, dirt, ice, mountain, mud, sand, sandstone, snow, soil, stone -from infinigen.terrain.surface_kernel.kernelizer import Kernelizer +from infinigen.assets.materials import ( + chunkyrock, + cobble_stone, + cracked_ground, + dirt, + ice, + mountain, + mud, + sand, + sandstone, + snow, + soil, + stone, +) from infinigen.core.util.blender import clear_scene +from infinigen.terrain.surface_kernel.kernelizer import Kernelizer if __name__ == "__main__": parser = Kernelizer() - for surface in [chunkyrock, cobble_stone, cracked_ground, dirt, ice, mountain, mud, sand, sandstone, snow, soil, stone]: + for surface in [ + chunkyrock, + cobble_stone, + cracked_ground, + dirt, + ice, + mountain, + mud, + sand, + sandstone, + snow, + soil, + stone, + ]: clear_scene() bpy.ops.mesh.primitive_cube_add() obj = bpy.context.active_object @@ -26,10 +50,14 @@ code, _, _ = parser(obj.modifiers[surface.mod_name]) folder = Path("terrain/source/common/surfaces") folder.mkdir(exist_ok=1) - dst = folder/f"{surface.name}.h" + dst = folder / f"{surface.name}.h" with open(dst, "w") as f: - f.write(f'''// Code generated using version {__version__} of infinigen/tools/kernelize_surfaces.py; refer to infinigen/assets/materials/{surface.name}.py which has the copyright and authors''') + f.write( + f"""// Code generated using version {__version__} of infinigen/tools/kernelize_surfaces.py; refer to infinigen/assets/materials/{surface.name}.py which has the copyright and authors""" + ) f.write(code) f.write("\n") # optional: clang-format needed to format output code - subprocess.call(f"clang-format -style=\"{{IndentWidth: 4}}\" -i '{dst}'", shell=True) \ No newline at end of file + subprocess.call( + f"clang-format -style=\"{{IndentWidth: 4}}\" -i '{dst}'", shell=True + ) diff --git a/infinigen/tools/terrain/landtile_viewer.py b/infinigen/tools/terrain/landtile_viewer.py index a1e56df30..3e9f44d3d 100644 --- a/infinigen/tools/terrain/landtile_viewer.py +++ b/infinigen/tools/terrain/landtile_viewer.py @@ -4,28 +4,27 @@ # Authors: Zeyu Ma -import os -import sys - -#sys.path.append(f"{os.path.split(os.path.abspath(__file__))[0]}/../..") +# sys.path.append(f"{os.path.split(os.path.abspath(__file__))[0]}/../..") import argparse +import os import bpy import numpy as np + +from infinigen.core import init from infinigen.core.nodes.node_wrangler import Nodes, NodeWrangler -from infinigen.terrain.utils import Mesh, read from infinigen.core.util.blender import clear_scene from infinigen.core.util.organization import AssetFile -from infinigen.core import init +from infinigen.terrain.utils import Mesh, read if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-i', '--input', type=str) - parser.add_argument('-o', '--overlay', type=int, default=False) - parser.add_argument('--mesh', type=str, default="") - parser.add_argument('--scene', type=str, default="") + parser.add_argument("-i", "--input", type=str) + parser.add_argument("-o", "--overlay", type=int, default=False) + parser.add_argument("--mesh", type=str, default="") + parser.add_argument("--scene", type=str, default="") args = init.parse_args_blender(parser) - + folder = os.path.dirname(args.input) tile_size = float(np.loadtxt(f"{folder}/{AssetFile.TileSize}.txt")) image = read(args.input) @@ -35,13 +34,19 @@ mesh.vertex_attributes["attribute"] = image.reshape(-1).astype(np.float32) clear_scene() obj = mesh.export_blender("preview") - if args.mesh != "": mesh.save(args.mesh) + if args.mesh != "": + mesh.save(args.mesh) if args.overlay: material = bpy.data.materials.new(name="preview_material") material.use_nodes = True nw = NodeWrangler(material.node_tree) - new_attribute_node = nw.new_node(Nodes.Attribute, [], {"attribute_name": "attribute"}) - material.node_tree.links.new(new_attribute_node.outputs['Color'], material.node_tree.nodes['Principled BSDF'].inputs['Base Color']) + new_attribute_node = nw.new_node( + Nodes.Attribute, [], {"attribute_name": "attribute"} + ) + material.node_tree.links.new( + new_attribute_node.outputs["Color"], + material.node_tree.nodes["Principled BSDF"].inputs["Base Color"], + ) obj.active_material = material - if args.scene != "": bpy.ops.wm.save_mainfile(filepath=args.scene) - + if args.scene != "": + bpy.ops.wm.save_mainfile(filepath=args.scene) diff --git a/infinigen/tools/terrain/palette/palette.py b/infinigen/tools/terrain/palette/palette.py index a87532f6a..4071ac48a 100644 --- a/infinigen/tools/terrain/palette/palette.py +++ b/infinigen/tools/terrain/palette/palette.py @@ -4,15 +4,16 @@ # Authors: Zeyu Ma, Lingjie Mei -from google_images_search import GoogleImagesSearch -from sklearn.mixture import GaussianMixture import argparse -import numpy as np +import colorsys import os +from pathlib import Path + import cv2 import matplotlib.pyplot as plt -import colorsys -from pathlib import Path +import numpy as np +from google_images_search import GoogleImagesSearch +from sklearn.mixture import GaussianMixture def make_palette(keyword, num_images, num_colors, overwrite=False): @@ -22,13 +23,13 @@ def make_palette(keyword, num_images, num_colors, overwrite=False): # - Multiselect is currently not feasible. Choose ONE option only # - This param can also be omitted from _search_params if you do not wish to define any value _search_params = { - 'q': keyword, - 'num': num_images, - 'fileType': 'jpg|png', + "q": keyword, + "num": num_images, + "fileType": "jpg|png", } # this will search and download: - folder = f'{os.path.split(os.path.abspath(__file__))[0]}/images/{keyword}' + folder = f"{os.path.split(os.path.abspath(__file__))[0]}/images/{keyword}" if os.path.exists(folder) and not overwrite: print("folder existing, skip") else: @@ -38,7 +39,8 @@ def make_palette(keyword, num_images, num_colors, overwrite=False): colors = np.zeros((0, 3)) for image_name in os.listdir(folder): - if image_name.endswith("svg"): continue + if image_name.endswith("svg"): + continue image = cv2.imread(f"{folder}/{image_name}") image = cv2.resize(image, (128, 128)) image = image[:, :, :3] @@ -77,15 +79,21 @@ def make_palette(keyword, num_images, num_colors, overwrite=False): diagrams = np.clip(diagrams * 256, a_min=0, a_max=255).astype(np.int32) diagrams = diagrams.reshape((2 * S, num_colors * S, 3)) - Path(f'{os.path.split(os.path.abspath(__file__))[0]}/images').mkdir(parents=True, exist_ok=True) - Path(f'{os.path.split(os.path.abspath(__file__))[0]}/json').mkdir(parents=True, exist_ok=True) + Path(f"{os.path.split(os.path.abspath(__file__))[0]}/images").mkdir( + parents=True, exist_ok=True + ) + Path(f"{os.path.split(os.path.abspath(__file__))[0]}/json").mkdir( + parents=True, exist_ok=True + ) plt.figure(figsize=(20, 5)) plt.imshow(diagrams) - plt.savefig(f'{os.path.split(os.path.abspath(__file__))[0]}/images/{keyword}.png') + plt.savefig(f"{os.path.split(os.path.abspath(__file__))[0]}/images/{keyword}.png") colors_rgb = np.clip(colors_rgb * 256, a_min=0, a_max=255).astype(np.int32) - with open(f"{os.path.split(os.path.abspath(__file__))[0]}/json/{keyword}.json", "w") as f: + with open( + f"{os.path.split(os.path.abspath(__file__))[0]}/json/{keyword}.json", "w" + ) as f: f.write("{\n") f.write(' "color": {\n') for i, color in enumerate(colors_rgb): @@ -93,25 +101,25 @@ def make_palette(keyword, num_images, num_colors, overwrite=False): f.write(" },\n") f.write(' "hsv": [\n') for color_hsv in colors_hsv: - f.write(f' [{color_hsv[0]}, {color_hsv[1]}, {color_hsv[2]}],\n') + f.write(f" [{color_hsv[0]}, {color_hsv[1]}, {color_hsv[2]}],\n") f.write(" ],\n") f.write(' "std": [\n') for std in cov: - covs = ','.join([str(x) for x in std.reshape(-1)]) - f.write(f' [{covs}],\n') + covs = ",".join([str(x) for x in std.reshape(-1)]) + f.write(f" [{covs}],\n") f.write(" ],\n") f.write(' "prob": [\n') for i in range(num_colors): - f.write(f' {weights[i]},\n') + f.write(f" {weights[i]},\n") f.write(" ]\n") f.write("}\n") if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-k', '--keyword', type=str) - parser.add_argument('-i', '--num_images', default=10) - parser.add_argument('-c', '--num_colors', default=10) - parser.add_argument('-o', '--overwrite', action='store_true') + parser.add_argument("-k", "--keyword", type=str) + parser.add_argument("-i", "--num_images", default=10) + parser.add_argument("-c", "--num_colors", default=10) + parser.add_argument("-o", "--overwrite", action="store_true") args = parser.parse_args() make_palette(args.keyword, args.num_images, args.num_colors, args.overwrite) diff --git a/infinigen/tools/terrain/params_parser.py b/infinigen/tools/terrain/params_parser.py index 682986181..fe3fd70c8 100644 --- a/infinigen/tools/terrain/params_parser.py +++ b/infinigen/tools/terrain/params_parser.py @@ -7,7 +7,7 @@ import argparse parser = argparse.ArgumentParser() -parser.add_argument('-f', '--file_path', type=str) +parser.add_argument("-f", "--file_path", type=str) args = parser.parse_args() output = "" @@ -16,12 +16,14 @@ code = "" + def get_code(current_type, variables): code = "" for i, v in enumerate(variables): code += f" {current_type} {v} = {current_type[0]}_params[{i}];\n" return code + with open(args.file_path, "r") as f: lines = f.readlines() i = 0 @@ -43,7 +45,13 @@ def get_code(current_type, variables): code += get_code(current_type, current_vars) break else: - current_vars.extend([x.lstrip().rstrip() for x in lines[i].lstrip().rstrip().rstrip(',').split(",") if x.lstrip().rstrip() != ""]) + current_vars.extend( + [ + x.lstrip().rstrip() + for x in lines[i].lstrip().rstrip().rstrip(",").split(",") + if x.lstrip().rstrip() != "" + ] + ) i += 1 - + print(code) diff --git a/infinigen_examples/asset_parameters.py b/infinigen_examples/asset_parameters.py index dd3d9d1ae..f276cf151 100644 --- a/infinigen_examples/asset_parameters.py +++ b/infinigen_examples/asset_parameters.py @@ -2,44 +2,62 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. # Authors: Lingjie Mei -import numpy as np -from infinigen.assets.clothes import blanket -from infinigen.assets.materials import metal, fabrics, ceramic -from infinigen.assets.materials.woods import wood -from infinigen.assets.scatters.clothes import ClothesCover -from infinigen.assets.seating import ChairFactory -from infinigen.assets.tableware import PotFactory, PanFactory, FruitContainerFactory -from infinigen.core.surface import NoApply +from infinigen.assets.objects.seating import ChairFactory +from infinigen.assets.objects.tableware import ( + FruitContainerFactory, + PanFactory, + PotFactory, +) parameters = { - 'ChairFactory': { - 'factories': [ChairFactory] * 16, - 'globals': { - }, - 'individuals': [{}, {'arm_mid': [-.03, -.03, .09], 'leg_height': .5, 'leg_x_offset': 0}, - {'arm_mid': [0, 0, 0], 'leg_height': .6, 'leg_x_offset': .02}, - {'arm_mid': [.03, .09, -.03], 'leg_height': .7, 'leg_x_offset': .05}, {}, - {'leg_offset_bar': (.2, .4), 'seat_front': 1., 'back_vertical_cuts': 1}, - {'leg_offset_bar': (.4, .6), 'seat_front': 1.1, 'back_vertical_cuts': 2}, - {'leg_offset_bar': (.6, .8), 'seat_front': 1.2, 'back_vertical_cuts': 3}, {}] + [{}] * 7, - 'repeat': 12, - 'indices': [0] * 9 + list(range(1, 8)), - 'scene_idx': 4, + "ChairFactory": { + "factories": [ChairFactory] * 16, + "globals": {}, + "individuals": [ + {}, + {"arm_mid": [-0.03, -0.03, 0.09], "leg_height": 0.5, "leg_x_offset": 0}, + {"arm_mid": [0, 0, 0], "leg_height": 0.6, "leg_x_offset": 0.02}, + {"arm_mid": [0.03, 0.09, -0.03], "leg_height": 0.7, "leg_x_offset": 0.05}, + {}, + {"leg_offset_bar": (0.2, 0.4), "seat_front": 1.0, "back_vertical_cuts": 1}, + {"leg_offset_bar": (0.4, 0.6), "seat_front": 1.1, "back_vertical_cuts": 2}, + {"leg_offset_bar": (0.6, 0.8), "seat_front": 1.2, "back_vertical_cuts": 3}, + {}, + ] + + [{}] * 7, + "repeat": 12, + "indices": [0] * 9 + list(range(1, 8)), + "scene_idx": 4, }, - - 'PanFactory': { - 'factories': [PanFactory] * 8 + [PanFactory] * 2 + [PotFactory] * 3 + [FruitContainerFactory] * 3, - 'globals': { - }, - 'individuals': [{}, {'scale': .1, 'depth': .3, 'x_handle': 2, }, {'scale': .12, 'depth': .5, 'x_handle': 1.5}, - {'scale': .15, 'depth': .8, 'x_handle': 1.2}, {}, - {'s_handle': .8, 'r_expand': 1, 'x_guard': 1, }, - {'s_handle': 1., 'r_expand': 1.15, 'x_guard': 1.3}, - {'s_handle': 1.2, 'r_expand': 1.3, 'x_guard': 1.6}, {}] + [{}] * 7, - 'repeat': 12, - 'indices': [0] * 9 + list(range(1, 8)), - 'scene_idx': 2, + "PanFactory": { + "factories": [PanFactory] * 8 + + [PanFactory] * 2 + + [PotFactory] * 3 + + [FruitContainerFactory] * 3, + "globals": {}, + "individuals": [ + {}, + { + "scale": 0.1, + "depth": 0.3, + "x_handle": 2, + }, + {"scale": 0.12, "depth": 0.5, "x_handle": 1.5}, + {"scale": 0.15, "depth": 0.8, "x_handle": 1.2}, + {}, + { + "s_handle": 0.8, + "r_expand": 1, + "x_guard": 1, + }, + {"s_handle": 1.0, "r_expand": 1.15, "x_guard": 1.3}, + {"s_handle": 1.2, "r_expand": 1.3, "x_guard": 1.6}, + {}, + ] + + [{}] * 7, + "repeat": 12, + "indices": [0] * 9 + list(range(1, 8)), + "scene_idx": 2, }, - } diff --git a/infinigen_examples/configs_indoor/base.gin b/infinigen_examples/configs_indoor/base_indoors.gin similarity index 95% rename from infinigen_examples/configs_indoor/base.gin rename to infinigen_examples/configs_indoor/base_indoors.gin index b3130c227..4b69c72ec 100644 --- a/infinigen_examples/configs_indoor/base.gin +++ b/infinigen_examples/configs_indoor/base_indoors.gin @@ -1,6 +1,4 @@ include 'infinigen_examples/configs_nature/base.gin' -include 'infinigen_examples/configs_nature/base_surface_registry.gin' -include 'infinigen_examples/configs_nature/natural.gin' include 'infinigen_examples/configs_nature/performance/fast_terrain_assets.gin' # overriden in fast_solve.gin if present diff --git a/infinigen_examples/configs_indoor/fast_solve.gin b/infinigen_examples/configs_indoor/fast_solve.gin index db561577c..1a2a5797f 100644 --- a/infinigen_examples/configs_indoor/fast_solve.gin +++ b/infinigen_examples/configs_indoor/fast_solve.gin @@ -3,6 +3,8 @@ MultistoryRoomSolver.n_divide_trials = 60 RoomSolver.iters_mult = 120 MultistoryRoomSolver.iters_mult = 120 +Solver.addition_weight_scalar = 3.0 + compose_indoors.solve_steps_large = 150 compose_indoors.solve_steps_medium = 40 compose_indoors.solve_steps_small = 10 diff --git a/infinigen_examples/configs_indoor/natural.gin b/infinigen_examples/configs_indoor/natural.gin deleted file mode 100644 index 67ce4ac03..000000000 --- a/infinigen_examples/configs_indoor/natural.gin +++ /dev/null @@ -1,3 +0,0 @@ -# assets.materials.water.shader.color = ("palette", "water") -assets.materials.mountain.shader.color = ("palette", "mountain soil") -assets.materials.sandstone.shader.color = ("palette", "sandstone") \ No newline at end of file diff --git a/infinigen_examples/configs_nature/base.gin b/infinigen_examples/configs_nature/base.gin index b10263a26..9f4a58f51 100644 --- a/infinigen_examples/configs_nature/base.gin +++ b/infinigen_examples/configs_nature/base.gin @@ -1,3 +1,5 @@ +include 'infinigen_examples/configs_nature/surface_registry.gin' + OVERALL_SEED = 0 LOG_DIR = '.' @@ -13,103 +15,6 @@ save_obj_and_instances.output_folder="saved_mesh.obj" util.logging.create_text_file.log_dir = %LOG_DIR -placement.populate_all.dist_cull = 70 -compose_nature.inview_distance = 70 -compose_nature.near_distance = 20 -compose_nature.center_distance = 35 - -compose_nature.land_domain_tags = 'landscape,-liquid_covered,-cave,-beach' -compose_nature.nonliving_domain_tags = 'landscape,-cave' -compose_nature.underwater_domain_tags = 'landscape,liquid_covered,-cave' - -compose_nature.terrain_enabled = True -compose_nature.lighting_enabled = True -compose_nature.coarse_terrain_enabled = True -compose_nature.terrain_surface_enabled = True - -compose_nature.simulated_river_enabled=False -compose_nature.tilted_river_enabled=False - -compose_nature.fancy_clouds_chance = 0.6 - -compose_nature.trees_chance = 0.85 -compose_nature.bushes_chance = 0.7 -compose_nature.clouds_chance = 0.0 -compose_nature.boulders_chance = 0.7 - -compose_nature.glowing_rocks_chance = 0.0 -compose_nature.rocks_chance = 0.9 - -compose_nature.ground_leaves_chance = 0.7 -compose_nature.ground_twigs_chance = 0.7 -compose_nature.chopped_trees_chance = 0.7 - -compose_nature.grass_chance = 0.8 -compose_nature.ferns_chance = 0.25 -compose_nature.monocots_chance = 0.15 - -compose_nature.flowers_chance = 0.2 -compose_nature.kelp_chance = 0.0 -compose_nature.cactus_chance = 0.0 -compose_nature.coconut_trees_chance = 0.0 -compose_nature.palm_trees_chance = 0.0 - -compose_nature.instanced_trees_chance = 0.0 # conditioned on trees_chance as prereq - -compose_nature.fish_school_chance = 0.0 -compose_nature.bug_swarm_chance = 0.0 - -compose_nature.rain_particles_chance = 0.0 -compose_nature.snow_particles_chance = 0.0 -compose_nature.leaf_particles_chance = 0.0 -compose_nature.dust_particles_chance = 0.0 -compose_nature.marine_snow_particles_chance = 0.0 -compose_nature.camera_based_lighting_chance = 0.0 - -compose_nature.wind_chance = 0.5 -compose_nature.turbulence_chance = 0.3 -wind_effector.strength = ('uniform', 0, 0.02) -turbulence_effector.strength = ('uniform', 0, 0.02) -turbulence_effector.noise = ('uniform', 0, 0.015) - -compose_nature.corals_chance = 0.0 -compose_nature.seaweed_chance = 0.0 -compose_nature.seashells_chance = 0.0 -compose_nature.urchin_chance = 0.0 -compose_nature.jellyfish_chance = 0.0 - -compose_nature.mushroom_chance = 0 # TEMP -compose_nature.pinecone_chance = 0.1 -compose_nature.pine_needle_chance = 0.1 -compose_nature.caustics_chance = 0.0 -compose_nature.decorative_plants_chance = 0.1 - -compose_nature.cached_fire = False -populate_scene.cached_fire = False - -compose_nature.cached_fire_trees_chance= 0 -compose_nature.cached_fire_bushes_chance = 0 -compose_nature.cached_fire_boulders_chance = 0.0 -compose_nature.cached_fire_cactus_chance = 0 - - - -populate_scene.slime_mold_chance = 0.0 -populate_scene.ivy_chance = 0.0 -populate_scene.lichen_chance = 0.0 -populate_scene.mushroom_chance = 0.0 -populate_scene.moss_chance = 0.0 -populate_scene.snow_layer_chance = 0 - -populate_scene.snow_layer_chance=0.0 - -populate_scene.fire_warmup = 50 -populate_scene.trees_fire_on_the_fly_chance = 0 -populate_scene.bushes_fire_on_the_fly_chance = 0 -populate_scene.creatures_fire_on_the_fly_chance = 0 -populate_scene.boulders_fire_on_the_fly_chance = 0 -populate_scene.cactus_fire_on_the_fly_chance = 0 - target_face_size.global_multiplier = 2 scatter_res_distance.dist = 4 @@ -181,50 +86,4 @@ camera.spawn_camera_rigs.n_camera_rigs = 1 camera.spawn_camera_rigs.camera_rig_config = [ {'loc': (0, 0, 0), 'rot_euler': (0, 0, 0)}, {'loc': (0.075, 0, 0), 'rot_euler': (0, 0, 0)} -] - -compose_nature.camera_selection_tags_ratio = {"liquid": (0, 0.5)} # often overridden by scenetypes -compose_nature.camera_selection_anim_criterion_keys = {"liquid": True} - -# TERRAIN SEED # -assets.materials.ice.geo_ice.random_seed = %OVERALL_SEED -assets.materials.lava.lava_geo.random_seed = %OVERALL_SEED -assets.materials.mud.geo_mud.random_seed = %OVERALL_SEED -assets.materials.cobble_stone.geo_cobblestone.random_seed = %OVERALL_SEED -assets.materials.dirt.geo_dirt.random_seed = %OVERALL_SEED -assets.materials.stone.geo_stone.random_seed = %OVERALL_SEED -assets.materials.cracked_ground.geo_cracked_ground.random_seed = %OVERALL_SEED -assets.materials.soil.geometry_soil.random_seed = %OVERALL_SEED -assets.materials.chunkyrock.geo_rocks.random_seed = %OVERALL_SEED - -assets.materials.mountain.shader.random_seed = %OVERALL_SEED -assets.materials.sand.shader.random_seed = %OVERALL_SEED -assets.materials.water.shader.random_seed = %OVERALL_SEED - -compose_nature.ground_creatures_chance = 0.0 -compose_nature.ground_creature_registry = [ - (@CarnivoreFactory, 1), - (@HerbivoreFactory, 1), - (@BirdFactory, 1), - (@SnakeFactory, 1) -] - -compose_nature.flying_creatures_chance=0.1 -compose_nature.flying_creature_registry = [ - (@FlyingBirdFactory, 1), - (@DragonflyFactory, 0.1), -] - -group_collections.config = [ - {'name': 'assets', 'hide_viewport': True, 'hide_render': True}, # collections of assets used by scatters - {'name': 'scatter', 'hide_viewport': True, 'hide_render': False}, # actual instanced objects for scatters - {'name': 'placeholders', 'hide_viewport': False, 'hide_render': True}, # low-res markers / proxies for where assets will be spawned - {'name': 'unique_assets', 'hide_viewport': True, 'hide_render': False}, # actual hi-res assets spawned at each placeholder location - {'name': 'particleassets', 'hide_viewport': True, 'hide_render': False}, # actual hi-res assets spawned at each placeholder location - {'name': 'particles', 'hide_viewport': True, 'hide_render': False}, # actual particle emitters / particle systems - {'name': 'animhelper', 'hide_viewport': False, 'hide_render': True}, # curves and iks -] - -include 'infinigen_examples/configs_nature/base_surface_registry.gin' -include 'infinigen_examples/configs_nature/natural.gin' - +] \ No newline at end of file diff --git a/infinigen_examples/configs_nature/base_nature.gin b/infinigen_examples/configs_nature/base_nature.gin new file mode 100644 index 000000000..f0309ce52 --- /dev/null +++ b/infinigen_examples/configs_nature/base_nature.gin @@ -0,0 +1,138 @@ +include 'infinigen_examples/configs_nature/base.gin' + +placement.populate_all.dist_cull = 70 +compose_nature.inview_distance = 70 +compose_nature.near_distance = 20 +compose_nature.center_distance = 35 + +compose_nature.land_domain_tags = 'landscape,-liquid_covered,-cave,-beach' +compose_nature.nonliving_domain_tags = 'landscape,-cave' +compose_nature.underwater_domain_tags = 'landscape,liquid_covered,-cave' + +compose_nature.terrain_enabled = True +compose_nature.lighting_enabled = True +compose_nature.coarse_terrain_enabled = True +compose_nature.terrain_surface_enabled = True + +compose_nature.simulated_river_enabled=False +compose_nature.tilted_river_enabled=False + +compose_nature.fancy_clouds_chance = 0.6 + +compose_nature.trees_chance = 0.85 +compose_nature.bushes_chance = 0.7 +compose_nature.clouds_chance = 0.0 +compose_nature.boulders_chance = 0.7 + +compose_nature.glowing_rocks_chance = 0.0 +compose_nature.rocks_chance = 0.9 + +compose_nature.ground_leaves_chance = 0.7 +compose_nature.ground_twigs_chance = 0.7 +compose_nature.chopped_trees_chance = 0.7 + +compose_nature.grass_chance = 0.8 +compose_nature.ferns_chance = 0.25 +compose_nature.monocots_chance = 0.15 + +compose_nature.flowers_chance = 0.2 +compose_nature.kelp_chance = 0.0 +compose_nature.cactus_chance = 0.0 +compose_nature.coconut_trees_chance = 0.0 +compose_nature.palm_trees_chance = 0.0 + +compose_nature.instanced_trees_chance = 0.0 # conditioned on trees_chance as prereq + +compose_nature.fish_school_chance = 0.0 +compose_nature.bug_swarm_chance = 0.0 + +compose_nature.rain_particles_chance = 0.0 +compose_nature.snow_particles_chance = 0.0 +compose_nature.leaf_particles_chance = 0.0 +compose_nature.dust_particles_chance = 0.0 +compose_nature.marine_snow_particles_chance = 0.0 +compose_nature.camera_based_lighting_chance = 0.0 + +compose_nature.wind_chance = 0.5 +compose_nature.turbulence_chance = 0.3 +WindEffector.strength = ('uniform', 0, 0.02) +TurbulenceEffector.strength = ('uniform', 0, 0.02) +TurbulenceEffector.noise = ('uniform', 0, 0.015) + +compose_nature.corals_chance = 0.0 +compose_nature.seaweed_chance = 0.0 +compose_nature.seashells_chance = 0.0 +compose_nature.urchin_chance = 0.0 +compose_nature.jellyfish_chance = 0.0 + +compose_nature.mushroom_chance = 0 # TEMP +compose_nature.pinecone_chance = 0.1 +compose_nature.pine_needle_chance = 0.1 +compose_nature.caustics_chance = 0.0 +compose_nature.decorative_plants_chance = 0.1 + +compose_nature.cached_fire = False +populate_scene.cached_fire = False + +compose_nature.cached_fire_trees_chance= 0 +compose_nature.cached_fire_bushes_chance = 0 +compose_nature.cached_fire_boulders_chance = 0.0 +compose_nature.cached_fire_cactus_chance = 0 + +populate_scene.slime_mold_chance = 0.0 +populate_scene.ivy_chance = 0.0 +populate_scene.lichen_chance = 0.0 +populate_scene.mushroom_chance = 0.0 +populate_scene.moss_chance = 0.0 +populate_scene.snow_layer_chance = 0 + +populate_scene.snow_layer_chance=0.0 + +populate_scene.fire_warmup = 50 +populate_scene.trees_fire_on_the_fly_chance = 0 +populate_scene.bushes_fire_on_the_fly_chance = 0 +populate_scene.creatures_fire_on_the_fly_chance = 0 +populate_scene.boulders_fire_on_the_fly_chance = 0 +populate_scene.cactus_fire_on_the_fly_chance = 0 + +compose_nature.camera_selection_tags_ratio = {"liquid": (0, 0.5)} # often overridden by scenetypes +compose_nature.camera_selection_anim_criterion_keys = {"liquid": True} + +# TERRAIN SEED # +assets.materials.ice.geo_ice.random_seed = %OVERALL_SEED +assets.materials.lava.lava_geo.random_seed = %OVERALL_SEED +assets.materials.mud.geo_mud.random_seed = %OVERALL_SEED +assets.materials.cobble_stone.geo_cobblestone.random_seed = %OVERALL_SEED +assets.materials.dirt.geo_dirt.random_seed = %OVERALL_SEED +assets.materials.stone.geo_stone.random_seed = %OVERALL_SEED +assets.materials.cracked_ground.geo_cracked_ground.random_seed = %OVERALL_SEED +assets.materials.soil.geometry_soil.random_seed = %OVERALL_SEED +assets.materials.chunkyrock.geo_rocks.random_seed = %OVERALL_SEED + +assets.materials.mountain.shader.random_seed = %OVERALL_SEED +assets.materials.sand.shader.random_seed = %OVERALL_SEED +assets.materials.water.shader.random_seed = %OVERALL_SEED + +compose_nature.ground_creatures_chance = 0.0 +compose_nature.ground_creature_registry = [ + (@CarnivoreFactory, 1), + (@HerbivoreFactory, 1), + (@BirdFactory, 1), + (@SnakeFactory, 1) +] + +compose_nature.flying_creatures_chance=0.1 +compose_nature.flying_creature_registry = [ + (@FlyingBirdFactory, 1), + (@DragonflyFactory, 0.1), +] + +group_collections.config = [ + {'name': 'assets', 'hide_viewport': True, 'hide_render': True}, # collections of assets used by scatters + {'name': 'scatter', 'hide_viewport': True, 'hide_render': False}, # actual instanced objects for scatters + {'name': 'placeholders', 'hide_viewport': False, 'hide_render': True}, # low-res markers / proxies for where assets will be spawned + {'name': 'unique_assets', 'hide_viewport': True, 'hide_render': False}, # actual hi-res assets spawned at each placeholder location + {'name': 'particleassets', 'hide_viewport': True, 'hide_render': False}, # actual hi-res assets spawned at each placeholder location + {'name': 'particles', 'hide_viewport': True, 'hide_render': False}, # actual particle emitters / particle systems + {'name': 'animhelper', 'hide_viewport': False, 'hide_render': True}, # curves and iks +] diff --git a/infinigen_examples/configs_nature/base_surface_registry.gin b/infinigen_examples/configs_nature/base_surface_registry.gin deleted file mode 100644 index d224ddcf0..000000000 --- a/infinigen_examples/configs_nature/base_surface_registry.gin +++ /dev/null @@ -1,67 +0,0 @@ -surface.registry.ground_collection = [ - ('mud', 2), - ('sand', 1), - ('cobble_stone', 1), - ('cracked_ground', 1), - ('dirt', 1), - ('stone', 1), - ('soil', 1), - ('chunkyrock', 0), -] - -surface.registry.beach = [ - ('sand', 10), - ('cracked_ground', 1), - ('dirt', 1), - ('stone', 1), - ('soil', 1), -] - -surface.registry.eroded = [ - ('sand', 1), - ('cracked_ground', 1), - ('dirt', 1), - ('stone', 1), - ('soil', 1), -] - -surface.registry.mountain_collection = [ - ('mountain', 10), - ("sandstone", 2), -] - -surface.registry.rock_collection = [ - # ('aluminumdisp2tut', 0.5), - ('stone', 1), - ('mountain', 5), - # ('ice', 1), -] - -surface.registry.liquid_collection = [ - ('water', 0.95), - # ('lava', 0.05), -] - -surface.registry.lava = [ - ('lava', 1), -] - -surface.registry.snow = [ - ('snow', 1), -] - -surface.registry.atmosphere = [ - ('atmosphere_light_haze', 1), -] - -surface.registry.bark = [ - ('bark_birch', 0.1), - ('bark_random', 0.9), - #('wood', 0.01), -] - -surface.registry.greenery = [ - ('simple_greenery', 1), -] - -surface.registry.smooth_categories = 0 diff --git a/infinigen_examples/configs_nature/natural.gin b/infinigen_examples/configs_nature/natural.gin deleted file mode 100644 index 67ce4ac03..000000000 --- a/infinigen_examples/configs_nature/natural.gin +++ /dev/null @@ -1,3 +0,0 @@ -# assets.materials.water.shader.color = ("palette", "water") -assets.materials.mountain.shader.color = ("palette", "mountain soil") -assets.materials.sandstone.shader.color = ("palette", "sandstone") \ No newline at end of file diff --git a/infinigen_examples/configs_nature/scene_types/desert.gin b/infinigen_examples/configs_nature/scene_types/desert.gin index 80fca8ff8..6218d7be7 100644 --- a/infinigen_examples/configs_nature/scene_types/desert.gin +++ b/infinigen_examples/configs_nature/scene_types/desert.gin @@ -16,8 +16,8 @@ compose_nature.snow_particles_chance = 0 compose_nature.leaf_particles_chance = 0.05 compose_nature.dust_particles_chance = 0.0 -atmosphere_light_haze.shader_atmosphere.density = ("uniform", 0, 0.0015) -atmosphere_light_haze.shader_atmosphere.anisotropy = 0 +assets.materials.atmosphere_light_haze.shader_atmosphere.density = ("uniform", 0, 0.0015) +assets.materials.atmosphere_light_haze.shader_atmosphere.anisotropy = 0 animate_cameras.follow_poi_chance=0.5 diff --git a/infinigen_examples/configs_nature/scene_types/under_water.gin b/infinigen_examples/configs_nature/scene_types/under_water.gin index 551914e95..105b45b2a 100644 --- a/infinigen_examples/configs_nature/scene_types/under_water.gin +++ b/infinigen_examples/configs_nature/scene_types/under_water.gin @@ -76,7 +76,7 @@ scene.waterbody_chance = 1 Terrain.under_water = 1 compose_nature.turbulence_chance = 0.7 -turbulence_effector.strength = ("uniform", 0, 7) -turbulence_effector.size = ("uniform", 1.5, 4.5) -turbulence_effector.flow = 1 -turbulence_effector.noise = 10 \ No newline at end of file +TurbulenceEffector.strength = ("uniform", 0, 7) +TurbulenceEffector.size = ("uniform", 1.5, 4.5) +TurbulenceEffector.flow = 1 +TurbulenceEffector.noise = 10 \ No newline at end of file diff --git a/infinigen_examples/configs_indoor/base_surface_registry.gin b/infinigen_examples/configs_nature/surface_registry.gin similarity index 100% rename from infinigen_examples/configs_indoor/base_surface_registry.gin rename to infinigen_examples/configs_nature/surface_registry.gin diff --git a/infinigen_examples/generate_asset_demo.py b/infinigen_examples/generate_asset_demo.py index e7374933c..c08e49ca4 100644 --- a/infinigen_examples/generate_asset_demo.py +++ b/infinigen_examples/generate_asset_demo.py @@ -4,52 +4,52 @@ # Authors: Alexander Raistrick import argparse -import os -import sys -from pathlib import Path import logging from copy import copy - -logging.basicConfig( - format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', - level=logging.WARNING -) +from pathlib import Path import bpy -from mathutils import Vector, Matrix, bvhtree import gin import numpy as np -from tqdm import tqdm, trange +from mathutils import Matrix, Vector, bvhtree +from tqdm import trange +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, +) -from infinigen.terrain import Terrain -from infinigen.assets.small_plants.fern import FernFactory -from infinigen.assets.creatures.util.animation.run_cycle import follow_path from infinigen.assets.lighting import sky_lighting -from infinigen.assets.weather import kole_clouds +from infinigen.assets.objects.creatures.util.animation.run_cycle import follow_path from infinigen.assets.scatters import grass, pebbles, pine_needle, pinecone -from infinigen.assets.materials import ( - mountain, sand, water, atmosphere_light_haze, sandstone, cracked_ground, \ - soil, dirt, cobble_stone, chunkyrock, stone, lava, ice, mud, snow -) -from infinigen.core.placement import placement, density, camera as cam_util +from infinigen.assets.weather import kole_clouds +from infinigen.core import execute_tasks, init, surface +from infinigen.core.placement import camera as cam_util +from infinigen.core.placement import placement from infinigen.core.placement.split_in_view import split_inview from infinigen.core.util import blender as butil +from infinigen.terrain import Terrain + +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.WARNING, +) -from infinigen.core import execute_tasks, init, surface def find_flat_location( - mesh, - bvh: bvhtree.BVHTree, - rad: float, - alt: float, - retries=100, margin_pct=0.2, - ang_samples=36 + mesh, + bvh: bvhtree.BVHTree, + rad: float, + alt: float, + retries=100, + margin_pct=0.2, + ang_samples=36, ): - for i in trange(retries): - ground_loc = copy(np.random.choice(mesh.data.vertices).co) origin = ground_loc + Vector((0, 0, alt)) @@ -57,24 +57,27 @@ def find_flat_location( if center_alt is None: continue - for j, ang in enumerate(np.linspace(0, 2*np.pi, ang_samples)): - sample_loc = origin + Matrix.Rotation(ang, 4, 'Z') @ Vector((rad, 0, 0)) + for j, ang in enumerate(np.linspace(0, 2 * np.pi, ang_samples)): + sample_loc = origin + Matrix.Rotation(ang, 4, "Z") @ Vector((rad, 0, 0)) *_, dist = bvh.ray_cast(origin, sample_loc - origin) if dist is not None and dist < rad: break *_, sample_alt = bvh.ray_cast(sample_loc, Vector((0, 0, -1))) - if sample_alt is None or abs(center_alt - sample_alt) > margin_pct * center_alt: + if ( + sample_alt is None + or abs(center_alt - sample_alt) > margin_pct * center_alt + ): break - else: # triggered if no `break` statement + else: # triggered if no `break` statement return ground_loc - - raise ValueError(f'Failed to find flat area {retries=}') + + raise ValueError(f"Failed to find flat area {retries=}") + def circular_camera_path(camera_rig, target_obj, rad, alt, duration): - bpy.ops.curve.primitive_bezier_circle_add( location=target_obj.location + Vector((0, 0, alt)), ) @@ -82,57 +85,65 @@ def circular_camera_path(camera_rig, target_obj, rad, alt, duration): circle.scale = (rad,) * 3 follow_path(camera_rig, circle, duration=duration) - circle.data.driver_add('eval_time').driver.expression = 'frame' + circle.data.driver_add("eval_time").driver.expression = "frame" + + butil.constrain_object(camera_rig, "TRACK_TO", target=target_obj) - butil.constrain_object(camera_rig, 'TRACK_TO', target=target_obj) @gin.configurable def compose_scene( - output_folder: Path, + output_folder: Path, scene_seed: int, - - asset_factory=None, # provided via gin - grid_rad=1.2, - grid_dim=3, #NxN grid - background='grass', + asset_factory=None, # provided via gin + grid_rad=1.2, + grid_dim=3, # NxN grid + background="grass", camera_circle_radius=8, camera_altitude=2, circle_duration_sec=25, fstop=2, - - asset_scale=(1,1,1), - asset_offset=(0,0,0), - - **params + asset_scale=(1, 1, 1), + asset_offset=(0, 0, 0), + **params, ): - sky_lighting.add_lighting() if params.get("fancy_clouds", 0): kole_clouds.add_kole_clouds() - + camera_rigs = cam_util.spawn_camera_rigs() cam = cam_util.get_camera(0, 0) # find a flat spot on the terrain to do the demo\ - terrain = Terrain(scene_seed, surface.registry, task='coarse', on_the_fly_asset_folder=output_folder/"assets") + terrain = Terrain( + scene_seed, + surface.registry, + task="coarse", + on_the_fly_asset_folder=output_folder / "assets", + ) terrain_mesh = terrain.coarse_terrain() - scene_bvh = bvhtree.BVHTree.FromObject(terrain_mesh, bpy.context.evaluated_depsgraph_get()) + scene_bvh = bvhtree.BVHTree.FromObject( + terrain_mesh, bpy.context.evaluated_depsgraph_get() + ) if asset_factory is not None: center = find_flat_location( - terrain_mesh, - scene_bvh, - rad=camera_circle_radius * 1.5, - alt=camera_altitude * 1.5 + terrain_mesh, + scene_bvh, + rad=camera_circle_radius * 1.5, + alt=camera_altitude * 1.5, ) else: center = (0, 0, 0) # move camera in a circle around that location - center_obj = butil.spawn_empty('center') + center_obj = butil.spawn_empty("center") center_obj.location = center - circular_camera_path(camera_rigs[0], center_obj, - camera_circle_radius, camera_altitude, - duration=circle_duration_sec*bpy.context.scene.render.fps) + circular_camera_path( + camera_rigs[0], + center_obj, + camera_circle_radius, + camera_altitude, + duration=circle_duration_sec * bpy.context.scene.render.fps, + ) cam.data.dof.use_dof = True cam.data.dof.aperture_fstop = fstop cam.data.dof.focus_object = center_obj @@ -147,7 +158,7 @@ def compose_scene( for i, l in enumerate(locs): floorloc, *_ = scene_bvh.ray_cast(Vector(l), Vector((0, 0, -1))) if floorloc is None: - raise ValueError('Found a hole in the terain') + raise ValueError("Found a hole in the terain") locs[i] = np.array(floorloc + Vector(asset_offset)) if asset_factory is not None: @@ -160,46 +171,81 @@ def compose_scene( o.scale = asset_scale # apply a procedural backdrop on all visible parts of the terrain - terrain_inview, *_ = split_inview(terrain_mesh, cam=cam, dist_max=params['inview_distance'], vis_margin=2) + terrain_inview, *_ = split_inview( + terrain_mesh, cam=cam, dist_max=params["inview_distance"], vis_margin=2 + ) if background is None: pass - elif background == 'grass': + elif background == "grass": grass.apply(terrain_inview) pebbles.apply(terrain_inview) - elif background == 'pine_forest': + elif background == "pine_forest": pine_needle.apply(terrain_inview) pinecone.apply(terrain_inview) pebbles.apply(terrain_inview) - elif background == 'TODO ADD MORE OPTIONS HERE': + elif background == "TODO ADD MORE OPTIONS HERE": pass else: - raise ValueError(f'Unrecognized {background=}') + raise ValueError(f"Unrecognized {background=}") + def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--output_folder', type=Path, required=True) - parser.add_argument('--input_folder', type=Path, default=None) - parser.add_argument('-s', '--seed', default=None, help="The seed used to generate the scene") - parser.add_argument('-t', '--task', nargs='+', default=['coarse'], - choices=['coarse', 'populate', 'fine_terrain', 'ground_truth', 'render', 'mesh_save']) - parser.add_argument('-g', '--configs', nargs='+', default=['base'], - help='Set of config files for gin (separated by spaces) ' - 'e.g. --configs file1 file2 (exclude .gin from path)') - parser.add_argument('-p', '--overrides', nargs='+', default=[], - help='Parameter settings that override config defaults ' - 'e.g. --overrides module_1.a=2 module_2.b=3') - parser.add_argument('--task_uniqname', type=str, default=None) - parser.add_argument('-d', '--debug', action="store_const", dest="loglevel", const=logging.DEBUG, default=logging.INFO) - parser.add_argument( '-v', '--verbose', action="store_const", dest="loglevel", const=logging.INFO) + parser.add_argument("--output_folder", type=Path, required=True) + parser.add_argument("--input_folder", type=Path, default=None) + parser.add_argument( + "-s", "--seed", default=None, help="The seed used to generate the scene" + ) + parser.add_argument( + "-t", + "--task", + nargs="+", + default=["coarse"], + choices=[ + "coarse", + "populate", + "fine_terrain", + "ground_truth", + "render", + "mesh_save", + ], + ) + parser.add_argument( + "-g", + "--configs", + nargs="+", + default=["base"], + help="Set of config files for gin (separated by spaces) " + "e.g. --configs file1 file2 (exclude .gin from path)", + ) + parser.add_argument( + "-p", + "--overrides", + nargs="+", + default=[], + help="Parameter settings that override config defaults " + "e.g. --overrides module_1.a=2 module_2.b=3", + ) + parser.add_argument("--task_uniqname", type=str, default=None) + parser.add_argument( + "-d", + "--debug", + action="store_const", + dest="loglevel", + const=logging.DEBUG, + default=logging.INFO, + ) + parser.add_argument( + "-v", "--verbose", action="store_const", dest="loglevel", const=logging.INFO + ) args = init.parse_args_blender(parser) - extras = '[%(filename)s:%(lineno)d] ' if args.loglevel == logging.DEBUG else '' + extras = "[%(filename)s:%(lineno)d] " if args.loglevel == logging.DEBUG else "" logging.basicConfig( - format=f'[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s', + format=f"[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s", level=args.loglevel, - datefmt='%H:%M:%S' + datefmt="%H:%M:%S", ) logging.getLogger("infinigen").setLevel(args.loglevel) @@ -207,19 +253,20 @@ def main(): init.apply_gin_configs( configs=args.configs, overrides=args.overrides, - configs_folder='infinigen_examples/configs_nature', - mandatory_folders=['infinigen_examples/configs_nature/scene_types'], - skip_unknown=True + config_folders=["infinigen_examples/configs_nature"], + mandatory_folders=["infinigen_examples/configs_nature/scene_types"], + skip_unknown=True, ) - + execute_tasks.main( compose_scene_func=compose_scene, - input_folder=args.input_folder, - output_folder=args.output_folder, - task=args.task, - task_uniqname=args.task_uniqname, - scene_seed=scene_seed + input_folder=args.input_folder, + output_folder=args.output_folder, + task=args.task, + task_uniqname=args.task_uniqname, + scene_seed=scene_seed, ) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/infinigen_examples/generate_asset_parameters.py b/infinigen_examples/generate_asset_parameters.py index eb40bd499..bb3c32087 100644 --- a/infinigen_examples/generate_asset_parameters.py +++ b/infinigen_examples/generate_asset_parameters.py @@ -8,7 +8,7 @@ # - Alex Raistrick # - Karhan Kayan - add fire option -import importlib +import logging import math import os import random @@ -17,61 +17,72 @@ from collections.abc import Callable from itertools import product from pathlib import Path -import logging - -from infinigen.assets.materials.woods import non_wood_tile, wood_tile -from infinigen.assets.utils.object import new_cube, origin2lowest, center -from infinigen.core.init import configure_cycles_devices -from infinigen.core.surface import write_attr_data -from infinigen.core.util.blender import deep_clone_obj -from infinigen_examples.asset_parameters import parameters -from infinigen_examples.generate_individual_assets import make_args, setup_camera -from infinigen_examples.util.test_utils import load_txt_list +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging logging.basicConfig( - format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', level=logging.WARNING + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, ) import bpy -import gin import numpy as np from PIL import Image -from infinigen.assets.fluid.fluid import set_obj_on_fire -from infinigen.core.tagging import tag_system from infinigen.assets.lighting import ( - sky_lighting, hdri_lighting, three_point_lighting, holdout_lighting, CeilingLightFactory, + hdri_lighting, + holdout_lighting, + sky_lighting, + three_point_lighting, ) - -from infinigen.core import surface, init -from infinigen.core.placement import factory -from infinigen.core.util.camera import points_inview - -from infinigen.assets.utils.misc import subclasses +from infinigen.assets.materials.woods import non_wood_tile, wood_tile from infinigen.assets.utils.decorate import read_base_co, read_co, read_normal +from infinigen.assets.utils.misc import subclasses +from infinigen.assets.utils.object import center, new_cube, origin2lowest +from infinigen.core import init, surface +from infinigen.core.init import configure_cycles_devices +from infinigen.core.placement import factory +from infinigen.core.surface import write_attr_data +from infinigen.core.tagging import tag_system -from infinigen.core.util.math import FixedSeed # noinspection PyUnresolvedReferences from infinigen.core.util import blender as butil +from infinigen.core.util.blender import deep_clone_obj +from infinigen.core.util.camera import points_inview +from infinigen.core.util.math import FixedSeed +from infinigen_examples.asset_parameters import parameters +from infinigen_examples.generate_individual_assets import make_args, setup_camera +from infinigen_examples.util.test_utils import load_txt_list + +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.WARNING, +) def build_scene_asset(args, factory_name, idx): - params = parameters[factory_name]['globals'].copy() - i = idx // parameters[factory_name]['repeat'] - params.update(parameters[factory_name]['individuals'].copy()[i]) - factory = parameters[factory_name]['factories'][i] - idx = parameters[factory_name]['indices'][i] + params = parameters[factory_name]["globals"].copy() + i = idx // parameters[factory_name]["repeat"] + params.update(parameters[factory_name]["individuals"].copy()[i]) + factory = parameters[factory_name]["factories"][i] + idx = parameters[factory_name]["indices"][i] with FixedSeed(idx): factory = factory(idx) for k, v in params.items(): setattr( - factory, k, - v() if isinstance(v, Callable) and hasattr(v, '__name__') and v.__name__ == '' else v + factory, + k, + v() + if isinstance(v, Callable) + and hasattr(v, "__name__") + and v.__name__ == "" + else v, ) with FixedSeed(idx): - if hasattr(factory, 'post_init'): + if hasattr(factory, "post_init"): factory.post_init() with FixedSeed(idx): try: @@ -82,15 +93,15 @@ def build_scene_asset(args, factory_name, idx): asset = factory.spawn_asset(idx) except Exception as e: traceback.print_exc() - print(f'{factory}.spawn_asset({idx=}) FAILED!! {e}') + print(f"{factory}.spawn_asset({idx=}) FAILED!! {e}") raise e with FixedSeed(idx): factory.finalize_assets(asset) origin2lowest(asset, True) bpy.context.view_layer.objects.active = asset parent = asset - if asset.type == 'EMPTY': - meshes = [o for o in asset.children_recursive if o.type == 'MESH'] + if asset.type == "EMPTY": + meshes = [o for o in asset.children_recursive if o.type == "MESH"] sizes = [] for m in meshes: co = read_co(m) @@ -113,35 +124,45 @@ def build_scene_asset(args, factory_name, idx): plane.location[-1] += np.min(co[:, -1]) + 2.5 butil.apply_transform(plane, True) plane_ = deep_clone_obj(plane) - plane_.location[-1] -= .1 + plane_.location[-1] -= 0.1 plane_.scale = (1.5,) * 3 normal = read_normal(plane) - write_attr_data(plane, 'ground', normal[:, -1] < -.5, 'INT', 'FACE') - idx = parameters[factory_name]['scene_idx'] + write_attr_data(plane, "ground", normal[:, -1] < -0.5, "INT", "FACE") + idx = parameters[factory_name]["scene_idx"] with FixedSeed(idx): - wood_tile.apply(plane, selection='ground') - non_wood_tile.apply(plane, selection='!ground', vertical=True) + wood_tile.apply(plane, selection="ground") + non_wood_tile.apply(plane, selection="!ground", vertical=True) factory = CeilingLightFactory(0) - factory.light_factory.params['Wattage'] = factory.light_factory.params['Wattage'] * 20 + factory.light_factory.params["Wattage"] = ( + factory.light_factory.params["Wattage"] * 20 + ) light = factory.spawn_asset(0) - light.location[-1] = np.min(co[:, -1]) + 5 - .5 + light.location[-1] = np.min(co[:, -1]) + 5 - 0.5 return asset def build_scene(path, idx, factory_name, args): scene = bpy.context.scene - scene.render.engine = 'CYCLES' - scene.render.resolution_x, scene.render.resolution_y = map(int, args.resolution.split('x')) + scene.render.engine = "CYCLES" + scene.render.resolution_x, scene.render.resolution_y = map( + int, args.resolution.split("x") + ) scene.cycles.samples = args.samples configure_cycles_devices(True) t = idx / (args.frame_end / args.cycles) - args.cam_angle = args.cam_angle[0], args.cam_angle[1], (np.abs(t - np.round(t)) * 2) * 180 + args.cam_angle = ( + args.cam_angle[0], + args.cam_angle[1], + (np.abs(t - np.round(t)) * 2) * 180, + ) if not args.fire: bpy.context.scene.render.film_transparent = args.film_transparent - bpy.context.scene.world.node_tree.nodes['Background'].inputs[0].default_value[-1] = 0 + bpy.context.scene.world.node_tree.nodes["Background"].inputs[0].default_value[ + -1 + ] = 0 - if idx % parameters[factory_name]['repeat'] == 0: + if idx % parameters[factory_name]["repeat"] == 0: butil.clear_scene() camera, center = setup_camera(args) asset = build_scene_asset(args, factory_name, idx) @@ -154,17 +175,21 @@ def build_scene(path, idx, factory_name, args): three_point_lighting.add_lighting(asset) else: sky_lighting.add_lighting(camera) - nodes = bpy.data.worlds['World'].node_tree.nodes - sky_texture = [n for n in nodes if n.name.startswith('Sky Texture')][-1] + nodes = bpy.data.worlds["World"].node_tree.nodes + sky_texture = [n for n in nodes if n.name.startswith("Sky Texture")][-1] sky_texture.sun_elevation = np.deg2rad(args.elevation) - sky_texture.sun_rotation = np.pi * .75 + sky_texture.sun_rotation = np.pi * 0.75 else: camera, center = setup_camera(args) - asset = list(o for o in bpy.data.objects if 'Factory' in o.name and o.parent is None)[0] + asset = list( + o for o in bpy.data.objects if "Factory" in o.name and o.parent is None + )[0] if args.scale_reference: - bpy.ops.mesh.primitive_cylinder_add(radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2)) + bpy.ops.mesh.primitive_cylinder_add( + radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2) + ) if args.cam_center > 0 and asset: co = read_base_co(asset) + asset.location @@ -172,34 +197,38 @@ def build_scene(path, idx, factory_name, args): center.location[-1] += args.cam_zoff if args.cam_dist <= 0 and asset: - if 'Factory' in factory_name: + if "Factory" in factory_name: adjust_cam_distance(asset, camera, args.margin) else: - adjust_cam_distance(asset, camera, args.margin, .75) + adjust_cam_distance(asset, camera, args.margin, 0.75) - cam_info_ng = bpy.data.node_groups.get('nodegroup_active_cam_info') + cam_info_ng = bpy.data.node_groups.get("nodegroup_active_cam_info") if cam_info_ng is not None: - cam_info_ng.nodes['Object Info'].inputs['Object'].default_value = camera + cam_info_ng.nodes["Object Info"].inputs["Object"].default_value = camera if args.save_blend: - (path / 'scenes').mkdir(exist_ok=True) + (path / "scenes").mkdir(exist_ok=True) butil.save_blend(f"{path}/scenes/scene_{idx:03d}.blend", autopack=True) tag_system.save_tag(f"{path}/MaskTag.json") if args.fire: - bpy.data.worlds['World'].node_tree.nodes["Background.001"].inputs[1].default_value = 0.04 + bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[ + 1 + ].default_value = 0.04 bpy.context.scene.view_settings.exposure = -2 - if args.render == 'image': - (path / 'images').mkdir(exist_ok=True) + if args.render == "image": + (path / "images").mkdir(exist_ok=True) imgpath = path / f"images/image_{idx:03d}.png" scene.render.filepath = str(imgpath) bpy.ops.render.render(write_still=True) - elif args.render == 'video': + elif args.render == "video": bpy.context.scene.frame_end = args.frame_end t = f"frame / {args.frame_end / args.cycles}" - parent(asset).driver_add('rotation_euler')[-1].driver.expression = f'(abs({t}-round({t}))*2-.5)*{np.pi}' - (path / 'frames' / f'scene_{idx:03d}').mkdir(parents=True, exist_ok=True) + parent(asset).driver_add("rotation_euler")[ + -1 + ].driver.expression = f"(abs({t}-round({t}))*2-.5)*{np.pi}" + (path / "frames" / f"scene_{idx:03d}").mkdir(parents=True, exist_ok=True) imgpath = path / f"frames/scene_{idx:03d}/frame_###.png" scene.render.filepath = str(imgpath) bpy.ops.render.render(animation=True) @@ -209,14 +238,14 @@ def parent(obj): return obj if obj.parent is None else obj.parent -def adjust_cam_distance(asset, camera, margin, percent=.999): +def adjust_cam_distance(asset, camera, margin, percent=0.999): co = read_base_co(asset) * asset.scale co += asset.location lowest = np.amin(co, 0) highest = np.amax(co, 0) interp = np.linspace(lowest, highest, 11) bbox = np.array(list(product(*zip(*interp)))) - for cam_dist in np.exp(np.linspace(-1., 5.5, 500)): + for cam_dist in np.exp(np.linspace(-1.0, 5.5, 500)): camera.location[1] = -cam_dist bpy.context.view_layer.update() inview = points_inview(bbox, camera) @@ -230,71 +259,82 @@ def adjust_cam_distance(asset, camera, margin, percent=.999): def make_grid(args, path, n): files = [] - for filename in sorted(os.listdir(f'{path}/images')): - if filename.endswith('.png'): - files.append(f'{path}/images/{filename}') + for filename in sorted(os.listdir(f"{path}/images")): + if filename.endswith(".png"): + files.append(f"{path}/images/{filename}") files = files[:n] if len(files) == 0: - print('No images found') + print("No images found") return with Image.open(files[0]) as i: x, y = i.size - for i, name in enumerate([path.stem, f'{path.stem}_']): + for i, name in enumerate([path.stem, f"{path.stem}_"]): if args.zoom: - img = Image.new('RGBA', (2 * x, y)) - sz = int(np.floor(np.sqrt(n - .9))) + img = Image.new("RGBA", (2 * x, y)) + sz = int(np.floor(np.sqrt(n - 0.9))) if i > 0: random.shuffle(files) with Image.open(files[0]) as i: img.paste(i, (0, 0)) - for idx in range(sz ** 2): + for idx in range(sz**2): with Image.open(files[min(idx + 1, len(files) - 1)]) as i: - img.paste(i.resize((x // sz, y // sz)), (x + (idx % sz) * (x // sz), idx // sz * (y // sz))) - img.save(f'{path}/{name}.png') + img.paste( + i.resize((x // sz, y // sz)), + (x + (idx % sz) * (x // sz), idx // sz * (y // sz)), + ) + img.save(f"{path}/{name}.png") else: - sz_x = list(sorted(range(1, n + 1), key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio)))[0] + sz_x = list( + sorted( + range(1, n + 1), + key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio), + ) + )[0] sz_y = math.ceil(n / sz_x) - img = Image.new('RGBA', (sz_x * x, sz_y * y)) + img = Image.new("RGBA", (sz_x * x, sz_y * y)) if i > 0: random.shuffle(files) for idx, file in enumerate(files): with Image.open(file) as i: img.paste(i, (idx % sz_x * x, idx // sz_x * y)) - img.save(f'{path}/{name}.png') + img.save(f"{path}/{name}.png") def main(args): - bpy.context.window.workspace = bpy.data.workspaces['Geometry Nodes'] + bpy.context.window.workspace = bpy.data.workspaces["Geometry Nodes"] - init.apply_gin_configs('infinigen_examples/configs_indoor', skip_unknown=True) + init.apply_gin_configs("infinigen_examples/configs_indoor", skip_unknown=True) surface.registry.initialize_from_gin() - extras = '[%(filename)s:%(lineno)d] ' if args.loglevel == logging.DEBUG else '' + extras = "[%(filename)s:%(lineno)d] " if args.loglevel == logging.DEBUG else "" logging.basicConfig( - format=f'[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s', - level=args.loglevel, datefmt='%H:%M:%S' + format=f"[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s", + level=args.loglevel, + datefmt="%H:%M:%S", ) logging.getLogger("infinigen").setLevel(args.loglevel) - if '.txt' in args.factories[0]: - name = args.factories[0].split('.')[-2].split('/')[-1] + if ".txt" in args.factories[0]: + name = args.factories[0].split(".")[-2].split("/")[-1] else: - name = '_'.join(args.factories) - path = Path(os.getcwd()) / 'outputs' / name + name = "_".join(args.factories) + path = Path(os.getcwd()) / "outputs" / name path.mkdir(exist_ok=True) factories = list(args.factories) - if 'ALL_ASSETS' in factories: + if "ALL_ASSETS" in factories: factories += [f.__name__ for f in subclasses(factory.AssetFactory)] - factories.remove('ALL_ASSETS') - if 'ALL_SCATTERS' in factories: - factories += [f.stem for f in Path('surfaces/scatters').iterdir()] - factories.remove('ALL_SCATTERS') - if 'ALL_MATERIALS' in factories: - factories += [f.stem for f in Path('infinigen/assets/materials').iterdir()] - factories.remove('ALL_MATERIALS') - if '.txt' in factories[0]: - factories = [f.split('.')[-1] for f in load_txt_list(factories[0], skip_sharp=False)] + factories.remove("ALL_ASSETS") + if "ALL_SCATTERS" in factories: + factories += [f.stem for f in Path("surfaces/scatters").iterdir()] + factories.remove("ALL_SCATTERS") + if "ALL_MATERIALS" in factories: + factories += [f.stem for f in Path("infinigen/assets/materials").iterdir()] + factories.remove("ALL_MATERIALS") + if ".txt" in factories[0]: + factories = [ + f.split(".")[-1] for f in load_txt_list(factories[0], skip_sharp=False) + ] for fac in factories: fac_path = path / fac @@ -309,21 +349,25 @@ def main(args): except Exception as e: print(e) continue - if args.render == 'image': + if args.render == "image": make_grid(args, fac_path, n_images) - if args.render == 'video': - (fac_path / 'videos').mkdir(exist_ok=True) + if args.render == "video": + (fac_path / "videos").mkdir(exist_ok=True) for i in range(n_images): subprocess.run( f'ffmpeg -y -r 24 -pattern_type glob -i "{fac_path}/frames/scene_{i:03d}/frame*.png" ' - f'{fac_path}/videos/video_{i:03d}.mp4', shell=True + f"{fac_path}/videos/video_{i:03d}.mp4", + shell=True, ) -if __name__ == '__main__': +if __name__ == "__main__": args = make_args() args.no_mod = args.no_mod or args.fire args.film_transparent = args.film_transparent and not args.hdri - args.n_images = len(parameters[args.factories[0]]['factories']) * parameters[args.factories[0]]['repeat'] + args.n_images = ( + len(parameters[args.factories[0]]["factories"]) + * parameters[args.factories[0]]["repeat"] + ) with FixedSeed(1): main(args) diff --git a/infinigen_examples/generate_individual_assets.py b/infinigen_examples/generate_individual_assets.py index 6f8e8f9f0..89a0c50a0 100644 --- a/infinigen_examples/generate_individual_assets.py +++ b/infinigen_examples/generate_individual_assets.py @@ -10,6 +10,7 @@ import argparse import importlib +import logging import math import os import random @@ -17,52 +18,62 @@ import subprocess import traceback from itertools import product -from pathlib import Path -import logging from multiprocessing import Pool - -from infinigen.core.init import configure_cycles_devices - -logging.basicConfig(format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', level=logging.WARNING) +from pathlib import Path import bpy import gin import numpy as np -from PIL import Image - import submitit +from PIL import Image -from infinigen.assets.fluid.fluid import set_obj_on_fire -from infinigen.core.tagging import tag_system -from infinigen.assets.lighting import sky_lighting, hdri_lighting, three_point_lighting, holdout_lighting +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, +) + +from infinigen.assets.lighting import ( + hdri_lighting, + holdout_lighting, + sky_lighting, + three_point_lighting, +) -from infinigen.core import surface, init -from infinigen.core.placement import density, factory -from infinigen.core.util.camera import points_inview - -from infinigen.assets.utils.misc import assign_material, subclasses # from infinigen.core.rendering.render import enable_gpu from infinigen.assets.utils.decorate import read_base_co, read_co +from infinigen.assets.utils.misc import assign_material, subclasses +from infinigen.core import init, surface +from infinigen.core.init import configure_cycles_devices +from infinigen.core.placement import density, factory +from infinigen.core.tagging import tag_system -from infinigen.core.util.math import FixedSeed # noinspection PyUnresolvedReferences from infinigen.core.util import blender as butil - +from infinigen.core.util.camera import points_inview +from infinigen.core.util.math import FixedSeed from infinigen.tools import export - from infinigen_examples.util.test_utils import load_txt_list +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.WARNING, +) + + def build_scene_asset(args, factory_name, idx): factory = None - for subdir in os.listdir('infinigen/assets'): + for subdir in os.listdir("infinigen/assets"): with gin.unlock_config(): module = importlib.import_module(f'infinigen.assets.{subdir.split(".")[0]}') if hasattr(module, factory_name): factory = getattr(module, factory_name) break if factory is None: - raise ModuleNotFoundError(f'{factory_name} not Found.') + raise ModuleNotFoundError(f"{factory_name} not Found.") with FixedSeed(idx): factory = factory(idx) try: @@ -73,21 +84,31 @@ def build_scene_asset(args, factory_name, idx): asset = factory.spawn_asset(idx) except Exception as e: traceback.print_exc() - print(f'{factory}.spawn_asset({idx=}) FAILED!! {e}') + print(f"{factory}.spawn_asset({idx=}) FAILED!! {e}") raise e factory.finalize_assets(asset) if args.fire: from infinigen.assets.fluid.fluid import set_obj_on_fire - set_obj_on_fire(asset, 0, resolution=args.fire_res, simulation_duration=args.fire_duration, - noise_scale=2, add_turbulence=True, adaptive_domain=False) + + set_obj_on_fire( + asset, + 0, + resolution=args.fire_res, + simulation_duration=args.fire_duration, + noise_scale=2, + add_turbulence=True, + adaptive_domain=False, + ) bpy.context.scene.frame_set(args.fire_duration) bpy.context.scene.frame_end = args.fire_duration - bpy.data.worlds['World'].node_tree.nodes["Background.001"].inputs[1].default_value = 0.04 + bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[ + 1 + ].default_value = 0.04 bpy.context.scene.view_settings.exposure = -1 bpy.context.view_layer.objects.active = asset parent = asset - if asset.type == 'EMPTY': - meshes = [o for o in asset.children_recursive if o.type == 'MESH'] + if asset.type == "EMPTY": + meshes = [o for o in asset.children_recursive if o.type == "MESH"] sizes = [] for m in meshes: co = read_co(m) @@ -104,13 +125,20 @@ def build_scene_asset(args, factory_name, idx): parent.location = -(x_min[0] + x_max[0]) / 2, -(x_min[1] + x_max[1]) / 2, 0 butil.apply_transform(parent, loc=True) if not args.no_ground: - bpy.ops.mesh.primitive_grid_add(size=5, x_subdivisions=400, y_subdivisions=400) + bpy.ops.mesh.primitive_grid_add( + size=5, x_subdivisions=400, y_subdivisions=400 + ) plane = bpy.context.active_object plane.location[-1] = x_min[-1] plane.is_shadow_catcher = True - material = bpy.data.materials.new('plane') + material = bpy.data.materials.new("plane") material.use_nodes = True - material.node_tree.nodes['Principled BSDF'].inputs[0].default_value = .015, .009, .003, 1 + material.node_tree.nodes["Principled BSDF"].inputs[0].default_value = ( + 0.015, + 0.009, + 0.003, + 1, + ) assign_material(plane, material) return asset @@ -119,80 +147,95 @@ def build_scene_asset(args, factory_name, idx): def build_scene_surface(factory_name, idx): try: with gin.unlock_config(): - scatter = importlib.import_module(f'infinigen.assets.scatters.{factory_name}') + scatter = importlib.import_module( + f"infinigen.assets.scatters.{factory_name}" + ) - if not hasattr(scatter, 'apply'): - raise ValueError(f'{scatter} has no apply()') + if not hasattr(scatter, "apply"): + raise ValueError(f"{scatter} has no apply()") - bpy.ops.mesh.primitive_grid_add(size=10, x_subdivisions=400, y_subdivisions=400) + bpy.ops.mesh.primitive_grid_add( + size=10, x_subdivisions=400, y_subdivisions=400 + ) plane = bpy.context.active_object - material = bpy.data.materials.new('plane') + material = bpy.data.materials.new("plane") material.use_nodes = True - material.node_tree.nodes['Principled BSDF'].inputs[0].default_value = .015, .009, .003, 1 + material.node_tree.nodes["Principled BSDF"].inputs[0].default_value = ( + 0.015, + 0.009, + 0.003, + 1, + ) assign_material(plane, material) if type(scatter) is type: scatter = scatter(idx) - scatter.apply(plane, selection=density.placement_mask(.15, .45)) + scatter.apply(plane, selection=density.placement_mask(0.15, 0.45)) asset = plane except ModuleNotFoundError: try: with gin.unlock_config(): try: - template = importlib.import_module(f'infinigen.assets.materials.{factory_name}') - except: - for subdir in os.listdir('infinigen/assets/materials'): + template = importlib.import_module( + f"infinigen.assets.materials.{factory_name}" + ) + except ImportError: + for subdir in os.listdir("infinigen/assets/materials"): with gin.unlock_config(): module = importlib.import_module( - f'infinigen.assets.materials.{subdir.split(".")[0]}') + f'infinigen.assets.materials.{subdir.split(".")[0]}' + ) if hasattr(module, factory_name): template = getattr(module, factory_name) break else: - raise Exception(f'{factory_name} not Found.') - if hasattr(template, 'make_sphere'): + raise Exception(f"{factory_name} not Found.") + if hasattr(template, "make_sphere"): asset = template.make_sphere() else: - bpy.ops.mesh.primitive_ico_sphere_add(radius=.8, subdivisions=9) + bpy.ops.mesh.primitive_ico_sphere_add(radius=0.8, subdivisions=9) asset = bpy.context.active_object if type(template) is type: template = template(idx) template.apply(asset) except ModuleNotFoundError: - raise Exception(f'{factory_name} not Found.') + raise Exception(f"{factory_name} not Found.") return asset def build_and_save_asset(payload: dict): - # unpack payload - args are packed into payload for compatibility with slurm/multiprocessing - factory_name = payload['fac'] - args = payload['args'] - idx = payload['idx'] + factory_name = payload["fac"] + args = payload["args"] + idx = payload["idx"] if args.seed > 0: idx = args.seed path = args.output_folder / factory_name if (path / f"images/image_{idx:03d}.png").exists() and args.skip_existing: - print(f'Skipping {path}') + print(f"Skipping {path}") return path.mkdir(exist_ok=True) scene = bpy.context.scene - scene.render.engine = 'CYCLES' - scene.render.resolution_x, scene.render.resolution_y = map(int, args.resolution.split('x')) + scene.render.engine = "CYCLES" + scene.render.resolution_x, scene.render.resolution_y = map( + int, args.resolution.split("x") + ) scene.cycles.samples = args.samples butil.clear_scene() configure_cycles_devices() if not args.fire: bpy.context.scene.render.film_transparent = args.film_transparent - bpy.context.scene.world.node_tree.nodes['Background'].inputs[0].default_value[-1] = 0 + bpy.context.scene.world.node_tree.nodes["Background"].inputs[0].default_value[ + -1 + ] = 0 camera, center = setup_camera(args) - if 'Factory' in factory_name: + if "Factory" in factory_name: asset = build_scene_asset(args, factory_name, idx) else: asset = build_scene_surface(factory_name, idx) @@ -205,13 +248,15 @@ def build_and_save_asset(payload: dict): three_point_lighting.add_lighting(asset) else: sky_lighting.add_lighting(camera) - nodes = bpy.data.worlds['World'].node_tree.nodes - sky_texture = [n for n in nodes if n.name.startswith('Sky Texture')][-1] + nodes = bpy.data.worlds["World"].node_tree.nodes + sky_texture = [n for n in nodes if n.name.startswith("Sky Texture")][-1] sky_texture.sun_elevation = np.deg2rad(args.elevation) - sky_texture.sun_rotation = np.pi * .75 + sky_texture.sun_rotation = np.pi * 0.75 if args.scale_reference: - bpy.ops.mesh.primitive_cylinder_add(radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2)) + bpy.ops.mesh.primitive_cylinder_add( + radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2) + ) if args.cam_center > 0 and asset: co = read_base_co(asset) + asset.location @@ -219,62 +264,65 @@ def build_and_save_asset(payload: dict): center.location[-1] += args.cam_zoff if args.cam_dist <= 0 and asset: - if 'Factory' in factory_name: + if "Factory" in factory_name: adjust_cam_distance(asset, camera, args.margin) else: - adjust_cam_distance(asset, camera, args.margin, .75) + adjust_cam_distance(asset, camera, args.margin, 0.75) - cam_info_ng = bpy.data.node_groups.get('nodegroup_active_cam_info') + cam_info_ng = bpy.data.node_groups.get("nodegroup_active_cam_info") if cam_info_ng is not None: - cam_info_ng.nodes['Object Info'].inputs['Object'].default_value = camera + cam_info_ng.nodes["Object Info"].inputs["Object"].default_value = camera if args.save_blend: - (path / 'scenes').mkdir(exist_ok=True) + (path / "scenes").mkdir(exist_ok=True) butil.save_blend(f"{path}/scenes/scene_{idx:03d}.blend", autopack=True) tag_system.save_tag(f"{path}/MaskTag.json") if args.fire: - bpy.data.worlds['World'].node_tree.nodes["Background.001"].inputs[1].default_value = 0.04 + bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[ + 1 + ].default_value = 0.04 bpy.context.scene.view_settings.exposure = -2 - if args.render == 'image': - (path / 'images').mkdir(exist_ok=True) + if args.render == "image": + (path / "images").mkdir(exist_ok=True) imgpath = path / f"images/image_{idx:03d}.png" scene.render.filepath = str(imgpath) bpy.ops.render.render(write_still=True) - elif args.render == 'video': + elif args.render == "video": bpy.context.scene.frame_end = args.frame_end - parent(asset).driver_add('rotation_euler')[ - -1].driver.expression = f"frame/{args.frame_end / (2 * np.pi * args.cycles)}" - (path / 'frames' / f'scene_{idx:03d}').mkdir(parents=True, exist_ok=True) + parent(asset).driver_add("rotation_euler")[ + -1 + ].driver.expression = f"frame/{args.frame_end / (2 * np.pi * args.cycles)}" + (path / "frames" / f"scene_{idx:03d}").mkdir(parents=True, exist_ok=True) imgpath = path / f"frames/scene_{idx:03d}/frame_###.png" scene.render.filepath = str(imgpath) bpy.ops.render.render(animation=True) - elif args.render == 'none': + elif args.render == "none": pass else: - raise ValueError(f'Unrecognized {args.render=}') + raise ValueError(f"Unrecognized {args.render=}") if args.export is not None: - export_path = path/'export'/f'export_{idx:03d}' + export_path = path / "export" / f"export_{idx:03d}" export_path.mkdir(exist_ok=True, parents=True) export.export_curr_scene( - export_path, - format=args.export, - image_res=args.export_texture_res + export_path, format=args.export, image_res=args.export_texture_res ) + def parent(obj): return obj if obj.parent is None else obj.parent -def adjust_cam_distance(asset, camera, margin, percent=.999): + +def adjust_cam_distance(asset, camera, margin, percent=0.999): co = read_base_co(asset) * asset.scale co += asset.location lowest = np.amin(co, 0) highest = np.amax(co, 0) interp = np.linspace(lowest, highest, 11) bbox = np.array(list(product(*zip(*interp)))) - for cam_dist in np.exp(np.linspace(-1., 5.5, 500)): + for cam_dist in np.exp(np.linspace(-1.0, 5.5, 500)): camera.location[1] = -cam_dist bpy.context.view_layer.update() inview = points_inview(bbox, camera) @@ -288,61 +336,69 @@ def adjust_cam_distance(asset, camera, margin, percent=.999): def make_grid(args, path, n): files = [] - for filename in sorted(os.listdir(f'{path}/images')): - if filename.endswith('.png'): - files.append(f'{path}/images/{filename}') + for filename in sorted(os.listdir(f"{path}/images")): + if filename.endswith(".png"): + files.append(f"{path}/images/{filename}") files = files[:n] if len(files) == 0: - print('No images found') + print("No images found") return with Image.open(files[0]) as i: x, y = i.size - for i, name in enumerate([path.stem, f'{path.stem}_']): + for i, name in enumerate([path.stem, f"{path.stem}_"]): if args.zoom: - img = Image.new('RGBA', (2 * x, y)) - sz = int(np.floor(np.sqrt(n - .9))) + img = Image.new("RGBA", (2 * x, y)) + sz = int(np.floor(np.sqrt(n - 0.9))) if i > 0: random.shuffle(files) with Image.open(files[0]) as i: img.paste(i, (0, 0)) - for idx in range(sz ** 2): + for idx in range(sz**2): with Image.open(files[min(idx + 1, len(files) - 1)]) as i: - img.paste(i.resize((x // sz, y // sz)), (x + (idx % sz) * (x // sz), idx // sz * (y // sz))) - img.save(f'{path}/{name}.png') + img.paste( + i.resize((x // sz, y // sz)), + (x + (idx % sz) * (x // sz), idx // sz * (y // sz)), + ) + img.save(f"{path}/{name}.png") else: - sz_x = list(sorted(range(1, n + 1), key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio)))[0] + sz_x = list( + sorted( + range(1, n + 1), + key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio), + ) + )[0] sz_y = math.ceil(n / sz_x) - img = Image.new('RGBA', (sz_x * x, sz_y * y)) + img = Image.new("RGBA", (sz_x * x, sz_y * y)) if i > 0: random.shuffle(files) for idx, file in enumerate(files): with Image.open(file) as i: img.paste(i, (idx % sz_x * x, idx // sz_x * y)) - img.save(f'{path}/{name}.png') - print(f'{path}/{name}.png generated') + img.save(f"{path}/{name}.png") + print(f"{path}/{name}.png generated") def setup_camera(args): cam_dist = args.cam_dist if args.cam_dist > 0 else 6 bpy.ops.object.camera_add(location=(0, -cam_dist, 0), rotation=(np.pi / 2, 0, 0)) camera = bpy.context.active_object - camera.parent = butil.spawn_empty('Camera parent') + camera.parent = butil.spawn_empty("Camera parent") camera.parent.location = (0, 0, args.cam_zoff) camera.parent.rotation_euler = np.deg2rad(np.array(args.cam_angle)) - bpy.data.scenes['Scene'].camera = camera + bpy.data.scenes["Scene"].camera = camera scene = bpy.context.scene - camera.data.sensor_height = camera.data.sensor_width * scene.render.resolution_y / scene.render.resolution_x + camera.data.sensor_height = ( + camera.data.sensor_width * scene.render.resolution_y / scene.render.resolution_x + ) for area in bpy.context.screen.areas: - if area.type == 'VIEW_3D': - area.spaces.active.region_3d.view_perspective = 'CAMERA' + if area.type == "VIEW_3D": + area.spaces.active.region_3d.view_perspective = "CAMERA" break - cam_info_ng = bpy.data.node_groups.get('nodegroup_active_cam_info') + cam_info_ng = bpy.data.node_groups.get("nodegroup_active_cam_info") if cam_info_ng is not None: - cam_info_ng.nodes['Object Info'].inputs['Object'].default_value = camera + cam_info_ng.nodes["Object Info"].inputs["Object"].default_value = camera return camera, camera.parent -def subclasses(cls): - return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in subclasses(c)]) def mapfunc(f, its, args): if args.n_workers == 1: @@ -351,25 +407,24 @@ def mapfunc(f, its, args): with Pool(args.n_workers) as p: return list(p.imap(f, its)) else: - executor = submitit.AutoExecutor( - folder=args.output_folder/'logs' - ) + executor = submitit.AutoExecutor(folder=args.output_folder / "logs") executor.update_parameters( name=args.output_folder.name, timeout_min=60, cpus_per_task=2, mem_gb=8, - slurm_partition=os.environ['INFINIGEN_SLURMPARTITION'], - slurm_array_parallelism=args.n_workers + slurm_partition=os.environ["INFINIGEN_SLURMPARTITION"], + slurm_array_parallelism=args.n_workers, ) jobs = executor.map_array(f, its) for j in jobs: - print(f'Job finished {j.wait()}') + print(f"Job finished {j.wait()}") + def main(args): - bpy.context.window.workspace = bpy.data.workspaces['Geometry Nodes'] + bpy.context.window.workspace = bpy.data.workspaces["Geometry Nodes"] - init.apply_gin_configs('infinigen_examples/configs_indoor', skip_unknown=True) + init.apply_gin_configs("infinigen_examples/configs_indoor", skip_unknown=True) surface.registry.initialize_from_gin() init.configure_blender() @@ -377,133 +432,234 @@ def main(args): if args.gpu: init.configure_render_cycles() - extras = '[%(filename)s:%(lineno)d] ' if args.loglevel == logging.DEBUG else '' - logging.basicConfig(format=f'[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s', - level=args.loglevel, datefmt='%H:%M:%S') + extras = "[%(filename)s:%(lineno)d] " if args.loglevel == logging.DEBUG else "" + logging.basicConfig( + format=f"[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s", + level=args.loglevel, + datefmt="%H:%M:%S", + ) logging.getLogger("infinigen").setLevel(args.loglevel) - if '.txt' in args.factories[0]: - name = args.factories[0].split('.')[-2].split('/')[-1] + if ".txt" in args.factories[0]: + name = args.factories[0].split(".")[-2].split("/")[-1] else: - name = '_'.join(args.factories) + name = "_".join(args.factories) if args.output_folder is None: - args.output_folder = Path(os.getcwd()) / 'outputs' + args.output_folder = Path(os.getcwd()) / "outputs" path = Path(args.output_folder) / name path.mkdir(exist_ok=True) factories = list(args.factories) - if 'ALL_ASSETS' in factories: + if "ALL_ASSETS" in factories: factories += [f.__name__ for f in subclasses(factory.AssetFactory)] - factories.remove('ALL_ASSETS') - if 'ALL_SCATTERS' in factories: - factories += [f.stem for f in Path('surfaces/scatters').iterdir()] - factories.remove('ALL_SCATTERS') - if 'ALL_MATERIALS' in factories: - factories += [f.stem for f in Path('infinigen/assets/materials').iterdir()] - factories.remove('ALL_MATERIALS') - has_txt = '.txt' in factories[0] + factories.remove("ALL_ASSETS") + if "ALL_SCATTERS" in factories: + factories += [f.stem for f in Path("surfaces/scatters").iterdir()] + factories.remove("ALL_SCATTERS") + if "ALL_MATERIALS" in factories: + factories += [f.stem for f in Path("infinigen/assets/materials").iterdir()] + factories.remove("ALL_MATERIALS") + has_txt = ".txt" in factories[0] if has_txt: - factories = [f.split('.')[-1] for f in load_txt_list(factories[0], skip_sharp=False)] + factories = [ + f.split(".")[-1] for f in load_txt_list(factories[0], skip_sharp=False) + ] if not args.postprocessing_only: for fac in factories: targets = [ - {'args': args, 'fac': fac, 'idx': idx} - for idx in range(args.n_images) + {"args": args, "fac": fac, "idx": idx} for idx in range(args.n_images) ] mapfunc(build_and_save_asset, targets, args) for j, fac in enumerate(factories): - fac_path = args.output_folder/fac - assert fac_path.exists(); f'{fac_path} does not exist' + fac_path = args.output_folder / fac + assert fac_path.exists() + f"{fac_path} does not exist" if has_txt: for i in range(args.n_images): - img_path = fac_path / 'images' / f'image_{i:03d}.png' + img_path = fac_path / "images" / f"image_{i:03d}.png" if img_path.exists(): subprocess.run( - f'cp -f {img_path} {path}/{fac}_{i:03d}.png', shell=True + f"cp -f {img_path} {path}/{fac}_{i:03d}.png", shell=True ) else: - print(f'{img_path} does not exist') - elif args.render == 'image': + print(f"{img_path} does not exist") + elif args.render == "image": make_grid(args, fac_path, args.n_images) - elif args.render == 'video': - (fac_path / 'videos').mkdir(exist_ok=True) + elif args.render == "video": + (fac_path / "videos").mkdir(exist_ok=True) for i in range(args.n_images): subprocess.run( f'ffmpeg -y -r 24 -pattern_type glob -i "{fac_path}/frames/scene_{i:03d}/frame*.png" ' - f'{fac_path}/videos/video_{i:03d}.mp4', shell=True) - + f"{fac_path}/videos/video_{i:03d}.mp4", + shell=True, + ) def snake_case(s): - return '_'.join( - re.sub('([A-Z][a-z]+)', r' \1', re.sub('([A-Z]+)', r' \1', s.replace('-', ' '))).split()).lower() + return "_".join( + re.sub( + "([A-Z][a-z]+)", r" \1", re.sub("([A-Z]+)", r" \1", s.replace("-", " ")) + ).split() + ).lower() + def make_args(): parser = argparse.ArgumentParser() - parser.add_argument('-o', '--output_folder', type=Path, default=None) - parser.add_argument('-f', '--factories', default=[], nargs='+', - help="List factories/surface scatters/surface materials you want to render") - parser.add_argument('-n', '--n_images', default=1, type=int, help="Number of scenes to render") - parser.add_argument("-m", '--margin', default=.01, - help="Margin between the asset the boundary of the image when automatically adjusting " - "the camera") - parser.add_argument('-R', '--resolution', default='1024x1024', type=str, - help="Image resolution widthxheight") - parser.add_argument('-p', '--samples', default=200, type=int, help="Blender cycles samples") - parser.add_argument('-l', '--lighting', default=0, type=int, help="Lighting seed") - parser.add_argument('-Z', '--cam_zoff', '--z_offset', type=float, default=.0, - help="Additional offset on Z axis for camera look-at positions") - parser.add_argument('-s', '--save_blend', action='store_true', help="Whether to save .blend file") - parser.add_argument('-e', '--elevation', default=60, type=float, help="Elevation of the sun") - parser.add_argument('--cam_dist', default=0, type=float, - help="Distance from the camera to the look-at position" + parser.add_argument("-o", "--output_folder", type=Path, default=None) + parser.add_argument( + "-f", + "--factories", + default=[], + nargs="+", + help="List factories/surface scatters/surface materials you want to render", + ) + parser.add_argument( + "-n", "--n_images", default=1, type=int, help="Number of scenes to render" + ) + parser.add_argument( + "-m", + "--margin", + default=0.01, + help="Margin between the asset the boundary of the image when automatically adjusting " + "the camera", ) parser.add_argument( - '-a', '--cam_angle', default=(-30, 0, 45), type=float, nargs='+', - help="Camera rotation in XYZ" + "-R", + "--resolution", + default="1024x1024", + type=str, + help="Image resolution widthxheight", ) - parser.add_argument('-O', '--offset', default=(0, 0, 0), type=float, nargs='+', help='asset location') - parser.add_argument('-c', '--cam_center', default=1, type=int, help="Camera rotation in XYZ") parser.add_argument( - '-r', '--render', default='image', type=str, choices=['image', 'video', 'none'], - help="Whether to render the scene in images or video") - parser.add_argument('-b', '--best_ratio', default=9 / 16, type=float, - help="Best aspect ratio for compiling the images into asset grid") - parser.add_argument('-F', '--fire', action='store_true') - parser.add_argument('-I', '--fire_res', default=100, type=int) - parser.add_argument('-U', '--fire_duration', default=30, type=int) - parser.add_argument('-t', '--film_transparent', default=1, type=int, - help="Whether the background is transparent") - parser.add_argument('-E', '--frame_end', type=int, default=120, help="End of frame in videos") - parser.add_argument('-g', '--gpu', action='store_true', help="Whether to use gpu in rendering") - parser.add_argument('-C', '--cycles', type=float, default=1, help="render video cycles") - parser.add_argument('-A', '--scale_reference', action='store_true', help="Add the scale reference") - parser.add_argument('-S', '--skip_existing', action='store_true', help="Skip existing scenes and renders") - parser.add_argument('-P', '--postprocessing_only', action='store_true', help="Only run postprocessing") - parser.add_argument('-D', '--seed', type=int, default=-1, help="Run a specific seed.") - parser.add_argument('-N', '--no-mod', action='store_true', help="No modification") - parser.add_argument('-d', '--debug', action="store_const", dest="loglevel", const=logging.DEBUG, - default=logging.INFO) - parser.add_argument('-H', '--hdri', action='store_true', help="add_hdri") - parser.add_argument('-T', '--three_point', action='store_true', help="add three-point lighting") - parser.add_argument('-G', '--no_ground', action='store_true', help="no ground") - parser.add_argument('-W', '--spawn_placeholder', action='store_true', help="spawn placeholder") - parser.add_argument('-z', '--zoom', action='store_true', help="zoom first figure") - - parser.add_argument('--n_workers', type=int, default=1) - parser.add_argument('--slurm', action='store_true') - - parser.add_argument('--export', type=str, default=None, choices=export.FORMAT_CHOICES) - parser.add_argument('--export_texture_res', type=int, default=1024) + "-p", "--samples", default=200, type=int, help="Blender cycles samples" + ) + parser.add_argument("-l", "--lighting", default=0, type=int, help="Lighting seed") + parser.add_argument( + "-Z", + "--cam_zoff", + "--z_offset", + type=float, + default=0.0, + help="Additional offset on Z axis for camera look-at positions", + ) + parser.add_argument( + "-s", "--save_blend", action="store_true", help="Whether to save .blend file" + ) + parser.add_argument( + "-e", "--elevation", default=60, type=float, help="Elevation of the sun" + ) + parser.add_argument( + "--cam_dist", + default=0, + type=float, + help="Distance from the camera to the look-at position", + ) + parser.add_argument( + "-a", + "--cam_angle", + default=(-30, 0, 45), + type=float, + nargs="+", + help="Camera rotation in XYZ", + ) + parser.add_argument( + "-O", + "--offset", + default=(0, 0, 0), + type=float, + nargs="+", + help="asset location", + ) + parser.add_argument( + "-c", "--cam_center", default=1, type=int, help="Camera rotation in XYZ" + ) + parser.add_argument( + "-r", + "--render", + default="image", + type=str, + choices=["image", "video", "none"], + help="Whether to render the scene in images or video", + ) + parser.add_argument( + "-b", + "--best_ratio", + default=9 / 16, + type=float, + help="Best aspect ratio for compiling the images into asset grid", + ) + parser.add_argument("-F", "--fire", action="store_true") + parser.add_argument("-I", "--fire_res", default=100, type=int) + parser.add_argument("-U", "--fire_duration", default=30, type=int) + parser.add_argument( + "-t", + "--film_transparent", + default=1, + type=int, + help="Whether the background is transparent", + ) + parser.add_argument( + "-E", "--frame_end", type=int, default=120, help="End of frame in videos" + ) + parser.add_argument( + "-g", "--gpu", action="store_true", help="Whether to use gpu in rendering" + ) + parser.add_argument( + "-C", "--cycles", type=float, default=1, help="render video cycles" + ) + parser.add_argument( + "-A", "--scale_reference", action="store_true", help="Add the scale reference" + ) + parser.add_argument( + "-S", + "--skip_existing", + action="store_true", + help="Skip existing scenes and renders", + ) + parser.add_argument( + "-P", + "--postprocessing_only", + action="store_true", + help="Only run postprocessing", + ) + parser.add_argument( + "-D", "--seed", type=int, default=-1, help="Run a specific seed." + ) + parser.add_argument("-N", "--no-mod", action="store_true", help="No modification") + parser.add_argument( + "-d", + "--debug", + action="store_const", + dest="loglevel", + const=logging.DEBUG, + default=logging.INFO, + ) + parser.add_argument("-H", "--hdri", action="store_true", help="add_hdri") + parser.add_argument( + "-T", "--three_point", action="store_true", help="add three-point lighting" + ) + parser.add_argument("-G", "--no_ground", action="store_true", help="no ground") + parser.add_argument( + "-W", "--spawn_placeholder", action="store_true", help="spawn placeholder" + ) + parser.add_argument("-z", "--zoom", action="store_true", help="zoom first figure") + + parser.add_argument("--n_workers", type=int, default=1) + parser.add_argument("--slurm", action="store_true") + + parser.add_argument( + "--export", type=str, default=None, choices=export.FORMAT_CHOICES + ) + parser.add_argument("--export_texture_res", type=int, default=1024) return init.parse_args_blender(parser) -if __name__ == '__main__': +if __name__ == "__main__": args = make_args() args.no_mod = args.no_mod or args.fire args.film_transparent = args.film_transparent and not args.hdri diff --git a/infinigen_examples/generate_indoors.py b/infinigen_examples/generate_indoors.py index 80fac6cd8..a557f4232 100644 --- a/infinigen_examples/generate_indoors.py +++ b/infinigen_examples/generate_indoors.py @@ -3,92 +3,65 @@ # of this source tree. import argparse -from pathlib import Path import logging -from time import time +from pathlib import Path + from numpy import deg2rad -import pprint -import copy +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging logging.basicConfig( - format='[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', - level=logging.INFO + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, ) import bpy -from mathutils import Vector import gin import numpy as np -import trimesh -from numpy import deg2rad -from infinigen.assets import (lighting) -from infinigen.assets.wall_decorations.skirting_board import make_skirting_board -from infinigen.assets.utils.decorate import read_co -from infinigen.terrain import Terrain +from infinigen.assets import lighting from infinigen.assets.materials import invisible_to_camera - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - example_solver, - checks, - usage_lookup -) - -from infinigen.assets import ( - fluid, - cactus, - cactus, - trees, - monocot, - rocks, - underwater, - creatures, - lighting, - weather -) -from infinigen.assets.scatters import grass, pebbles +from infinigen.assets.objects.wall_decorations.skirting_board import make_skirting_board from infinigen.assets.utils.decorate import read_co - -from infinigen.core.placement import density, camera as cam_util, split_in_view - -from infinigen_examples.indoor_constraint_examples import home_constraints -from infinigen.core import ( - execute_tasks, - surface, - init, - placement, - tags as t, - tagging +from infinigen.core import execute_tasks, init, placement, surface, tagging +from infinigen.core import tags as t +from infinigen.core.constraints import checks +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.example_solver import ( + Solver, + greedy, + populate, + state_def, ) -from infinigen.core.util import (blender as butil, pipeline) - -from infinigen.core.constraints.example_solver.room import decorate as room_dec, constants -from infinigen.core.constraints.example_solver import state_def, greedy, populate, Solver - +from infinigen.core.constraints.example_solver.room import constants +from infinigen.core.constraints.example_solver.room import decorate as room_dec from infinigen.core.constraints.example_solver.room.constants import WALL_HEIGHT +from infinigen.core.placement import camera as cam_util +from infinigen.core.util import blender as butil +from infinigen.core.util import pipeline from infinigen.core.util.camera import points_inview - -from infinigen_examples.generate_nature import compose_nature # so gin can find it +from infinigen.terrain import Terrain +from infinigen_examples.indoor_constraint_examples import home_constraints from infinigen_examples.util import constraint_util as cu from infinigen_examples.util.generate_indoors_util import ( + apply_greedy_restriction, create_outdoor_backdrop, - place_cam_overhead, - overhead_view, hide_other_rooms, + place_cam_overhead, restrict_solving, - apply_greedy_restriction ) +from . import generate_nature # noqa F401 # needed for nature gin configs to load + logger = logging.getLogger(__name__) -def default_greedy_stages(): +def default_greedy_stages(): """Returns descriptions of what will be covered by each greedy stage of the solver. - Any domain containing one or more VariableTags is greedy: it produces many separate domains, + Any domain containing one or more VariableTags is greedy: it produces many separate domains, one for each possible assignment of the unresolved variables. """ @@ -100,39 +73,47 @@ def default_greedy_stages(): all_room = r.Domain({t.Semantics.Room, -t.Semantics.Object}) all_obj = r.Domain({t.Semantics.Object, -t.Semantics.Room}) - all_obj_in_room = all_obj.with_relation(cl.AnyRelation(), all_room.with_tags(cu.variable_room)) + all_obj_in_room = all_obj.with_relation( + cl.AnyRelation(), all_room.with_tags(cu.variable_room) + ) primary = all_obj_in_room.with_relation(-cl.AnyRelation(), all_obj) greedy_stages = {} - greedy_stages['rooms'] = all_room - - greedy_stages['on_floor'] = primary.with_relation(on_floor, all_room) - greedy_stages['on_wall'] = ( + greedy_stages["rooms"] = all_room + + greedy_stages["on_floor"] = primary.with_relation(on_floor, all_room) + greedy_stages["on_wall"] = ( primary.with_relation(-on_floor, all_room) .with_relation(-on_ceiling, all_room) .with_relation(on_wall, all_room) ) - greedy_stages['on_ceiling'] = ( + greedy_stages["on_ceiling"] = ( primary.with_relation(-on_floor, all_room) .with_relation(on_ceiling, all_room) .with_relation(-on_wall, all_room) ) - secondary = all_obj.with_relation(cl.AnyRelation(), primary.with_tags(cu.variable_obj)) + secondary = all_obj.with_relation( + cl.AnyRelation(), primary.with_tags(cu.variable_obj) + ) - greedy_stages['side_obj'] = secondary.with_relation(side, all_obj) + greedy_stages["side_obj"] = secondary.with_relation(side, all_obj) nonside = secondary.with_relation(-side, all_obj) - greedy_stages['obj_ontop_obj'] = nonside.with_relation(cu.ontop, all_obj).with_relation(-cu.on, all_obj) - greedy_stages['obj_on_support'] = nonside.with_relation(cu.on, all_obj).with_relation(-cu.ontop, all_obj) - + greedy_stages["obj_ontop_obj"] = nonside.with_relation( + cu.ontop, all_obj + ).with_relation(-cu.on, all_obj) + greedy_stages["obj_on_support"] = nonside.with_relation( + cu.on, all_obj + ).with_relation(-cu.ontop, all_obj) return greedy_stages all_vars = [cu.variable_room, cu.variable_obj] + @gin.configurable def compose_indoors(output_folder: Path, scene_seed: int, **overrides): p = pipeline.RandomStageExecutor(scene_seed, output_folder, overrides) @@ -141,17 +122,19 @@ def compose_indoors(output_folder: Path, scene_seed: int, **overrides): def add_coarse_terrain(): terrain = Terrain( - scene_seed, - surface.registry, - task='coarse', - on_the_fly_asset_folder=output_folder / "assets" + scene_seed, + surface.registry, + task="coarse", + on_the_fly_asset_folder=output_folder / "assets", ) terrain_mesh = terrain.coarse_terrain() # placement.density.set_tag_dict(terrain.tag_dict) return terrain, terrain_mesh - terrain, terrain_mesh = p.run_stage('terrain', add_coarse_terrain, use_chance=False, default=(None, None)) - p.run_stage('sky_lighting', lighting.sky_lighting.add_lighting, use_chance=False) + terrain, terrain_mesh = p.run_stage( + "terrain", add_coarse_terrain, use_chance=False, default=(None, None) + ) + p.run_stage("sky_lighting", lighting.sky_lighting.add_lighting, use_chance=False) consgraph = home_constraints() stages = default_greedy_stages() @@ -159,258 +142,390 @@ def add_coarse_terrain(): stages, consgraph, limits = restrict_solving(stages, consgraph) - if overrides.get('restrict_single_supported_roomtype', False): + if overrides.get("restrict_single_supported_roomtype", False): restrict_parent_rooms = { - np.random.choice([ - - # Only these roomtypes have constraints written in home_constraints. - # Others will be empty-ish besides maybe storage and plants - # TODO: add constraints to home_constraints for garages, offices, balconies, etc - - t.Semantics.Bedroom, - t.Semantics.LivingRoom, - t.Semantics.Kitchen, - t.Semantics.Bathroom, - t.Semantics.DiningRoom - ]) + np.random.choice( + [ + # Only these roomtypes have constraints written in home_constraints. + # Others will be empty-ish besides maybe storage and plants + # TODO: add constraints to home_constraints for garages, offices, balconies, etc + t.Semantics.Bedroom, + t.Semantics.LivingRoom, + t.Semantics.Kitchen, + t.Semantics.Bathroom, + t.Semantics.DiningRoom, + ] + ) } - logger.info(f'Restricting to {restrict_parent_rooms}') + logger.info(f"Restricting to {restrict_parent_rooms}") apply_greedy_restriction(stages, restrict_parent_rooms, cu.variable_room) solver = Solver(output_folder=output_folder) + def solve_rooms(): - return solver.solve_rooms(scene_seed, consgraph, stages['rooms']) - state: state_def.State = p.run_stage('solve_rooms', solve_rooms, use_chance=False) + return solver.solve_rooms(scene_seed, consgraph, stages["rooms"]) + + state: state_def.State = p.run_stage("solve_rooms", solve_rooms, use_chance=False) def solve_large(): assignments = greedy.iterate_assignments( - stages['on_floor'], state, all_vars, limits, nonempty=True + stages["on_floor"], state, all_vars, limits, nonempty=True ) for i, vars in enumerate(assignments): solver.solve_objects( - consgraph, - stages['on_floor'], - var_assignments=vars, - n_steps=overrides['solve_steps_large'], - desc=f"on_floor_{i}", - abort_unsatisfied=overrides.get('abort_unsatisfied_large', False) + consgraph, + stages["on_floor"], + var_assignments=vars, + n_steps=overrides["solve_steps_large"], + desc=f"on_floor_{i}", + abort_unsatisfied=overrides.get("abort_unsatisfied_large", False), ) return solver.state - state = p.run_stage('solve_large', solve_large, use_chance=False, default=state) + + state = p.run_stage("solve_large", solve_large, use_chance=False, default=state) solved_rooms = [ state.objs[assignment[cu.variable_room]].obj for assignment in greedy.iterate_assignments( - stages['on_floor'], state, [cu.variable_room], limits + stages["on_floor"], state, [cu.variable_room], limits ) - ] + ] solved_bound_points = np.concatenate([butil.bounds(r) for r in solved_rooms]) - solved_bbox = (np.min(solved_bound_points, axis=0), np.max(solved_bound_points, axis=0)) + solved_bbox = ( + np.min(solved_bound_points, axis=0), + np.max(solved_bound_points, axis=0), + ) - house_bbox = np.concatenate([butil.bounds(obj) for obj in solver.get_bpy_objects(r.Domain({t.Semantics.Room}))]) + house_bbox = np.concatenate( + [ + butil.bounds(obj) + for obj in solver.get_bpy_objects(r.Domain({t.Semantics.Room})) + ] + ) house_bbox = (np.min(house_bbox, axis=0), np.max(house_bbox, axis=0)) camera_rigs = placement.camera.spawn_camera_rigs() def pose_cameras(): - nonroom_objs = [ o.obj for o in state.objs.values() if t.Semantics.Room not in o.tags ] scene_objs = solved_rooms + nonroom_objs scene_preprocessed = placement.camera.camera_selection_preprocessing( - terrain=None, - scene_objs=scene_objs + terrain=None, scene_objs=scene_objs + ) + + solved_floor_surface = butil.join_objects( + [ + tagging.extract_tagged_faces(o, {t.Subpart.SupportSurface}) + for o in solved_rooms + ] ) - solved_floor_surface = butil.join_objects([ - tagging.extract_tagged_faces(o, {t.Subpart.SupportSurface}) - for o in solved_rooms - ]) - placement.camera.configure_cameras( camera_rigs, scene_preprocessed=scene_preprocessed, - init_surfaces=solved_floor_surface + init_surfaces=solved_floor_surface, ) return scene_preprocessed - - scene_preprocessed = p.run_stage('pose_cameras', pose_cameras, use_chance=False) + + scene_preprocessed = p.run_stage("pose_cameras", pose_cameras, use_chance=False) def animate_cameras(): cam_util.animate_cameras(camera_rigs, solved_bbox, scene_preprocessed, pois=[]) - p.run_stage('animate_cameras', animate_cameras, use_chance=False, prereq='pose_cameras') p.run_stage( - 'populate_intermediate_pholders', - populate.populate_state_placeholders, + "animate_cameras", animate_cameras, use_chance=False, prereq="pose_cameras" + ) + + p.run_stage( + "populate_intermediate_pholders", + populate.populate_state_placeholders, solver.state, - filter=t.Semantics.AssetPlaceholderForChildren, + filter=t.Semantics.AssetPlaceholderForChildren, final=False, - use_chance=False + use_chance=False, ) - + def solve_medium(): - n_steps = overrides['solve_steps_medium'] - for i, vars in enumerate(greedy.iterate_assignments(stages['on_wall'], state, all_vars, limits)): - solver.solve_objects(consgraph, stages['on_wall'], vars, n_steps, desc=f"on_wall_{i}") - for i, vars in enumerate(greedy.iterate_assignments(stages['on_ceiling'], state, all_vars, limits)): - solver.solve_objects(consgraph, stages['on_ceiling'], vars, n_steps, desc=f"on_ceiling_{i}") - for i, vars in enumerate(greedy.iterate_assignments(stages['side_obj'], state, all_vars, limits)): - solver.solve_objects(consgraph, stages['side_obj'], vars, n_steps, desc=f"side_obj_{i}") + n_steps = overrides["solve_steps_medium"] + for i, vars in enumerate( + greedy.iterate_assignments(stages["on_wall"], state, all_vars, limits) + ): + solver.solve_objects( + consgraph, stages["on_wall"], vars, n_steps, desc=f"on_wall_{i}" + ) + for i, vars in enumerate( + greedy.iterate_assignments(stages["on_ceiling"], state, all_vars, limits) + ): + solver.solve_objects( + consgraph, stages["on_ceiling"], vars, n_steps, desc=f"on_ceiling_{i}" + ) + for i, vars in enumerate( + greedy.iterate_assignments(stages["side_obj"], state, all_vars, limits) + ): + solver.solve_objects( + consgraph, stages["side_obj"], vars, n_steps, desc=f"side_obj_{i}" + ) return solver.state - state = p.run_stage('solve_medium', solve_medium, use_chance=False, default=state) + + state = p.run_stage("solve_medium", solve_medium, use_chance=False, default=state) def solve_small(): - n_steps = overrides['solve_steps_small'] - for i, vars in enumerate(greedy.iterate_assignments(stages['obj_ontop_obj'], state, all_vars, limits)): - solver.solve_objects(consgraph, stages['obj_ontop_obj'], vars, n_steps, desc=f"obj_ontop_obj_{i}") - for i, vars in enumerate(greedy.iterate_assignments(stages['obj_on_support'], state, all_vars, limits)): - solver.solve_objects(consgraph, stages['obj_on_support'], vars, n_steps, desc=f"obj_on_support_{i}") - #for i, vars in enumerate(greedy.iterate_assignments(stages['tertiary'], state, all_vars, limits)): + n_steps = overrides["solve_steps_small"] + for i, vars in enumerate( + greedy.iterate_assignments(stages["obj_ontop_obj"], state, all_vars, limits) + ): + solver.solve_objects( + consgraph, + stages["obj_ontop_obj"], + vars, + n_steps, + desc=f"obj_ontop_obj_{i}", + ) + for i, vars in enumerate( + greedy.iterate_assignments( + stages["obj_on_support"], state, all_vars, limits + ) + ): + solver.solve_objects( + consgraph, + stages["obj_on_support"], + vars, + n_steps, + desc=f"obj_on_support_{i}", + ) + # for i, vars in enumerate(greedy.iterate_assignments(stages['tertiary'], state, all_vars, limits)): # solver.solve_objects(consgraph, stages['tertiary'], vars, n_steps, desc=f"tertiary_{i}") return solver.state - state = p.run_stage('solve_small', solve_small, use_chance=False, default=state) - p.run_stage('populate_assets', populate.populate_state_placeholders, state, use_chance=False) - - door_filter = r.Domain({t.Semantics.Door}, [(cl.AnyRelation(), stages['rooms'])]) - window_filter = r.Domain({t.Semantics.Window}, [(cl.AnyRelation(), stages['rooms'])]) - p.run_stage('room_doors', lambda: room_dec.populate_doors(solver.get_bpy_objects(door_filter)), use_chance=False) - p.run_stage('room_windows', lambda: room_dec.populate_windows(solver.get_bpy_objects(window_filter)), use_chance=False) + state = p.run_stage("solve_small", solve_small, use_chance=False, default=state) + + p.run_stage( + "populate_assets", populate.populate_state_placeholders, state, use_chance=False + ) + + door_filter = r.Domain({t.Semantics.Door}, [(cl.AnyRelation(), stages["rooms"])]) + window_filter = r.Domain( + {t.Semantics.Window}, [(cl.AnyRelation(), stages["rooms"])] + ) + p.run_stage( + "room_doors", + lambda: room_dec.populate_doors(solver.get_bpy_objects(door_filter)), + use_chance=False, + ) + p.run_stage( + "room_windows", + lambda: room_dec.populate_windows(solver.get_bpy_objects(window_filter)), + use_chance=False, + ) room_meshes = solver.get_bpy_objects(r.Domain({t.Semantics.Room})) - p.run_stage('room_stairs', lambda: room_dec.room_stairs(state, room_meshes), use_chance=False) - p.run_stage('skirting_floor', lambda: make_skirting_board(room_meshes, t.Subpart.SupportSurface)) - p.run_stage('skirting_ceiling', lambda: make_skirting_board(room_meshes, t.Subpart.Ceiling)) + p.run_stage( + "room_stairs", + lambda: room_dec.room_stairs(state, room_meshes), + use_chance=False, + ) + p.run_stage( + "skirting_floor", + lambda: make_skirting_board(room_meshes, t.Subpart.SupportSurface), + ) + p.run_stage( + "skirting_ceiling", lambda: make_skirting_board(room_meshes, t.Subpart.Ceiling) + ) - rooms_meshed = butil.get_collection('placeholders:room_meshes') + rooms_meshed = butil.get_collection("placeholders:room_meshes") rooms_split = room_dec.split_rooms(list(rooms_meshed.objects)) - p.run_stage('room_walls', room_dec.room_walls, rooms_split['wall'].objects, use_chance=False) - p.run_stage('room_pillars', room_dec.room_pillars, state, rooms_split['wall'].objects, use_chance=False) - p.run_stage('room_floors', room_dec.room_floors, rooms_split['floor'].objects, use_chance=False) - p.run_stage('room_ceilings', room_dec.room_ceilings, rooms_split['ceiling'].objects, use_chance=False) + p.run_stage( + "room_walls", room_dec.room_walls, rooms_split["wall"].objects, use_chance=False + ) + p.run_stage( + "room_pillars", + room_dec.room_pillars, + state, + rooms_split["wall"].objects, + use_chance=False, + ) + p.run_stage( + "room_floors", + room_dec.room_floors, + rooms_split["floor"].objects, + use_chance=False, + ) + p.run_stage( + "room_ceilings", + room_dec.room_ceilings, + rooms_split["ceiling"].objects, + use_chance=False, + ) - #state.print() - state.to_json(output_folder / 'solve_state.json') + # state.print() + state.to_json(output_folder / "solve_state.json") cam = cam_util.get_camera(0, 0) - + def turn_off_lights(): for o in bpy.data.objects: - if o.type == 'LIGHT' and not o.data.cycles.is_portal: - print(f'Deleting {o.name}') + if o.type == "LIGHT" and not o.data.cycles.is_portal: + print(f"Deleting {o.name}") butil.delete(o) - p.run_stage('lights_off', turn_off_lights) + + p.run_stage("lights_off", turn_off_lights) def invisible_room_ceilings(): - rooms_split['exterior'].hide_viewport = True - rooms_split['exterior'].hide_render = True - invisible_to_camera.apply(list(rooms_split['ceiling'].objects)) - invisible_to_camera.apply([o for o in bpy.data.objects if 'CeilingLight' in o.name]) - p.run_stage('invisible_room_ceilings', invisible_room_ceilings, use_chance=False) + rooms_split["exterior"].hide_viewport = True + rooms_split["exterior"].hide_render = True + invisible_to_camera.apply(list(rooms_split["ceiling"].objects)) + invisible_to_camera.apply( + [o for o in bpy.data.objects if "CeilingLight" in o.name] + ) + + p.run_stage("invisible_room_ceilings", invisible_room_ceilings, use_chance=False) p.run_stage( - 'overhead_cam', - place_cam_overhead, - cam=camera_rigs[0], + "overhead_cam", + place_cam_overhead, + cam=camera_rigs[0], bbox=solved_bbox, - use_chance=False + use_chance=False, ) p.run_stage( - 'hide_other_rooms', + "hide_other_rooms", hide_other_rooms, - state, - rooms_split, + state, + rooms_split, keep_rooms=[r.name for r in solved_rooms], - use_chance=False + use_chance=False, ) height = p.run_stage( - 'nature_backdrop', - create_outdoor_backdrop, - terrain, + "nature_backdrop", + create_outdoor_backdrop, + terrain, house_bbox=house_bbox, cam=cam, - p=p, + p=p, params=overrides, use_chance=False, - prereq='terrain', + prereq="terrain", default=0, ) - if overrides.get('topview', False): - rooms_split['exterior'].hide_viewport = True - rooms_split['ceiling'].hide_viewport = True - rooms_split['exterior'].hide_render = True - rooms_split['ceiling'].hide_render = True - for group in ['wall', 'floor']: + if overrides.get("topview", False): + rooms_split["exterior"].hide_viewport = True + rooms_split["ceiling"].hide_viewport = True + rooms_split["exterior"].hide_render = True + rooms_split["ceiling"].hide_render = True + for group in ["wall", "floor"]: for wall in rooms_split[group].objects: for mat in wall.data.materials: for n in mat.node_tree.nodes: - if n.type == 'BSDF_PRINCIPLED': - n.inputs['Alpha'].default_value = overrides.get('alpha_walls', 1.) - bbox = np.concatenate([read_co(r) + np.array(r.location)[np.newaxis, :] for r in rooms_meshed.objects]) + if n.type == "BSDF_PRINCIPLED": + n.inputs["Alpha"].default_value = overrides.get( + "alpha_walls", 1.0 + ) + bbox = np.concatenate( + [ + read_co(r) + np.array(r.location)[np.newaxis, :] + for r in rooms_meshed.objects + ] + ) camera = camera_rigs[0].children[0] camera_rigs[0].location = 0, 0, 0 camera_rigs[0].rotation_euler = 0, 0, 0 bpy.contexScene.camera = camera - rot_x = deg2rad(overrides.get('topview_rot_x', 0)) - rot_z = deg2rad(overrides.get('topview_rot_z', 0)) + rot_x = deg2rad(overrides.get("topview_rot_x", 0)) + rot_z = deg2rad(overrides.get("topview_rot_z", 0)) camera.rotation_euler = rot_x, 0, rot_z mean = np.mean(bbox, 0) - for cam_dist in np.exp(np.linspace(1., 5., 500)): - camera.location = mean[0] + cam_dist * np.sin(rot_x) * np.sin(rot_z), mean[1] - cam_dist * np.sin( - rot_x) * np.cos(rot_z), mean[2] - WALL_HEIGHT / 2 + cam_dist * np.cos(rot_x) + for cam_dist in np.exp(np.linspace(1.0, 5.0, 500)): + camera.location = ( + mean[0] + cam_dist * np.sin(rot_x) * np.sin(rot_z), + mean[1] - cam_dist * np.sin(rot_x) * np.cos(rot_z), + mean[2] - WALL_HEIGHT / 2 + cam_dist * np.cos(rot_x), + ) bpy.context.view_layer.update() inview = points_inview(bbox, camera) if inview.all(): for area in bpy.contexScreen.areas: - if area.type == 'VIEW_3D': - area.spaces.active.region_3d.view_perspective = 'CAMERA' + if area.type == "VIEW_3D": + area.spaces.active.region_3d.view_perspective = "CAMERA" break break - + return { "height_offset": height, "whole_bbox": house_bbox, } - def main(args): scene_seed = init.apply_scene_seed(args.seed) init.apply_gin_configs( - configs=args.configs, + configs=["base_indoors.gin"] + args.configs, overrides=args.overrides, - configs_folder='infinigen_examples/configs_indoor' + config_folders=[ + "infinigen_examples/configs_indoor", + "infinigen_examples/configs_nature", + ], ) constants.initialize_constants() - execute_tasks.main(compose_scene_func=compose_indoors, input_folder=args.input_folder, - output_folder=args.output_folder, task=args.task, task_uniqname=args.task_uniqname, - scene_seed=scene_seed) + execute_tasks.main( + compose_scene_func=compose_indoors, + populate_scene_func=None, + input_folder=args.input_folder, + output_folder=args.output_folder, + task=args.task, + task_uniqname=args.task_uniqname, + scene_seed=scene_seed, + ) if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--output_folder', type=Path) - parser.add_argument('--input_folder', type=Path, default=None) - parser.add_argument('-s', '--seed', default=None, help="The seed used to generate the scene") - parser.add_argument('-t', '--task', nargs='+', default=['coarse'], - choices=['coarse', 'populate', 'fine_terrain', 'ground_truth', 'render', 'mesh_save', 'export']) - parser.add_argument('-g', '--configs', nargs='+', default=['base'], - help='Set of config files for gin (separated by spaces) ' - 'e.g. --gin_config file1 file2 (exclude .gin from path)') - parser.add_argument('-p', '--overrides', nargs='+', default=[], - help='Parameter settings that override config defaults ' - 'e.g. --gin_param module_1.a=2 module_2.b=3') - parser.add_argument('--task_uniqname', type=str, default=None) - parser.add_argument('-d', '--debug', type=str, nargs='*', default=None) + parser.add_argument("--output_folder", type=Path) + parser.add_argument("--input_folder", type=Path, default=None) + parser.add_argument( + "-s", "--seed", default=None, help="The seed used to generate the scene" + ) + parser.add_argument( + "-t", + "--task", + nargs="+", + default=["coarse"], + choices=[ + "coarse", + "populate", + "fine_terrain", + "ground_truth", + "render", + "mesh_save", + "export", + ], + ) + parser.add_argument( + "-g", + "--configs", + nargs="+", + default=["base"], + help="Set of config files for gin (separated by spaces) " + "e.g. --gin_config file1 file2 (exclude .gin from path)", + ) + parser.add_argument( + "-p", + "--overrides", + nargs="+", + default=[], + help="Parameter settings that override config defaults " + "e.g. --gin_param module_1.a=2 module_2.b=3", + ) + parser.add_argument("--task_uniqname", type=str, default=None) + parser.add_argument("-d", "--debug", type=str, nargs="*", default=None) args = init.parse_args_blender(parser) logging.getLogger("infinigen").setLevel(logging.INFO) @@ -418,9 +533,9 @@ def main(args): if args.debug is not None: for name in logging.root.manager.loggerDict: - if not name.startswith('infinigen'): + if not name.startswith("infinigen"): continue if len(args.debug) == 0 or any(name.endswith(x) for x in args.debug): logging.getLogger(name).setLevel(logging.DEBUG) - main(args) \ No newline at end of file + main(args) diff --git a/infinigen_examples/generate_material_balls.py b/infinigen_examples/generate_material_balls.py index 6a6c4be8c..4e5145958 100644 --- a/infinigen_examples/generate_material_balls.py +++ b/infinigen_examples/generate_material_balls.py @@ -8,100 +8,113 @@ # - Alex Raistrick # - Karhan Kayan - add fire option -import argparse import importlib -import math +import logging import os -import random -import re -import subprocess -import traceback -from itertools import product from pathlib import Path -import logging from numpy.random import uniform from tqdm import tqdm -from infinigen.assets.materials import metal_shader_list -from infinigen.assets.materials.woods import tiled_wood -from infinigen_examples.generate_individual_assets import adjust_cam_distance, make_args, parent, setup_camera -from infinigen_examples.util.test_utils import load_txt_list - -logging.basicConfig(format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', level=logging.WARNING) +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, +) import bpy import gin import numpy as np -from PIL import Image - -from infinigen.assets.fluid.fluid import set_obj_on_fire -from infinigen.core.tagging import tag_system -from infinigen.assets.lighting import sky_lighting, hdri_lighting, three_point_lighting, holdout_lighting -from infinigen.core import surface, init -from infinigen.core.placement import density, factory -from infinigen.core.util.camera import points_inview - -from infinigen.assets.utils.misc import assign_material, subclasses +from infinigen.assets.lighting import ( + hdri_lighting, + holdout_lighting, + sky_lighting, + three_point_lighting, +) +from infinigen.assets.materials.woods import tiled_wood +from infinigen.assets.utils.decorate import read_base_co +from infinigen.assets.utils.misc import subclasses +from infinigen.core import init, surface +from infinigen.core.placement import factory from infinigen.core.rendering.render import enable_gpu -from infinigen.assets.utils.decorate import read_base_co, read_co -from infinigen.core.util.math import FixedSeed # noinspection PyUnresolvedReferences from infinigen.core.util import blender as butil +from infinigen.core.util.math import FixedSeed +from infinigen_examples.generate_individual_assets import ( + adjust_cam_distance, + make_args, + setup_camera, +) +from infinigen_examples.util.test_utils import load_txt_list + +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.WARNING, +) def build_scene_surface(factory_name, idx): try: with gin.unlock_config(): try: - template = importlib.import_module(f'infinigen.assets.materials.{factory_name}') - except: - for subdir in os.listdir('infinigen/assets/materials'): + template = importlib.import_module( + f"infinigen.assets.materials.{factory_name}" + ) + except ImportError: + for subdir in os.listdir("infinigen/assets/materials"): with gin.unlock_config(): - module = importlib.import_module(f'infinigen.assets.materials.{subdir.split(".")[0]}') + module = importlib.import_module( + f'infinigen.assets.materials.{subdir.split(".")[0]}' + ) if hasattr(module, factory_name): template = getattr(module, factory_name) break else: - raise Exception(f'{factory_name} not Found.') + raise Exception(f"{factory_name} not Found.") if type(template) is type: template = template(idx) - if hasattr(template, 'make_sphere'): + if hasattr(template, "make_sphere"): asset = template.make_sphere() else: bpy.ops.mesh.primitive_ico_sphere_add(radius=1, subdivisions=7) asset = bpy.context.active_object with FixedSeed(idx): - if 'metal' in factory_name or 'sofa_fabric' in factory_name: - template.apply(asset, scale=.1) - elif 'hardwood' in factory_name: + if "metal" in factory_name or "sofa_fabric" in factory_name: + template.apply(asset, scale=0.1) + elif "hardwood" in factory_name: template.apply(asset, rotation=(np.pi / 2, 0, 0)) - elif 'brick' in factory_name: - template.apply(asset, height=uniform(.25, .3)) + elif "brick" in factory_name: + template.apply(asset, height=uniform(0.25, 0.3)) else: template.apply(asset) except ModuleNotFoundError: - raise Exception(f'{factory_name} not Found.') + raise Exception(f"{factory_name} not Found.") return asset def build_scene(path, factory_names, args): scene = bpy.context.scene - scene.render.engine = 'CYCLES' - scene.render.resolution_x, scene.render.resolution_y = map(int, args.resolution.split('x')) + scene.render.engine = "CYCLES" + scene.render.resolution_x, scene.render.resolution_y = map( + int, args.resolution.split("x") + ) scene.cycles.samples = args.samples butil.clear_scene() if not args.fire: bpy.context.scene.render.film_transparent = args.film_transparent - bpy.context.scene.world.node_tree.nodes['Background'].inputs[0].default_value[-1] = 0 + bpy.context.scene.world.node_tree.nodes["Background"].inputs[0].default_value[ + -1 + ] = 0 camera, center = setup_camera(args) - scale = .3 + scale = 0.3 assets = [] with tqdm(total=len(factory_names)) as pbar: for idx, factory_name in enumerate(factory_names): @@ -119,7 +132,7 @@ def build_scene(path, factory_names, args): bpy.ops.mesh.primitive_grid_add(size=1, x_subdivisions=400, y_subdivisions=400) asset = bpy.context.active_object asset.scale = [scale * len(assets) / size * 4] * 3 - asset.location = (len(assets) // size - 1) * margin / 2, size // 2 * margin * .8, 0 + asset.location = (len(assets) // size - 1) * margin / 2, size // 2 * margin * 0.8, 0 tiled_wood.apply(asset, hscale=10, vscale=3) with FixedSeed(args.lighting): @@ -130,13 +143,15 @@ def build_scene(path, factory_names, args): three_point_lighting.add_lighting(asset) else: sky_lighting.add_lighting(camera) - nodes = bpy.data.worlds['World'].node_tree.nodes - sky_texture = [n for n in nodes if n.name.startswith('Sky Texture')][-1] + nodes = bpy.data.worlds["World"].node_tree.nodes + sky_texture = [n for n in nodes if n.name.startswith("Sky Texture")][-1] sky_texture.sun_elevation = np.deg2rad(args.elevation) - sky_texture.sun_rotation = np.pi * .75 + sky_texture.sun_rotation = np.pi * 0.75 if args.scale_reference: - bpy.ops.mesh.primitive_cylinder_add(radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2)) + bpy.ops.mesh.primitive_cylinder_add( + radius=0.3, depth=1.8, location=(4.9, 4.9, 1.8 / 2) + ) if args.cam_center > 0 and asset: co = read_base_co(asset) + asset.location @@ -144,50 +159,55 @@ def build_scene(path, factory_names, args): center.location[-1] += args.cam_zoff if args.cam_dist <= 0 and asset: - adjust_cam_distance(asset, camera, args.margin, .6) + adjust_cam_distance(asset, camera, args.margin, 0.6) - cam_info_ng = bpy.data.node_groups.get('nodegroup_active_cam_info') + cam_info_ng = bpy.data.node_groups.get("nodegroup_active_cam_info") if cam_info_ng is not None: - cam_info_ng.nodes['Object Info'].inputs['Object'].default_value = camera + cam_info_ng.nodes["Object Info"].inputs["Object"].default_value = camera if args.save_blend: - (path / 'scenes').mkdir(exist_ok=True) + (path / "scenes").mkdir(exist_ok=True) butil.save_blend(f"{path}/scenes/scene_{idx:03d}.blend", autopack=True) def main(args): - bpy.context.window.workspace = bpy.data.workspaces['Geometry Nodes'] + bpy.context.window.workspace = bpy.data.workspaces["Geometry Nodes"] - init.apply_gin_configs('infinigen_examples/configs_indoor', skip_unknown=True) + init.apply_gin_configs("infinigen_examples/configs_indoor", skip_unknown=True) surface.registry.initialize_from_gin() - extras = '[%(filename)s:%(lineno)d] ' if args.loglevel == logging.DEBUG else '' - logging.basicConfig(format=f'[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s', - level=args.loglevel, datefmt='%H:%M:%S') + extras = "[%(filename)s:%(lineno)d] " if args.loglevel == logging.DEBUG else "" + logging.basicConfig( + format=f"[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] {extras}| %(message)s", + level=args.loglevel, + datefmt="%H:%M:%S", + ) logging.getLogger("infinigen").setLevel(args.loglevel) - if '.txt' in args.factories[0]: - name = args.factories[0].split('.')[-2].split('/')[-1] + if ".txt" in args.factories[0]: + name = args.factories[0].split(".")[-2].split("/")[-1] else: - name = '_'.join(args.factories) - path = Path(os.getcwd()) / 'outputs' / name + name = "_".join(args.factories) + path = Path(os.getcwd()) / "outputs" / name path.mkdir(exist_ok=True) if args.gpu: enable_gpu() factories = list(args.factories) - if 'ALL_ASSETS' in factories: + if "ALL_ASSETS" in factories: factories += [f.__name__ for f in subclasses(factory.AssetFactory)] - factories.remove('ALL_ASSETS') - if 'ALL_SCATTERS' in factories: - factories += [f.stem for f in Path('surfaces/scatters').iterdir()] - factories.remove('ALL_SCATTERS') - if 'ALL_MATERIALS' in factories: - factories += [f.stem for f in Path('infinigen/assets/materials').iterdir()] - factories.remove('ALL_MATERIALS') - if '.txt' in factories[0]: - factories = [f.split('.')[-1] for f in load_txt_list(factories[0], skip_sharp=False)] + factories.remove("ALL_ASSETS") + if "ALL_SCATTERS" in factories: + factories += [f.stem for f in Path("surfaces/scatters").iterdir()] + factories.remove("ALL_SCATTERS") + if "ALL_MATERIALS" in factories: + factories += [f.stem for f in Path("infinigen/assets/materials").iterdir()] + factories.remove("ALL_MATERIALS") + if ".txt" in factories[0]: + factories = [ + f.split(".")[-1] for f in load_txt_list(factories[0], skip_sharp=False) + ] try: build_scene(path, factories, args) @@ -195,7 +215,7 @@ def main(args): print(e) -if __name__ == '__main__': +if __name__ == "__main__": args = make_args() args.no_mod = args.no_mod or args.fire args.film_transparent = args.film_transparent and not args.hdri diff --git a/infinigen_examples/generate_nature.py b/infinigen_examples/generate_nature.py index 0a4d7cfee..e1b164060 100644 --- a/infinigen_examples/generate_nature.py +++ b/infinigen_examples/generate_nature.py @@ -2,446 +2,1019 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. import argparse -import os -import sys -from pathlib import Path import itertools import logging - -logging.basicConfig( - format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', - datefmt='%H:%M:%S', - level=logging.WARNING -) +from pathlib import Path import bpy -import mathutils -from mathutils import Vector import gin +import mathutils import numpy as np -from numpy.random import uniform, normal, randint - -logging.basicConfig(level=logging.INFO) - -from infinigen.core.placement import ( - particles, placement, density, - camera as cam_util, - split_in_view, factory, - animation_policy, instance_scatter, detail, -) +from mathutils import Vector +from numpy.random import randint, uniform -from infinigen.assets.scatters import ( - pebbles, grass, ground_leaves, ground_twigs, \ - chopped_trees, pinecone, fern, flowerplant, monocot as monocots, ground_mushroom, \ - slime_mold, moss, ivy, lichen, mushroom, decorative_plants, seashells, \ - pine_needle, seaweed, coral_reef, jellyfish, urchin +# ruff: noqa: E402 +# NOTE: logging config has to be before imports that use logging +logging.basicConfig( + format="[%(asctime)s.%(msecs)03d] [%(module)s] [%(levelname)s] | %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO, ) +# unused imports required for gin to find modules currently, # TODO remove +# ruff: noqa: F401 +from infinigen.assets import fluid, lighting, weather from infinigen.assets.materials import ( - mountain, sand, water, atmosphere_light_haze, sandstone, cracked_ground, \ - soil, dirt, cobble_stone, chunkyrock, stone, lava, ice, mud, snow + atmosphere_light_haze, + chunkyrock, + cobble_stone, + cracked_ground, + dirt, + ice, + lava, + mountain, + mud, + sand, + sandstone, + snow, + soil, + stone, + water, ) - -from infinigen.assets import ( - fluid, - cactus, - cactus, - trees, - monocot, - rocks, - underwater, - creatures, - lighting, - weather +from infinigen.assets.objects import ( + cactus, + cloud, + creatures, + leaves, + monocot, + particles, + rocks, + trees, ) -from infinigen.terrain import Terrain - -from infinigen.core.util import ( - blender as butil, - logging as logging_util, - pipeline, +from infinigen.assets.scatters import ( + chopped_trees, + coral_reef, + decorative_plants, + fern, + flowerplant, + grass, + ground_leaves, + ground_mushroom, + ground_twigs, + ivy, + jellyfish, + lichen, + monocots, + moss, + pebbles, + pine_needle, + pinecone, + seashells, + seaweed, + slime_mold, + snow_layer, + urchin, ) -from infinigen.core.util.organization import Tags -from infinigen.core.util.random import sample_registry, random_general +from infinigen.assets.scatters.utils.selection import scatter_lower, scatter_upward +from infinigen.core import execute_tasks, init, surface +from infinigen.core.placement import camera as cam_util +from infinigen.core.placement import density, placement, split_in_view +from infinigen.core.util import blender as butil +from infinigen.core.util import logging as logging_util +from infinigen.core.util import pipeline from infinigen.core.util.math import FixedSeed, int_hash -from infinigen.core import execute_tasks, surface, init +from infinigen.core.util.organization import Tags, Task +from infinigen.core.util.pipeline import RandomStageExecutor +from infinigen.core.util.random import random_general, sample_registry +from infinigen.terrain import Terrain + +logger = logging.getLogger(__name__) + @gin.configurable def compose_nature(output_folder, scene_seed, **params): - p = pipeline.RandomStageExecutor(scene_seed, output_folder, params) def add_coarse_terrain(): - terrain = Terrain(scene_seed, surface.registry, task='coarse', on_the_fly_asset_folder=output_folder/"assets") + terrain = Terrain( + scene_seed, + surface.registry, + task="coarse", + on_the_fly_asset_folder=output_folder / "assets", + ) terrain_mesh = terrain.coarse_terrain() density.set_tag_dict(terrain.tag_dict) return terrain, terrain_mesh - terrain, terrain_mesh = p.run_stage('terrain', add_coarse_terrain, use_chance=False, default=(None, None)) - + + terrain, terrain_mesh = p.run_stage( + "terrain", add_coarse_terrain, use_chance=False, default=(None, None) + ) + if terrain_mesh is None: terrain_mesh = butil.create_noise_plane() density.set_tag_dict({}) - scene_bvh = mathutils.bvhtree.BVHTree.FromObject(terrain_mesh, bpy.context.evaluated_depsgraph_get()) + scene_bvh = mathutils.bvhtree.BVHTree.FromObject( + terrain_mesh, bpy.context.evaluated_depsgraph_get() + ) - land_domain = params.get('land_domain_tags') - underwater_domain = params.get('underwater_domain_tags') - nonliving_domain = params.get('nonliving_domain_tags') + land_domain = params.get("land_domain_tags") + underwater_domain = params.get("underwater_domain_tags") + nonliving_domain = params.get("nonliving_domain_tags") - p.run_stage('fancy_clouds', weather.kole_clouds.add_kole_clouds) + p.run_stage("fancy_clouds", weather.kole_clouds.add_kole_clouds) - season = p.run_stage('season', trees.random_season, use_chance=False) - logging.info(f'{season=}') + season = p.run_stage("season", trees.random_season, use_chance=False) + logging.info(f"{season=}") def choose_forest_params(): # params to be shared between unique and instanced trees n_tree_species = randint(1, params.get("max_tree_species", 3) + 1) - tree_params = lambda: { - 'density': params.get("tree_density", uniform(0.045, 0.15)) / n_tree_species, - 'distance_min': uniform(1, 2.5), - 'select_scale': uniform(0.03, 0.3) - } + + def tree_params(): + return { + "density": params.get("tree_density", uniform(0.045, 0.15)) + / n_tree_species, + "distance_min": uniform(1, 2.5), + "select_scale": uniform(0.03, 0.3), + } + return [tree_params() for _ in range(n_tree_species)] - tree_species_params = p.run_stage('forest_params', choose_forest_params, use_chance=False) + + tree_species_params = p.run_stage( + "forest_params", choose_forest_params, use_chance=False + ) def add_trees(terrain_mesh): for i, params in enumerate(tree_species_params): fac = trees.TreeFactory(np.random.randint(1e7), coarse=True) - selection = density.placement_mask(params['select_scale'], tag=land_domain) - placement.scatter_placeholders_mesh(terrain_mesh, fac, selection=selection, altitude=-0.1, - overall_density=params['density'], distance_min=params['distance_min']) - p.run_stage('trees', add_trees, terrain_mesh) + selection = density.placement_mask(params["select_scale"], tag=land_domain) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + selection=selection, + altitude=-0.1, + overall_density=params["density"], + distance_min=params["distance_min"], + ) + + p.run_stage("trees", add_trees, terrain_mesh) def add_bushes(terrain_mesh): n_bush_species = randint(1, params.get("max_bush_species", 2) + 1) for i in range(n_bush_species): - spec_density = params.get("bush_density", uniform(0.03, 0.12)) / n_bush_species + spec_density = ( + params.get("bush_density", uniform(0.03, 0.12)) / n_bush_species + ) fac = trees.BushFactory(int_hash((scene_seed, i)), coarse=True) - selection = density.placement_mask(uniform(0.015, 0.2), normal_thresh=0.3, - select_thresh=uniform(0.5, 0.6), tag=land_domain) - placement.scatter_placeholders_mesh(terrain_mesh, fac, altitude=-0.05, - overall_density=spec_density, distance_min=uniform(0.05, 0.3), - selection=selection) - p.run_stage('bushes', add_bushes, terrain_mesh) + selection = density.placement_mask( + uniform(0.015, 0.2), + normal_thresh=0.3, + select_thresh=uniform(0.5, 0.6), + tag=land_domain, + ) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + altitude=-0.05, + overall_density=spec_density, + distance_min=uniform(0.05, 0.3), + selection=selection, + ) + + p.run_stage("bushes", add_bushes, terrain_mesh) def add_clouds(terrain_mesh): - cloud_factory = weather.CloudFactory(int_hash((scene_seed, 0)), coarse=True, terrain_mesh=terrain_mesh) + cloud_factory = cloud.CloudFactory( + int_hash((scene_seed, 0)), coarse=True, terrain_mesh=terrain_mesh + ) placement.scatter_placeholders(cloud_factory.spawn_locations(), cloud_factory) - p.run_stage('clouds', add_clouds, terrain_mesh) + + p.run_stage("clouds", add_clouds, terrain_mesh) def add_boulders(terrain_mesh): n_boulder_species = randint(1, params.get("max_boulder_species", 5)) for i in range(n_boulder_species): - selection = density.placement_mask(0.05, tag=nonliving_domain, select_thresh=uniform(0.55, 0.6)) + selection = density.placement_mask( + 0.05, tag=nonliving_domain, select_thresh=uniform(0.55, 0.6) + ) fac = rocks.BoulderFactory(int_hash((scene_seed, i)), coarse=True) - placement.scatter_placeholders_mesh(terrain_mesh, fac, - overall_density=params.get("boulder_density", uniform(.02, .05)) / n_boulder_species, - selection=selection, altitude=-0.25) - p.run_stage('boulders', add_boulders, terrain_mesh) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + overall_density=params.get("boulder_density", uniform(0.02, 0.05)) + / n_boulder_species, + selection=selection, + altitude=-0.25, + ) + + p.run_stage("boulders", add_boulders, terrain_mesh) fluid.cached_fire_scenecomp_options(p, terrain_mesh, params, tree_species_params) def add_glowing_rocks(terrain_mesh): - selection = density.placement_mask(uniform(0.03, 0.3), normal_thresh=-1.1, select_thresh=0, tag=Tags.Cave) + selection = density.placement_mask( + uniform(0.03, 0.3), normal_thresh=-1.1, select_thresh=0, tag=Tags.Cave + ) fac = rocks.GlowingRocksFactory(int_hash((scene_seed, 0)), coarse=True) - placement.scatter_placeholders_mesh(terrain_mesh, fac, - overall_density=params.get("glow_rock_density", 0.025), selection=selection) - p.run_stage('glowing_rocks', add_glowing_rocks, terrain_mesh) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + overall_density=params.get("glow_rock_density", 0.025), + selection=selection, + ) + + p.run_stage("glowing_rocks", add_glowing_rocks, terrain_mesh) def add_kelp(terrain_mesh): fac = monocot.KelpMonocotFactory(int_hash((scene_seed, 0)), coarse=True) selection = density.placement_mask(scale=0.05, tag=underwater_domain) - placement.scatter_placeholders_mesh(terrain_mesh, fac, altitude=-0.05, - overall_density=params.get('kelp_density', uniform(.2, 1)), - selection=selection, distance_min=3) - p.run_stage('kelp', add_kelp, terrain_mesh) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + altitude=-0.05, + overall_density=params.get("kelp_density", uniform(0.2, 1)), + selection=selection, + distance_min=3, + ) + + p.run_stage("kelp", add_kelp, terrain_mesh) def add_cactus(terrain_mesh): n_cactus_species = randint(2, params.get("max_cactus_species", 4)) for i in range(n_cactus_species): fac = cactus.CactusFactory(int_hash((scene_seed, i)), coarse=True) - selection = density.placement_mask(scale=.05, tag=land_domain, select_thresh=0.57) - placement.scatter_placeholders_mesh(terrain_mesh, fac, altitude=-0.05, - overall_density=params.get('cactus_density', uniform(.02, .1) / n_cactus_species), - selection=selection, distance_min=1) - p.run_stage('cactus', add_cactus, terrain_mesh) + selection = density.placement_mask( + scale=0.05, tag=land_domain, select_thresh=0.57 + ) + placement.scatter_placeholders_mesh( + terrain_mesh, + fac, + altitude=-0.05, + overall_density=params.get( + "cactus_density", uniform(0.02, 0.1) / n_cactus_species + ), + selection=selection, + distance_min=1, + ) + + p.run_stage("cactus", add_cactus, terrain_mesh) def camera_preprocess(): camera_rigs = cam_util.spawn_camera_rigs() scene_preprocessed = cam_util.camera_selection_preprocessing( - terrain, + terrain, terrain_mesh, - tags_ratio=params.get('camera_selection_tags_ratio'), - ranges_ratio=params.get('camera_selection_ranges_ratio'), - anim_criterion_keys=params.get('camera_selection_anim_criterion_keys', False), - ) - return camera_rigs, scene_preprocessed - camera_rigs, scene_preprocessed = p.run_stage('camera_preprocess', camera_preprocess, use_chance=False) - - bbox = terrain.get_bounding_box() if terrain is not None else butil.bounds(terrain_mesh) + tags_ratio=params.get("camera_selection_tags_ratio"), + ranges_ratio=params.get("camera_selection_ranges_ratio"), + anim_criterion_keys=params.get( + "camera_selection_anim_criterion_keys", False + ), + ) + return camera_rigs, scene_preprocessed + + camera_rigs, scene_preprocessed = p.run_stage( + "camera_preprocess", camera_preprocess, use_chance=False + ) + + bbox = ( + terrain.get_bounding_box() + if terrain is not None + else butil.bounds(terrain_mesh) + ) p.run_stage( - 'pose_cameras', - lambda: cam_util.configure_cameras(camera_rigs, scene_preprocessed, init_bounding_box=bbox), - use_chance=False + "pose_cameras", + lambda: cam_util.configure_cameras( + camera_rigs, scene_preprocessed, init_bounding_box=bbox + ), + use_chance=False, ) cam = cam_util.get_camera(0, 0) - p.run_stage('lighting', lighting.sky_lighting.add_lighting, cam, use_chance=False) - + p.run_stage("lighting", lighting.sky_lighting.add_lighting, cam, use_chance=False) + # determine a small area of the terrain for the creatures to run around on # must happen before camera is animated, as camera may want to follow them around - terrain_center, *_ = split_in_view.split_inview(terrain_mesh, cam=cam, - start=0, end=0, outofview=False, vis_margin=5, dist_max=params["center_distance"], - hide_render=True, suffix='center') + terrain_center, *_ = split_in_view.split_inview( + terrain_mesh, + cam=cam, + start=0, + end=0, + outofview=False, + vis_margin=5, + dist_max=params["center_distance"], + hide_render=True, + suffix="center", + ) deps = bpy.context.evaluated_depsgraph_get() - terrain_center_bvh = mathutils.bvhtree.BVHTree.FromObject(terrain_center, deps) - - pois = [] # objects / points of interest, for the camera to look at + mathutils.bvhtree.BVHTree.FromObject(terrain_center, deps) + + pois = [] # objects / points of interest, for the camera to look at def add_ground_creatures(target): - fac_class = sample_registry(params['ground_creature_registry']) - fac = fac_class(int_hash((scene_seed, 0)), bvh=scene_bvh, animation_mode='idle') - n = params.get('max_ground_creatures', randint(1, 4)) - selection = density.placement_mask(select_thresh=0, tag='beach', altitude_range=(-0.5, 0.5)) \ - if fac_class is creatures.CrabFactory else 1 - col = placement.scatter_placeholders_mesh(target, fac, num_placeholders=n, overall_density=1, selection=selection, altitude=0.2) + fac_class = sample_registry(params["ground_creature_registry"]) + fac = fac_class(int_hash((scene_seed, 0)), bvh=scene_bvh, animation_mode="idle") + n = params.get("max_ground_creatures", randint(1, 4)) + selection = ( + density.placement_mask( + select_thresh=0, tag="beach", altitude_range=(-0.5, 0.5) + ) + if fac_class is creatures.CrabFactory + else 1 + ) + col = placement.scatter_placeholders_mesh( + target, + fac, + num_placeholders=n, + overall_density=1, + selection=selection, + altitude=0.2, + ) return list(col.objects) - pois += p.run_stage('ground_creatures', add_ground_creatures, target=terrain_center, default=[]) + + pois += p.run_stage( + "ground_creatures", add_ground_creatures, target=terrain_center, default=[] + ) def flying_creatures(): - fac_class = sample_registry(params['flying_creature_registry']) - fac = fac_class(randint(1e7), bvh=scene_bvh, animation_mode='idle') - n = params.get('max_flying_creatures', randint(2, 7)) - col = placement.scatter_placeholders_mesh(terrain_center, fac, num_placeholders=n, overall_density=1, altitude=0.2) + fac_class = sample_registry(params["flying_creature_registry"]) + fac = fac_class(randint(1e7), bvh=scene_bvh, animation_mode="idle") + n = params.get("max_flying_creatures", randint(2, 7)) + col = placement.scatter_placeholders_mesh( + terrain_center, fac, num_placeholders=n, overall_density=1, altitude=0.2 + ) return list(col.objects) - pois += p.run_stage('flying_creatures', flying_creatures, default=[]) - p.run_stage('animate_cameras', lambda: cam_util.animate_cameras( - camera_rigs, bbox, scene_preprocessed, pois=pois), use_chance=False) + pois += p.run_stage("flying_creatures", flying_creatures, default=[]) - with logging_util.Timer('Compute coarse terrain frustrums'): + p.run_stage( + "animate_cameras", + lambda: cam_util.animate_cameras( + camera_rigs, bbox, scene_preprocessed, pois=pois + ), + use_chance=False, + ) + + with logging_util.Timer("Compute coarse terrain frustrums"): terrain_inview, *_ = split_in_view.split_inview( - terrain_mesh, verbose=True, outofview=False, print_areas=True, - cam=cam, vis_margin=2, dist_max=params['inview_distance'], hide_render=True, suffix='inview' + terrain_mesh, + verbose=True, + outofview=False, + print_areas=True, + cam=cam, + vis_margin=2, + dist_max=params["inview_distance"], + hide_render=True, + suffix="inview", ) terrain_near, *_ = split_in_view.split_inview( - terrain_mesh, verbose=True, outofview=False, print_areas=True, - cam=cam, vis_margin=2, dist_max=params['near_distance'], hide_render=True, suffix='near' + terrain_mesh, + verbose=True, + outofview=False, + print_areas=True, + cam=cam, + vis_margin=2, + dist_max=params["near_distance"], + hide_render=True, + suffix="near", ) - collider = butil.modify_mesh(butil.deep_clone_obj(terrain_near), 'COLLISION', apply=False, show_viewport=True) - collider.name = collider.name + '.collider' + collider = butil.modify_mesh( + butil.deep_clone_obj(terrain_near), + "COLLISION", + apply=False, + show_viewport=True, + ) + collider.name = collider.name + ".collider" collider.collision.use_culling = False - collider_col = butil.get_collection('colliders') + collider_col = butil.get_collection("colliders") butil.put_in_collection(collider, collider_col) - butil.modify_mesh(terrain_near, 'SUBSURF', levels=2, apply=True) + butil.modify_mesh(terrain_near, "SUBSURF", levels=2, apply=True) deps = bpy.context.evaluated_depsgraph_get() terrain_inview_bvh = mathutils.bvhtree.BVHTree.FromObject(terrain_inview, deps) - p.run_stage('caustics', lambda: lighting.caustics_lamp.add_caustics(terrain_near)) + p.run_stage("caustics", lambda: lighting.caustics_lamp.add_caustics(terrain_near)) def add_fish_school(): n = random_general(params.get("max_fish_schools", 3)) for i in range(n): - selection = density.placement_mask(0.1, select_thresh=0, tag=underwater_domain) + selection = density.placement_mask( + 0.1, select_thresh=0, tag=underwater_domain + ) fac = creatures.FishSchoolFactory(randint(1e7), bvh=terrain_inview_bvh) - col = placement.scatter_placeholders_mesh(terrain_near, fac, selection=selection, - overall_density=1, num_placeholders=1, altitude=2) + col = placement.scatter_placeholders_mesh( + terrain_near, + fac, + selection=selection, + overall_density=1, + num_placeholders=1, + altitude=2, + ) placement.populate_collection(fac, col) - p.run_stage('fish_school', add_fish_school, default=[]) + + p.run_stage("fish_school", add_fish_school, default=[]) def add_bug_swarm(): n = randint(1, params.get("max_bug_swarms", 3) + 1) selection = density.placement_mask(0.1, select_thresh=0, tag=land_domain) - fac = creatures.AntSwarmFactory(randint(1e7), bvh=terrain_inview_bvh, coarse=True) - col = placement.scatter_placeholders_mesh(terrain_inview, fac, - selection=selection, overall_density=1, num_placeholders=n, altitude=2) + fac = creatures.AntSwarmFactory( + randint(1e7), bvh=terrain_inview_bvh, coarse=True + ) + col = placement.scatter_placeholders_mesh( + terrain_inview, + fac, + selection=selection, + overall_density=1, + num_placeholders=n, + altitude=2, + ) placement.populate_collection(fac, col) - p.run_stage('bug_swarm', add_bug_swarm) + + p.run_stage("bug_swarm", add_bug_swarm) def add_rocks(target): - selection = density.placement_mask(scale=0.15, select_thresh=0.5, - normal_thresh=0.7, return_scalar=True, tag=nonliving_domain) + selection = density.placement_mask( + scale=0.15, + select_thresh=0.5, + normal_thresh=0.7, + return_scalar=True, + tag=nonliving_domain, + ) _, rock_col = pebbles.apply(target, selection=selection) return rock_col - p.run_stage('rocks', add_rocks, terrain_inview) + + p.run_stage("rocks", add_rocks, terrain_inview) def add_ground_leaves(target): - selection = density.placement_mask(scale=0.1, select_thresh=0.52, normal_thresh=0.7, return_scalar=True, tag=land_domain) + selection = density.placement_mask( + scale=0.1, + select_thresh=0.52, + normal_thresh=0.7, + return_scalar=True, + tag=land_domain, + ) ground_leaves.apply(target, selection=selection, season=season) - p.run_stage('ground_leaves', add_ground_leaves, terrain_near, prereq='trees') - + + p.run_stage("ground_leaves", add_ground_leaves, terrain_near, prereq="trees") + def add_ground_twigs(target): use_leaves = uniform() < 0.5 - selection = density.placement_mask(scale=0.15, select_thresh=0.55, normal_thresh=0.7, return_scalar=True, tag=nonliving_domain) + selection = density.placement_mask( + scale=0.15, + select_thresh=0.55, + normal_thresh=0.7, + return_scalar=True, + tag=nonliving_domain, + ) ground_twigs.apply(target, selection=selection, use_leaves=use_leaves) - p.run_stage('ground_twigs', add_ground_twigs, terrain_near) + + p.run_stage("ground_twigs", add_ground_twigs, terrain_near) def add_chopped_trees(target): - selection = density.placement_mask(scale=0.15, select_thresh=uniform(0.55, 0.6), - normal_thresh=0.7, return_scalar=True, tag=nonliving_domain) + selection = density.placement_mask( + scale=0.15, + select_thresh=uniform(0.55, 0.6), + normal_thresh=0.7, + return_scalar=True, + tag=nonliving_domain, + ) chopped_trees.apply(target, selection=selection) - p.run_stage('chopped_trees', add_chopped_trees, terrain_inview) + + p.run_stage("chopped_trees", add_chopped_trees, terrain_inview) def add_grass(target): - select_max = params.get('grass_select_max', 0.5) + select_max = params.get("grass_select_max", 0.5) selection = density.placement_mask( - normal_dir=(0, 0, 1), scale=0.1, tag=land_domain, - return_scalar=True, select_thresh=uniform(select_max/2, select_max)) + normal_dir=(0, 0, 1), + scale=0.1, + tag=land_domain, + return_scalar=True, + select_thresh=uniform(select_max / 2, select_max), + ) grass.apply(target, selection=selection) - p.run_stage('grass', add_grass, terrain_inview) + + p.run_stage("grass", add_grass, terrain_inview) def add_monocots(target): selection = density.placement_mask( - normal_dir=(0, 0, 1), scale=0.2, tag=land_domain) + normal_dir=(0, 0, 1), scale=0.2, tag=land_domain + ) monocots.apply(terrain_inview, grass=True, selection=selection) selection = density.placement_mask( - normal_dir=(0, 0, 1), scale=0.2, select_thresh=0.55, - tag=params.get("grass_habitats", None)) + normal_dir=(0, 0, 1), + scale=0.2, + select_thresh=0.55, + tag=params.get("grass_habitats", None), + ) monocots.apply(target, grass=False, selection=selection) - p.run_stage('monocots', add_monocots, terrain_inview) + + p.run_stage("monocots", add_monocots, terrain_inview) def add_ferns(target): - selection = density.placement_mask(normal_dir=(0, 0, 1), scale=0.1, - select_thresh=0.6, return_scalar=True, tag=land_domain) + selection = density.placement_mask( + normal_dir=(0, 0, 1), + scale=0.1, + select_thresh=0.6, + return_scalar=True, + tag=land_domain, + ) fern.apply(target, selection=selection) - p.run_stage('ferns', add_ferns, terrain_inview) + + p.run_stage("ferns", add_ferns, terrain_inview) def add_flowers(target): - selection = density.placement_mask(normal_dir=(0, 0, 1), scale=0.01, - select_thresh=0.6, return_scalar=True, tag=land_domain) + selection = density.placement_mask( + normal_dir=(0, 0, 1), + scale=0.01, + select_thresh=0.6, + return_scalar=True, + tag=land_domain, + ) flowerplant.apply(target, selection=selection) - p.run_stage('flowers', add_flowers, terrain_inview) + + p.run_stage("flowers", add_flowers, terrain_inview) def add_corals(target): - vertical_faces = density.placement_mask(scale=0.15, select_thresh=uniform(.44, .48)) - coral_reef.apply(target, selection=vertical_faces, tag=underwater_domain, - density=params.get('coral_density', 2.5)) - horizontal_faces = density.placement_mask(scale=.15, normal_thresh=-.4, normal_thresh_high=.4) - coral_reef.apply(target, selection=horizontal_faces, n=5, horizontal=True, tag=underwater_domain, - density=params.get('horizontal_coral_density', 2.5)) - p.run_stage('corals', add_corals, terrain_inview) - - p.run_stage('mushroom', lambda: ground_mushroom.Mushrooms().apply(terrain_near, - selection=density.placement_mask(scale=.1, select_thresh=.65, return_scalar=True, tag=land_domain), - density=params.get('mushroom_density', 2))) - - p.run_stage('seaweed', lambda: seaweed.apply(terrain_inview, - selection=density.placement_mask(scale=0.05, select_thresh=.5, normal_thresh=0.4, tag=underwater_domain))) - p.run_stage('urchin', lambda: urchin.apply(terrain_inview, - selection=density.placement_mask(scale=0.05, select_thresh=.5, tag=underwater_domain))) - p.run_stage('jellyfish', lambda: jellyfish.apply(terrain_inview, - selection=density.placement_mask(scale=0.05, select_thresh=.5, tag=underwater_domain))) - - p.run_stage('seashells', lambda: seashells.apply(terrain_near, - selection=density.placement_mask(scale=0.05, select_thresh=.5, tag='landscape,', return_scalar=True))) - p.run_stage('pinecone', lambda: pinecone.apply(terrain_near, - selection=density.placement_mask(scale=.1, select_thresh=.63, tag=land_domain))) - p.run_stage('pine_needle', lambda: pine_needle.apply(terrain_near, - selection=density.placement_mask(scale=uniform(0.05, 0.2), select_thresh=uniform(0.4, 0.55), tag=land_domain, return_scalar=True))) - p.run_stage('decorative_plants', lambda: decorative_plants.apply(terrain_near, - selection=density.placement_mask(scale=uniform(0.05, 0.2), select_thresh=uniform(0.5, 0.65), tag=land_domain, return_scalar=True))) - - p.run_stage('wind', weather.particles.wind_effector) - p.run_stage('turbulence', weather.particles.turbulence_effector) - emitter_off = Vector((0, 0, 5)) # to allow space to fall into frame from off screen - - def add_leaf_particles(): - return particles.particle_system( - emitter=butil.spawn_plane(location=emitter_off, size=60), - subject=trees.random_leaf_collection(n=5, season=season), - settings=particles.falling_leaf_settings()) - def add_rain_particles(): - return particles.particle_system( - emitter=butil.spawn_plane(location=emitter_off, size=30), - subject=factory.make_asset_collection(weather.particles.RaindropFactory(scene_seed), 5), - settings=particles.rain_settings()) - def add_dust_particles(): - return particles.particle_system( - emitter=butil.spawn_cube(location=Vector(), size=30), - subject=factory.make_asset_collection(weather.particles.DustMoteFactory(scene_seed), 5), - settings=particles.floating_dust_settings()) - def add_marine_snow_particles(): - return particles.particle_system( - emitter=butil.spawn_cube(location=Vector(), size=30), - subject=factory.make_asset_collection(weather.particles.DustMoteFactory(scene_seed), 5), - settings=particles.marine_snow_setting()) - def add_snow_particles(): - return particles.particle_system( - emitter=butil.spawn_plane(location=emitter_off, size=60), - subject=factory.make_asset_collection(weather.particles.SnowflakeFactory(scene_seed), 5), - settings=particles.snow_settings()) - - particle_systems = [ - p.run_stage('leaf_particles', add_leaf_particles, prereq='trees'), - p.run_stage('rain_particles', add_rain_particles), - p.run_stage('dust_particles', add_dust_particles), - p.run_stage('marine_snow_particles', add_marine_snow_particles), - p.run_stage('snow_particles', add_snow_particles), - ] - - for emitter, system in filter(lambda s: s is not None, particle_systems): - with logging_util.Timer(f"Baking particle system"): - butil.constrain_object(emitter, "COPY_LOCATION", use_offset=True, target=cam.parent) - particles.bake(emitter, system) - butil.put_in_collection(emitter, butil.get_collection('particles')) - - - placeholders = list(itertools.chain.from_iterable( - c.all_objects for c in bpy.data.collections if c.name.startswith('placeholders:') - )) - - add_simulated_river = lambda: fluid.make_river(terrain_mesh, placeholders, output_folder=output_folder) - p.run_stage('simulated_river', add_simulated_river, use_chance=False) - - add_tilted_river = lambda: fluid.make_tilted_river(terrain_mesh, placeholders, output_folder=output_folder) - p.run_stage('tilted_river', add_tilted_river, use_chance=False) - - p.save_results(output_folder/'pipeline_coarse.csv') + vertical_faces = density.placement_mask( + scale=0.15, select_thresh=uniform(0.44, 0.48) + ) + coral_reef.apply( + target, + selection=vertical_faces, + tag=underwater_domain, + density=params.get("coral_density", 2.5), + ) + horizontal_faces = density.placement_mask( + scale=0.15, normal_thresh=-0.4, normal_thresh_high=0.4 + ) + coral_reef.apply( + target, + selection=horizontal_faces, + n=5, + horizontal=True, + tag=underwater_domain, + density=params.get("horizontal_coral_density", 2.5), + ) + + p.run_stage("corals", add_corals, terrain_inview) + + p.run_stage( + "mushroom", + lambda: ground_mushroom.Mushrooms().apply( + terrain_near, + selection=density.placement_mask( + scale=0.1, select_thresh=0.65, return_scalar=True, tag=land_domain + ), + density=params.get("mushroom_density", 2), + ), + ) + + p.run_stage( + "seaweed", + lambda: seaweed.apply( + terrain_inview, + selection=density.placement_mask( + scale=0.05, select_thresh=0.5, normal_thresh=0.4, tag=underwater_domain + ), + ), + ) + p.run_stage( + "urchin", + lambda: urchin.apply( + terrain_inview, + selection=density.placement_mask( + scale=0.05, select_thresh=0.5, tag=underwater_domain + ), + ), + ) + p.run_stage( + "jellyfish", + lambda: jellyfish.apply( + terrain_inview, + selection=density.placement_mask( + scale=0.05, select_thresh=0.5, tag=underwater_domain + ), + ), + ) + + p.run_stage( + "seashells", + lambda: seashells.apply( + terrain_near, + selection=density.placement_mask( + scale=0.05, select_thresh=0.5, tag="landscape,", return_scalar=True + ), + ), + ) + p.run_stage( + "pinecone", + lambda: pinecone.apply( + terrain_near, + selection=density.placement_mask( + scale=0.1, select_thresh=0.63, tag=land_domain + ), + ), + ) + p.run_stage( + "pine_needle", + lambda: pine_needle.apply( + terrain_near, + selection=density.placement_mask( + scale=uniform(0.05, 0.2), + select_thresh=uniform(0.4, 0.55), + tag=land_domain, + return_scalar=True, + ), + ), + ) + p.run_stage( + "decorative_plants", + lambda: decorative_plants.apply( + terrain_near, + selection=density.placement_mask( + scale=uniform(0.05, 0.2), + select_thresh=uniform(0.5, 0.65), + tag=land_domain, + return_scalar=True, + ), + ), + ) + + p.run_stage("wind", lambda: weather.WindEffector(np.randint(1e7))(0)) + p.run_stage("turbulence", lambda: weather.TurbulenceEffector(np.randint(1e7))(0)) + + overhead_emitter = butil.spawn_plane(location=Vector((0, 0, 5)), size=60) + butil.constrain_object( + overhead_emitter, "COPY_LOCATION", use_offset=True, target=camera_rigs[0] + ) + + cube_emitter = butil.spawn_cube(location=Vector(), size=30) + butil.constrain_object( + cube_emitter, "COPY_LOCATION", use_offset=True, target=camera_rigs[0] + ) + + def leaf_particles(): + gen = weather.FallingParticles( + leaves.LeafFactoryV2(randint(1e7)), + distribution=weather.falling_leaf_param_distribution, + ) + return gen(overhead_emitter) + + p.run_stage("leaf_particles", leaf_particles, prereq="trees") + + def rain_particles(): + gen = weather.FallingParticles( + particles.RaindropFactory(randint(1e7)), + distribution=weather.rain_param_distribution, + ) + return gen(overhead_emitter) + + p.run_stage("rain_particles", rain_particles) + + def dust_particles(): + gen = weather.FallingParticles( + particles.DustMoteFactory(randint(1e7)), + distribution=weather.floating_dust_param_distribution, + ) + return gen(cube_emitter) + + p.run_stage("dust_particles", dust_particles) + + def marine_snow_particles(): + gen = weather.FallingParticles( + particles.MarineSnowFactory(randint(1e7)), + distribution=weather.marine_snow_param_distribution, + ) + return gen(cube_emitter) + + p.run_stage("marine_snow_particles", marine_snow_particles) + + def snow_particles(): + gen = weather.FallingParticles( + particles.SnowflakeFactory(randint(1e7)), + distribution=weather.snow_param_distribution, + ) + return gen(overhead_emitter) + + p.run_stage("snow_particles", snow_particles) + + placeholders = list( + itertools.chain.from_iterable( + c.all_objects + for c in bpy.data.collections + if c.name.startswith("placeholders:") + ) + ) + + def add_simulated_river(): + return fluid.make_river(terrain_mesh, placeholders, output_folder=output_folder) + + p.run_stage("simulated_river", add_simulated_river, use_chance=False) + + def add_tilted_river(): + return fluid.make_tilted_river( + terrain_mesh, placeholders, output_folder=output_folder + ) + + p.run_stage("tilted_river", add_tilted_river, use_chance=False) + + p.save_results(output_folder / "pipeline_coarse.csv") return { "height_offset": 0, "whole_bbox": None, } -def main(args): +@gin.configurable +def populate_scene(output_folder, scene_seed, **params): + p = RandomStageExecutor(scene_seed, output_folder, params) + camera = [cam_util.get_camera(i, j) for i, j in cam_util.get_cameras_ids()] + + season = p.run_stage( + "choose_season", trees.random_season, use_chance=False, default=[] + ) + + fire_cache_system = fluid.FireCachingSystem() if params.get("cached_fire") else None + + populated = {} + populated["trees"] = p.run_stage( + "populate_trees", + use_chance=False, + default=[], + fn=lambda: placement.populate_all( + trees.TreeFactory, camera, season=season, vis_cull=4 + ), + ) # , + # meshing_camera=camera, adapt_mesh_method='subdivide', cam_meshing_max_dist=8)) + populated["boulders"] = p.run_stage( + "populate_boulders", + use_chance=False, + default=[], + fn=lambda: placement.populate_all(rocks.BoulderFactory, camera, vis_cull=3), + ) # , + # meshing_camera=camera, adapt_mesh_method='subdivide', cam_meshing_max_dist=8)) + populated["bushes"] = p.run_stage( + "populate_bushes", + use_chance=False, + fn=lambda: placement.populate_all( + trees.BushFactory, camera, vis_cull=1, adapt_mesh_method="subdivide" + ), + ) + p.run_stage( + "populate_kelp", + use_chance=False, + fn=lambda: placement.populate_all( + monocot.KelpMonocotFactory, camera, vis_cull=5 + ), + ) + populated["cactus"] = p.run_stage( + "populate_cactus", + use_chance=False, + fn=lambda: placement.populate_all(cactus.CactusFactory, camera, vis_cull=6), + ) + p.run_stage( + "populate_clouds", + use_chance=False, + fn=lambda: placement.populate_all( + cloud.CloudFactory, camera, dist_cull=None, vis_cull=None + ), + ) + p.run_stage( + "populate_glowing_rocks", + use_chance=False, + fn=lambda: placement.populate_all( + rocks.GlowingRocksFactory, camera, dist_cull=None, vis_cull=None + ), + ) + + populated["cached_fire_trees"] = p.run_stage( + "populate_cached_fire_trees", + use_chance=False, + default=[], + fn=lambda: placement.populate_all( + fluid.CachedTreeFactory, + camera, + season=season, + vis_cull=4, + dist_cull=70, + cache_system=fire_cache_system, + ), + ) + populated["cached_fire_boulders"] = p.run_stage( + "populate_cached_fire_boulders", + use_chance=False, + default=[], + fn=lambda: placement.populate_all( + fluid.CachedBoulderFactory, + camera, + vis_cull=3, + dist_cull=70, + cache_system=fire_cache_system, + ), + ) + populated["cached_fire_bushes"] = p.run_stage( + "populate_cached_fire_bushes", + use_chance=False, + fn=lambda: placement.populate_all( + fluid.CachedBushFactory, + camera, + vis_cull=1, + adapt_mesh_method="subdivide", + cache_system=fire_cache_system, + ), + ) + populated["cached_fire_cactus"] = p.run_stage( + "populate_cached_fire_cactus", + use_chance=False, + fn=lambda: placement.populate_all( + fluid.CachedCactusFactory, + camera, + vis_cull=6, + cache_system=fire_cache_system, + ), + ) + + grime_selection_funcs = { + "trees": scatter_lower, + "boulders": scatter_upward, + } + grime_types = { + "slime_mold": slime_mold.SlimeMold, + "lichen": lichen.Lichen, + "ivy": ivy.Ivy, + "mushroom": ground_mushroom.Mushrooms, + "moss": moss.MossCover, + } + + def apply_grime(grime_type, surface_cls): + surface_fac = surface_cls() + for ( + target_type, + results, + ) in populated.items(): + selection_func = grime_selection_funcs.get(target_type, None) + for fac_seed, fac_pholders, fac_assets in results: + if len(fac_pholders) == 0: + continue + for inst_seed, obj in fac_assets: + with FixedSeed(int_hash((grime_type, fac_seed, inst_seed))): + p_k = f"{grime_type}_on_{target_type}_per_instance_chance" + if uniform() > params.get(p_k, 0.4): + continue + logger.debug("Applying {surface_fac} on {obj}") + surface_fac.apply(obj, selection=selection_func) + + for grime_type, surface_cls in grime_types.items(): + p.run_stage(grime_type, lambda: apply_grime(grime_type, surface_cls)) + + def apply_snow_layer(surface_cls): + surface_fac = surface_cls() + for ( + target_type, + results, + ) in populated.items(): + selection_func = grime_selection_funcs.get(target_type, None) + for fac_seed, fac_pholders, fac_assets in results: + if len(fac_pholders) == 0: + continue + for inst_seed, obj in fac_assets: + tmp = obj.users_collection[0].hide_viewport + obj.users_collection[0].hide_viewport = False + surface_fac.apply(obj, selection=selection_func) + obj.users_collection[0].hide_viewport = tmp + + p.run_stage("snow_layer", lambda: apply_snow_layer(snow_layer.Snowlayer)) + + creature_facs = { + "beetles": creatures.BeetleFactory, + "bird": creatures.BirdFactory, + "carnivore": creatures.CarnivoreFactory, + "crab": creatures.CrabFactory, + "crustacean": creatures.CrustaceanFactory, + "dragonfly": creatures.DragonflyFactory, + "fish": creatures.FishFactory, + "flyingbird": creatures.FlyingBirdFactory, + "herbivore": creatures.HerbivoreFactory, + "snake": creatures.SnakeFactory, + } + for k, fac in creature_facs.items(): + p.run_stage( + f"populate_{k}", + use_chance=False, + fn=lambda: placement.populate_all(fac, camera=None), + ) + + fire_warmup = params.get("fire_warmup", 50) + simulation_duration = ( + bpy.context.scene.frame_end - bpy.context.scene.frame_start + fire_warmup + ) + + def set_fire(assets): + objs = [o for *_, a in assets for _, o in a] + with butil.EnableParentCollections(objs): + fluid.set_fire_to_assets( + assets, + bpy.context.scene.frame_start - fire_warmup, + simulation_duration, + output_folder, + ) + + p.run_stage( + "trees_fire_on_the_fly", set_fire, populated["trees"], prereq="populate_trees" + ) + p.run_stage( + "bushes_fire_on_the_fly", + set_fire, + populated["bushes"], + prereq="populate_bushes", + ) + p.run_stage( + "boulders_fire_on_the_fly", + set_fire, + populated["boulders"], + prereq="populate_boulders", + ) + p.run_stage( + "cactus_fire_on_the_fly", + set_fire, + populated["cactus"], + prereq="populate_cactus", + ) + + p.save_results(output_folder / "pipeline_fine.csv") + + +def main(args): scene_seed = init.apply_scene_seed(args.seed) - mandatory_exclusive = [Path('infinigen_examples/configs_nature/scene_types')] + mandatory_exclusive = [Path("infinigen_examples/configs_nature/scene_types")] init.apply_gin_configs( - configs=args.configs, + configs=["base_nature.gin"] + args.configs, overrides=args.overrides, - configs_folder='infinigen_examples/configs_nature', + config_folders="infinigen_examples/configs_nature", mandatory_folders=mandatory_exclusive, - mutually_exclusive_folders=mandatory_exclusive, + mutually_exclusive_folders=mandatory_exclusive, ) - + execute_tasks.main( compose_scene_func=compose_nature, - input_folder=args.input_folder, - output_folder=args.output_folder, - task=args.task, - task_uniqname=args.task_uniqname, - scene_seed=scene_seed + populate_scene_func=populate_scene, + input_folder=args.input_folder, + output_folder=args.output_folder, + task=args.task, + task_uniqname=args.task_uniqname, + scene_seed=scene_seed, ) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--output_folder', type=Path) - parser.add_argument('--input_folder', type=Path, default=None) - parser.add_argument('-s', '--seed', default=None, help="The seed used to generate the scene") - parser.add_argument('-t', '--task', nargs='+', default=['coarse'], - choices=['coarse', 'populate', 'fine_terrain', 'ground_truth', 'render', 'mesh_save', 'export']) - parser.add_argument('-g', '--configs', nargs='+', default=['base'], - help='Set of config files for gin (separated by spaces) ' - 'e.g. --gin_config file1 file2 (exclude .gin from path)') - parser.add_argument('-p', '--overrides', nargs='+', default=[], - help='Parameter settings that override config defaults ' - 'e.g. --gin_param module_1.a=2 module_2.b=3') - parser.add_argument('--task_uniqname', type=str, default=None) - parser.add_argument('-d', '--debug', action="store_const", dest="loglevel", const=logging.DEBUG, default=logging.INFO) + parser.add_argument("--output_folder", type=Path) + parser.add_argument("--input_folder", type=Path, default=None) + parser.add_argument( + "-s", "--seed", default=None, help="The seed used to generate the scene" + ) + parser.add_argument( + "-t", + "--task", + nargs="+", + default=["coarse"], + choices=[ + "coarse", + "populate", + "fine_terrain", + "ground_truth", + "render", + "mesh_save", + "export", + ], + ) + parser.add_argument( + "-g", + "--configs", + nargs="+", + default=["base"], + help="Set of config files for gin (separated by spaces) " + "e.g. --gin_config file1 file2 (exclude .gin from path)", + ) + parser.add_argument( + "-p", + "--overrides", + nargs="+", + default=[], + help="Parameter settings that override config defaults " + "e.g. --gin_param module_1.a=2 module_2.b=3", + ) + parser.add_argument("--task_uniqname", type=str, default=None) + parser.add_argument( + "-d", + "--debug", + action="store_const", + dest="loglevel", + const=logging.DEBUG, + default=logging.INFO, + ) args = init.parse_args_blender(parser) logging.getLogger("infinigen").setLevel(args.loglevel) diff --git a/infinigen_examples/indoor_asset_semantics.py b/infinigen_examples/indoor_asset_semantics.py index 9ae3196c2..083bf3264 100644 --- a/infinigen_examples/indoor_asset_semantics.py +++ b/infinigen_examples/indoor_asset_semantics.py @@ -3,37 +3,36 @@ # Authors: Alexander Raistrick -from infinigen.assets import ( - appliances, - bathroom, - decor, - elements, - lighting, - seating, +from infinigen.assets.objects import ( + appliances, + bathroom, + clothes, + decor, + elements, + lamp, + seating, shelves, - table_decorations, - tables, - tableware, - wall_decorations, - windows, - clothes + table_decorations, + tables, + tableware, + wall_decorations, + windows, ) +from infinigen.core.tags import Semantics -from infinigen.core.tags import Semantics, Subpart, FromGenerator def home_asset_usage(): - - """ Defines what generators are consider to fulfill what roles in a home setting. + """Defines what generators are consider to fulfill what roles in a home setting. The primary effect of this to determine what types of objects are returned by the square brackets [ ] operator in home_constraints - You can define these however you like - use + You can define these however you like - use See the `Semantics` class in `infinigen.core.tags` for a list of possible semantics, or add your own. """ - # TODO: this whole used_as will be integrated into the constraint language. Currently there are two paralell semantics trees, one to define the tags and one to use them. + # TODO: this whole used_as will be integrated into the constraint language. Currently there are two paralell semantics trees, one to define the tags and one to use them. used_as = {} @@ -47,10 +46,7 @@ def home_asset_usage(): tableware.PotFactory, tableware.CupFactory, } - used_as[Semantics.Cookware] = { - tableware.PotFactory, - tableware.PanFactory - } + used_as[Semantics.Cookware] = {tableware.PotFactory, tableware.PanFactory} used_as[Semantics.Utensils] = { tableware.SpoonFactory, tableware.KnifeFactory, @@ -63,14 +59,14 @@ def home_asset_usage(): tableware.FoodBagFactory, tableware.FoodBoxFactory, tableware.JarFactory, - tableware.BottleFactory + tableware.BottleFactory, } used_as[Semantics.TableDisplayItem] = { tableware.FruitContainerFactory, table_decorations.VaseFactory, tableware.BowlFactory, - tableware.PotFactory + tableware.PotFactory, } used_as[Semantics.OfficeShelfItem] = { @@ -85,7 +81,7 @@ def home_asset_usage(): { table_decorations.BookColumnFactory, tableware.JarFactory, - } + }, ) used_as[Semantics.BathroomItem] = { @@ -96,7 +92,7 @@ def home_asset_usage(): used_as[Semantics.ClothDrapeItem] = { # objects that can be strewn about / draped over furniture - #clothes.BlanketFactory, + # clothes.BlanketFactory, clothes.PantsFactory, clothes.ShirtFactory, } @@ -117,7 +113,7 @@ def home_asset_usage(): used_as[Semantics.Sink] = { table_decorations.SinkFactory, bathroom.BathroomSinkFactory, - bathroom.StandingSinkFactory + bathroom.StandingSinkFactory, } used_as[Semantics.Storage] = { @@ -125,12 +121,12 @@ def home_asset_usage(): shelves.CellShelfFactory, shelves.LargeShelfFactory, shelves.KitchenCabinetFactory, - shelves.SingleCabinetFactory + shelves.SingleCabinetFactory, } used_as[Semantics.SideTable] = { shelves.SidetableDeskFactory, - tables.SideTableFactory + tables.SideTableFactory, } used_as[Semantics.Table] = set.union( @@ -138,15 +134,15 @@ def home_asset_usage(): { tables.TableDiningFactory, tables.TableCocktailFactory, - shelves.SimpleDeskFactory, + shelves.SimpleDeskFactory, tables.CoffeeTableFactory, - } + }, ) used_as[Semantics.Chair] = { seating.BarChairFactory, seating.ChairFactory, - seating.OfficeChairFactory + seating.OfficeChairFactory, } used_as[Semantics.LoungeSeating] = { @@ -163,7 +159,7 @@ def home_asset_usage(): appliances.DishwasherFactory, appliances.OvenFactory, appliances.BeverageFridgeFactory, - appliances.MicrowaveFactory + appliances.MicrowaveFactory, } used_as[Semantics.KitchenCounter] = { @@ -186,10 +182,9 @@ def home_asset_usage(): bathroom.StandingSinkFactory, bathroom.ToiletFactory, bathroom.BathtubFactory, - seating.SofaFactory, shelves.TVStandFactory, - } + }, ) # endregion furniture @@ -197,7 +192,7 @@ def home_asset_usage(): used_as[Semantics.WallDecoration] = { wall_decorations.WallArtFactory, wall_decorations.MirrorFactory, - wall_decorations.BalloonFactory + wall_decorations.BalloonFactory, } used_as[Semantics.Door] = { @@ -207,25 +202,22 @@ def home_asset_usage(): elements.doors.PanelDoorFactory, } - used_as[Semantics.Window] = { - windows.WindowFactory - } + used_as[Semantics.Window] = {windows.WindowFactory} used_as[Semantics.CeilingLight] = { - lighting.CeilingLightFactory, + lamp.CeilingLightFactory, } used_as[Semantics.Lighting] = set().union( used_as[Semantics.CeilingLight], { - lighting.LampFactory, - lighting.FloorLampFactory, - lighting.DeskLampFactory, - } + lamp.LampFactory, + lamp.FloorLampFactory, + lamp.DeskLampFactory, + }, ) used_as[Semantics.Object] = set().union( - used_as[Semantics.Furniture], used_as[Semantics.Sink], used_as[Semantics.Door], @@ -237,13 +229,11 @@ def home_asset_usage(): tableware.PlantContainerFactory, tableware.LargePlantContainerFactory, decor.AquariumTankFactory, - appliances.TVFactory, appliances.MonitorFactory, - elements.RugFactory, bathroom.HardwareFactory, - } + }, ) # region Extra metadata about assets @@ -252,42 +242,34 @@ def home_asset_usage(): used_as[Semantics.RealPlaceholder] = { appliances.MonitorFactory, appliances.TVFactory, - bathroom.BathroomSinkFactory, bathroom.StandingSinkFactory, bathroom.ToiletFactory, - decor.AquariumTankFactory, elements.RackFactory, elements.RugFactory, - seating.BedFrameFactory, seating.BedFactory, seating.ChairFactory, - shelves.KitchenSpaceFactory, - tables.TableCocktailFactory, - table_decorations.BookColumnFactory, table_decorations.BookFactory, table_decorations.BookStackFactory, table_decorations.SinkFactory, - tableware.BowlFactory, tableware.FoodBoxFactory, tableware.FruitContainerFactory, tableware.LargePlantContainerFactory, tableware.PlantContainerFactory, tableware.PotFactory, - wall_decorations.BalloonFactory, wall_decorations.MirrorFactory, wall_decorations.WallArtFactory, shelves.SingleCabinetFactory, shelves.KitchenCabinetFactory, shelves.CellShelfFactory, - elements.NatureShelfTrinketsFactory + elements.NatureShelfTrinketsFactory, } used_as[Semantics.AssetAsPlaceholder] = set() @@ -299,7 +281,7 @@ def home_asset_usage(): shelves.KitchenCabinetFactory, shelves.LargeShelfFactory, table_decorations.SinkFactory, - tables.TableCocktailFactory + tables.TableCocktailFactory, } used_as[Semantics.PlaceholderBBox] = { @@ -307,26 +289,28 @@ def home_asset_usage(): appliances.OvenFactory, } - used_as[Semantics.SingleGenerator] = set().union( - used_as[Semantics.Dishware], - used_as[Semantics.Utensils], - { - lighting.CeilingLightFactory, - lighting.CeilingClassicLampFactory, - seating.ChairFactory, - seating.BarChairFactory, - seating.OfficeChairFactory - } - ).difference({ - tableware.CupFactory - }) + used_as[Semantics.SingleGenerator] = ( + set() + .union( + used_as[Semantics.Dishware], + used_as[Semantics.Utensils], + { + lamp.CeilingLightFactory, + lamp.CeilingClassicLampFactory, + seating.ChairFactory, + seating.BarChairFactory, + seating.OfficeChairFactory, + }, + ) + .difference({tableware.CupFactory}) + ) used_as[Semantics.NoRotation] = set().union( used_as[Semantics.WallDecoration], { bathroom.HardwareFactory, - lighting.CeilingLightFactory, # rotationally symetric - } + lamp.CeilingLightFactory, # rotationally symetric + }, ) used_as[Semantics.NoCollision] = { @@ -337,9 +321,9 @@ def home_asset_usage(): elements.RugFactory, wall_decorations.MirrorFactory, wall_decorations.WallArtFactory, - lighting.CeilingLightFactory, + lamp.CeilingLightFactory, } # endregion - return used_as \ No newline at end of file + return used_as diff --git a/infinigen_examples/indoor_constraint_examples.py b/infinigen_examples/indoor_constraint_examples.py index cda7d78b3..4b6834a06 100644 --- a/infinigen_examples/indoor_constraint_examples.py +++ b/infinigen_examples/indoor_constraint_examples.py @@ -6,63 +6,60 @@ from collections import OrderedDict -import numpy as np -import random -from numpy.random import uniform, normal, randint - -import infinigen -import gin - -from infinigen.assets import ( - appliances, - bathroom, - decor, - elements, - lighting, - seating, +from numpy.random import uniform + +from infinigen.assets.objects import ( + appliances, + bathroom, + decor, + elements, + lamp, + seating, shelves, - table_decorations, - tables, - tableware, - wall_decorations, - windows, - clothes + table_decorations, + tables, + tableware, + wall_decorations, ) - -from infinigen.core.constraints import ( - constraint_language as cl, - example_solver, - usage_lookup -) - -from infinigen import assets - -from infinigen.core.util.math import clip_gaussian -from infinigen.core.tags import Semantics, Subpart, FromGenerator +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import usage_lookup +from infinigen.core.tags import Semantics, Subpart from .indoor_asset_semantics import home_asset_usage from .util import constraint_util as cu + def sample_home_constraint_params(): return dict( - has_tv = uniform() < 0.5, - has_aquarium_tank = uniform() < 0.15, - has_birthday_balloons = uniform() < 0.15, - has_cocktail_tables = uniform() < 0.15, - has_kitchen_barstools = uniform() < 0.15, + # what pct of the room floorplan should we try to fill with furniture? + furniture_fullness_pct=uniform(0.6, 0.9), + # how many objects in each shelving per unit of volume + obj_interior_obj_pct=uniform(0.5, 1), # uniform(0.6, 0.9), + # what pct of top surface of storage furniture should be filled with objects? e.g pct of top surface of shelf + obj_on_storage_pct=uniform(0.1, 0.9), + # what pct of top surface of NON-STORAGE objects should be filled with objects? e.g pct of countertop/diningtable covered in stuff + obj_on_nonstorage_pct=uniform(0.1, 0.6), + # meters squared of wall art per approx meters squared of FLOOR area. TODO cant measure wall area currently. + painting_area_per_room_area=uniform(20, 60) / 40, + # rare objects wont even be added to the constraint graph in most homes + has_tv=uniform() < 0.5, + has_aquarium_tank=uniform() < 0.15, + has_birthday_balloons=uniform() < 0.15, + has_cocktail_tables=uniform() < 0.15, + has_kitchen_barstools=uniform() < 0.15, ) -def home_constraints(): +def home_constraints(): """Construct a constraint graph which incentivizes realistic home layouts. - + Result will contain both hard constraints (`constraints`) and soft constraints (`score_terms`). Notes for developers: - This function is typically evaluated ONCE. It is not called repeatedly during the optimization process. - To debug values you will need to inject print statements into impl_bindings.py or evaluate.py. Better debugging tools will come soon. - Similarly, most `lambda:` statements below will only be evaluated once to construct the graph - do not assume they will be re-evaluated during optimization. - - Available constraint options are in `infinigen/core/constraints/constraint_language/__init__.py`. + - Available constraint options are in `infinigen/core/constraints/constraint_language/__init__.py`. - You can easily add new constraint functions by adding them here, and defining evaluator functions for them in `impl_bindings.py` - Using newly added constraint types as hard constraints may be rejected by our hard constraint solver - It is quite easy to specify an impossible constraint program, or one that our solver cannot solve: @@ -85,7 +82,7 @@ def home_constraints(): constraints = OrderedDict() score_terms = OrderedDict() - #region overall fullness + # region overall fullness furniture = obj[Semantics.Furniture].related_to(rooms, cu.on_floor) wallfurn = furniture.related_to(rooms, cu.against_wall) @@ -96,396 +93,558 @@ def home_constraints(): for k, v in params.items(): print(f"{home_constraints.__name__} params - {k}: {v}") - score_terms['fullness'] = rooms.sum(lambda r: ( - obj.count().maximize(weight=4) # TODO re-incorporate more precise fullness scores above - + obj.volume().maximize(weight=1) - )) - - #endregion - - #region furniture - - score_terms['furniture_aesthetics'] = wallfurn.sum(lambda t: ( - t.distance(wallfurn).hinge(0.2, 0.6).maximize(weight=0.6) + - cl.accessibility_cost(t, furniture).minimize(weight=5) + - cl.accessibility_cost(t, rooms).minimize(weight=10) - )) - - - constraints['storage'] = rooms.all(lambda r: ( - storage.related_to(r).count().in_range(1, 7) - )) - score_terms['storage'] = rooms.sum(lambda r: ( - cl.accessibility_cost(storage.related_to(r), furniture.related_to(r), dist=0.5).minimize(weight=5) - + cl.accessibility_cost(storage.related_to(r), r, dist=0.5).minimize(weight=5) - )) - - #endregion furntiure - - score_terms['portal_accessibility'] = ( + score_terms["furniture_fullness"] = rooms.mean( + lambda r: ( + furniture.related_to(r) + .volume(dims=(0, 1)) + .safediv(r.volume(dims=(0, 1))) + .sub(params["furniture_fullness_pct"]) + .abs() + .minimize(weight=15) + ) + ) + + score_terms["obj_in_obj_fullness"] = rooms.mean( + lambda r: ( + furniture.related_to(r).mean( + lambda f: ( + obj.related_to(f, cu.on) + .volume() + .safediv(f.volume()) + .sub(params["obj_interior_obj_pct"]) + .abs() + .minimize(weight=10) + ) + ) + ) + ) + + def top_fullness_pct(f): + return ( + obj.related_to(f, cu.ontop) + .volume(dims=(0, 1)) + .safediv(f.volume(dims=(0, 1))) + ) + + score_terms["obj_ontop_storage_fullness"] = rooms.mean( + lambda r: ( + storage.related_to(r).mean( + lambda f: ( + top_fullness_pct(f) + .sub(params["obj_on_storage_pct"]) + .abs() + .minimize(weight=10) + ) + ) + ) + ) + + score_terms["obj_ontop_nonstorage_fullness"] = rooms.mean( + lambda r: ( + furniture[-Semantics.Storage] + .related_to(r) + .mean( + lambda f: ( + top_fullness_pct(f) + .sub(params["obj_on_nonstorage_pct"]) + .abs() + .minimize(weight=10) + ) + ) + ) + ) + + # endregion + + # region furniture + + score_terms["furniture_aesthetics"] = wallfurn.mean( + lambda t: ( + t.distance(wallfurn).hinge(0.2, 0.6).maximize(weight=0.6) + + cl.accessibility_cost(t, furniture).minimize(weight=5) + + cl.accessibility_cost(t, rooms).minimize(weight=10) + ) + ) + + constraints["storage"] = rooms.all( + lambda r: (storage.related_to(r).count().in_range(1, 7)) + ) + score_terms["storage"] = rooms.mean( + lambda r: ( + cl.accessibility_cost( + storage.related_to(r), furniture.related_to(r), dist=0.5 + ).minimize(weight=5) + + cl.accessibility_cost(storage.related_to(r), r, dist=0.5).minimize( + weight=5 + ) + ) + ) + + # endregion furntiure + + score_terms["portal_accessibility"] = ( # make sure the fronts of objects are accessible where applicable - #### disabled since its generally fine to block floor-to-ceiling windows a little - #window.sum(lambda t: ( + # window.mean(lambda t: ( # cl.accessibility_cost(t, furniture, np.array([0, -1, 0])) - #)).minimize(weight=2) + - - doors.sum(lambda t: ( - cl.accessibility_cost(t, furniture, cu.front_dir, dist=4) + - cl.accessibility_cost(t, furniture, cu.back_dir, dist=4) - )).minimize(weight=5) + # )).minimize(weight=2) + + doors.mean( + lambda t: ( + cl.accessibility_cost(t, furniture, cu.front_dir, dist=4) + + cl.accessibility_cost(t, furniture, cu.back_dir, dist=4) + ) + ).minimize(weight=5) ) - #region WALL/FLOOR COVERINGS + # region WALL/FLOOR COVERINGS walldec = obj[Semantics.WallDecoration].related_to(rooms, cu.flush_wall) wall_art = walldec[wall_decorations.WallArtFactory] mirror = walldec[wall_decorations.MirrorFactory] rugs = obj[elements.RugFactory].related_to(rooms, cu.on_floor) - constraints['rugs'] = rooms.all(lambda r: ( - rugs.related_to(r).distance(rugs) >= 1 - )) - - score_terms['rugs'] = rooms.all(lambda r: ( - cl.center_stable_surface_dist(rugs.related_to(r)).minimize(weight=1) - )) - - vertical_diff = lambda o, r: (o.distance(r, cu.floortags) - o.distance(r, cu.ceilingtags)).abs() - - constraints['wall_decorations'] = rooms.all(lambda r: ( - wall_art.related_to(r).count().in_range(0, 6) - * mirror.related_to(r).count().in_range(0, 1) - * walldec.related_to(r).all(lambda t: t.distance(r, cu.floortags) > 0.6) - #walldec.all(lambda t: ( - # (vertical_diff(t, r).abs() < 1.5) * - # (t.distance(cutters) > 0.1) - #)) - )) - score_terms['wall_decorations'] = rooms.sum(lambda r: ( - - walldec.related_to(r).sum(lambda w: ( - - vertical_diff(w, r).abs().minimize(weight=1) - + w.distance(walldec).maximize(weight=1) - + w.distance(window).hinge(0.25, 10).maximize(weight=1) - - + cl.angle_alignment_cost(w, r, cu.floortags).minimize(weight=5) - + cl.accessibility_cost(w, furniture, dist=1).minimize(weight=5) - + cl.center_stable_surface_dist(w).minimize(weight=1) - )) - )) - - score_terms['floor_covering'] = ( - rugs.sum(lambda rug: ( - rug.distance(rooms, cu.walltags).maximize(weight=3) + - cl.angle_alignment_cost(rug, rooms, cu.walltags).minimize(weight=3) - )) - ) - #endregion - - #region PLANTS + constraints["rugs"] = rooms.all(lambda r: (rugs.related_to(r).distance(rugs) >= 1)) + + score_terms["rugs"] = rooms.all( + lambda r: (cl.center_stable_surface_dist(rugs.related_to(r)).minimize(weight=1)) + ) + + def vertical_diff(o, r): + return (o.distance(r, cu.floortags) - o.distance(r, cu.ceilingtags)).abs() + + constraints["wall_decorations"] = rooms.all( + lambda r: ( + wall_art.related_to(r).count().in_range(0, 6) + * mirror.related_to(r).count().in_range(0, 1) + * walldec.related_to(r).all(lambda t: t.distance(r, cu.floortags) > 0.6) + # walldec.all(lambda t: ( + # (vertical_diff(t, r).abs() < 1.5) * + # (t.distance(cutters) > 0.1) + # )) + ) + ) + score_terms["wall_decorations"] = rooms.mean( + lambda r: ( + walldec.related_to(r).mean( + lambda w: ( + vertical_diff(w, r).abs().minimize(weight=1) + + w.distance(walldec).maximize(weight=1) + + w.distance(window).hinge(0.25, 10).maximize(weight=1) + + cl.angle_alignment_cost(w, r, cu.floortags).minimize(weight=5) + + cl.accessibility_cost(w, furniture, dist=1).minimize(weight=5) + + cl.center_stable_surface_dist(w).minimize(weight=1) + ) + ) + ) + ) + + score_terms["floor_covering"] = rugs.mean( + lambda rug: ( + rug.distance(rooms, cu.walltags).maximize(weight=3) + + cl.angle_alignment_cost(rug, rooms, cu.walltags).minimize(weight=3) + ) + ) + # endregion + + # region PLANTS small_plants = obj[tableware.PlantContainerFactory].related_to(storage, cu.ontop) big_plants = ( obj[tableware.LargePlantContainerFactory] .related_to(rooms, cu.on_floor) .related_to(rooms, cu.against_wall) ) - constraints['plants'] = rooms.all(lambda r: ( - big_plants.related_to(r).count().in_range(0, 1) * - small_plants.related_to(storage.related_to(r)).count().in_range(0, 5) - )) - score_terms['plants'] = rooms.sum(lambda r: ( - - big_plants.related_to(r).sum(lambda p: p.distance(doors)).maximize(weight=5) - - + ( # small plants should be near window for sunlight - small_plants - .related_to(storage.related_to(r)) - .sum(lambda p: p.distance(window.related_to(r))) - ).minimize(weight=1) - )) - #endregion - - #region SIDETABLE + constraints["plants"] = rooms.all( + lambda r: ( + big_plants.related_to(r).count().in_range(0, 1) + * small_plants.related_to(storage.related_to(r)).count().in_range(0, 5) + ) + ) + score_terms["plants"] = rooms.mean( + lambda r: ( + big_plants.related_to(r) + .mean(lambda p: p.distance(doors)) + .maximize(weight=5) + + ( # small plants should be near window for sunlight + small_plants.related_to(storage.related_to(r)).mean( + lambda p: p.distance(window.related_to(r)) + ) + ).minimize(weight=1) + ) + ) + # endregion + + # region SIDETABLE sidetable = furniture[Semantics.SideTable].related_to(furniture, cu.side_by_side) - score_terms['sidetable'] = rooms.sum(lambda r: ( - sidetable.related_to(r).sum(lambda t: ( - t.distance(r, cu.walltags).minimize(weight=1) - )) - )) - #endregion + score_terms["sidetable"] = rooms.mean( + lambda r: ( + sidetable.related_to(r).mean( + lambda t: (t.distance(r, cu.walltags).minimize(weight=1)) + ) + ) + ) + # endregion - #region DESKS + # region DESKS desks = wallfurn[shelves.SimpleDeskFactory] - deskchair = furniture[seating.OfficeChairFactory].related_to(desks, cu.front_against) + deskchair = furniture[seating.OfficeChairFactory].related_to( + desks, cu.front_against + ) monitors = obj[appliances.MonitorFactory] - constraints['desk'] = rooms.all(lambda r: ( - desks.related_to(r).all(lambda t: ( - deskchair.related_to(r).related_to(t).count().in_range(0, 1) * - monitors.related_to(t, cu.ontop).count().equals(1) * - (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0) * - (deskchair.related_to(r).related_to(t).count() == 1) - )) - )) - - score_terms['desk'] = rooms.sum(lambda r: desks.sum(lambda d: ( - - obj.related_to(d).count().maximize(weight=3) - - + d.distance(doors.related_to(r)).maximize(weight=0.1) - - + cl.accessibility_cost(d, furniture.related_to(r)).minimize(weight=3) - + cl.accessibility_cost(d, r).minimize(weight=3) - - + monitors.related_to(d).sum(lambda m: ( - cl.accessibility_cost(m, r, dist=2).minimize(weight=3) + - cl.accessibility_cost(m, obj.related_to(r), dist=0.5).minimize(weight=3) + - m.distance(r, cu.walltags).hinge(0.1, 1e7).minimize(weight=1) - )) + constraints["desk"] = rooms.all( + lambda r: ( + desks.related_to(r).all( + lambda t: ( + deskchair.related_to(r).related_to(t).count().in_range(0, 1) + * monitors.related_to(t, cu.ontop).count().equals(1) + * (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0) + * (deskchair.related_to(r).related_to(t).count() == 1) + ) + ) + ) + ) - + deskchair.distance(rooms, cu.walltags).maximize(weight=1) - ))) + score_terms["desk"] = rooms.mean( + lambda r: desks.mean( + lambda d: ( + obj.related_to(d).count().maximize(weight=3) + + d.distance(doors.related_to(r)).maximize(weight=0.1) + + cl.accessibility_cost(d, furniture.related_to(r)).minimize(weight=3) + + cl.accessibility_cost(d, r).minimize(weight=3) + + monitors.related_to(d).mean( + lambda m: ( + cl.accessibility_cost(m, r, dist=2).minimize(weight=3) + + cl.accessibility_cost( + m, obj.related_to(r), dist=0.5 + ).minimize(weight=3) + + m.distance(r, cu.walltags).hinge(0.1, 1e7).minimize(weight=1) + ) + ) + + deskchair.distance(rooms, cu.walltags).maximize(weight=1) + ) + ) + ) - #endregion + # endregion - #region ALL LIGHTING RULES + # region ALL LIGHTING RULES lights = obj[Semantics.Lighting] - floor_lamps = lights[lighting.FloorLampFactory].related_to(rooms, cu.on_floor).related_to(rooms, cu.against_wall) - #constraints['lighting'] = rooms.all(lambda r: ( + floor_lamps = ( + lights[lamp.FloorLampFactory] + .related_to(rooms, cu.on_floor) + .related_to(rooms, cu.against_wall) + ) + # constraints['lighting'] = rooms.all(lambda r: ( # # dont put redundant lights close to eachother (including lamps, ceiling lights, etc) # lights.related_to(r).all(lambda l: l.distance(lights.related_to(r)) >= 2) - #)) - - #endregion + # )) - #region CEILING LIGHTS - ceillights = lights[lighting.CeilingLightFactory] - - constraints['ceiling_lights'] = rooms.all(lambda r: ( - ceillights.related_to(r, cu.hanging).count().in_range(1, 4) - )) - score_terms['ceiling_lights'] = rooms.sum(lambda r: ( - (ceillights.count() / r.volume(dims=2)).hinge(0.08, 0.15).minimize(weight=5) + - ceillights.mean(lambda t: ( - t.distance(r, cu.walltags).pow(0.5) * 1.5 + - t.distance(ceillights).pow(0.2) * 2 - )).maximize(weight=1) - )) - #endregion + # endregion - #region LAMPS - lamps = lights[lighting.DeskLampFactory].related_to(furniture, cu.ontop) - constraints['lamps'] = rooms.all(lambda r: ( + # region CEILING LIGHTS + ceillights = lights[lamp.CeilingLightFactory] - # allow 0-2 lamps per room, placed on any sensible object - lamps.related_to(storage.related_to(r)).count().in_range(0, 2) - #* lamps.related_to(sidetable.related_to(r)).count().in_range(0, 2) - #* lamps.related_to(desks.related_to(r, cu.on), cu.ontop).count().in_range(0, 1) + constraints["ceiling_lights"] = rooms.all( + lambda r: (ceillights.related_to(r, cu.hanging).count().in_range(1, 4)) + ) + score_terms["ceiling_lights"] = rooms.mean( + lambda r: ( + (ceillights.count() / r.volume(dims=2)).hinge(0.08, 0.15).minimize(weight=5) + + ceillights.mean( + lambda t: ( + t.distance(r, cu.walltags).pow(0.5) * 1.5 + + t.distance(ceillights).pow(0.2) * 2 + ) + ).maximize(weight=1) + ) + ) + # endregion - * ( # pull-string lamps look extremely unnatural when too far off the ground - lamps.related_to(storage.related_to(r)) - .all(lambda l: - l.distance(r, cu.floortags).in_range(0.5, 1.5) + # region LAMPS + lamps = lights[lamp.DeskLampFactory].related_to(furniture, cu.ontop) + constraints["lamps"] = rooms.all( + lambda r: ( + # allow 0-2 lamps per room, placed on any sensible object + lamps.related_to(storage.related_to(r)).count().in_range(0, 2) + # * lamps.related_to(sidetable.related_to(r)).count().in_range(0, 2) + # * lamps.related_to(desks.related_to(r, cu.on), cu.ontop).count().in_range(0, 1) + * ( # pull-string lamps look extremely unnatural when too far off the ground + lamps.related_to(storage.related_to(r)).all( + lambda l: l.distance(r, cu.floortags).in_range(0.5, 1.5) + ) ) ) + ) - )) - - score_terms['lamps'] = lamps.sum(lambda l: ( - cl.center_stable_surface_dist(l.related_to(sidetable)).minimize(weight=1) + - l.distance(lamps).maximize(weight=1) - )) - #endregion + score_terms["lamps"] = lamps.mean( + lambda l: ( + cl.center_stable_surface_dist(l.related_to(sidetable)).minimize(weight=1) + + l.distance(lamps).maximize(weight=1) + ) + ) + # endregion # region CLOSETS closets = rooms[Semantics.Closet].excludes(cu.room_types) - constraints['closets'] = closets.all(lambda r: ( - (storage.related_to(r).count() >= 1) * - ceillights.related_to(r, cu.hanging).count().in_range(0, 1) * - (walldec.related_to(r).count() == 0) # special case exclusion - no paintings etc in closets - )) - score_terms['closets'] = closets.all(lambda r: ( - storage.related_to(r).count().maximize(weight=2) * - obj.related_to(storage.related_to(r)).count().maximize(weight=2) - )) + constraints["closets"] = closets.all( + lambda r: ( + (storage.related_to(r).count() >= 1) + * ceillights.related_to(r, cu.hanging).count().in_range(0, 1) + * ( + walldec.related_to(r).count() == 0 + ) # special case exclusion - no paintings etc in closets + ) + ) + score_terms["closets"] = closets.all( + lambda r: ( + storage.related_to(r).count().maximize(weight=2) + * obj.related_to(storage.related_to(r)).count().maximize(weight=2) + ) + ) # NOTE: closets also have special-case behavior below depending on what room they are adjacent to # endregion - #region BEDROOMS + # region BEDROOMS bedrooms = rooms[Semantics.Bedroom].excludes(cu.room_types) beds = wallfurn[Semantics.Bed][seating.BedFactory] - constraints['bedroom'] = bedrooms.all(lambda r: ( - - beds.related_to(r).count().in_range(1, 2) * - - ( - sidetable.related_to(r) - .related_to(beds.related_to(r), cu.leftright_leftright) - .count().in_range(0, 2) - ) * - - rugs.related_to(r).count().in_range(0, 2) * - - desks.related_to(r).count().in_range(0, 1) * - storage.related_to(r).count().in_range(2, 5) * - - floor_lamps.related_to(r).count().in_range(0, 1) * - - storage.related_to(r).all(lambda s: ( - (obj[Semantics.OfficeShelfItem].related_to(s, cu.on).count() >= 0) - )) - )) + constraints["bedroom"] = bedrooms.all( + lambda r: ( + beds.related_to(r).count().in_range(1, 2) + * ( + sidetable.related_to(r) + .related_to(beds.related_to(r), cu.leftright_leftright) + .count() + .in_range(0, 2) + ) + * rugs.related_to(r).count().in_range(0, 1) + * desks.related_to(r).count().in_range(0, 1) + * storage.related_to(r).count().in_range(2, 5) + * floor_lamps.related_to(r).count().in_range(0, 1) + * storage.related_to(r).all( + lambda s: ( + obj[Semantics.OfficeShelfItem].related_to(s, cu.on).count() >= 0 + ) + ) + ) + ) - score_terms['bedroom'] = bedrooms.sum(lambda r: ( - beds.related_to(r).count().maximize(weight=3) + - beds.related_to(r).sum(lambda t: cl.distance(r, doors)).maximize(weight=0.5) + - sidetable.related_to(r).sum(lambda t: t.distance(beds.related_to(r))).minimize(weight=3) - )) + score_terms["bedroom"] = bedrooms.mean( + lambda r: ( + beds.related_to(r).count().maximize(weight=3) + + beds.related_to(r) + .mean(lambda t: cl.distance(r, doors)) + .maximize(weight=0.5) + + sidetable.related_to(r) + .mean(lambda t: t.distance(beds.related_to(r))) + .minimize(weight=3) + ) + ) - #endregion + # endregion - #region KITCHENS + # region KITCHENS kitchens = rooms[Semantics.Kitchen].excludes(cu.room_types) - + countertops = furniture[Semantics.KitchenCounter] - wallcounter = countertops[shelves.KitchenSpaceFactory].related_to(rooms, cu.against_wall) + wallcounter = countertops[shelves.KitchenSpaceFactory].related_to( + rooms, cu.against_wall + ) island = countertops[shelves.KitchenIslandFactory] barchairs = furniture[seating.BarChairFactory] - - constraints['kitchen_counters'] = kitchens.all(lambda r: ( - wallcounter.related_to(r).count().in_range(1, 2) * - island.related_to(r).count().in_range(0, 1) - )) - - if params['has_kitchen_barstools']: - constraints['kitchen_barchairs'] = kitchens.all(lambda r: ( - barchairs.related_to(island.related_to(r), cu.front_against).count().in_range(0, 4) - )) - score_terms['kitchen_counters'] = kitchens.sum(lambda r: ( + constraints["kitchen_counters"] = kitchens.all( + lambda r: ( + wallcounter.related_to(r).count().in_range(1, 2) + * island.related_to(r).count().in_range(0, 1) + ) + ) - # try to fill 40-60% of kitchen floorplan with countertops (additive with typical furniture incentive) - ( - countertops.related_to(r).volume(dims=2) - / r.volume(dims=2).clamp_min(1) # avoid div by 0 - ).hinge(0.4, 0.6).minimize(weight=10) + + if params["has_kitchen_barstools"]: + constraints["kitchen_barchairs"] = kitchens.all( + lambda r: ( + barchairs.related_to(island.related_to(r), cu.front_against) + .count() + .in_range(0, 4) + ) + ) - # cluster countertops together - countertops.related_to(r).sum( - lambda c: countertops.related_to(r).mean(lambda c2: - c.distance(c2) + score_terms["kitchen_counters"] = kitchens.mean( + lambda r: ( + # try to fill 40-60% of kitchen floorplan with countertops (additive with typical furniture incentive) + ( + countertops.related_to(r).volume(dims=2) + / r.volume(dims=2).clamp_min(1) # avoid div by 0 ) - ).minimize(weight=3) + .hinge(0.4, 0.6) + .minimize(weight=10) + + + # cluster countertops together + countertops.related_to(r) + .mean(lambda c: countertops.related_to(r).mean(lambda c2: c.distance(c2))) + .minimize(weight=3) + ) + ) - )) + constraints["kitchen_island_placement"] = kitchens.all( + lambda r: wallcounter.related_to(r).all( + lambda t: (t.distance(island.related_to(r)).in_range(0.7, 3)) + ) + * island.related_to(r).all( + lambda t: ( + t.distance(wallcounter.related_to(r)).in_range(0.7, 3) + * (t.distance(r, cu.walltags) > 2) + ) + ) + ) - constraints['kitchen_island_placement'] = kitchens.all(lambda r: - wallcounter.related_to(r).all(lambda t: ( - t.distance(island.related_to(r)).in_range(0.7, 3) - )) * - island.related_to(r).all(lambda t: ( - t.distance(wallcounter.related_to(r)).in_range(0.7, 3) * - (t.distance(r, cu.walltags) > 2) - )) + score_terms["kitchen_island_placement"] = kitchens.mean( + lambda r: ( + island.mean( + lambda t: ( + cl.angle_alignment_cost(t, wallcounter) + + cl.angle_alignment_cost(t, r, cu.walltags) + ) + ).minimize(weight=1) + + island.distance(r, cu.walltags).hinge(3, 1e7).minimize(weight=10) + + wallcounter.mean( + lambda t: cl.focus_score(t, island.related_to(r)).minimize(weight=5) + ) + ) ) - score_terms['kitchen_island_placement'] = kitchens.sum(lambda r: ( - island.sum(lambda t: ( - cl.angle_alignment_cost(t, wallcounter) + - cl.angle_alignment_cost(t, r, cu.walltags) - )).minimize(weight=1) + - island.distance(r, cu.walltags).hinge(3, 1e7).minimize(weight=10) + - wallcounter.sum(lambda t: - cl.focus_score(t, island.related_to(r)).minimize(weight=5) + sink_flush_on_counter = cl.StableAgainst( + cu.bottom, {Subpart.SupportSurface}, margin=0.001 + ) + cl.StableAgainst(cu.back, cu.walltags, margin=0.1) + kitchen_sink = obj[Semantics.Sink][table_decorations.SinkFactory].related_to( + countertops, sink_flush_on_counter + ) + constraints["kitchen_sink"] = kitchens.all( + lambda r: ( + # those sinks can be on either type of counter + kitchen_sink.related_to(wallcounter.related_to(r)).count().in_range(0, 1) + * kitchen_sink.related_to(island.related_to(r)) + .count() + .in_range(0, 1) # island sinks dont need to be against wall + * countertops.related_to(r).all( + lambda c: ( + kitchen_sink.related_to(c).all( + lambda s: s.distance(c, cu.side).in_range(0.05, 0.2) + ) + ) + ) ) - )) + ) - sink_flush_on_counter = cl.StableAgainst(cu.bottom, {Subpart.SupportSurface}, margin=0.001) - sink_against_wall = cl.StableAgainst(cu.back, cu.walltags, margin=0.1) - kitchen_sink = ( - obj[Semantics.Sink][table_decorations.SinkFactory] - .related_to(countertops, sink_flush_on_counter) + score_terms["kitchen_sink"] = kitchens.mean( + lambda r: ( + countertops.mean( + lambda c: kitchen_sink.related_to(c).mean( + lambda s: ( + (s.volume(dims=2) / c.volume(dims=2)) + .hinge(0.2, 0.4) + .minimize(weight=10) + ) + ) + ) + + island.related_to(r).mean( + lambda isl: ( # sinks on islands must be near to edge and oriented outwards + kitchen_sink.related_to(isl).mean( + lambda s: ( + cl.angle_alignment_cost(s, isl, cu.side).minimize(weight=10) + + cl.distance(s, isl, cu.side) + .hinge(0.05, 0.07) + .minimize(weight=10) + ) + ) + ) + ) + ) ) - constraints['kitchen_sink'] = kitchens.all(lambda r: ( - # those sinks can be on either type of counter - kitchen_sink.related_to(wallcounter.related_to(r)).count().in_range(0, 1) - * kitchen_sink.related_to(island.related_to(r)).count().in_range(0, 1) # island sinks dont need to be against wall + kitchen_appliances = obj[Semantics.KitchenAppliance] + kitchen_appliances_big = kitchen_appliances.related_to( + kitchens, cu.on_floor + ).related_to(kitchens, cu.against_wall) + microwaves = kitchen_appliances[appliances.MicrowaveFactory].related_to( + wallcounter, cu.on + ) - * countertops.related_to(r).all(lambda c: ( - kitchen_sink.related_to(c).all( - lambda s: s.distance(c, cu.side).in_range(0.05, 0.2) + constraints["kitchen_appliance"] = kitchens.all( + lambda r: ( + kitchen_appliances_big[appliances.DishwasherFactory] + .related_to(r) + .count() + .in_range(0, 1) + * kitchen_appliances_big[appliances.BeverageFridgeFactory] + .related_to(r) + .count() + .in_range(0, 1) + * ( + kitchen_appliances_big[appliances.OvenFactory].related_to(r).count() + == 1 ) - )) - )) - - score_terms['kitchen_sink'] = kitchens.sum(lambda r: ( + * (wallfurn[shelves.KitchenCabinetFactory].related_to(r).count() >= 0) + * (microwaves.related_to(wallcounter.related_to(r)).count().in_range(0, 1)) + ) + ) - countertops.sum(lambda c: kitchen_sink.related_to(c).sum(lambda s: ( - (s.volume(dims=2) / c.volume(dims=2)).hinge(0.2, 0.4).minimize(weight=10) - ))) + score_terms["kitchen_appliance"] = kitchens.mean( + lambda r: ( + kitchen_appliances.mean( + lambda t: ( + t.distance(wallcounter.related_to(r)).minimize(weight=1) + + cl.accessibility_cost(t, r, dist=1).minimize(weight=10) + + cl.accessibility_cost( + t, furniture.related_to(r), dist=1 + ).minimize(weight=10) + + t.distance(island.related_to(r)) + .hinge(0.7, 1e7) + .minimize(weight=10) + ) + ) + ) + ) - + island.related_to(r).sum(lambda isl:( # sinks on islands must be near to edge and oriented outwards - kitchen_sink.related_to(isl).sum(lambda s: ( - cl.angle_alignment_cost(s, isl, cu.side).minimize(weight=10) - + cl.distance(s, isl, cu.side).hinge(0.05, 0.07).minimize(weight=10) - )) - )) + def obj_on_counter(r): + return obj.related_to(countertops.related_to(r), cu.on) - )) + constraints["kitchen_objects"] = kitchens.all( + lambda r: ( + (obj_on_counter(r)[Semantics.KitchenCounterItem].count() >= 0) + * ( + obj[Semantics.FoodPantryItem] + .related_to(storage.related_to(r), cu.on) + .count() + >= 0 + ) + * island.related_to(r).all( + lambda t: ( + obj[Semantics.TableDisplayItem] + .related_to(t, cu.ontop) + .count() + .in_range(0, 4) + ) + ) + ) + ) - kitchen_appliances = obj[Semantics.KitchenAppliance] - kitchen_appliances_big = kitchen_appliances.related_to(kitchens, cu.on_floor).related_to(kitchens, cu.against_wall) - microwaves = kitchen_appliances[appliances.MicrowaveFactory].related_to(wallcounter, cu.on) - - constraints['kitchen_appliance'] = kitchens.all(lambda r: ( - - kitchen_appliances_big[appliances.DishwasherFactory].related_to(r).count().in_range(0, 1) - * kitchen_appliances_big[appliances.BeverageFridgeFactory].related_to(r).count().in_range(0, 1) - * (kitchen_appliances_big[appliances.OvenFactory].related_to(r).count() == 1) - - * (wallfurn[shelves.KitchenCabinetFactory].related_to(r).count() >= 0) - - * (microwaves.related_to(wallcounter.related_to(r)).count().in_range(0, 1)) - )) - - score_terms['kitchen_appliance'] = kitchens.sum(lambda r: ( - kitchen_appliances.sum(lambda t: ( - t.distance(wallcounter.related_to(r)).minimize(weight=1) - + cl.accessibility_cost(t, r, dist=1).minimize(weight=10) - + cl.accessibility_cost(t, furniture.related_to(r), dist=1).minimize(weight=10) - + t.distance(island.related_to(r)).hinge(0.7, 1e7).minimize(weight=10) - )) - )) - - obj_on_counter = lambda r: obj.related_to(countertops.related_to(r), cu.on) - constraints['kitchen_objects'] = kitchens.all(lambda r: ( - - (obj_on_counter(r)[Semantics.KitchenCounterItem].count() >= 0) - - * (obj[Semantics.FoodPantryItem].related_to(storage.related_to(r), cu.on).count() >= 0) - - * island.related_to(r).all(lambda t: ( - obj[Semantics.TableDisplayItem].related_to(t, cu.ontop).count().in_range(0, 4) - )) - )) - - score_terms['kitchen_objects'] = kitchens.sum(lambda r: ( - ( - obj.related_to(wallcounter, cu.on) - .sum(lambda t: t.distance(r, cu.walltags)) - .minimize(weight=3) + score_terms["kitchen_objects"] = kitchens.mean( + lambda r: ( + ( + obj.related_to(wallcounter, cu.on) + .mean(lambda t: t.distance(r, cu.walltags)) + .minimize(weight=3) + ) + + cl.center_stable_surface_dist( + obj.related_to(island.related_to(r), cu.ontop) + ).minimize(weight=1) ) - + cl.center_stable_surface_dist( - obj.related_to(island.related_to(r), cu.ontop) - ).minimize(weight=1) - )) + ) # disabled for now bc tertiary - #constraints['kitchen_appliance_objects'] = kitchens.all(lambda r: ( + # constraints['kitchen_appliance_objects'] = kitchens.all(lambda r: ( # wallfurn[appliances.DishwasherFactory].related_to(r).all(lambda r: ( # (obj[Semantics.Cookware].related_to(r, cu.on).count() >= 0) * # (obj[Semantics.Dishware].related_to(r, cu.on).count() >= 0 @@ -493,315 +652,432 @@ def home_constraints(): # wallfurn[appliances.OvenFactory].related_to(r).all(lambda r: ( # (obj[Semantics.Cookware].related_to(r, cu.on).count() >= 0) # )) - #))) + # ))) closet_kitchen = closets.related_to(kitchens, cl.RoomNeighbour()) - constraints['closet_kitchen'] = closet_kitchen.all(lambda r: ( - obj[Semantics.FoodPantryItem].related_to(storage.related_to(r), cu.on).count() >= 0 - )) - score_terms['closet_kitchen'] = closet_kitchen.sum(lambda r: ( - storage.related_to(r).count().maximize(weight=2) + - obj[Semantics.FoodPantryItem].related_to(storage.related_to(r), cu.on).count().maximize(weight=5) - )) - - #score_terms['kitchen_table'] # todo diningtable or hightop - - #endregion - - #region LIVINGROOMS - + constraints["closet_kitchen"] = closet_kitchen.all( + lambda r: ( + obj[Semantics.FoodPantryItem] + .related_to(storage.related_to(r), cu.on) + .count() + >= 0 + ) + ) + score_terms["closet_kitchen"] = closet_kitchen.mean( + lambda r: ( + storage.related_to(r).count().maximize(weight=2) + + obj[Semantics.FoodPantryItem] + .related_to(storage.related_to(r), cu.on) + .count() + .maximize(weight=5) + ) + ) + + # score_terms['kitchen_table'] # todo diningtable or hightop + + # endregion + + # region LIVINGROOMS + livingrooms = rooms[Semantics.LivingRoom].excludes(cu.room_types) sofas = furniture[seating.SofaFactory] tvstands = wallfurn[shelves.TVStandFactory] coffeetables = furniture[tables.CoffeeTableFactory] - sofa_back_near_wall = cl.StableAgainst(cu.back, cu.walltags, margin=uniform(0.1, 0.3)) - sofa_side_near_wall = cl.StableAgainst(cu.side, cu.walltags, margin=uniform(0.1, 0.3)) - freestanding = lambda o, r: ( - o - .related_to(r) - .related_to(r, -sofa_back_near_wall) - #.related_to(r, -cu.side_against_wall) - ) - - constraints['sofa'] = livingrooms.all(lambda r: ( - #sofas.related_to(r).count().in_range(2, 3) - sofas.related_to(r, sofa_back_near_wall).count().in_range(2, 4) - #* sofas.related_to(r, sofa_side_near_wall).count().in_range(0, 1) - - * freestanding(sofas, r).all(lambda t: ( # frustrum infront of freestanding sofa must directly contain tvstand - cl.accessibility_cost(t, tvstands.related_to(r), dist=3) > 0.7 - )) - - * sofas.all(lambda t: ( - cl.accessibility_cost(t, furniture.related_to(r), dist=2).in_range(0, 0.5) - * cl.accessibility_cost(t, r, dist=1).in_range(0, 0.5) - )) - - #* ( # allow a storage object behind non-wall sofas - # storage.related_to(r) - # .related_to(freestanding(sofas, r)) - # .count().in_range(0, 1) - #) - )) - - constraints['sofa_positioning'] = rooms.all(lambda r: (sofas.all(lambda s: ( - (cl.accessibility_cost(s, rooms, dist=3) < 0.5) - * (cl.focus_score(s, tvstands.related_to(r)) > 0.5) # must face or perpendicular to TVStand - )))) - - score_terms['sofa'] = livingrooms.sum(lambda r: ( - - sofas.volume().maximize(weight=10) - - + sofas.related_to(r).sum(lambda t: ( - - t.distance(sofas.related_to(r)).hinge(0, 1).minimize(weight=1) - + t.distance(tvstands.related_to(r)).hinge(2, 3).minimize(weight=5) - - + cl.focus_score(t, tvstands.related_to(r)).maximize(weight=5) - + cl.angle_alignment_cost(t, tvstands.related_to(r), cu.front).minimize(weight=1) - + cl.focus_score(t, coffeetables.related_to(r)).maximize(weight=2) - - + cl.accessibility_cost(t, r, dist=3).minimize(weight=3) - )) - - + freestanding(sofas, r).sum(lambda t: ( - cl.angle_alignment_cost(t, tvstands.related_to(r)).minimize(weight=5) - + cl.angle_alignment_cost(t, r, cu.walltags).minimize(weight=3) - + cl.center_stable_surface_dist(t).minimize(weight=0.5) - )) - )) - - tvs = obj[appliances.TVFactory].related_to(tvstands, cu.ontop) - - if params['has_tv']: - constraints['tv'] = livingrooms.all(lambda r: ( - tvstands.related_to(r).all(lambda t: ( - (tvs.related_to(t).count() == 1) - - * tvs.related_to(t).all(lambda tv: - cl.accessibility_cost(tv, r, dist=1).in_range(0, 0.1) + sofa_back_near_wall = cl.StableAgainst( + cu.back, cu.walltags, margin=uniform(0.1, 0.3) + ) + cl.StableAgainst(cu.side, cu.walltags, margin=uniform(0.1, 0.3)) + + def freestanding(o, r): + return o.related_to(r).related_to(r, -sofa_back_near_wall) + + constraints["sofa"] = livingrooms.all( + lambda r: ( + # sofas.related_to(r).count().in_range(2, 3) + sofas.related_to(r, sofa_back_near_wall).count().in_range(2, 4) + # * sofas.related_to(r, sofa_side_near_wall).count().in_range(0, 1) + * freestanding(sofas, r).all( + lambda t: ( # frustrum infront of freestanding sofa must directly contain tvstand + cl.accessibility_cost(t, tvstands.related_to(r), dist=3) > 0.7 ) - )) - )) - - score_terms['tvstand'] = rooms.all(lambda r: (tvstands.sum(lambda stand: ( - tvs.related_to(stand).volume().maximize(weight=1) - - + stand.distance(window).maximize(weight=1) # penalize being very close to window. avoids tv blocking window. - + cl.accessibility_cost(stand, furniture).minimize(weight=3) + ) + * sofas.all( + lambda t: ( + cl.accessibility_cost(t, furniture.related_to(r), dist=2).in_range( + 0, 0.5 + ) + * cl.accessibility_cost(t, r, dist=1).in_range(0, 0.5) + ) + ) + # * ( # allow a storage object behind non-wall sofas + # storage.related_to(r) + # .related_to(freestanding(sofas, r)) + # .count().in_range(0, 1) + # ) + ) + ) - + cl.center_stable_surface_dist(stand).minimize(weight=5) # center tvstand against wall (also tries to do vertical & floor but those are constrained) - + cl.center_stable_surface_dist(tvs.related_to(stand)).minimize(weight=1) - )))) + constraints["sofa_positioning"] = rooms.all( + lambda r: ( + sofas.all( + lambda s: ( + (cl.accessibility_cost(s, rooms, dist=3) < 0.5) + * ( + cl.focus_score(s, tvstands.related_to(r)) > 0.5 + ) # must face or perpendicular to TVStand + ) + ) + ) + ) - constraints['livingroom'] = livingrooms.all(lambda r: ( - storage.related_to(r).count().in_range(1, 5) + score_terms["sofa"] = livingrooms.mean( + lambda r: ( + sofas.volume().maximize(weight=10) + + sofas.related_to(r).mean( + lambda t: ( + t.distance(sofas.related_to(r)).hinge(0, 1).minimize(weight=1) + + t.distance(tvstands.related_to(r)).hinge(2, 3).minimize(weight=5) + + cl.focus_score(t, tvstands.related_to(r)).maximize(weight=5) + + cl.angle_alignment_cost( + t, tvstands.related_to(r), cu.front + ).minimize(weight=1) + + cl.focus_score(t, coffeetables.related_to(r)).maximize(weight=2) + + cl.accessibility_cost(t, r, dist=3).minimize(weight=3) + ) + ) + + freestanding(sofas, r).mean( + lambda t: ( + cl.angle_alignment_cost(t, tvstands.related_to(r)).minimize( + weight=5 + ) + + cl.angle_alignment_cost(t, r, cu.walltags).minimize(weight=3) + + cl.center_stable_surface_dist(t).minimize(weight=0.5) + ) + ) + ) + ) - * tvstands.related_to(r).count().equals(1) + tvs = obj[appliances.TVFactory].related_to(tvstands, cu.ontop) - * ( # allow sidetables next to any sofa - sidetable.related_to(r) - .related_to(sofas.related_to(r), cu.side_by_side) - .count().in_range(0, 2) + if params["has_tv"]: + constraints["tv"] = livingrooms.all( + lambda r: ( + tvstands.related_to(r).all( + lambda t: ( + (tvs.related_to(t).count() == 1) + * tvs.related_to(t).all( + lambda tv: cl.accessibility_cost(tv, r, dist=1).in_range( + 0, 0.1 + ) + ) + ) + ) + ) ) - * desks.related_to(r).count().in_range(0, 1) - * coffeetables.related_to(r).count().in_range(0, 1) - * coffeetables.related_to(r).all(lambda t: ( - (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count().in_range(0, 3)) - )) - - * ( - rugs - .related_to(r) - #.related_to(furniture.related_to(r), cu.side_by_side) - .count().in_range(0, 2) + score_terms["tvstand"] = rooms.all( + lambda r: ( + tvstands.mean( + lambda stand: ( + tvs.related_to(stand).volume().maximize(weight=1) + + stand.distance(window).maximize( + weight=1 + ) # penalize being very close to window. avoids tv blocking window. + + cl.accessibility_cost(stand, furniture).minimize(weight=3) + + cl.center_stable_surface_dist(stand).minimize( + weight=5 + ) # center tvstand against wall (also tries to do vertical & floor but those are constrained) + + cl.center_stable_surface_dist(tvs.related_to(stand)).minimize( + weight=1 + ) + ) + ) ) - )) + ) - score_terms['livingroom'] = livingrooms.sum(lambda r: ( + constraints["livingroom"] = livingrooms.all( + lambda r: ( + storage.related_to(r).count().in_range(1, 5) + * tvstands.related_to(r).count().equals(1) + * ( # allow sidetables next to any sofa + sidetable.related_to(r) + .related_to(sofas.related_to(r), cu.side_by_side) + .count() + .in_range(0, 2) + ) + * desks.related_to(r).count().in_range(0, 1) + * coffeetables.related_to(r).count().in_range(0, 1) + * coffeetables.related_to(r).all( + lambda t: ( + obj[Semantics.OfficeShelfItem] + .related_to(t, cu.on) + .count() + .in_range(0, 3) + ) + ) + * ( + rugs.related_to(r) + # .related_to(furniture.related_to(r), cu.side_by_side) + .count() + .in_range(0, 2) + ) + ) + ) - coffeetables.related_to(r).sum(lambda t: ( + score_terms["livingroom"] = livingrooms.mean( + lambda r: ( + coffeetables.related_to(r).mean( + lambda t: ( + # ideal coffeetable-to-tv distance according to google + t.distance(sofas.related_to(r)).hinge(0.45, 0.6).minimize(weight=5) + + cl.angle_alignment_cost( + t, sofas.related_to(r), cu.front + ).minimize(weight=5) + + cl.focus_score(sofas.related_to(r), t).maximize(weight=5) + ) + ) + ) + ) - # ideal coffeetable-to-tv distance according to google - t.distance(sofas.related_to(r)).hinge(0.45, 0.6).minimize(weight=5) + constraints["livingroom_objects"] = livingrooms.all( + lambda r: ( + storage.all( + lambda t: ( + obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0 + ) + ) + * coffeetables.all( + lambda t: ( + obj[Semantics.TableDisplayItem] + .related_to(t, cu.ontop) + .count() + .in_range(0, 1) + * (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0) + ) + ) + ) + ) - + cl.angle_alignment_cost(t, sofas.related_to(r), cu.front).minimize(weight=5) - + cl.focus_score(sofas.related_to(r), t).maximize(weight=5) - )) - )) + # endregion + # region DININGROOMS - constraints['livingroom_objects'] = livingrooms.all(lambda r: ( - storage.all(lambda t: ( - (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0) - )) * - coffeetables.all(lambda t: ( - obj[Semantics.TableDisplayItem].related_to(t, cu.ontop).count().in_range(0, 1) * - (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0) - )) - )) + diningtables = furniture[Semantics.Table][tables.TableDiningFactory] + diningchairs = furniture[Semantics.Chair][seating.ChairFactory] + constraints["dining_chairs"] = rooms.all( + lambda r: ( + diningtables.related_to(r).all( + lambda t: ( + diningchairs.related_to(r) + .related_to(t, cu.front_against) + .count() + .in_range(3, 6) + ) + ) + ) + ) - #endregion + score_terms["dining_chairs"] = rooms.all( + lambda r: ( + diningchairs.related_to(r).count().maximize(weight=5) + + diningchairs.related_to(r) + .mean(lambda t: t.distance(diningchairs.related_to(r))) + .maximize(weight=3) + # cl.reflectional_asymmetry(diningchairs.related_to(r), diningtables.related_to(r)).minimize(weight=1) + # cl.rotational_asymmetry(diningchairs.related_to(r)).minimize(weight=1) + ) + ) - #region DININGROOMS + constraints["dining_table_objects"] = rooms.all( + lambda r: ( + diningtables.related_to(r).all( + lambda t: ( + obj[Semantics.TableDisplayItem] + .related_to(t, cu.ontop) + .count() + .in_range(0, 2) + * (obj[Semantics.Utensils].related_to(t, cu.ontop).count() >= 0) + * ( + obj[Semantics.Dishware] + .related_to(t, cu.ontop) + .count() + .in_range(0, 2) + ) + ) + ) + ) + ) - diningtables = furniture[Semantics.Table][tables.TableDiningFactory] - diningchairs = furniture[Semantics.Chair][seating.ChairFactory] - constraints['dining_chairs'] = rooms.all(lambda r: ( - diningtables.related_to(r).all(lambda t: ( - diningchairs.related_to(r).related_to(t, cu.front_against).count().in_range(3, 6) - )) - )) - - score_terms['dining_chairs'] = rooms.all(lambda r: ( - diningchairs.related_to(r).count().maximize(weight=5) + - diningchairs.related_to(r).sum(lambda t: t.distance(diningchairs.related_to(r))).maximize(weight=3) - #cl.reflectional_asymmetry(diningchairs.related_to(r), diningtables.related_to(r)).minimize(weight=1) - #cl.rotational_asymmetry(diningchairs.related_to(r)).minimize(weight=1) - )) - - constraints['dining_table_objects'] = rooms.all(lambda r: ( - diningtables.related_to(r).all(lambda t: ( - obj[Semantics.TableDisplayItem].related_to(t, cu.ontop).count().in_range(0, 2) * - (obj[Semantics.Utensils].related_to(t, cu.ontop).count() >= 0) * - (obj[Semantics.Dishware].related_to(t, cu.ontop).count().in_range(0, 2)) - )) - )) - - score_terms['dining_table_objects'] = rooms.sum(lambda r: ( - cl.center_stable_surface_dist( - obj[Semantics.TableDisplayItem] - .related_to(diningtables.related_to(r), cu.ontop) - ).minimize(weight=1) - )) + score_terms["dining_table_objects"] = rooms.mean( + lambda r: ( + cl.center_stable_surface_dist( + obj[Semantics.TableDisplayItem].related_to( + diningtables.related_to(r), cu.ontop + ) + ).minimize(weight=1) + ) + ) diningrooms = rooms[Semantics.DiningRoom].excludes(cu.room_types) - constraints['diningroom'] = diningrooms.all(lambda r: ( - (diningtables.related_to(r).count() == 1) * - storage.related_to(r).all(lambda t: ( - (obj[Semantics.Dishware].related_to(t, cu.on).count() >= 0) * - (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count().in_range(0, 5)) - )) - )) - score_terms['diningroom'] = diningrooms.sum(lambda r: ( - - diningtables.related_to(r).distance(r, cu.walltags).maximize(weight=10) - + cl.angle_alignment_cost(diningtables.related_to(r), r, cu.walltags).minimize(weight=10) - + cl.center_stable_surface_dist(diningtables.related_to(r)).minimize(weight=1) - )) - #endregion - - #region BATHROOMS + constraints["diningroom"] = diningrooms.all( + lambda r: ( + (diningtables.related_to(r).count() == 1) + * storage.related_to(r).all( + lambda t: ( + (obj[Semantics.Dishware].related_to(t, cu.on).count() >= 0) + * ( + obj[Semantics.OfficeShelfItem] + .related_to(t, cu.on) + .count() + .in_range(0, 5) + ) + ) + ) + ) + ) + score_terms["diningroom"] = diningrooms.mean( + lambda r: ( + diningtables.related_to(r).distance(r, cu.walltags).maximize(weight=10) + + cl.angle_alignment_cost( + diningtables.related_to(r), r, cu.walltags + ).minimize(weight=10) + + cl.center_stable_surface_dist(diningtables.related_to(r)).minimize( + weight=1 + ) + ) + ) + # endregion + + # region BATHROOMS bathrooms = rooms[Semantics.Bathroom].excludes(cu.room_types) toilet = wallfurn[bathroom.ToiletFactory] bathtub = wallfurn[bathroom.BathtubFactory] sink = wallfurn[bathroom.StandingSinkFactory] hardware = obj[bathroom.HardwareFactory].related_to(bathrooms, cu.against_wall) - constraints['bathroom'] = bathrooms.all(lambda r: ( - - mirror.related_to(r).related_to(r, cu.flush_wall).count().equals(1) * - sink.related_to(r).count().equals(1) * - toilet.related_to(r).count().equals(1) * - - storage.related_to(r).all(lambda t: ( - (obj[Semantics.BathroomItem].related_to(t, cu.on).count() >= 0) - )) - )) - - score_terms['toilet'] = rooms.all(lambda r: ( - toilet.distance(doors).maximize(weight=1) + - toilet.distance(furniture).maximize(weight=1) + - toilet.distance(sink).maximize(weight=1) + - cl.accessibility_cost(toilet, furniture, dist=2).minimize(weight=10) - )) - - constraints['bathtub'] = bathrooms.all(lambda r: ( - bathtub.related_to(r).count().in_range(0, 1) * - hardware.related_to(r).count().in_range(1, 4) - )) - score_terms['bathtub'] = bathrooms.all(lambda r: ( - - bathtub.sum(lambda t: t.distance(hardware)).minimize(weight=0.2) - + sink.sum(lambda t: t.distance(hardware)).minimize(weight=0.2) - - + hardware.sum(lambda t: ( - t.distance(rooms, cu.floortags) - .hinge(0.5, 1) - .minimize(weight=15) - )) + constraints["bathroom"] = bathrooms.all( + lambda r: ( + mirror.related_to(r).related_to(r, cu.flush_wall).count().equals(1) + * sink.related_to(r).count().equals(1) + * toilet.related_to(r).count().equals(1) + * storage.related_to(r).all( + lambda t: ( + obj[Semantics.BathroomItem].related_to(t, cu.on).count() >= 0 + ) + ) + ) + ) - )) + score_terms["toilet"] = rooms.all( + lambda r: ( + toilet.distance(doors).maximize(weight=1) + + toilet.distance(furniture).maximize(weight=1) + + toilet.distance(sink).maximize(weight=1) + + cl.accessibility_cost(toilet, furniture, dist=2).minimize(weight=10) + ) + ) - score_terms['bathroom'] = ( - mirror.related_to(bathrooms).distance(sink).minimize(weight=0.2) - + cl.accessibility_cost(mirror, furniture, cu.down_dir).maximize(weight=3) + constraints["bathtub"] = bathrooms.all( + lambda r: ( + bathtub.related_to(r).count().in_range(0, 1) + * hardware.related_to(r).count().in_range(1, 4) + ) + ) + score_terms["bathtub"] = bathrooms.all( + lambda r: ( + bathtub.mean(lambda t: t.distance(hardware)).minimize(weight=0.2) + + sink.mean(lambda t: t.distance(hardware)).minimize(weight=0.2) + + hardware.mean( + lambda t: ( + t.distance(rooms, cu.floortags).hinge(0.5, 1).minimize(weight=15) + ) + ) + ) ) - #endregion - #region MISC OBJECTS + score_terms["bathroom"] = mirror.related_to(bathrooms).distance(sink).minimize( + weight=0.2 + ) + cl.accessibility_cost(mirror, furniture, cu.down_dir).maximize(weight=3) + # endregion + + # region MISC OBJECTS - if params['has_aquarium_tank']: + if params["has_aquarium_tank"]: - aqtank = lambda r: obj[decor.AquariumTankFactory].related_to(storage.related_to(r), cu.ontop) + def aqtank(r): + return obj[decor.AquariumTankFactory].related_to( + storage.related_to(r), cu.ontop + ) - constraints['aquarium_tank'] = ( - aqtank(rooms).count().in_range(0, 1) + constraints["aquarium_tank"] = aqtank(rooms).count().in_range(0, 1) + score_terms["aquarium_tank"] = rooms.all( + lambda r: ( + aqtank(r).distance(r, cu.walltags).hinge(0.05, 0.1).minimize(weight=1) + ) ) - score_terms['aquarium_tank'] = rooms.all(lambda r: ( - aqtank(r).distance(r, cu.walltags).hinge(0.05, 0.1).minimize(weight=1) - )) - if params['has_birthday_balloons']: - balloons = obj[wall_decorations.BalloonFactory].related_to(rooms, cu.against_wall) - constraints['birthday_balloons'] = ( + if params["has_birthday_balloons"]: + balloons = obj[wall_decorations.BalloonFactory].related_to( + rooms, cu.against_wall + ) + constraints["birthday_balloons"] = ( balloons.related_to(rooms, cu.against_wall).count().in_range(0, 3) ) - score_terms['birthday_balloons'] = rooms.all(lambda r: ( - balloons.sum(lambda b: b.distance(r, cu.floortags).hinge(1.6, 2.5).minimize(weight=1)) - )) + score_terms["birthday_balloons"] = rooms.all( + lambda r: ( + balloons.mean( + lambda b: b.distance(r, cu.floortags) + .hinge(1.6, 2.5) + .minimize(weight=1) + ) + ) + ) - if params['has_cocktail_tables']: - + if params["has_cocktail_tables"]: cocktail_table = ( furniture[tables.TableCocktailFactory] .related_to(rooms, cu.on_floor) .related_to(rooms, cu.against_wall) ) - constraints['cocktail_tables'] = diningrooms.all(lambda r: ( - cocktail_table.related_to(r).count().in_range(0, 3) - *( - barchairs.related_to(cocktail_table.related_to(r), cu.front_against) - .count().in_range(0, 4) + constraints["cocktail_tables"] = diningrooms.all( + lambda r: ( + cocktail_table.related_to(r).count().in_range(0, 3) + * ( + barchairs.related_to(cocktail_table.related_to(r), cu.front_against) + .count() + .in_range(0, 4) + ) + * ( + obj[tableware.WineglassFactory] + .related_to(cocktail_table.related_to(r), cu.ontop) + .count() + .in_range(0, 4) + ) ) - * ( - obj[tableware.WineglassFactory] - .related_to(cocktail_table.related_to(r), cu.ontop) - .count().in_range(0, 4) - ) - )) - score_terms['cocktail_tables'] = diningrooms.sum(lambda r: ( - cocktail_table.related_to(r).sum(lambda t: ( - - t.distance(r, cu.walltags).hinge(0.5, 1).minimize(weight=1) - + t.distance(cocktail_table.related_to(r)).hinge(1, 2).minimize(weight=1) - - + barchairs.related_to(t).sum( - lambda c: c.distance(barchairs.related_to(t)) - ).maximize(weight=1) - )) - )) - - #endregion + ) + score_terms["cocktail_tables"] = diningrooms.mean( + lambda r: ( + cocktail_table.related_to(r).mean( + lambda t: ( + t.distance(r, cu.walltags).hinge(0.5, 1).minimize(weight=1) + + t.distance(cocktail_table.related_to(r)) + .hinge(1, 2) + .minimize(weight=1) + + barchairs.related_to(t) + .mean(lambda c: c.distance(barchairs.related_to(t))) + .maximize(weight=1) + ) + ) + ) + ) + + # endregion return cl.Problem( constraints=constraints, score_terms=score_terms, ) -all_constraint_funcs = [ - home_constraints -] + +all_constraint_funcs = [home_constraints] diff --git a/infinigen_examples/util/constraint_util.py b/infinigen_examples/util/constraint_util.py index 5345a586a..1dc75ce23 100644 --- a/infinigen_examples/util/constraint_util.py +++ b/infinigen_examples/util/constraint_util.py @@ -7,31 +7,41 @@ import numpy as np from infinigen.core import tags as t -from infinigen import assets as a -from infinigen.core.constraints import ( - constraint_language as cl, - example_solver, - usage_lookup -) +from infinigen.core.constraints import constraint_language as cl room_types = { - t.Semantics.Kitchen, - t.Semantics.Bedroom, - t.Semantics.LivingRoom, - t.Semantics.Closet, - t.Semantics.Hallway, - t.Semantics.Bathroom, - t.Semantics.Garage, - t.Semantics.Balcony, - t.Semantics.DiningRoom, - t.Semantics.Utility, + t.Semantics.Kitchen, + t.Semantics.Bedroom, + t.Semantics.LivingRoom, + t.Semantics.Closet, + t.Semantics.Hallway, + t.Semantics.Bathroom, + t.Semantics.Garage, + t.Semantics.Balcony, + t.Semantics.DiningRoom, + t.Semantics.Utility, t.Semantics.Staircase, } all_sides = {t.Subpart.Bottom, t.Subpart.Top, t.Subpart.Front, t.Subpart.Back} -walltags = {t.Subpart.Wall, t.Subpart.Visible, -t.Subpart.SupportSurface, -t.Subpart.Ceiling} -floortags = {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Wall, -t.Subpart.Ceiling} -ceilingtags = {t.Subpart.Visible, t.Subpart.Ceiling, -t.Subpart.Wall, -t.Subpart.SupportSurface} +walltags = { + t.Subpart.Wall, + t.Subpart.Visible, + -t.Subpart.SupportSurface, + -t.Subpart.Ceiling, +} +floortags = { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Wall, + -t.Subpart.Ceiling, +} +ceilingtags = { + t.Subpart.Visible, + t.Subpart.Ceiling, + -t.Subpart.Wall, + -t.Subpart.SupportSurface, +} front_dir = np.array([0, 1, 0]) back_dir = np.array([0, -1, 0]) @@ -42,7 +52,13 @@ top = {t.Subpart.Top, -t.Subpart.Back, -t.Subpart.Bottom, -t.Subpart.Front} side = {-t.Subpart.Top, -t.Subpart.Bottom, -t.Subpart.Back, -t.Subpart.SupportSurface} front = {t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Bottom, -t.Subpart.Back} -leftright = {-t.Subpart.Top, -t.Subpart.Bottom, -t.Subpart.Back, -t.Subpart.Front, -t.Subpart.SupportSurface} +leftright = { + -t.Subpart.Top, + -t.Subpart.Bottom, + -t.Subpart.Back, + -t.Subpart.Front, + -t.Subpart.SupportSurface, +} on_floor = cl.StableAgainst(bottom, floortags, margin=0.01) flush_wall = cl.StableAgainst(back, walltags, margin=0.02) @@ -54,10 +70,12 @@ ontop = cl.StableAgainst(bottom, top) on = cl.StableAgainst(bottom, {t.Subpart.SupportSurface}) -front_against = cl.StableAgainst(front, side, margin=0.05, check_z=False) #check_z=False +front_against = cl.StableAgainst( + front, side, margin=0.05, check_z=False +) # check_z=False leftright_leftright = cl.StableAgainst(leftright, leftright, margin=0.05) side_by_side = cl.StableAgainst(side, side) back_to_back = cl.StableAgainst(back, back) -variable_room = t.Variable('room') -variable_obj = t.Variable('obj') \ No newline at end of file +variable_room = t.Variable("room") +variable_obj = t.Variable("obj") diff --git a/infinigen_examples/util/generate_indoors_util.py b/infinigen_examples/util/generate_indoors_util.py index ea41810f1..13cbb01ce 100644 --- a/infinigen_examples/util/generate_indoors_util.py +++ b/infinigen_examples/util/generate_indoors_util.py @@ -8,51 +8,47 @@ import typing import bpy -from mathutils import Vector - import gin import numpy as np -from numpy.random import uniform, normal, randint -import trimesh - -from infinigen.terrain import Terrain, hidden_in_viewport -from infinigen.terrain.utils import Mesh -from infinigen.assets.materials import invisible_to_camera - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - usage_lookup -) +from mathutils import Vector +from numpy.random import uniform from infinigen.assets import weather +from infinigen.assets.materials import invisible_to_camera from infinigen.assets.scatters import grass, pebbles +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints import usage_lookup from infinigen.core.placement import density, split_in_view -from infinigen.core.util import (blender as butil, pipeline) +from infinigen.core.util import blender as butil +from infinigen.core.util import pipeline from infinigen.core.util.camera import points_inview -from infinigen.core import tags as t +from infinigen.terrain import Terrain, hidden_in_viewport +from infinigen.terrain.utils import Mesh from . import constraint_util as cu logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) + def within_bbox_2d(verts, bbox): return ( - (verts[:, 0] > bbox[0][0]) & - (verts[:, 0] < bbox[1][0]) & - (verts[:, 1] > bbox[0][1]) & - (verts[:, 1] < bbox[1][1]) + (verts[:, 0] > bbox[0][0]) + & (verts[:, 0] < bbox[1][0]) + & (verts[:, 1] > bbox[0][1]) + & (verts[:, 1] < bbox[1][1]) ) + def create_outdoor_backdrop( - terrain: Terrain, - house_bbox: tuple, - cam, + terrain: Terrain, + house_bbox: tuple, + cam, p: pipeline.RandomStageExecutor, - params: dict + params: dict, ): - all_vertices = [] for name in terrain.terrain_objs: if name not in hidden_in_viewport: @@ -66,98 +62,110 @@ def create_outdoor_backdrop( else: height = all_vertices[all_mask, 2].max() - extra_zoff = uniform(0, 4) # deliberately float above the terrain. + extra_zoff = uniform(0, 4) # deliberately float above the terrain. height += extra_zoff for obj in terrain.terrain_objs.values(): obj.location[2] -= height butil.apply_transform(obj, loc=True) - main_terrain = bpy.data.objects['OpaqueTerrain'] + main_terrain = bpy.data.objects["OpaqueTerrain"] verts = np.zeros(3 * len(main_terrain.data.vertices), float) - main_terrain.data.vertices.foreach_get('co', verts) + main_terrain.data.vertices.foreach_get("co", verts) verts = verts.reshape(-1, 3) mask = within_bbox_2d(verts, house_bbox) - with butil.ViewportMode(main_terrain, mode='EDIT'): - bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='FACE') - bpy.ops.mesh.select_all(action='DESELECT') + with butil.ViewportMode(main_terrain, mode="EDIT"): + bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type="FACE") + bpy.ops.mesh.select_all(action="DESELECT") split_in_view.select_vertmask(main_terrain, mask) - with butil.ViewportMode(main_terrain, mode='EDIT'): + with butil.ViewportMode(main_terrain, mode="EDIT"): bpy.ops.mesh.select_more() - bpy.ops.mesh.delete(type='VERT') - - p.run_stage('fancy_clouds', weather.kole_clouds.add_kole_clouds) - - terrain_inview, *_ = split_in_view.split_inview(main_terrain, verbose=True, outofview=False, - print_areas=True, cam=cam, vis_margin=2, dist_max=params['near_distance'], hide_render=True, - suffix='inview') + bpy.ops.mesh.delete(type="VERT") + + p.run_stage("fancy_clouds", weather.kole_clouds.add_kole_clouds) + + terrain_inview, *_ = split_in_view.split_inview( + main_terrain, + verbose=True, + outofview=False, + print_areas=True, + cam=cam, + vis_margin=2, + dist_max=params["near_distance"], + hide_render=True, + suffix="inview", + ) def add_grass(target): - select_max = params.get('grass_select_max', 0.5) - selection = density.placement_mask(normal_dir=(0, 0, 1), scale=0.1, return_scalar=True, - select_thresh=uniform(select_max / 2, select_max)) + select_max = params.get("grass_select_max", 0.5) + selection = density.placement_mask( + normal_dir=(0, 0, 1), + scale=0.1, + return_scalar=True, + select_thresh=uniform(select_max / 2, select_max), + ) grass.apply(target, selection=selection) - p.run_stage('grass', add_grass, terrain_inview) + p.run_stage("grass", add_grass, terrain_inview) def add_rocks(target): - selection = density.placement_mask(scale=0.15, select_thresh=0.5, normal_thresh=0.7, return_scalar=True) + selection = density.placement_mask( + scale=0.15, select_thresh=0.5, normal_thresh=0.7, return_scalar=True + ) _, rock_col = pebbles.apply(target, selection=selection) return rock_col - p.run_stage('rocks', add_rocks, terrain_inview) + p.run_stage("rocks", add_rocks, terrain_inview) return height -def place_cam_overhead(cam: bpy.types.Object, bbox: tuple[np.array]): - butil.spawn_point_cloud('place_cam_overhead', bbox) +def place_cam_overhead(cam: bpy.types.Object, bbox: tuple[np.array]): + butil.spawn_point_cloud("place_cam_overhead", bbox) mins, maxs = bbox cam.location = (maxs + mins) / 2 cam.rotation_euler = (0, 0, 0) - for cam_dist in np.exp(np.linspace(-1., 5.5, 500)): + for cam_dist in np.exp(np.linspace(-1.0, 5.5, 500)): cam.location[-1] = cam_dist bpy.context.view_layer.update() inview = points_inview(bbox, cam.children[0]) if inview.all(): for area in bpy.context.screen.areas: - if area.type == 'VIEW_3D': - area.spaces.active.region_3d.view_perspective = 'CAMERA' + if area.type == "VIEW_3D": + area.spaces.active.region_3d.view_perspective = "CAMERA" break return def overhead_view(cam, room_name): - room_name = room_name.split('.')[0] - + room_name = room_name.split(".")[0] + for o in bpy.data.objects: - if '.exterior' in o.name: + if ".exterior" in o.name: o.hide_viewport = True o.hide_render = True - elif '.ceiling' in o.name: + elif ".ceiling" in o.name: invisible_to_camera.apply(o) - floor = bpy.data.objects[room_name + '.floor'] + floor = bpy.data.objects[room_name + ".floor"] cam.location = floor.location + Vector((0, 0, 10)) cam.rotation_euler = (0, 0, 0) -def hide_other_rooms(state, rooms_split, keep_rooms: list[str]): +def hide_other_rooms(state, rooms_split, keep_rooms: list[str]): for col in rooms_split.values(): for o in col.objects: - if any( - roomname.split('.')[0] in o.name - for roomname in keep_rooms - ): + if any(roomname.split(".")[0] in o.name for roomname in keep_rooms): continue o.hide_viewport = True o.hide_render = True hide_cutters = [ - o + o for k, os in state.objs.items() - if t.Semantics.Cutter in os.tags and not any( + if t.Semantics.Cutter in os.tags + and not any( rel.target_name == roomname for rel in os.relations for roomname in keep_rooms @@ -169,8 +177,9 @@ def hide_other_rooms(state, rooms_split, keep_rooms: list[str]): o.hide_viewport = True bpy.context.scene.render.film_transparent = True + def apply_greedy_restriction( - stages: dict[str, r.Domain], + stages: dict[str, r.Domain], filter_tags: set[str], var: t.Variable, scope_domain: r.Domain = None, @@ -179,14 +188,18 @@ def apply_greedy_restriction( for k, d in stages.items(): if scope_domain is not None and not d.intersects(scope_domain): continue - stages[k], match = r.domain_tag_substitute(d, var, r.Domain(filter_tags).with_tags(var), return_match=True) - logger.info(f'{apply_greedy_restriction.__name__} restricting {k=} to {filter_tags=} for {var=}') - + stages[k], match = r.domain_tag_substitute( + d, var, r.Domain(filter_tags).with_tags(var), return_match=True + ) + logger.info( + f"{apply_greedy_restriction.__name__} restricting {k=} to {filter_tags=} for {var=}" + ) + + @gin.configurable def restrict_solving( stages: dict[str, r.Domain], problem: cl.Problem, - # typically provided by gin restrict_parent_rooms: set[str] = None, restrict_parent_objs: set[str] = None, @@ -196,7 +209,6 @@ def restrict_solving( solve_max_parent_obj: int = None, consgraph_filters: typing.Iterable[str] = None, ): - """Restricts solving to a subset of the full house or constraint graph. Parameters @@ -219,8 +231,12 @@ def restrict_solving( """ obj_domain = r.Domain({t.Semantics.Object}) - primary_obj_domain = r.Domain({t.Semantics.Object}, [(-cl.AnyRelation(), obj_domain)]) - secondary_obj_domain = r.Domain({t.Semantics.Object}, [(cl.AnyRelation(), obj_domain)]) + primary_obj_domain = r.Domain( + {t.Semantics.Object}, [(-cl.AnyRelation(), obj_domain)] + ) + secondary_obj_domain = r.Domain( + {t.Semantics.Object}, [(cl.AnyRelation(), obj_domain)] + ) if restrict_parent_rooms is not None: apply_greedy_restriction(stages, restrict_parent_rooms, cu.variable_room) @@ -229,22 +245,30 @@ def restrict_solving( apply_greedy_restriction(stages, restrict_parent_objs, cu.variable_obj) if restrict_child_primary is not None: - restrict_child_primary = t.to_tag_set(restrict_child_primary, fac_context=usage_lookup._factory_lookup) + restrict_child_primary = t.to_tag_set( + restrict_child_primary, fac_context=usage_lookup._factory_lookup + ) for k, d in stages.items(): if d.intersects(primary_obj_domain): - logger.info(f'restrict_solving applying restrict_child_primary, limiting {k} to objects satisfying {restrict_child_primary}') + logger.info( + f"restrict_solving applying restrict_child_primary, limiting {k} to objects satisfying {restrict_child_primary}" + ) stages[k] = d.intersection(r.Domain(restrict_child_primary)) - + if restrict_child_secondary is not None: - restrict_child_secondary = t.to_tag_set(restrict_child_secondary, fac_context=usage_lookup._factory_lookup) + restrict_child_secondary = t.to_tag_set( + restrict_child_secondary, fac_context=usage_lookup._factory_lookup + ) for k, d in stages.items(): if d.intersects(secondary_obj_domain): - logger.info(f'restrict_solving applying restrict_child_secondary, limiting {k} to objects satisfying {restrict_child_primary}') + logger.info( + f"restrict_solving applying restrict_child_secondary, limiting {k} to objects satisfying {restrict_child_primary}" + ) stages[k] = d.intersection(r.Domain(restrict_child_secondary)) - + quantity_limits = { cu.variable_room: solve_max_rooms, - cu.variable_obj: solve_max_parent_obj + cu.variable_obj: solve_max_parent_obj, } if consgraph_filters is not None: @@ -253,13 +277,16 @@ def restrict_solving( assert isinstance(consgraph_filters, typing.Iterable) old_counts = (len(problem.constraints), len(problem.score_terms)) - filter = lambda d: { - k: v for k, v in d.items() - if any(fi in k for fi in consgraph_filters) - } + def filter(d): + return { + k: v for k, v in d.items() if any(fi in k for fi in consgraph_filters) + } + problem = cl.Problem(filter(problem.constraints), filter(problem.score_terms)) - + new_counts = (len(problem.constraints), len(problem.score_terms)) - logger.info(f'restrict_solving filtered consgraph from {old_counts=} {new_counts=} using {consgraph_filters=}') + logger.info( + f"restrict_solving filtered consgraph from {old_counts=} {new_counts=} using {consgraph_filters=}" + ) - return stages, problem, quantity_limits \ No newline at end of file + return stages, problem, quantity_limits diff --git a/infinigen_examples/util/test_utils.py b/infinigen_examples/util/test_utils.py index 2892bd7f4..2bd11f589 100644 --- a/infinigen_examples/util/test_utils.py +++ b/infinigen_examples/util/test_utils.py @@ -4,26 +4,24 @@ # Authors: Alexander Raistrick -from pathlib import Path import importlib -import pdb +from pathlib import Path import gin -import bpy -from infinigen.core import surface -from infinigen.core.util import blender as butil, math as mutil -from infinigen.core import init +from infinigen.core import init, surface from infinigen.core.constraints.example_solver.room import constants +from infinigen.core.util import math as mutil + -def setup_gin(configs_folder, configs=None, overrides=None): +def setup_gin(configs_folders, configs, overrides=None): gin.clear_config() init.apply_gin_configs( - configs_folder=Path(configs_folder), + config_folders=configs_folders, configs=configs, overrides=overrides, skip_unknown=True, - finalize_config=False + finalize_config=False, ) surface.registry.initialize_from_gin() gin.unlock_config() @@ -33,28 +31,26 @@ def setup_gin(configs_folder, configs=None, overrides=None): def import_item(name): - *path_parts, name = name.split('.') + *path_parts, name = name.split(".") with gin.unlock_config(): - try: - return importlib.import_module('.' + name, '.'.join(path_parts)) + return importlib.import_module("." + name, ".".join(path_parts)) except ModuleNotFoundError: - mod = importlib.import_module('.'.join(path_parts)) + mod = importlib.import_module(".".join(path_parts)) return getattr(mod, name) + def load_txt_list(path: Path, skip_sharp=True): - path = Path(path) pathabs = path.absolute() if not pathabs.exists(): - raise FileNotFoundError(f'{path=} resolved to {pathabs=} which does not exist') + raise FileNotFoundError(f"{path=} resolved to {pathabs=} which does not exist") res = pathabs.read_text().splitlines() res = [ - f.lstrip('#').lstrip(' ') - for f in res if - (not f.startswith('#') or not skip_sharp) - and len(f) > 0 + f.lstrip("#").lstrip(" ") + for f in res + if (not f.startswith("#") or not skip_sharp) and len(f) > 0 ] return res diff --git a/log.txt b/log.txt deleted file mode 100644 index 517b8103e..000000000 --- a/log.txt +++ /dev/null @@ -1,18262 +0,0 @@ -commit a2880ed6024ee66476a7c5f0311973e1a368f8f2 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 3 lines to infinigen/datagen/configs/export.gin. Contributed as part of Infinigen-Indoors by David Yan. - -commit 5799d04378407f7c38e4c4e14feca96057d74042 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 10 lines to infinigen/datagen/configs/indoor_background_configs.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d8d29f06702a503c687e78c424688b59ed7e06c5 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 2 lines to infinigen/assets/decor/aquarium_tank.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 967cd920797cefda6946ebea9a4b712aab5d12d2 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 15 lines to infinigen/assets/decor/aquarium_tank.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit a53846ff5697191fdf31b1e867e577173f7757d4 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 97 lines to infinigen/assets/decor/aquarium_tank.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ad7e685d457575dea212ae9db19258176adab396 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 1 lines to infinigen/assets/decor/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d6d5db4594fa201ad6f96ad577a219f40bf31505 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 6 lines to infinigen/assets/windows/__init__.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit b95f42aeb0e432f8fb1c85aab97088caa011e777 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 2 lines to infinigen/assets/windows/window.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit b0ad83e089ae92a288d2f19b4b4489e5dd9985ff -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 51 lines to infinigen/assets/windows/window.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 290ad43054732d51b487dd581b9742f266fc8460 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 203 lines to infinigen/assets/windows/window.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 4926ed7f93550c14fa1b724d62e417381226dd33 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 779 lines to infinigen/assets/windows/window.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d7a4cbc3fadc990f956fcd2692d2c344344072f0 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 17 lines to infinigen/assets/table_decorations/book.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 30346ae3a72e8ef32782706d4418c8b89e7f31af -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 17 lines to infinigen/assets/table_decorations/book.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 661a2f937ce44577b2593aaacd75766f949b6f96 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 181 lines to infinigen/assets/table_decorations/book.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 507bc958c7cd167e4f03374b7420841c6a107d7c -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 324 lines to infinigen/assets/table_decorations/utils.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 9d797df96982c2c7c46c5bf19afab6f52f25d34e -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 1 lines to infinigen/assets/table_decorations/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 43158158b74af7bbff2ea966d4068eb98a5088c4 -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 1 lines to infinigen/assets/table_decorations/__init__.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 31e053fde4b582ab8b006f505abcc4237785e60e -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 1 lines to infinigen/assets/table_decorations/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 82659a743d753ef34fa6aaed5795a407653d898f -Author: pvl-bot -Date: Sun Jun 16 23:16:33 2024 -0700 - - Add 1 lines to infinigen/assets/table_decorations/vase.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 4ce599f72d31b21593eeb761dcbc93f06ea88e54 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 4 lines to infinigen/assets/table_decorations/vase.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 6d029f33e4fc8632abd71c74e10ced13ac2b0e06 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 31 lines to infinigen/assets/table_decorations/vase.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 251c3323b477bd46c1b76097bf1a86d0422103e8 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 67 lines to infinigen/assets/table_decorations/vase.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 3673bd9addb86ef19ba616a39f441e065ffa43ec -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 198 lines to infinigen/assets/table_decorations/vase.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 17eeb5ea4609c23d99c571bed2a530aa04fcecef -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 5 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 22e4bab7ccd66079ba856c2da5841f7f42e95c88 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 62 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit bfffcc718eebcb71abbac3f40f98e3cd6f96bb26 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 102 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 32fe356161b1211a2ea21683ce23a4c386c200c5 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 117 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 232d51bf497c34f104cf76ad251b0755d37b95f8 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 225 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ac425f57ecce19a0926d6bc538d9c454e161720c -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 274 lines to infinigen/assets/table_decorations/sink.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit dc577759538978d4f37368891d05e455bc705727 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 4 lines to infinigen/assets/seating/chairs/seats/curvy_seats.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit b796a28d2e4f14864dffe37effec2cf949d055f2 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 46 lines to infinigen/assets/seating/chairs/seats/curvy_seats.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit db4a31eefb74d73ccf38ade5240b0798746bdb5c -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 100 lines to infinigen/assets/seating/chairs/seats/curvy_seats.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 82d691ba20db24fa23305aa0a345ba6ab0c45a2c -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 2 lines to infinigen/assets/seating/chairs/seats/round_seats.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit c1be072ff44524bc56caff58e0d57586a77713ee -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 39 lines to infinigen/assets/seating/chairs/seats/round_seats.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 81121be9907b9889d5d5edf78e220ac4d1f5cb2d -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 1 lines to infinigen/assets/seating/chairs/bar_chair.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit efb0781287d01576d117fa0ec0eea312138a41ab -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 3 lines to infinigen/assets/seating/chairs/bar_chair.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit f7ae6f6ebcc24fc39778347c063e05fea35b5356 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 15 lines to infinigen/assets/seating/chairs/bar_chair.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 79e8a8bfe40545f30724f61f0435932211f95d3b -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 37 lines to infinigen/assets/seating/chairs/bar_chair.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit eb3bcead798cdcac5725f31c9b6a2fea34a79b3b -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 117 lines to infinigen/assets/seating/chairs/bar_chair.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit cc312dba6416a9cd829367857a6e6bb5cfdea149 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 7 lines to infinigen/assets/seating/chairs/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 742a9bde5bddd1c98ea68a5e602a38c49510dfd5 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 14 lines to infinigen/assets/seating/chairs/chair.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 5c0f875d7464aa157de1a5787d85c4e92c1078e7 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 22 lines to infinigen/assets/seating/chairs/chair.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b67001ffdabc22106fe1ee46d8a3d64c7f87ba12 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 328 lines to infinigen/assets/seating/chairs/chair.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit e2eadbee16f7f1855316366686016de432fe824a -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 12 lines to infinigen/assets/seating/chairs/office_chair.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit bccbfc7c07d51e631bca4ce2ffdfcbc7619cade1 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 18 lines to infinigen/assets/seating/chairs/office_chair.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3ad3ba7cd71652e5922bc93a6b6cc6d4b16d80aa -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 36 lines to infinigen/assets/seating/chairs/office_chair.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 8d379da567082d5220c684de7962dc3bca611a20 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 140 lines to infinigen/assets/seating/chairs/office_chair.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 2cd5fde810f6d246009ec3d48d20518cbd0945df -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 2 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 9c7e927d6fab7ab3cd40829f851bccf6707c96bf -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 4 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 51386633d851a40f67c86f62f518ee8979817db2 -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 18 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f1092c7d6439f1111e2a000c9fac8730ff9ba59c -Author: pvl-bot -Date: Sun Jun 16 23:16:32 2024 -0700 - - Add 75 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 24897d944f697f4b736c02a0e2a276bce3391af4 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 239 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d291d57d0b58d4d2f602ce9fb56c003f7ee45a6a -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 405 lines to infinigen/assets/seating/sofa.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 44492af609664e71269a3338b0da394c805c287f -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 7 lines to infinigen/assets/seating/bedframe.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 6c3e78b9f21c7667b29ea6dcd7f08b449f82add1 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 172 lines to infinigen/assets/seating/bedframe.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 82e3bbd29d6ecb4b7cc7859bc24006211efd3a1e -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 186 lines to infinigen/assets/seating/bed.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 719d71574366e13b1f25a79418e70be51879d4cc -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 2 lines to infinigen/assets/seating/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit c27b36c6e528ab28e3659de402be9c67fe844550 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 9 lines to infinigen/assets/seating/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 96be2a15d86fd42917373414bd56ed5f0e59bcb2 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 5 lines to infinigen/assets/seating/pillow.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 7476bb3d477591dbea2bc7fc31b5a7432de78a82 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 112 lines to infinigen/assets/seating/pillow.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 418830e3d2c43a2e53d1e628ea1ab687ef67e5a7 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 3 lines to infinigen/assets/seating/mattress.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 04a797719f28c0b9d60524bdefb990fed2764c1b -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 123 lines to infinigen/assets/seating/mattress.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit cc44ca40188a29613fd3d6041fe09e514fee142d -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 34 lines to infinigen/assets/lighting/holdout_lighting.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 05656cb0c4352348ba67cca3eaeb102a4ba37ed7 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 33 lines to infinigen/assets/lighting/hdri_lighting.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit eecce753d2dfb0c306cce5734a350bbf563f8e7a -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 3 lines to infinigen/assets/lighting/ceiling_lights.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 20593ad4182a4e801d38af2e0c5022ddfce68364 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 34 lines to infinigen/assets/lighting/ceiling_lights.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit a18083caf2d0a02471fef3336c47e218f891aa00 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 35 lines to infinigen/assets/lighting/ceiling_lights.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2fd8fff7ffce0249105e768e63a3a7f9c8bc6d7d -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 145 lines to infinigen/assets/lighting/ceiling_lights.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 7e964a5b0c97c18c3544c98bc8a99f528a917937 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 1 lines to infinigen/assets/lighting/three_point_lighting.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ca322453ecacbe8af3283847a356fad21c57c2bd -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 27 lines to infinigen/assets/lighting/three_point_lighting.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f544bfb3893912e3cc1400d415c3ba74148e96d9 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 9 lines to infinigen/assets/lighting/lamp.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 48aebf3bb533ae0afc860ae9115f7286ef0485c7 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 32 lines to infinigen/assets/lighting/lamp.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit a677764a37b3f3767f7298572ce4673a6da32111 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 75 lines to infinigen/assets/lighting/lamp.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 0fdcf6b61300031b75a5d77c83c33e10c9af224b -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 114 lines to infinigen/assets/lighting/lamp.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c7a2cd3392638d2b4d17c61f367de8c7a90f1e34 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 318 lines to infinigen/assets/lighting/lamp.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 6051bb06ff4c07b8ec219fea183f7db3f897d38d -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 56 lines to infinigen/assets/lighting/indoor_lights.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 65e5eafc9dcacac32f14e4c7e6852eadff8447d2 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 2 lines to infinigen/assets/lighting/ceiling_classic_lamp.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9ef918e63e9b52c8d9f4e968308068a6aafcce49 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 5 lines to infinigen/assets/lighting/ceiling_classic_lamp.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d848ff6e9e6499e520464dd39ca327f33cf09518 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 232 lines to infinigen/assets/lighting/ceiling_classic_lamp.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 46aceff291cf28a1de61b772afebbd8db6a577df -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 9 lines to infinigen/assets/scatters/clothes.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit c3da2edc9d1138d95fe3cc65b0c280c7621e5797 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 69 lines to infinigen/assets/scatters/clothes.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f0550f9410127ddb84040219c0917cad06b10820 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 1 lines to infinigen/assets/elements/nature_shelf_trinkets/generate.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 758f3ffe84f8ec8e746ee1e7db7ff8d333c8e8cc -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 1 lines to infinigen/assets/elements/nature_shelf_trinkets/generate.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a64c3bb41482525812294f4e3e9a17d0eee5583c -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 16 lines to infinigen/assets/elements/nature_shelf_trinkets/generate.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 68b4d91eb3cae215eb0e39a4f64e276d41ece306 -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 80 lines to infinigen/assets/elements/nature_shelf_trinkets/generate.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 9a174c4e5584fea243383c3ce19b2f1bcf6492eb -Author: pvl-bot -Date: Sun Jun 16 23:16:31 2024 -0700 - - Add 4 lines to infinigen/assets/elements/doors/casing.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 09a38e9b593111d8790e266485174aebfb2b3fcd -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 58 lines to infinigen/assets/elements/doors/casing.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit dfbf582c656f7c11976b9bd77079f49afdc91928 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 4 lines to infinigen/assets/elements/doors/base.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e823115f6b324a16775df3cc563d9d3c83ccbf80 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 5 lines to infinigen/assets/elements/doors/base.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 67cb84726faf578bd2c415376dfe5dcfd08289ae -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 208 lines to infinigen/assets/elements/doors/base.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f40a580f07936e4ef7a317f04cf6d80d4bf983e3 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 92 lines to infinigen/assets/elements/doors/panel.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b988978d154121e172003b9443f2864f8e141ce2 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 55 lines to infinigen/assets/elements/doors/lite.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 18592ec491d767ed7d0a1f190e5750db5ccf414a -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 80 lines to infinigen/assets/elements/doors/louver.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 664c4a4d48c23840bbb4abfc5363610097a541b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 33 lines to infinigen/assets/elements/doors/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 23295ac304f69ba0e02409b51a5f3aceda16e26f -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 3 lines to infinigen/assets/elements/staircases/spiral.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ee6412ee2b226388442f382ddec68a975c930ac7 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 4 lines to infinigen/assets/elements/staircases/spiral.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 644c10af55d2ef92db16bfeed1b763bf7fd244ef -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 55 lines to infinigen/assets/elements/staircases/spiral.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b635d284d52ab1f1805a8abf37f720288aff248e -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 3 lines to infinigen/assets/elements/staircases/curved.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e188fc64f29830bfd155b7323f7f19e113d3db7d -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 4 lines to infinigen/assets/elements/staircases/curved.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit c0bcbe19e692117b9b3ee5be25066972b51a6c3a -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 60 lines to infinigen/assets/elements/staircases/curved.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit e7605ec909ac5ccf26f085cd0bfa21b6a36edf8d -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 3 lines to infinigen/assets/elements/staircases/straight.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 79d2823a8ce6b20259cd0c026028873d93023998 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 4 lines to infinigen/assets/elements/staircases/straight.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 01ecc4b07a8f014ff1539dfb12986d2450a38b93 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 5 lines to infinigen/assets/elements/staircases/straight.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 9edd3de55126c59dd961725c75fb669276697856 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 592 lines to infinigen/assets/elements/staircases/straight.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 65e63943c4c7477ffe866263c9846ee0c7951396 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 32 lines to infinigen/assets/elements/staircases/cantilever.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 662d39f85213136ab71d588ee9bfdf653bcf274d -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 18 lines to infinigen/assets/elements/staircases/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c8217b58eadc8960102fa3b92d07bdf1a35dd1e4 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 145 lines to infinigen/assets/elements/staircases/l_shaped.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ebb5aad01803bcbe8226094b31e5ee999e30468d -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 33 lines to infinigen/assets/elements/staircases/generate.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a21b6e90d804c65eaccc64aff5f0c7d3659b5833 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 3 lines to infinigen/assets/elements/staircases/u_shaped.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0e6c023542651027401aa2715c84e86c08030d31 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 4 lines to infinigen/assets/elements/staircases/u_shaped.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit d0dbb0e43aefb3c4eba6b03bf2fa255fc960279a -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 151 lines to infinigen/assets/elements/staircases/u_shaped.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 45e790e917ebafe2a640f9d42c551fc64ca38ab7 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 6 lines to infinigen/assets/elements/warehouses/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 6095f72899b84b5ad800a7c18d19a467d4f2fa5d -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 2 lines to infinigen/assets/elements/warehouses/pallet.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit d73fa6d20268beeb8c4c0c8149da937d9e05c0fa -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 85 lines to infinigen/assets/elements/warehouses/pallet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 403907546b33290bc72d3bb856b9a3c411695a56 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 2 lines to infinigen/assets/elements/warehouses/rack.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit b239d307e18bf464a8eca2bbe9c95dbb5899eb5f -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 166 lines to infinigen/assets/elements/warehouses/rack.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 1e34fd0a6657cdd155178275e5f55fd2749b3b2a -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 123 lines to infinigen/assets/elements/pillars.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit fe82da6c9595e6cfabb5fdfbffe89b1099ac8a5a -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 1 lines to infinigen/assets/elements/rug.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 04df9e273de641025e9a389a808fb5f0710140c6 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 5 lines to infinigen/assets/elements/rug.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 5217e1d9d3512f1e36c2ca3f4bd7e70c0cce54d0 -Author: pvl-bot -Date: Sun Jun 16 23:16:30 2024 -0700 - - Add 58 lines to infinigen/assets/elements/rug.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ea5ee3624aebaaee3d8f02d3d452e36d990dddea -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 1 lines to infinigen/assets/elements/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 20196bdbb1954245ca79a66ed3974510e6a94456 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 9 lines to infinigen/assets/elements/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 8666934337e5f38686a12de490cf394362a85738 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 8 lines to infinigen/assets/utils/uv.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit f3618de2748eebac61457c3fc2a7eb6aa7fea9a9 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 193 lines to infinigen/assets/utils/uv.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a31095485520b02dc5ece45ecd70c26a51b71c65 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 43 lines to infinigen/assets/utils/extract_nodegroup_parts.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit dd6f5c0abad11a70d927bdd1648248d34452721d -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 53 lines to infinigen/assets/utils/autobevel.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0fe354525ad8d697030e48a8d4b8170cc5978a7e -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 142 lines to infinigen/assets/utils/shapes.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c19ae3b741e9abcd6f57c2c7b4a9a85ad0e7c978 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 87 lines to infinigen/assets/utils/bbox_from_mesh.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit fd4750ea9cdd640d1bead4123862fc58e702c813 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 3 lines to infinigen/assets/wall_decorations/balloon.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 70dec85102d7e06a7c184f874d65f4a5d1b9a1c3 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 75 lines to infinigen/assets/wall_decorations/balloon.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 827c956af9ffe26163a15f3ce028882451a1b63f -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 1 lines to infinigen/assets/wall_decorations/skirting_board.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit f1196a2bbea49300cdf930048952fd2348510dd5 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 18 lines to infinigen/assets/wall_decorations/skirting_board.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8408175e69922f09dfae7303e3910cdd65cbb3fb -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 67 lines to infinigen/assets/wall_decorations/skirting_board.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 0842cb85156afa5cd2134b40789e999760b484eb -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 200 lines to infinigen/assets/wall_decorations/skirting_board.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 73e73f8501fb83b2124e07f31e21474d3032679c -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 4 lines to infinigen/assets/wall_decorations/wall_art.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit b774582124d00c9f5189389641081efe1e62010d -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 13 lines to infinigen/assets/wall_decorations/wall_art.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0e3c27bcca6f5784c424b557c2c12f8f28eb6e43 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 73 lines to infinigen/assets/wall_decorations/wall_art.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9b5e875b969e9cd171c2bb134765b4ed03c7cebf -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 8 lines to infinigen/assets/wall_decorations/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c8b575819e9c53cada27d62494de47834a00824d -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 126 lines to infinigen/assets/wall_decorations/wall_shelf.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 339c93bafe702eaf0cf8178de5e134c2d6cc9f86 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 14 lines to infinigen/assets/wall_decorations/range_hood.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit dcb850eb3e113c62b97dabdb10fb2c017c53cc4b -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 20 lines to infinigen/assets/wall_decorations/range_hood.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 610416aee0efe06f47b8b19a2354e24b64220d1a -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 157 lines to infinigen/assets/wall_decorations/range_hood.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 214f6613e5a7b14c9e10d678cf4915f9794c2463 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 2 lines to infinigen/assets/organizer/basket.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b7222b97d93b94bd856259eea4cdfa7a071b205c -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 304 lines to infinigen/assets/organizer/basket.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit b0a60402c9290eef410323fc7f5bffc7020520fb -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 9 lines to infinigen/assets/organizer/__init__.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit e86fd25ea089d46cbf1b9e718a458b55c64bb331 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 3 lines to infinigen/assets/organizer/hook.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 2a2cc9419a8e7644666ba2192b92bbae84ebbb4f -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 384 lines to infinigen/assets/organizer/hook.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 8b1ad27d5042c2af6ac6150766bb4f6c19297ec1 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 2 lines to infinigen/assets/organizer/plate_rack.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d21911b57df052d0635368e842fa1018838d656c -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 333 lines to infinigen/assets/organizer/plate_rack.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit fc36d0de783f0fd1fedbf095f233f1af3d28f3f5 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 1 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2e5601f935772a8b6705c1acc0ade2fbb19a0ed4 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 32 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit de924b79140f35fe02778cee2bc3e4de444e7e47 -Author: pvl-bot -Date: Sun Jun 16 23:16:29 2024 -0700 - - Add 48 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 8ec2597f07d4ca21d58a8139fa50929f2017208a -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 172 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit d9bee11e8a63829f6693af71c8e58120733807dc -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 336 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b8f867ba511a7a7567c904e322ac732f68ecc807 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 626 lines to infinigen/assets/appliances/oven.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 2b64c425c2d3febcb39841bec831b101731ed4de -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 4 lines to infinigen/assets/appliances/tv.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit e67d3495bc2924f81722e6b0da320e5ae09c355b -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 5 lines to infinigen/assets/appliances/tv.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 4799521049fb41d543dd5d1200b00c841d6d7f85 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 21 lines to infinigen/assets/appliances/tv.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 25992cd5d74711dd80d77009fb519c0d90a638b9 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 191 lines to infinigen/assets/appliances/tv.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0265f6788749baf227f69a7664ea8e64df10a864 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 1 lines to infinigen/assets/appliances/__init__.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit bfa4527c8801880c0a5c30b07e9d53c10c0c7182 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 2 lines to infinigen/assets/appliances/__init__.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 81758e1b71406d33a2f7fc22fd5ba3c4ff2fef55 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 2 lines to infinigen/assets/appliances/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 85b8c3ff277b555afcefc7c5e966319ab77db9b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 26 lines to infinigen/assets/appliances/microwave.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 379bf0e8ea6906515d9ff926f0ecc5acc01cd336 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 41 lines to infinigen/assets/appliances/microwave.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit d681b3936397cc62f6492d82c099652536f49164 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 118 lines to infinigen/assets/appliances/microwave.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d774272384c322edbeea24f9114c5992f0cb37e7 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 262 lines to infinigen/assets/appliances/microwave.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 374cbbaac1c4eea9c6d91e7ba2ce1d36319d4c36 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 37 lines to infinigen/assets/appliances/dishwasher.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit fd831e3cbaccad056d94cbb3143ae7659471023a -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 57 lines to infinigen/assets/appliances/dishwasher.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit f04c5ed58c8c41f3b76df5376a34149d1b8cbeab -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 191 lines to infinigen/assets/appliances/dishwasher.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit dfbf77ac8e0e5772687d11ea72705c5791ce8fb0 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 644 lines to infinigen/assets/appliances/dishwasher.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 07d44cc326a5973bdd5274807e18686205e994e4 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 1 lines to infinigen/assets/appliances/beverage_fridge.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 81af6f1ba6fab5799409f6d16490dc95488cc88f -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 35 lines to infinigen/assets/appliances/beverage_fridge.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit d1b0d664f5fc10fe7bd9a2bde80f39261e998085 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 42 lines to infinigen/assets/appliances/beverage_fridge.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit cc2306691abc1134c6dc47fccf6d3d0f0c4fee2c -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 223 lines to infinigen/assets/appliances/beverage_fridge.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9dfe85ec2044f8d6c7036b4e29ee61f4ab2d442b -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 434 lines to infinigen/assets/appliances/beverage_fridge.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit c9a029331d47dfab5af97347a031ade62b0896a5 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/doors.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 315fa23a929e3d19ecbf9d7ba7abe40aab81764f -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/doors.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit b7a4432f6f585f385a8b62aeb5827c03474663e4 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 6 lines to infinigen/assets/shelves/doors.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit d92e73376f735bcf8d0c197783d94695d9198ab4 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 6 lines to infinigen/assets/shelves/doors.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ce87ddafd34eabd3636d0a4fda3c51b0fa180332 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 724 lines to infinigen/assets/shelves/doors.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 34d20a32898db715b2010b0ab81d6d0055b9c488 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 3 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 7115cd353c1bbac5725b96a51dbcfc605fcfbdb2 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 7 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit cc1a3aeca3404dcd57ae3001e7790dcfc12e8653 -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 9 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 4f577be8db059becd357584229140c0044fe694b -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 13 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 96e55c262af4b0b7aaae608f1b9d12109b7d549b -Author: pvl-bot -Date: Sun Jun 16 23:16:28 2024 -0700 - - Add 16 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 6774437197583f4ecd75f4295faf05c9fd487bd5 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 280 lines to infinigen/assets/shelves/kitchen_cabinet.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 6cdc8add656110b67780bacad7a80c59168a1f41 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/single_cabinet.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 57f309a93ce2982385f274c02232f8da5537a1c5 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 3 lines to infinigen/assets/shelves/single_cabinet.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ab10ebe25105e3c4cb2b9021ae58f929c4e348e6 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 7 lines to infinigen/assets/shelves/single_cabinet.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 807b5ae03ffee1915c9b70879e9ae22716eebfc7 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 9 lines to infinigen/assets/shelves/single_cabinet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 746130cc91509a702372fb73bafe87fffc82d89a -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 215 lines to infinigen/assets/shelves/single_cabinet.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 34ce110ad247a979ff084aeb157632f7b918bfc2 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/triangle_shelf.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit dd191be0b76fcd7d89dc78c7303f2c4b8d5113d9 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/triangle_shelf.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 5a83521268033ec429ffb602eacb6228ba614663 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 340 lines to infinigen/assets/shelves/triangle_shelf.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 349792226fdbc18192ce20105d4157119b77272a -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 526 lines to infinigen/assets/shelves/triangle_shelf.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 5d8cfa5c8b79e23da3069153e05c039a0869df4d -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/simple_desk.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit eb7a35d058bba82496ea4279711b47241ad3560b -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 11 lines to infinigen/assets/shelves/simple_desk.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 426d5616410ee2debb5f755ae3ea227e39b245b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 43 lines to infinigen/assets/shelves/simple_desk.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 927ccead91e369b8d8b898cec3335f0ed227db9a -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 211 lines to infinigen/assets/shelves/simple_desk.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit de585591a0a25d94f4fcd33305b3a88e0eb3b6a9 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 154 lines to infinigen/assets/shelves/countertop.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d380d10d68483cd2f6f18bbd73611d287571e8f9 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/utils.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 92786223272dca3624d78db73fc085e30338ce2d -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 7 lines to infinigen/assets/shelves/utils.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d4608181ffc6d32bcd000cc37b6716b94d5b5b19 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 52 lines to infinigen/assets/shelves/utils.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 7bd931a05212199cf33b3febbf4b0c44871f111d -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/cabinet.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 784999d4ce19cbe68f3219156bb5b5c06f2d4ef8 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 264 lines to infinigen/assets/shelves/cabinet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 2d3cd29f4c5df2915a6e0d58357f0531a2bd0209 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 737 lines to infinigen/assets/shelves/cabinet.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit b8416ef5fd478a78c17d3f4ba5267b9db0d2da38 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit c37d776faa5b6c34a1a62aec0f103b8d30215341 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 32e51aa4e39729b894318f63fc78759754525116 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e160187fe604413e3832a5b3a90612563a2786cc -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 5 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 21fe61b9328ede5c760bdce63e96bb9c5e96af4f -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 6 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 13f9825b0a65293f5494151a2011856dcdbff28b -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 68 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9404ff0db2554c8e83f9f1531b912a9f6a1e7ef8 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 137 lines to infinigen/assets/shelves/kitchen_space.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit d33d7dc817b892a52c8aaa4aed6c890b003b7aae -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 3c99cc40045179181c93a675c84d9598ef74b704 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 089236016e23143ef0203da5084441a19a6879b7 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 8 lines to infinigen/assets/shelves/__init__.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 4fcf6d2b11ba4bf754d5bdcbf0f168457b50359f -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/large_shelf.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 051d5676fcbc3d18491bae10975674913a7a7977 -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/large_shelf.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3827bdf2cb56f77cb9231d1a35e8e2225ef2b29d -Author: pvl-bot -Date: Sun Jun 16 23:16:27 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/large_shelf.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit db936bbe8740e8639ed913de8791e568f7670ffb -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 612 lines to infinigen/assets/shelves/large_shelf.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit b065b73beb885bda17926a45be88a1cfed855266 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/simple_bookcase.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ba6dc0c4458fb68ee2c641bf774426165e08f25f -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/simple_bookcase.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e1fab8bbeaa6e15eaa83e32f2f0ef017db1ab6a9 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 159 lines to infinigen/assets/shelves/simple_bookcase.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0f92df4aaccf410323c45b22fd89c24a7d7019de -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 356 lines to infinigen/assets/shelves/simple_bookcase.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit dfe9857289f6a1714c6291b9ab584e7785eef8a4 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/drawers.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 25f3196595889e639620068731645520c741895d -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 1 lines to infinigen/assets/shelves/drawers.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 37370c336faf3ab069927173000e7bb3925027a5 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 4 lines to infinigen/assets/shelves/drawers.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit baeebb9fe2c85dae745a3e44b16bfd7b712176c7 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 410 lines to infinigen/assets/shelves/drawers.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 90269abe640d3956f2a01b8ab742b37042269655 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 2 lines to infinigen/assets/shelves/cell_shelf.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 08824a214e568656d16cddb704b9cf8dd1bb0845 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 3 lines to infinigen/assets/shelves/cell_shelf.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit af3b2672c245d8f8e364e765fe417a824c9e5f4a -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 9 lines to infinigen/assets/shelves/cell_shelf.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit f04b47fb35ac82e1a2ab2bc94abe40843e777df1 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 36 lines to infinigen/assets/shelves/cell_shelf.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit afc2fbc10eb643bc29fc880ef56a98cffbc88ffd -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 850 lines to infinigen/assets/shelves/cell_shelf.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit c82980a2394e23e9e49c93776f27e64967ee4a15 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 6 lines to infinigen/assets/bathroom/toilet.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 10beed29137fe07be80d492be288e6c79a632ab5 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 286 lines to infinigen/assets/bathroom/toilet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 25e5980e2c04f6281c86c3f4bf7026bc8dcfa711 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 11 lines to infinigen/assets/bathroom/hardware.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit a17feca0bbf006a8542ebf7b7d038e8f5d3659c7 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 110 lines to infinigen/assets/bathroom/hardware.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ad6bfec63e6e5ee2b301230df1f5c9f11d5f761f -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 13 lines to infinigen/assets/bathroom/bathtub.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit d49a0a4ff75e9d1bc668e355279867e9675b56c0 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 40 lines to infinigen/assets/bathroom/bathtub.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8013bd9f2114b3651c6510c5292a6b8eaaa6be9b -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 236 lines to infinigen/assets/bathroom/bathtub.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 279e32494c2478cb90217a94ea605890bb59ea98 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 8 lines to infinigen/assets/bathroom/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit debc696443bab1780457f9bddbe11281b8278002 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 5 lines to infinigen/assets/bathroom/bathroom_sink.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 227a01d1219eaf2c6fe3d874296c7bc855f79db3 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 138 lines to infinigen/assets/bathroom/bathroom_sink.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 49ddc761ef592c24afa073110929264624adb507 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 6 lines to infinigen/assets/clothes/shirt.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 268a5a1143fdfff604f9f4805fc3641697a22318 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 65 lines to infinigen/assets/clothes/shirt.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f2cff82c97098b5b5ad97d9cdb719bb526ab0ee1 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 9 lines to infinigen/assets/clothes/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c44d205f50af4d2a6b59e554930f1f9909206c8b -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 6 lines to infinigen/assets/clothes/towel.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 0bd622a3b3d22f8ad306a46f95cf5eb516bad5a4 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 117 lines to infinigen/assets/clothes/towel.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 1da9534a168e4d9cbeb7b890eb1bf2dede4f134c -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 6 lines to infinigen/assets/clothes/blanket.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 4cbf5b737505da2094fa487bbb132395b06740cc -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 73 lines to infinigen/assets/clothes/blanket.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 358952c780386d3e781d8d8a181d984472f9258b -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 6 lines to infinigen/assets/clothes/pants.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 71be8d430897865bc395745e86fddee4844b6469 -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 60 lines to infinigen/assets/clothes/pants.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 04f9c87338fc11cf1e51faf34086894d3f4cf01a -Author: pvl-bot -Date: Sun Jun 16 23:16:26 2024 -0700 - - Add 2 lines to infinigen/assets/materials/woods/wood_old.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 29fe4d06acf7a2a939df8ee43f58613afa989a7c -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 3 lines to infinigen/assets/materials/woods/wood_old.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 2f2d8de76790e0cd3680d4a9f3571825586b2ed2 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 21 lines to infinigen/assets/materials/woods/wood_old.py. Contributed as part of Infinigen-Indoors by Mingzhe Wang. - -commit e32838a35a025ab3cf247396ea093f04261be5d8 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 50 lines to infinigen/assets/materials/woods/wood_old.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 270216a50b244df40a91790bfd101ef705065868 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 15 lines to infinigen/assets/materials/woods/wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 69c9c5cbfb2a45e8ebc23d0a9db15c18b7b3aae7 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 12 lines to infinigen/assets/materials/woods/hexagon_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit ed6fb7f2182f299215b79cbf56fd9e9559a14cde -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 18 lines to infinigen/assets/materials/woods/composite_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 1b8d1fa021ea40e8e66e5c0ebc70f89827afc08f -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 22 lines to infinigen/assets/materials/woods/non_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 5fdad3f7345bff430745b5636addf9eb74609473 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 14 lines to infinigen/assets/materials/woods/square_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 87cb2917af93261415e1287b41f56d4114138079 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 6 lines to infinigen/assets/materials/woods/wood.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 182bc877b764dc7cf01219841e5c02a4ee1998ea -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 7 lines to infinigen/assets/materials/woods/wood.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 62d5b7c3c606c680d0354d8c6634142e9d96fc44 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 133 lines to infinigen/assets/materials/woods/wood.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 8814ffb16eaa9388e2b94ba25848803e484270fa -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 12 lines to infinigen/assets/materials/woods/staggered_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 86dbcb903bcb1e89949575f36ad7d3aacae7bae8 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 14 lines to infinigen/assets/materials/woods/crossed_wood_tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 230bf146b4172180487ad2f7b9cb2e50a71c2977 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 3 lines to infinigen/assets/materials/woods/tiled_wood.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit f1ec5bf90e94173b0ff4968f6595917efeae255e -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 11 lines to infinigen/assets/materials/woods/tiled_wood.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit dd0493e9c617a1454769d11c42e7546f1f2b9711 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 58 lines to infinigen/assets/materials/woods/tiled_wood.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 6a5e880802f69cf527aa0408b4ed7959f270fc5b -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 116 lines to infinigen/assets/materials/woods/tiled_wood.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 9109359d3becc8acaddbb635e0f5a279b0cdccf5 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 63 lines to infinigen/assets/materials/stone_and_concrete/concrete.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 25bc986e937c6dc04fe1448cd6c7bc6d4093fc2a -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 179 lines to infinigen/assets/materials/stone_and_concrete/concrete.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 6b8a4f2172bcbaeb5d86d030cadf587940996bc0 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 20 lines to infinigen/assets/materials/metal/brushed_metal.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit e3d2916af692425917bcf36b290d1fef9ebef25c -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 67 lines to infinigen/assets/materials/metal/brushed_metal.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 434a7339f13889e9a700164f7a469dee7b5e7313 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 20 lines to infinigen/assets/materials/metal/grained_and_polished_metal.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9776d5acb7109f62b18fe624f76215b305947f6c -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 59 lines to infinigen/assets/materials/metal/grained_and_polished_metal.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 49651d254f283c736ac078d74b57004ce5374269 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 56 lines to infinigen/assets/materials/metal/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit cd97d6110b0c621d6b8654eefe3762483d2add75 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 26 lines to infinigen/assets/materials/metal/hammered_metal.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 99d5fbf2d48830bc5f81feae3e3d86bde2a1c4c7 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 55 lines to infinigen/assets/materials/metal/hammered_metal.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit fa2a7bd0676d76068a85a7143d16379db24cb4b7 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 33 lines to infinigen/assets/materials/metal/metal_basic.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 4a0e6b1601bba78408cee8f9ed122fda0a4310ff -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 19 lines to infinigen/assets/materials/metal/galvanized_metal.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit dc1cfcdaf9cb5fba5ab15962259364cc488cd1ba -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 49 lines to infinigen/assets/materials/metal/galvanized_metal.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 35a7a0d73b252ada8051b81b273122b4ec1bfc39 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 1 lines to infinigen/assets/materials/plastics/plastic_rough.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 08b4e3f3792d80185d9553b327cecb79a5c52bff -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 13 lines to infinigen/assets/materials/plastics/plastic_rough.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 01212dcf30bdd702a07563056f65cfd3b5cb0f37 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 69 lines to infinigen/assets/materials/plastics/plastic_rough.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 7028289d869e51f2b5b401a9e094bbc55f550dd9 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 2 lines to infinigen/assets/materials/plastics/plastic_translucent.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8998f3f6ae9a438e01ef41b9815d683cf557f4a9 -Author: pvl-bot -Date: Sun Jun 16 23:16:25 2024 -0700 - - Add 7 lines to infinigen/assets/materials/plastics/plastic_translucent.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 422618540da609c99c90b5ec936834d812d2d07a -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 35 lines to infinigen/assets/materials/plastics/plastic_translucent.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 25f92e17c68283fa4ebd148ae969bb240e189b53 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/coarse_knit_fabric.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 8d76a819c133e746c39b54cb2d37d12b9bcf86de -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 266 lines to infinigen/assets/materials/leather_and_fabrics/coarse_knit_fabric.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 6ab9c9405173b17a62ee63c7ac1b13b5ab0439bc -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 8 lines to infinigen/assets/materials/leather_and_fabrics/leather.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 0536a0f7b08c398cffe0ecef394e39fc7f479c51 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 28 lines to infinigen/assets/materials/leather_and_fabrics/leather.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a72d93218ed3a1e92f48c79f370145a3e2024e6d -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 74 lines to infinigen/assets/materials/leather_and_fabrics/leather.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit de22aa971664c751581e37eaf12089bc07e1f638 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/general_fabric.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ecf69bb1f2934d38c8dfa8bb66c6c17d5b540b11 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 41 lines to infinigen/assets/materials/leather_and_fabrics/general_fabric.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 044649819bcbde3c61687c1106cdcbd841388bc8 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 125 lines to infinigen/assets/materials/leather_and_fabrics/general_fabric.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 94bd36e7ad9555aa74e6e48bad8bd6e80b97b623 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/lined_fabric.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b6ea7077be962e2677df5ff100dadbbc1e5d62b9 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 163 lines to infinigen/assets/materials/leather_and_fabrics/lined_fabric.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 1f17a053a81109337d59962a958c9cc39c934d62 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 2 lines to infinigen/assets/materials/leather_and_fabrics/__init__.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 718e4825af397b0f20ecc5363fadad9b760fdbdc -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 20 lines to infinigen/assets/materials/leather_and_fabrics/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9f636a2bc1a163b5a92f74673d8efa1e108fb7e1 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/velvet.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 7e447c1d87fff59bacb4faecd168acc55521f409 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 82 lines to infinigen/assets/materials/leather_and_fabrics/velvet.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit c95602c75ec9045e331ee2b3a4bd959c63f91969 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/sofa_fabric.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9cfad3da986577c10f7e2ee06bfec6bb669a387f -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 36 lines to infinigen/assets/materials/leather_and_fabrics/sofa_fabric.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 66c7d007676ce0b50e9060613de3121662d89c17 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 4 lines to infinigen/assets/materials/leather_and_fabrics/fine_knit_fabric.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 9bfe404beab82aab070178a3f79d057e9aa87062 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 150 lines to infinigen/assets/materials/leather_and_fabrics/fine_knit_fabric.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 554fc1ce2d8956f305df18d8bd27a9a0cae43c6d -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 11 lines to infinigen/assets/materials/wear_tear/procedural_scratch.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c905341ad3aca7b66ab817256775fdb2cb38dc67 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 14 lines to infinigen/assets/materials/wear_tear/procedural_scratch.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 1e3f0d9b2d3f057559bedfb7e3077b9f7453ff5f -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 145 lines to infinigen/assets/materials/wear_tear/procedural_scratch.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 728397b670eb020cd3322beb7ed13525af8b2d55 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 11 lines to infinigen/assets/materials/wear_tear/procedural_edge_wear.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d6b4188a0558f548fc68e1f15f5660f08b3ddbae -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 265 lines to infinigen/assets/materials/wear_tear/procedural_edge_wear.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 44c1d4afbf120f2882ee8c9ece9cddecf849298b -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 53 lines to infinigen/assets/materials/marble_voronoi.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 7e4792a75401cefa966046d139592178c11fec35 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 1 lines to infinigen/assets/materials/table_marble.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit b619525c8e481ec3034ef8265c65eb927680fb43 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 7 lines to infinigen/assets/materials/table_marble.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 4a0ff94693627c26f9eeecd73c7090ede6e458a9 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 150 lines to infinigen/assets/materials/table_marble.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 785feb8460bdb80476de0eccaebcd2970026cb02 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 64 lines to infinigen/assets/materials/brick.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 669baf98f5942c0f9b6a8f754ef7d55e111cda99 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 10 lines to infinigen/assets/materials/fabrics.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 4d1e999a114e3b51fec2dfd7fa1b20f045426e90 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 37 lines to infinigen/assets/materials/invisible_to_camera.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit aa98111c4223a788a8bb0450aefe483a1dc43ba9 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 5 lines to infinigen/assets/materials/black_plastic.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 9b07936f45b5653eafb07442a1bbafedc2f6e9f8 -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 17 lines to infinigen/assets/materials/black_plastic.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 7b9da237a6ec2a27b05268a313ea8e0c2ffcec3d -Author: pvl-bot -Date: Sun Jun 16 23:16:24 2024 -0700 - - Add 5 lines to infinigen/assets/materials/table_materials.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 368c5ea783509e9fa789654abe75627a4aba3524 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 53 lines to infinigen/assets/materials/table_materials.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit bc9fbbb58d9bd1e8504daa21c34f65a12efcfaba -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 105 lines to infinigen/assets/materials/table_materials.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 45503a50ced952138b604ef3e52dd245343b1702 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 3 lines to infinigen/assets/materials/plaster.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 02bee5beb4c026b817ec6a1ba66b08fd2ba5db10 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 6 lines to infinigen/assets/materials/plaster.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 9948be401b60810749aa7720186a9552f368cb70 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 50 lines to infinigen/assets/materials/plaster.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d73a7f4143e59190561dc75aaacd076f38344dbd -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 5 lines to infinigen/assets/materials/microwave_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit f2f91c74084a7a626d79da55de67279637cafedf -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 22 lines to infinigen/assets/materials/microwave_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit ca81a3410a4ea78a4e7f0f70fd3e21febece52f4 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 21 lines to infinigen/assets/materials/glass.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit fe7ea88ee5d89542ed68203818f8ce0ac29008ed -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 27 lines to infinigen/assets/materials/glass.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 25de7cf093bcefa6e47b001a1d95bb0c564214ba -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 12 lines to infinigen/assets/materials/text_no_barcode.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 2c448bd7600ee5f9325237e0105890cde4f15f0a -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 1 lines to infinigen/assets/materials/art.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 161ad8dbf1fac7abe2fbd7cc9af8dea752fed9ca -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 99 lines to infinigen/assets/materials/art.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d05b25800d93ce04ac9b6f8a72b4fc7c778ebf2d -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 1 lines to infinigen/assets/materials/ceiling_light_shaders.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 34ea11ac19760c874bb3c0dad730e28f5bdc60e4 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 5 lines to infinigen/assets/materials/ceiling_light_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 29adf5c572746c0f17c270c5964025386675c08d -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 39 lines to infinigen/assets/materials/ceiling_light_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 2bf4f659948e09831b3625229f592a02ccce93fe -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 8 lines to infinigen/assets/materials/rug.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit ffbc4298fec243eb63aec2779075f0ba193904fd -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 42 lines to infinigen/assets/materials/rug.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 36b4f6a2a84e5fe60ad21cb808fa50908a3c6116 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 27 lines to infinigen/assets/materials/text.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit daf6aa10469f3010be72e2f52051b352e4ab8133 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 35 lines to infinigen/assets/materials/text.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 79fa245b06c84f260a3fd8e427a38414a2b09670 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 247 lines to infinigen/assets/materials/text.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit abea932da4a82b93122c454301986119a5acff1d -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 1 lines to infinigen/assets/materials/marble.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 4ea0527e45aa8ce28b1643776dd7f4a7cdde13ef -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 9 lines to infinigen/assets/materials/marble.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3cac6eaf59ecfda77ffdc0bcb2a72e950db8e586 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 1 lines to infinigen/assets/materials/ceramic.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 88f1565d033d51c821eb14d7deb44013d3ec4757 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 43 lines to infinigen/assets/materials/ceramic.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 626505ff8fdadd7f0f392f2d11c4a106c8e1e85c -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 3 lines to infinigen/assets/materials/common.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit baa9da34a355c6256aa18c8fd682a047304d4e52 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 65 lines to infinigen/assets/materials/common.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 623a2e8a3c7d74ef8b96c78fb1075a3565ec5fef -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 1 lines to infinigen/assets/materials/vase_shaders.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 90571bcc82435659db0261f1213e1fae0edb0370 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 36 lines to infinigen/assets/materials/vase_shaders.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit e77d9aeeb5338974bf10fac68ef51154f62fc543 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 5 lines to infinigen/assets/materials/shelf_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit a683eeb235412d161222ace548610522eccddf31 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 7 lines to infinigen/assets/materials/shelf_shaders.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit f09f240257e0806006129709a80a0fc5df1546b7 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 16 lines to infinigen/assets/materials/shelf_shaders.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 163fa12dd8df4f253a68657c333146618302d92d -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 275 lines to infinigen/assets/materials/shelf_shaders.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit f0ac5fe836f35d8f197c02eb8f6cbfe43f3354d6 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 2 lines to infinigen/assets/materials/plastic.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit cb5f1e2485ab82f1dbead62c4e83ca95b60c6491 -Author: pvl-bot -Date: Sun Jun 16 23:16:23 2024 -0700 - - Add 22 lines to infinigen/assets/materials/plastic.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 662ed46607923119413dfff18928792e5fad305c -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 5 lines to infinigen/assets/materials/oven_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 155c1e63c33675d672ff29e77f9ec01983c1a13f -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 23 lines to infinigen/assets/materials/oven_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit e1c0fbaaa06ef8207a02bf7e5007f562ca5a1ed5 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 5 lines to infinigen/assets/materials/lamp_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 157452f95a2e2bbb08e99500e3e065b895dafa02 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 54 lines to infinigen/assets/materials/lamp_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 7ea3d7ba135246749cd4c95f603be04b3fcd68fb -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 58 lines to infinigen/assets/materials/marble_regular.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit b66bab6b1aaa7f6be8eab42b73da79bf612f5d63 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 5 lines to infinigen/assets/materials/beverage_fridge_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit 06c60da8843900b93dbd23e7e2e662b9b3a42a19 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 41 lines to infinigen/assets/materials/beverage_fridge_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 01c4f5a3f8046326141fcc1d3836ed93cea3ea04 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 4 lines to infinigen/assets/materials/glass_volume.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit acb9c4694f7d8c11a584cdc08266373822021003 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 27 lines to infinigen/assets/materials/glass_volume.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f08a1c209574bc569ef841d235169fb436057f4c -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 5 lines to infinigen/assets/materials/dishwasher_shaders.py. Contributed as part of Infinigen-Indoors by Hongyu Wen. - -commit bd90bc19f48df70535c4196aed26cb38c15b6f47 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 80 lines to infinigen/assets/materials/dishwasher_shaders.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit dcfa1c29bb05278f3ab7d86a501c5cce07e3c8f2 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 176 lines to infinigen/assets/materials/bumpy_rubber_floor.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit c93dbdab8efb5b6138c917f7b4e532e55bd91913 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 1 lines to infinigen/assets/materials/hardwood_floor.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e45420e702f79fdda176e932f569fb150ee4d093 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 1 lines to infinigen/assets/materials/hardwood_floor.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 7b2acadf7bb95301811034ad68684ac23bff1ef8 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 44 lines to infinigen/assets/materials/hardwood_floor.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 23ef5ddc787e100803fe4a52269d9e32fd74802d -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 18 lines to infinigen/assets/materials/mirror.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 74ea6a9f3f62f438ec9f4745d08a2124ed797126 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 5 lines to infinigen/assets/materials/tile.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 3dc64c65e87964af1abce0b32cfdb3d15fe9eeeb -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 341 lines to infinigen/assets/materials/tile.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 6334b54b59d4f17a5a14b5c6648e53fb3f65f52f -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 36 lines to infinigen/assets/tables/legs/straight.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 34eea1a7bf88503c3784bd3f35baa5264bf916c6 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 74 lines to infinigen/assets/tables/legs/square.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 07696a08c31117c138d90da8e2e2a28df77ed92e -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 217 lines to infinigen/assets/tables/legs/wheeled.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 7bd75d733ebd353b5f14b0bf7768a31172b5e9e0 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 34 lines to infinigen/assets/tables/legs/single_stand.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit b79a3df48e5787298cf047280195fab9b2308a10 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 33 lines to infinigen/assets/tables/strechers.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 6bedc524d60a0939de4fabcdcd7f5b95a4529cbd -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 1 lines to infinigen/assets/tables/cocktail_table.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e94b3c4fdc0f06ef65acef1072d58b0f8e97af0f -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 30 lines to infinigen/assets/tables/cocktail_table.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 1de873bebd4176144d09d8f49cbc665000fb0432 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 35 lines to infinigen/assets/tables/cocktail_table.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 0f331061e48d1f7d39e8d38e71a63c472ccc80eb -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 37 lines to infinigen/assets/tables/cocktail_table.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 6e23bae05216fb4cad0db15070a32afb3aea5439 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 166 lines to infinigen/assets/tables/cocktail_table.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit dbc798cbca7c2f15248c2b1ae44d1c3c435461a0 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 1 lines to infinigen/assets/tables/dining_table.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit da3d71a574794dc9d2dddd46d2cd3fa20b39dc85 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 41 lines to infinigen/assets/tables/dining_table.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 0859b8e211653dbb3902922aa300a3d2fbd4eefa -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 44 lines to infinigen/assets/tables/dining_table.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d7ce755847f31a2b105991f833cca2402131c05e -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 88 lines to infinigen/assets/tables/dining_table.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit df7ec4d336dc36c67b005f640a1471420dea82ff -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 138 lines to infinigen/assets/tables/dining_table.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 456ec73ebc83cceab93e050bdb668d3428167148 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 1 lines to infinigen/assets/tables/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 6ff2e6edc6a702c37adfbb679e197b1173190b52 -Author: pvl-bot -Date: Sun Jun 16 23:16:22 2024 -0700 - - Add 6 lines to infinigen/assets/tables/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 39e2d65e49c7cbdd8ed87781e69ba9c30e6b43e1 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 298 lines to infinigen/assets/tables/lofting.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 30056c11bb0a78d907c8c81d9b5815c6c5cfd866 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 3 lines to infinigen/assets/tables/table_top.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 00ae17f4b184adba788947ef34f5c8a81300d10f -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 4 lines to infinigen/assets/tables/table_top.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit fedf45dc993f1847ecca50a8be5c24700c162500 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 167 lines to infinigen/assets/tables/table_top.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 31eae18d55fb37104355b808019c00352f143c77 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 514 lines to infinigen/assets/tables/table_utils.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 41ba5074f3d76dcd64b8672273a483eee3696e63 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 12 lines to infinigen/assets/tableware/lid.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 8f3eccf631ae70d9d4735e562d63c19a584c8622 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 111 lines to infinigen/assets/tableware/lid.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 58eae8af3a1db681e4e1bf8e6270ab8f2c7888e3 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 100 lines to infinigen/assets/tableware/pot.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit bed052a83d7bd49dc60bc3f7a98c3dbe2c63ed7c -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 2 lines to infinigen/assets/tableware/base.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e00ef1a5a34b07fa0348b15ac0c4af0c9585d63c -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 13 lines to infinigen/assets/tableware/base.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 3d510db7b67a9f12aa1d5a9e19522d2879d01037 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 101 lines to infinigen/assets/tableware/base.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 28756d3082c93daf6c5a0fbf7b017a68cfa451e2 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 4 lines to infinigen/assets/tableware/wineglass.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit dfa8aba194405348662dfc8394c758883815fe37 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 46 lines to infinigen/assets/tableware/wineglass.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 5fc7345dbba81c554b2264734d23931ea99cf76f -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 6 lines to infinigen/assets/tableware/plant_container.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 81213a80d9a8837109a1dc3720addefeceb9cc7a -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 7 lines to infinigen/assets/tableware/plant_container.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 3e1e76a974e3ef26c4594ae069f90985dadb5403 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 18 lines to infinigen/assets/tableware/plant_container.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 232931b04c7726d350687d1f76a69fb1dbc1b742 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 98 lines to infinigen/assets/tableware/plant_container.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a2449e6096bd0c972a37fc03d4e821747889c223 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 15 lines to infinigen/assets/tableware/bottle.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 568fa794ed7e643471cca4438bea4e87d0f863f5 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 131 lines to infinigen/assets/tableware/bottle.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c3043e33a28c198f876ecb3ee14eb12200c1347b -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 3 lines to infinigen/assets/tableware/pan.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 28b0e537b98740d1e3878b67df2a61bb4b30eaae -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 3 lines to infinigen/assets/tableware/pan.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 5d531b840e1334d281f9df20241f0a3e2cf803bb -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 4 lines to infinigen/assets/tableware/pan.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 758022df9c287a500eaa44920b9f2072ebdc6db5 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 119 lines to infinigen/assets/tableware/pan.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 33d22a0d831ff9ec9df361c48a868db72832280c -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/bowl.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 1c39257116f03a9a56fb858f3dfd9cc94b486089 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/bowl.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 931d458ceec9a61392e04facd4ed9cf233c49001 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 49 lines to infinigen/assets/tableware/bowl.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 6e775b4b281d33ecbc964ce76a0225dad8ab1ad9 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 14 lines to infinigen/assets/tableware/can.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 25af5f99bedbe39c02b648062c3355cea388f307 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 83 lines to infinigen/assets/tableware/can.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 25d2c99251caa9ee977030ddeecbb6eba43db6e9 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/spoon.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit c344270973d7f372206e396cca6f6aa45a3085ee -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 57 lines to infinigen/assets/tableware/spoon.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 1f218a8f0bd3dd3d0d0273c5a43ed93f9a970b70 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 22 lines to infinigen/assets/tableware/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 5e633172711e09a604f965b8bea807dff992b37a -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 8 lines to infinigen/assets/tableware/food_box.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8653d8223c18cf82bb157188f2e747fdbe97a889 -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 29 lines to infinigen/assets/tableware/food_box.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 617715fd88dc95d612af8e10219094f2273a5a9b -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/fork.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit c8ac06ba5530bf23319d807fc7b133f7f77772ed -Author: pvl-bot -Date: Sun Jun 16 23:16:21 2024 -0700 - - Add 88 lines to infinigen/assets/tableware/fork.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 2cb27e1efe6753bf560dddf9ce36812bb6f7bfd3 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/food_bag.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit fa14d39a4c2f9f1a59eb11ae3948356b7e5ca868 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 5 lines to infinigen/assets/tableware/food_bag.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 7407655e2b3695e7baba18c345a50e6230236f39 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 77 lines to infinigen/assets/tableware/food_bag.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 46099b6335b9b81c890c82ae5d5b7d3dc7f15605 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 13 lines to infinigen/assets/tableware/cup.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit c541075ff1ad6c2e1c6579611491edf008f7f25d -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 123 lines to infinigen/assets/tableware/cup.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 2e2cc4c568700a98ccd69ead8abb0d2be3146fa3 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 13 lines to infinigen/assets/tableware/jar.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 6a04e800b79aa022968b5e3186f3ed51fbc00117 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 70 lines to infinigen/assets/tableware/jar.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 493f5e2fc34c7b8674c7305e71e584cf43a6709a -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 44 lines to infinigen/assets/tableware/plate.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c25d30e9ae2a02e73a6cfa1adbddae95124bcd4e -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 1 lines to infinigen/assets/tableware/knife.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit c52c2fcb150e4f7d61b8ec577984417e4cfb52ef -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 94 lines to infinigen/assets/tableware/knife.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit cf2c2e7a3f869328193a5029089792e2d1ea3705 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 81 lines to infinigen/assets/tableware/chopsticks.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0a568c05b94dd55c64aa5fbb9aae83dde506dca0 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 2 lines to infinigen/assets/tableware/fruit_container.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit ca3873993033a3cf1f3a548f2c51df8a178b31e2 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 2 lines to infinigen/assets/tableware/fruit_container.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 95ae48a23b24e9d20363b5a6257c4a6da7083970 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 67 lines to infinigen/assets/tableware/fruit_container.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 5457c0e1bfa25776823cb9a88f07bcc8b259cdac -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 1 lines to infinigen/assets/material_assignments.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 95e480eaf59f540bcb3e92577fed83a869e06ed1 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 1 lines to infinigen/assets/material_assignments.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit b8698a60415eb2e9a4322ad670c8a65bdeff7295 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 15 lines to infinigen/assets/material_assignments.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 1020d08c520be75759994d1659c374a788cfd0bf -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 119 lines to infinigen/assets/material_assignments.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 477b144ccfe5b3674084dd4aebc92c47b4025f9c -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 456 lines to infinigen/assets/material_assignments.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit 55cd1691f43e40ebf55ce9fbf642951be316ab38 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 1 lines to infinigen/assets/color_fits.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0457e88065ef011d6ebf4902cd6a1c9fe0cc3cc9 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 4 lines to infinigen/assets/color_fits.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit e13509e7a79b0332c9f9c31b1b325411c5c0cdbd -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 93 lines to infinigen/assets/color_fits.py. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit ac8f7da7f668944a2b90703785f856de9085ebd6 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 173 lines to infinigen/core/placement/path_finding.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 27b21b60b3ccea2c833654aa6af960488d147a25 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 53 lines to infinigen/core/nodes/shader_utils.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 3663b5bdc766a8c8d95cc45423492318c5fd517d -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 79 lines to infinigen/core/util/bevelling.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 3d167d62fb02d4dd5981f12c1db7784bef21a83b -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 182 lines to infinigen/core/constraints/constraint_language/expression.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ff7845bbdca992eea0b83afcbf761fdfb18b7e52 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 2 lines to infinigen/core/constraints/constraint_language/geometry.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 3d6ed7dfa5ebc37da1b89dea1d376ecca4787d87 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 35 lines to infinigen/core/constraints/constraint_language/geometry.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 4fdc87a90e46154b5c1b01aad0da4cb8b71ba6c8 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 55 lines to infinigen/core/constraints/constraint_language/geometry.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d958e95c7c9282e12eec8851240d0ab7c74e0be2 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 3 lines to infinigen/core/constraints/constraint_language/set_reasoning.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ca9ac168061b3f11478a181252ad9ac4ab3f13bb -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 80 lines to infinigen/core/constraints/constraint_language/set_reasoning.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit c18ac5cd6489eed7e649644c9b6029954f1dbe04 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 32 lines to infinigen/core/constraints/constraint_language/result.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0caf21a9433956c4b1d91b446ac4e55aae5ddbe5 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 3 lines to infinigen/core/constraints/constraint_language/types.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ac681e51fef6068a7b1d6d1fe48215d237a724f8 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 42 lines to infinigen/core/constraints/constraint_language/types.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 4d7bf5ccfb12919b4ccf098b41633ab669ba2070 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 2 lines to infinigen/core/constraints/constraint_language/__init__.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 0b6840508f5df4c62a2e86d8888861e16c938bb6 -Author: pvl-bot -Date: Sun Jun 16 23:16:20 2024 -0700 - - Add 11 lines to infinigen/core/constraints/constraint_language/__init__.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit dbc84ef087c31f0d6b6c91a58839434b5c67b6f0 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 54 lines to infinigen/core/constraints/constraint_language/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 1d21848fd15d1a299ad9923bfc85e3303b2b6845 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 1 lines to infinigen/core/constraints/constraint_language/relations.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 456641d7bbc095eb65dc21b55e91e8fa1fd44f38 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 3 lines to infinigen/core/constraints/constraint_language/relations.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit f34257b82395078bd6065a9997a40e6c48aac08d -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 10 lines to infinigen/core/constraints/constraint_language/relations.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3ef3738a8dcee658ea17a6592707f87cd0449933 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 37 lines to infinigen/core/constraints/constraint_language/relations.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit c918a1a7c576922e591fd31c0fb271cdd9079740 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 363 lines to infinigen/core/constraints/constraint_language/relations.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 26d01a01e3fed1ffb429ab658c99a2bd8dd438f4 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 4 lines to infinigen/core/constraints/constraint_language/util.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit dae03a8635a91fea9455b4000be91e50869e1e70 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 46 lines to infinigen/core/constraints/constraint_language/util.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d201928001b14fb1b31f2c3d028eae9410d627c8 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 354 lines to infinigen/core/constraints/constraint_language/util.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit bc6c0f62123d840ae3bf021e71a634a3f27dc1a4 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 1 lines to infinigen/core/constraints/constraint_language/gather.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 7ddb8bf1fc02f6b98f5ac343c2628e20cc7d6822 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 66 lines to infinigen/core/constraints/constraint_language/gather.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9b1cf7e422b7d5f0a8764b6406f6f7a386fdc75c -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 1 lines to infinigen/core/constraints/example_solver/geometry/parse_scene.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 61f37934754a4ead8ba877dd4f6ebf2c49e96ed0 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 3 lines to infinigen/core/constraints/example_solver/geometry/parse_scene.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 32df7740ccac5613f22f73875a24dc9ba613c074 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 7 lines to infinigen/core/constraints/example_solver/geometry/parse_scene.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit f35fc07c4dbad5c5e265f41415abdde273ebf685 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 68 lines to infinigen/core/constraints/example_solver/geometry/parse_scene.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 1ab422dbfb911a09e42e94c1893bc2a7f9313a41 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 7 lines to infinigen/core/constraints/example_solver/geometry/dof.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit bfbdf35a4c0dae51df44cf0a7a1d8fc79a46f759 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 10 lines to infinigen/core/constraints/example_solver/geometry/dof.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 88ea5301262d0994d8d81c556e3e2a9103f1bb6d -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 150 lines to infinigen/core/constraints/example_solver/geometry/dof.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ef8d79f4978cee56e20c9be9ceda3be7a5d97f79 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 259 lines to infinigen/core/constraints/example_solver/geometry/dof.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 916620ccace0ff797e9f0c9f617ec323e5ed8f68 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 8 lines to infinigen/core/constraints/example_solver/geometry/planes.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 6158f85e7cf94639f49817cc5ea6f90a060c46b2 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 27 lines to infinigen/core/constraints/example_solver/geometry/planes.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 52d7c9285526874fb414fca98720e9044a954c1d -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 268 lines to infinigen/core/constraints/example_solver/geometry/planes.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 583cfa63dfd002a3657f4e5afe80e96714a85d46 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 55 lines to infinigen/core/constraints/example_solver/geometry/validity.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 0e426d6ac5c64e4f08863610bf2b14c7a77a2916 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 67 lines to infinigen/core/constraints/example_solver/geometry/validity.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2d568a7216a3689492671c6d29add4fc001e1959 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/geometry/stability.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 87c3cf5d47d14aa50f656f2a7e4c93fe2a88eda4 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 88 lines to infinigen/core/constraints/example_solver/geometry/stability.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit af67ce66529d5ca35b1d25acfca6e78467222660 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 266 lines to infinigen/core/constraints/example_solver/geometry/stability.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit c2a3b3fd7f012d4b36022b7f6a0fdbb8f0078fa0 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 1 lines to infinigen/core/constraints/example_solver/greedy/all_substitutions.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 9ecb38199f63d824bf3ad2cc9ce0a6911941b85d -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 175 lines to infinigen/core/constraints/example_solver/greedy/all_substitutions.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2b9f4e90f48147831f268120155a4b03341ec3bf -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 3 lines to infinigen/core/constraints/example_solver/greedy/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b4dd499c70e776cb005df06afa6cced7ac0d29c9 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 269 lines to infinigen/core/constraints/example_solver/greedy/constraint_partition.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b48db72ebc9cbadc3e13496dba9259128e1d17cd -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 113 lines to infinigen/core/constraints/example_solver/greedy/active_for_stage.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 51db9cf3a26e6f09eaee5a939d2bf64bdb0d1a39 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/scorer.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 10ba901f8f74f2af858b25caff34c6c589b624d2 -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 25 lines to infinigen/core/constraints/example_solver/room/scorer.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ddea8ba03e3a830cee88a409a97a9878d928514a -Author: pvl-bot -Date: Sun Jun 16 23:16:19 2024 -0700 - - Add 250 lines to infinigen/core/constraints/example_solver/room/scorer.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit bf0061463fe46732be27224c0482df7bee02da9c -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/room/contour.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit dd5145fb9a01b8dea38608ee3425e7912d751008 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 128 lines to infinigen/core/constraints/example_solver/room/contour.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 4fcfbfaa6789cb6280975241a092e2a6197f457a -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/room/segment.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e1fef368f783b8ee3562f13f18c275a13c71bcf6 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/segment.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 16a4b09351109d95b38b61e826ece1e648e18c04 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 134 lines to infinigen/core/constraints/example_solver/room/segment.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0530bce30cf4bd41415c6c00df1fbd5951462dae -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 3 lines to infinigen/core/constraints/example_solver/room/utils.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 16825ed562b6b4b35725105a83633889ee644245 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 7 lines to infinigen/core/constraints/example_solver/room/utils.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit ace8689ac2bf5a3bb168a970b312d79acb3a529a -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 139 lines to infinigen/core/constraints/example_solver/room/utils.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit af2325601a5e42db7a19324126af72d5ef4d4df0 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 80 lines to infinigen/core/constraints/example_solver/room/types.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0a2b8a3dfcf4b847aaca0369d22ce704f28f2fc2 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 5 lines to infinigen/core/constraints/example_solver/room/decorate.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit c5b65d8aadc8427388cf6e74eb643ceebe606004 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 11 lines to infinigen/core/constraints/example_solver/room/decorate.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 865e1a546bb959012bc30d39636b33c81112ba58 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 24 lines to infinigen/core/constraints/example_solver/room/decorate.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ad83cb791cee28e44591e48109bfce537e5ba0b9 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 114 lines to infinigen/core/constraints/example_solver/room/decorate.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ed4ba693dfaf0bff54824d67878b52f5f91b09c4 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 206 lines to infinigen/core/constraints/example_solver/room/decorate.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c3f9a0e5a1b26c39b5ac418bd0e2c30aa848f449 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/__init__.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 25367eaf2e1069da9033dd41558fb58360470f61 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 11 lines to infinigen/core/constraints/example_solver/room/constants.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 543db110eea4a715a36ed4f949cdc7f9b7475581 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 15 lines to infinigen/core/constraints/example_solver/room/constants.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 583dd74e4fd7fa9a6fc630fb769123523e6c09bf -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 74 lines to infinigen/core/constraints/example_solver/room/constants.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3fda728ff4c24f5b1e823cc7a6346a13a328f2bf -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 2 lines to infinigen/core/constraints/example_solver/room/graph.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 39bed180c090fdaca6a1a779e646f975d9f758b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 165 lines to infinigen/core/constraints/example_solver/room/graph.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3e5ff0ccd2f2e1d419d548eb57db8a130000dbac -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/blueprint.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 2f59ac5283a9b15d19af511c1f8faeef33a41ddf -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 9 lines to infinigen/core/constraints/example_solver/room/blueprint.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 63ad9b392d05fe6bfb2e62f2a1980bf4d0ef92ec -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 29 lines to infinigen/core/constraints/example_solver/room/blueprint.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 63eb43c822d0a34e45e53ac0fc995177cc6d81ef -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 174 lines to infinigen/core/constraints/example_solver/room/blueprint.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit da6e3f76c131b27a6635f72acaa5a2976776e7df -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/solver.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 8d1fd1a4966375fdb05377b181a24cc24d80cfa2 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/room/solver.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 6d74e65184ca2dda03605030f24d7fd9ebfb1774 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 273 lines to infinigen/core/constraints/example_solver/room/solver.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 97c20302917aee0f88879a3bd8f0e51f979e289d -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 16 lines to infinigen/core/constraints/example_solver/room/configs.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d455f742702d4eabe271fdd5d3a70ad8deb86d68 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 116 lines to infinigen/core/constraints/example_solver/room/configs.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 8632374545f5d688b6442d3649a4415df0912008 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/room/solidifier.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 4ca8eaeef25c3858ae055c50ba98d849ed22b2f6 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 12 lines to infinigen/core/constraints/example_solver/room/solidifier.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ebf4d17d872ed6fc76318a160459abd51b9501fc -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 99 lines to infinigen/core/constraints/example_solver/room/solidifier.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9f7e37e7cf85a981818dbc5d19296c4542357cfc -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 321 lines to infinigen/core/constraints/example_solver/room/solidifier.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d81ace5d0dc717e43a4b4dc8b51f51928cb75616 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 52 lines to infinigen/core/constraints/example_solver/moves/deletion.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8e75f73d36c28eccc706eedb081f5e448005d231 -Author: pvl-bot -Date: Sun Jun 16 23:16:18 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/moves/pose.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 3808681f221f3d2ee91dd76f6f7037c9718cc792 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 116 lines to infinigen/core/constraints/example_solver/moves/pose.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b138eed715cfafeadae881e23cda345fb4afdf47 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 5 lines to infinigen/core/constraints/example_solver/moves/addition.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 7dcaea581871a50a0794aa969e7329ba5a7d8e81 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 15 lines to infinigen/core/constraints/example_solver/moves/addition.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 0873d0169cfc9738cccd5db487e9e45043f0a0d8 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 168 lines to infinigen/core/constraints/example_solver/moves/addition.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 25fbdff205a763aa8cf6b4fc8d59d76f43871066 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/moves/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 1761c4421d95fd6a3015399c1571686e7894b6ce -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 62 lines to infinigen/core/constraints/example_solver/moves/swap.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 5e533523032ace58b07df14e3e3044f3a6fab335 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/moves/reassignment.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 97ba36b8a1384a3f4e9c72e82993373de0873e22 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 106 lines to infinigen/core/constraints/example_solver/moves/reassignment.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0f011dcc7187162c8904ca95e3346726674aa566 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 37 lines to infinigen/core/constraints/example_solver/moves/moves.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 593bfbb6dc2db247efb7a896f1c70efd878a2dc1 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/populate.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit a782b61f7527bf10243d93bb91920fc7cc76c340 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 114 lines to infinigen/core/constraints/example_solver/populate.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b347420d27ab4d491d0bf5fc73471196371e3fd3 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 4 lines to infinigen/core/constraints/example_solver/solve.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit ef9be11101b2ff415f74cc2d59c3945ec09e4425 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 6 lines to infinigen/core/constraints/example_solver/solve.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 1ff89f29a71008c90bd83f1ae240d22986f878a1 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 8 lines to infinigen/core/constraints/example_solver/solve.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 31ae234624d32ef8eca1f89270dadda3f1e6d95f -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 8 lines to infinigen/core/constraints/example_solver/solve.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 557c0b3cb021a49a31dac7ad6206901fb4b5ad5e -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 216 lines to infinigen/core/constraints/example_solver/solve.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 973168125e36524d08f3b3698b089fd3e0897165 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 148 lines to infinigen/core/constraints/example_solver/propose_continous.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit c7814a710cfb43fc44067d757ad7aa2529f1c8c2 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 5 lines to infinigen/core/constraints/example_solver/propose_discrete.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 69ef24df083a866401e5882984da9ce2d405dbaa -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 12 lines to infinigen/core/constraints/example_solver/propose_discrete.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 39eb45fb980ff9ce23b8b8233a1a9aa77b9b8c99 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 333 lines to infinigen/core/constraints/example_solver/propose_discrete.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 022a462ba8423da64ecdb31509a7f0693a43ac4d -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 2 lines to infinigen/core/constraints/example_solver/state_def.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 2b2a2a8908ad5be22cab890f9725ab99d651580b -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 7 lines to infinigen/core/constraints/example_solver/state_def.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 50d9ffce021d424607d38a0a6be61efab5b7e58c -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 9 lines to infinigen/core/constraints/example_solver/state_def.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 1f7e3022a0b786f6a31d5d7c38d033efbef5b578 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 17 lines to infinigen/core/constraints/example_solver/state_def.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 1d17d97b8b356e2681746efd06c934c33967fd79 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 172 lines to infinigen/core/constraints/example_solver/state_def.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit c95abae83707ef96f7a4eb5a0714d10bf7fc937b -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 3 lines to infinigen/core/constraints/example_solver/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 677077fdeda5df24773f72cbee2c240282e70ee6 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 151 lines to infinigen/core/constraints/example_solver/propose_relations.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit f4fbb450d29abf9ae6df0d82fac5ea41275cf443 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 7 lines to infinigen/core/constraints/example_solver/annealing.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 7362439725681f36134812843fada28cd96e9945 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 365 lines to infinigen/core/constraints/example_solver/annealing.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e001ff8e3c5c63b4f7c513eecb9378b4d085cb40 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 66 lines to infinigen/core/constraints/reasoning/domain_substitute.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit df4f36249cda10ba79743c4d0a31ee22136cb1f4 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 21 lines to infinigen/core/constraints/reasoning/constraint_constancy.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2365e12080add0ee8abe934b1ac36586236a64c6 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 22 lines to infinigen/core/constraints/reasoning/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 61eed6d6efcf19058ad0e9b870472c7050a1e6c6 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 1 lines to infinigen/core/constraints/reasoning/constraint_bounding.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit b14e66eb75e032ce514a126b6014983dda41a80a -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 35 lines to infinigen/core/constraints/reasoning/constraint_bounding.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 3419d1bf0431941075dc9f00b37674c9aa576963 -Author: pvl-bot -Date: Sun Jun 16 23:16:17 2024 -0700 - - Add 174 lines to infinigen/core/constraints/reasoning/constraint_bounding.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2d1acf44a3b263f7d77a63a904a0d4d9d6faaf8d -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 1 lines to infinigen/core/constraints/reasoning/constraint_domain.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 0aa2c390433233d3e71f349eff4bef08945bbd89 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 74 lines to infinigen/core/constraints/reasoning/constraint_domain.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit cd88dd267a99b53daef2dd345756378754d87c4b -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 416 lines to infinigen/core/constraints/reasoning/domain.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 4098d2de731f4e49dc475b620efbca10063d186e -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 64 lines to infinigen/core/constraints/reasoning/expr_equal.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8769347e49682e70da29f51bde06408193555952 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 3 lines to infinigen/core/constraints/evaluator/node_impl/impl_bindings.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a733a20254dfadb7d1a28cda189b6520245cb46c -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 42 lines to infinigen/core/constraints/evaluator/node_impl/impl_bindings.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 21401795ce7981746fc11ed844be21a3adf7e1b5 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 52 lines to infinigen/core/constraints/evaluator/node_impl/impl_bindings.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit ad7d9faf1c58defa8edd2f0be30ebead50c51095 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 277 lines to infinigen/core/constraints/evaluator/node_impl/impl_bindings.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9b8e3674ca870e62322a669c6487a0a3851a005f -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 1 lines to infinigen/core/constraints/evaluator/node_impl/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit dac544843a5c7206e05a80a9a46538b86ca39881 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 9 lines to infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 33666d75b7b0da783702f12c7a34f0bd347bba96 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 170 lines to infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e92b795126b13f3d909a529b1f5519eb7894e289 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 993 lines to infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 8ae175b7bd2e0e53c39df347e2f051e4c7eec7d2 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 1 lines to infinigen/core/constraints/evaluator/node_impl/symmetry.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e6ec3d3cef708147e1161cff12e0829b2f9944a4 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 262 lines to infinigen/core/constraints/evaluator/node_impl/symmetry.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 5b26f6c7497a7258a7cf15ab63f909047a48f9f9 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 5 lines to infinigen/core/constraints/evaluator/evaluate.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 09e2990ed80fd498db440415359e933702f2404d -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 26 lines to infinigen/core/constraints/evaluator/evaluate.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 9bb4d4bb9633d8795fc0c5ea059fdb5bf19d4c20 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 224 lines to infinigen/core/constraints/evaluator/evaluate.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 08afd13cf72f6512988e496bdfea27417c552458 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 231 lines to infinigen/core/constraints/evaluator/indoor_util.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 3ab967494e9146aa2f90c0ad51c85a6bdd8461e7 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 115 lines to infinigen/core/constraints/evaluator/eval_memo.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 128ed562848dd7f178d032df9a445ce6a13ae858 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 10 lines to infinigen/core/constraints/evaluator/__init__.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 6d50d8daf81b530eef2037c0f7468644b71778d6 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 1 lines to infinigen/core/constraints/evaluator/domain_contains.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 6512c8d39b0d76f7f6ea136d971d638305f3c262 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 59 lines to infinigen/core/constraints/evaluator/domain_contains.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ad642910695461d192beaaa47a6ce04ed3a6fa08 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 126 lines to infinigen/core/constraints/checks.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2dc5fec21b78d1288568649f82f4e52d69fefa82 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 3 lines to infinigen/core/constraints/usage_lookup.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit dd7dd7b90e96a16f835d37f71ab924f2e637e9f1 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 45 lines to infinigen/core/constraints/usage_lookup.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 7d8a4772287b04e352f8a9da16f09a6831a36ddb -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 9 lines to infinigen/core/tagging.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 92d4eb9d92908b51ba472572bcd5900ddbd94558 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 19 lines to infinigen/core/tagging.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 9023e340b9fb925c4c2ee2c7c30dfbd20e2ce03f -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 25 lines to infinigen/core/tagging.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 9842170aeb3590081273d34a484c3778d5434636 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 30 lines to infinigen/core/tagging.py. Contributed as part of Infinigen-Indoors by Lahav Lipson. - -commit cb923fe808235484edfb263e6329b5f1a6dfb5d4 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 345 lines to infinigen/core/tagging.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ef8e50d19d46ebb108e367069ea5d25a07509218 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 3 lines to infinigen/core/tags.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit f69c57cf10cb89db286bf531484852c40f8394be -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 7 lines to infinigen/core/tags.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit dd9c78879a011f8d260e9462c2c0ad2fb5ed1bec -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 80 lines to infinigen/core/tags.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit a9c72789e0e44f13929c7b64a667c6c549f4be31 -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 215 lines to infinigen/core/tags.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d4331267447675006a16589e668fb53540784c3a -Author: pvl-bot -Date: Sun Jun 16 23:16:16 2024 -0700 - - Add 1 lines to infinigen/tools/results/visualize_planar_graph.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 6c3a0045a1d697244c987ab51d9f1fa2dcb16330 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 70 lines to infinigen/tools/results/visualize_planar_graph.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 0a4ef0e03a609e4b4b56c306126f0bc2fc899b3c -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 12 lines to infinigen/tools/config/demo_config.yaml. Contributed as part of Infinigen-Indoors by David Yan. - -commit e823bcbd04b5579744253bcbc05bb5ccbd34e78c -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 37 lines to infinigen/tools/perceptual/create_submission.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 3b360a665f80a6e66b14c798d15b41ed83bfe6b1 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 411 lines to infinigen/tools/perceptual/analysis.ipynb. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit b47966ce5cc1ec88232b9b566048c488fa834823 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 34 lines to infinigen/tools/perceptual/perceptual_extract.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 3262add86904d8f21f31365c54055ff3839b11fd -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 12 lines to infinigen/tools/perceptual/rename.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 65a5a561b822f224e79b03d1fb76bf63450bc310 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 13 lines to infinigen/tools/perceptual/rename.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit dc15981b2a1cea10bcea46baf540675008996ccf -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 187 lines to infinigen/tools/perceptual/create_pairs.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 4b94c4073e6493683f2a8760fd503b15ba38a355 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 124 lines to infinigen/tools/convert_displacement.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 757024b09ce3cd182b64b8adcec9da02beffb638 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 32 lines to infinigen/tools/isaac_sim.py. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 36b10aa2d5f157746a58eab317d0f04ef5cc242e -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 139 lines to infinigen/tools/isaac_sim.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 9bed07285779baa685d0446a0ecbf4f003e85ce9 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 83 lines to infinigen/tools/indoor_profile.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 2b09762b5cadff70822cf8d226d600c991e9c276 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 63 lines to scripts/eevee_render.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 618739d0000fb680f8ac74b3d2009d6e8ddb0de5 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 11 lines to scripts/rebuttal.sh. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 2705f90c1b22e3b4ad340c18f4c52a3434f5b1d4 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 72 lines to scripts/rebuttal.sh. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 4852bbac5f3d23fa41b9afd852c796ff93b58f8e -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 8 lines to scripts/indoor.sh. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 3696c69adefd7551b4dc05ef618e39e0976b5132 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 36 lines to scripts/rebuttal_retry_render.sh. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 62a1a4fec2ff9feb7ad64781a7e6cae330b2386d -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 2 lines to docs/HelloRoom.md. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 95533f59846161e86190fd2ebf81c70e0736d870 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 125 lines to docs/HelloRoom.md. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 3160f6188c5cbe8b5d339207a0ca92c699da2de6 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to docs/ExportingToSimulators.md. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 699f3ae821fd3687068ea11833ad9cd5c68d8959 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 32 lines to docs/ExportingToSimulators.md. Contributed as part of Infinigen-Indoors by David Yan. - -commit 9f486f85ff5bd58c20d51c068c4382ae0e19986b -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 6e20cbe65894225c5767da3a080e38df01a8d3bd -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 05cd77cffd7678711516111f8b2f887693cb2e47 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit a1bfe6d288def89442a667957e0dce7c08c6fe51 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 3 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Meenal Parakh. - -commit ac5cb276f32fc01a36bdfb9201165da86c359f6a -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 12 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 36ee4ea749d40aa56ceb2b81743e61e32297ba14 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 17 lines to tests/assets/list_indoor_materials.txt. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 877e6688541fe6932124ff8d621dfe26f69dfaf2 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 7 lines to tests/assets/list_displaced_materials.txt. Contributed as part of Infinigen-Indoors by David Yan. - -commit fdf533d6bb65cbaece0a6b813193965b4a17205e -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 7 lines to tests/assets/test_materials_basic.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 125472b563eb95b76a7ccdc06539cc3c1d0e40ec -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 40 lines to tests/assets/test_materials_basic.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8da48f0f045bef1a9f85c1d9ed9ef9eafc8376c8 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit b15a70034bf2688c53ed38085e7dbc0b8ae49640 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 2082175925a9365625e5bbd089446456a6043d05 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 1 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Beining Han. - -commit 6db1fbcd3f0b658dbbfb81d2a512805b428f982f -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 3 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit e811301b630d04ab5779b31691dd63e8787e7fbe -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 28 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit b8e0d257ae54e6a6dd12aa0c5e358a0aa42597f1 -Author: pvl-bot -Date: Sun Jun 16 23:16:15 2024 -0700 - - Add 66 lines to tests/assets/list_indoor_meshes.txt. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit aecacaa0f695c4f5dde9b4932c2f7af9a306c4bf -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 1 lines to tests/assets/test_meshes_basic.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 38db10ef300d9dd30344fca1c972038a59cdc10e -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 89 lines to tests/assets/test_meshes_basic.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit a6c9bfe47d85fb65fac8ea2ef02befb3fbaa6610 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 4 lines to tests/assets/test_placeholders.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 94f7d2b0044462d5091eb8aaada511a9ecb2f482 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 7 lines to tests/assets/test_placeholders.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8d051a01c0446760ffd9d84a0b36ef8411307972 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 57 lines to tests/assets/test_placeholders.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 9bbb94aedab2b9d80a99575b380eee26a1b9468f -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 21 lines to tests/solver/test_greedy_stages.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 48d39929f807b7b9fe9161ff57a5c904defedc28 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 286 lines to tests/solver/test_greedy_stages.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit cd6d04fc548b5507328c506220d999f181a74e19 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 44 lines to tests/solver/test_state_def.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0e2fe0f9f6feead7ee9fb60669a5fe2d2b1fd30d -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 120 lines to tests/solver/test_greedy_substitutions.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 202e3c39073f21064b02c12f2353815f766d98b2 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 471 lines to tests/solver/test_greedy_partition.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 852cfa452f0e4a8b25e7756cdc0ea408176618b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 5 lines to tests/solver/test_asset_surfaces.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 5f032e1af206c1c3ae9cc5518e865f42838ea8b0 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 56 lines to tests/solver/test_asset_surfaces.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit c1f6ced9ef5d8853a248e45526985f7c926f05d7 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 83 lines to tests/solver/test_constraint_evaluator.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit e0b4e342488190622fc1ad5bbfeaa4ff73cc52a1 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 218 lines to tests/solver/test_constraint_evaluator.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit f080965f5e1af641872fb2bde4d7229d7b50bb1f -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 679 lines to tests/solver/test_constraint_evaluator.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 6baf8e5fb4f0683e9c6ef185163336db9d050785 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 6 lines to tests/solver/test_stable_against.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 662dbf04c3915cdfec063ae05aa5b9f3ecd10cdc -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 74 lines to tests/solver/test_stable_against.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ebc6fdcca9cbd9b7173dd3d3f7f10421f81247ec -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 84 lines to tests/solver/test_stable_against.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 22b6f554ff6cb02ac63a55d131dea5842b51a3ed -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 29 lines to tests/constraints/test_constraint_domain.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 71b77e1fb2848a82ccc6befafbb107f28471d5c3 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 170 lines to tests/constraints/test_constraint_domain.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8e1ea377cc33439e2394a36f68170c1807b29e3e -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 13 lines to tests/constraints/test_constraint_relations.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 2c13da8ddfa26450038aefe01ddbc82095df6d4f -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 83 lines to tests/constraints/test_constraint_relations.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 8e3e746291e9d206be336e80b392a4b94831d242 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 77 lines to tests/constraints/test_reldom.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 0c723cdc2bc2c2deba9a01c21f22697c7153fc85 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 18 lines to tests/constraints/test_tagset_operations.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 35b393f202154e59710ed4f8ebf778c2229e2e01 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 25 lines to tests/constraints/test_tagset_operations.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 7dc61ce5765e74391129836829bf99c4526e8e2e -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 15 lines to tests/constraints/test_tags.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 33b8f9d33f4af56a0059a0ef4897ab69c38306fa -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 43 lines to tests/constraints/test_constraint_language.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 992a1a7c2d3ed371fdd5681c7391868f6aa2b843 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 10 lines to tests/constraints/test_constraint_bounding.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 49df4a176209f3ebe004adbffae8f632684381f4 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 27 lines to tests/constraints/test_constraint_bounding.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ee2950ffc77b28dfeb3ee25be16ae76459203314 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 154 lines to tests/constraints/test_constraint_bounding.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 9b03e4165f17be3e0bfa1675ead1980c9157aaaa -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 15 lines to tests/core/test_tagging.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 5318cf79dda4173a7c982720bab2993fc1692173 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 82 lines to tests/core/test_tagging.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d01a5d270c151af34ba440f7341b0f770fcea207 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 39 lines to tests/core/test_gins.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e6dd3c610de11316ac90e72c487692893f5a5a15 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 28 lines to tests/tools/test_export.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 843d90397d39b2e494500b8327d8c5c4b7dd2a7c -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 52 lines to tests/tools/test_export.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit aed7c77b255edc60065effa0cf9eb08cbb17f022 -Author: pvl-bot -Date: Sun Jun 16 23:16:14 2024 -0700 - - Add 7 lines to tests/list_displaced_materials.txt. Contributed as part of Infinigen-Indoors by David Yan. - -commit 7a93f7205e08c2dbbaf7bd6d7f4270fb43e3e5de -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 35 lines to tests/material_balls.txt. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 13f280963796bb7207651997d1f53acff9fa2d49 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 3 lines to infinigen_examples/configs_indoor/disable/no_objects.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 428c51532435454b2c5c154cc7f11cd91d1d395e -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/configs_indoor/disable/no_details.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d2fb7a324f5187906cbc9c1a064dad866cca5345 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 7 lines to infinigen_examples/configs_indoor/disable/no_details.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 88bf8ec0ea810ba2d36ef6e85a94c1996841a8a1 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/configs_indoor/studio.gin. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit dbd9d60f7a2b4e70fe41ae940c6087565479fb3a -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 7 lines to infinigen_examples/configs_indoor/multistory.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 82d9f85836625a41e6daf71a0e06a77f5149fade -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 7 lines to infinigen_examples/configs_indoor/multistory.gin. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d0531a71f35682af71fe08839ac2e98af444e4f5 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 7 lines to infinigen_examples/configs_indoor/overhead.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit b8d11d80341482b5feb857933df993ccbdb8a34d -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 8 lines to infinigen_examples/configs_indoor/overhead.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 8ff32f1b8f2bb865db6eab51f3ba9967af487227 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 19 lines to infinigen_examples/configs_indoor/fast_solve.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 82ceea367391f619e680eebfeae5b32870d506bd -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 4 lines to infinigen_examples/configs_indoor/topview.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 8d9ba331e5c2054f874a9129988070bc8280220e -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 4 lines to infinigen_examples/configs_indoor/topview.gin. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 3d403c4644bcf455ea157ae494a4b18869031b80 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/configs_indoor/export_upload.gin. Contributed as part of Infinigen-Indoors by David Yan. - -commit 7b89026662f8161f5da8c99d6d58bc8b32d5ea0b -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/configs_indoor/singleroom.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d9be42ac856eb1f0ae977cdddcb8021191aab744 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/configs_indoor/base.gin. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 478badcddee79412359dd81d166a1ba7afa3c5d1 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 6 lines to infinigen_examples/configs_indoor/base.gin. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit 2717fe91df5be267d07fd2efe689364c8982a33a -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 58 lines to infinigen_examples/configs_indoor/base.gin. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit bf64297829c4da6e7f86c6ac7911a58ef07560a3 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/util/constraint_util.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 737265cf42087b551f2133e7a6a14cc36a751c11 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 62 lines to infinigen_examples/util/constraint_util.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit ccb851d7cf43fa769ce8cc33ab9b2687f6b122c9 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/util/generate_indoors_util.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit a1c4834c601099c572b5ccbaec69b4605e13f2e2 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 5 lines to infinigen_examples/util/generate_indoors_util.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit f4ba54c94235d9aa715a491cec9dd2d4ebd6a0ab -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 258 lines to infinigen_examples/util/generate_indoors_util.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 3a7e3a7750a01cb4db3409e9c4ba624c3525e419 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/util/test_utils.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit c6cdfa71852761ddbcd30332c7af12258b165ddb -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/util/test_utils.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 16711191bc293f451ad0561a37877340ea2bc7d2 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 57 lines to infinigen_examples/util/test_utils.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d01430919464dd048c3cae264ef7a30da737c8b4 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit f053a032b24963a46e69561caaebb8c0516f2de0 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 1 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit a7f5c07e33c48f3894fb7e6b7eed96ef2b5a45b4 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit b03147cd6fff0d0e76f4cd82b4963be3728f15d5 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 2 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit b97b816b14df8ed6ea73cb087b52166373d9332f -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 19 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 6014f883ac56ba254ad39909db0f0cd1a2a6e1ff -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 865 lines to infinigen_examples/indoor_constraint_examples.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 6c789c103d9cff2491973cbafd0dd5ad5df91322 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 203 lines to infinigen_examples/generate_material_balls.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit e46952295de3b90d5aa0aaf58452c4fd17ce5a39 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 6 lines to infinigen_examples/indoor_asset_semantics.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 3bd77582713236871d15fcfcaf3f880308f39c82 -Author: pvl-bot -Date: Sun Jun 16 23:16:13 2024 -0700 - - Add 332 lines to infinigen_examples/indoor_asset_semantics.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit e46aefc5bee3d3979609803ca30430e2ee9bf3f0 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 45 lines to infinigen_examples/asset_parameters.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit f4eeb4bfe1272599af4e16c1c3483a4aaae7a3de -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 1 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Stamatis Alexandropoulos. - -commit 9bca4587dff99a094a4c16047fb18207ebdc3dbe -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 1 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Yiming Zuo. - -commit 24b000109867afe807c832393bccd5c5a5902182 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 1 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by David Yan. - -commit 56b88a113f79a0e2091304375f857a3c3bdbf93b -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 3 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Karhan Kaan Kayan. - -commit 9bb58746a6391a0104b0012e2c80096da51b87eb -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 12 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Pvl Bot. - -commit ba87ded72811420f12918238a7684481d091c2e0 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 14 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Zeyu Ma. - -commit 1b5ffe310d7ff6cc2bfbde6306aa7c3bd7834a5d -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 38 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit d33eb1b151fb23562266b4cbe8a9ce6a41262896 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 356 lines to infinigen_examples/generate_indoors.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit 22b9acfbfa7d47c985ee1027428c4e81f690f3c2 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 1 lines to infinigen_examples/generate_asset_parameters.py. Contributed as part of Infinigen-Indoors by Alexander Raistrick. - -commit d1268dfd23aec66495f90bd11a72e3846ff8b0f9 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add 328 lines to infinigen_examples/generate_asset_parameters.py. Contributed as part of Infinigen-Indoors by Lingjie Mei. - -commit 356dc9c98404f24bb06501c33a974f6422e27b95 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Improvements to exporting.py, contributed by David Yan as part of Infinigen Indoors - -commit b3fa5e9471c29b0cdef00206aa9863a6bf4223e4 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Changes to existing files contributed as part of Infinigen-Indoors. Contributed by all authors - we were unable to create correct per-author commits for edits to existing files. - -commit 001797f5419dc02fe6e39a10e7e496fe907b49f3 -Author: pvl-bot -Date: Sun Jun 16 23:16:12 2024 -0700 - - Add files that were renamed/deleted as part of Infinigen Indoors - -commit f98b9575c73c322f1cbcc2dd48d26bbe8d6239f6 -Author: Alexander Raistrick -Date: Sun May 26 13:16:44 2024 -0400 - - Fix typos in export documentation - -commit f5bcba8de47623da9b715348cf95822445e0e9a7 -Merge: 830891c3a 857c9be95 -Author: Alex Raistrick -Date: Sun May 26 12:29:33 2024 -0400 - - Merge pull request #245 from princeton-vl/rc_1.3.3 - - Merge v1.3.1 - v1.3.3 - -commit 857c9be957fa14a40a75371a2aa5b64aecedc21f -Author: Alexander Raistrick -Date: Sat May 25 19:01:59 2024 -0400 - - Fix unit-test failures for empty materials - -commit e0a6e254c11a920e078867a1b488e4e68102d8d3 -Author: Alexander Raistrick -Date: Sat May 25 17:29:35 2024 -0400 - - v1.3.3 - Camera bugfix, pass all tests - -commit 03f603d030f3f8571808b96180514149c11a64fa -Author: Alexander Raistrick -Date: Sat May 25 17:08:27 2024 -0400 - - Add purge_empty_materials to every geonodes apply to fix emptymat tests - -commit 113230a056826d07e8de2a24f7087975ef85c48d -Author: Alexander Raistrick -Date: Sat May 25 17:08:07 2024 -0400 - - Add pytest timeout, reorganize tests/test_meshes_basic.txt to pass - -commit 7e3d6b31321a5f651a63eb615b442e7423b0eb03 -Author: Alexander Raistrick -Date: Tue Apr 23 13:49:06 2024 -0400 - - revert fps change - -commit dfc2b2ab1a8df700575f6d518685932c26ce396a -Author: Alexander Raistrick -Date: Tue Mar 5 13:47:31 2024 -0500 - - Add METAL to devices list - -commit 7d821161cfa6aac2e8c1ad317784b5e38bdb410e -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Tue Mar 5 11:00:04 2024 -0500 - - decrease sss radius - -commit 7538de1b97846fd3eab729f213b79b8768e13a42 -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Tue Mar 5 10:57:38 2024 -0500 - - multi cam fix - - * fix - - * fix placement - -commit 3078b0cfa02c2bd35877ba9aaec300b8125fe1c9 -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Sun Feb 18 14:21:06 2024 -0500 - - refactor camera selection syntax and prevent water in all frames - -commit 3c96b3b99c6e00413a3a125804c18b2e455d63f6 -Author: Alexander Raistrick -Date: Sun May 5 14:58:33 2024 -0400 - - Bugfix snakes & manage_jobs (v1.3.2) - - * Move glowingrocks to rocks to prevent circular/far-reaching imports - - * Update OcMesher - - * Fix white materials on snakes, add unit test for empty materials on any asseet - - * Fix denoising always on - - * Add slurm_partition to manage_jobs - - * Update formats_all, make release.py work with just a folder of videos - -commit 35506e8821830bd34af04eaef44a28f9f6932d8b -Author: Alex Raistrick -Date: Mon Feb 12 12:45:37 2024 -0500 - - Smbclient commandline utility - - * Add commandline bindings to smbclient.py - - * Add mapfunc for slurm launch of smb_client, WIP fix ls with glob at end - - * Fix recursive glob ls and download - - * Add exclude flag to eliminate downloading blends - - * Add verbose, ratelimit - -commit dd5d560cf0495b51b5991d9a8788d6a5ee4e7e7f -Author: DavidYan-1 -Date: Sun Feb 4 15:37:34 2024 -0500 - - Texture-baked exporting (princeton-vl/infinigen_internal/#103) - - * Full Scene Exporter - - * Regex Tweaks for Integration Testing - - * Refactor and optimize export - - * Move exporting README to docs/ folder and add more caveats - - * Path handling tweaks for exporter - - * Move export to infinigen.tools.export - - * Tweak docs - - * Add slurm scheduling to generate_individual_assets - - * Tweak generate_individual_assets - - * Small Refactor - - * Glass Export and other features - - Glass, Individual object, .obj vertex col export and fixes UV overrwrite - - * Add --export option to generate_individual_assets, use a slurm job array per factory not for the whole set - - * Add docs on generating & exporting individual assets - - * Export Optimizations and Bugfixes - - * Remove Unused Args - - * Typo / import fixes - - * Add --export option to generate_individual_assets, use a slurm job array per factory not for the whole set - - * Fix kwargs - - --------- - - Co-authored-by: Alexander Raistrick - -commit e4a42d7cf9084c271f908070c559395747ec27fb -Author: Alex Raistrick -Date: Wed Feb 7 09:12:11 2024 -0500 - - Render improvements (v1.3.1) (princeton_vl/infinigen_internal/#107) - - * Attempt to fix white snakes and volume_bounces=0 bugs - - * Make videos 4sec not 8, tweak slurm_1h, tweak terrain resolution, speed up cameras for video - - * Fix --use_existing for scenes started with --specific_seed - - * Increase opengl time, decrease render sample quality - - * Noisify camera motions - - * Add overhead.gin - - * Tweak video length and cam seped - - * Drop fps to 16 - - * Reorganize cycles configuration, overhaul enable_gpu to be more robust and only ever use one device type - - * Bump version to 1.3.1, remove scripts/launch - - * Tweak ratios, fix typo - - * Fix noshortrender typo, rename conf to noisy_video - - * Changelog - -commit 830891c3ac988f6be02ded26388b1572a5463de3 -Merge: 18be26c9b 80ac24c68 -Author: Alex Raistrick -Date: Wed Apr 24 22:34:04 2024 -0400 - - Merge pull request #122 from princeton-vl/rc_1.2.6 - - Bugfix v1.2.6 - Fix duplicate configs, CUDA_VISIBLE_DEVICES & more - -commit 80ac24c681ace3f811d1eec7363f53f4d3725ae3 -Author: Alexander Raistrick -Date: Wed Apr 24 13:22:09 2024 -0400 - - Increment version & changelog for v1.2.6 - -commit 4216ac371f3caab32666a0e3cc79f75ab0afe00f -Author: Alexander Raistrick -Date: Wed Apr 24 15:03:31 2024 -0400 - - Unify other creature interfaces - -commit 069ad8b12f932d44619928d4d54c08a85c5f9235 -Author: Alexander Raistrick -Date: Wed Apr 24 13:07:40 2024 -0400 - - Attempt to fix dynamic hair for #215, raise NotImplementedError for dynamic hair for now - -commit e7b3ef04df314b62e94047fa30321dbd48c054aa -Author: Alexander Raistrick -Date: Wed Apr 24 11:52:41 2024 -0400 - - Fix submitit emulator improperly following CUDA_VISIBLE_DEVICES (#212) - -commit 7177237a64ad00bddb32fd81e516ef7ef37d3d29 -Author: Alexander Raistrick -Date: Wed Apr 24 11:22:05 2024 -0400 - - Add trycatch for flowvis install - -commit 43244c9e26c6498182a1128cdf8f45115a467bba -Author: Alexander Raistrick -Date: Wed Apr 24 00:19:44 2024 -0400 - - Avoid duplicated configs by skipping sample_scene_spec when --configs specifies an option: - -commit f54f536420a759e56da3b4a11af9ed10806c4d1e -Author: Alexander Raistrick -Date: Tue Apr 23 23:34:27 2024 -0400 - - Implement mutually exclusive folders for scene_types to prevent common error case - -commit 18be26c9b4a7b375442d23569b737d8e2169e372 -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Tue Apr 2 11:16:34 2024 -0700 - - Bugfix v1.2.5 - Terrain bugfix for multi-task command; Terrain.bounds and Terrain.populated_bounds parameter - - * fix reinitializing terrain - - * add bounds and populated_bounds for terrain - -commit 5132903cd68704367d1c44c841e5163158e0f33d -Author: Alex Raistrick -Date: Sat Mar 9 16:08:10 2024 -0500 - - Update Installation.md - -commit 331b4e5e8b3d84f8a31f395c8a344df20844195e -Author: Alexander Raistrick -Date: Tue Mar 5 15:24:17 2024 -0500 - - Hotfix v1.2.4: Fix TreeFactory(season='winter'), fix join_objects ignoring empty meshes - -commit 66a449399f2f38d63e4b9aad689c02a4479f14df -Author: Zhangir Azerbayev <59542043+zhangir-azerbayev@users.noreply.github.com> -Date: Wed Feb 28 15:51:32 2024 -0500 - - Update Installation.md (#192) - -commit e8687f4ab5e809be28778fe42e26d26b414cca6a -Author: Alex Raistrick -Date: Mon Jan 15 13:44:40 2024 -0500 - - Update Installation.md - -commit 40a261506252f17107ff7398d787a3feb13a53d1 -Author: Alex Raistrick -Date: Fri Dec 29 09:49:57 2023 -0500 - - Hotfix v1.2.3 - - * Fix misplaced opengl path, tweak installation - - * Tweak launch scripts - - * Fix underspecified child python paths - - * Fix cleanup except_crashed - - * Fix underspecified child python paths - - * Remove cd worldgen typo - - * Fix leftover objects in tree generation - - * Change version to 1.2.3 not 1.2.0.3 - -commit 0c622a1788235e2270a73262463383ff1fade70a -Merge: 48be1cde4 e23073612 -Author: Alex Raistrick -Date: Sun Dec 17 15:24:59 2023 -0500 - - Merge pull request #184 from princeton-vl/hotfix_v1.2.0.2 - - Fix hello world crash - -commit e23073612e4d953d0c3a6f3639262e566a69ab9a -Author: Alexander Raistrick -Date: Sun Dec 17 13:45:58 2023 -0500 - - Fix hello world crash - -commit 48be1cde4f44e79b237fdf0170fbf28084f80704 -Merge: 873fedd61 ff086b6cb -Author: Alex Raistrick -Date: Sat Dec 16 13:27:44 2023 -0500 - - Merge pull request #182 from princeton-vl/hotfix_v1.2.0.1 - - Hotfix helloworld, data download, smooth shading - -commit ff086b6cb6c19b99bf01f1ddf53a0c156df4b720 -Author: Alexander Raistrick -Date: Sat Dec 16 12:36:17 2023 -0500 - - Hotfix helloworld, data download, smooth shading - -commit 873fedd6108e356c701112c8e67f300e4fe69d02 -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Thu Dec 14 23:40:11 2023 -0500 - - Update .gitmodules - -commit fcd7fc265e5a7c25b56e431e4ed57c3b5320fb42 -Author: Zeyu Ma -Date: Tue Dec 12 14:35:23 2023 -0500 - - Integrate OcMesher - -commit e4765d06e4794e45e166066be9a12fe188155285 -Author: pvl-bot -Date: Tue Dec 12 14:22:19 2023 -0500 - - Fix typos - -commit a31950f9ada0345fbf7812e78ad0f5598d170ff1 -Author: Alexander Raistrick -Date: Tue Dec 5 13:26:52 2023 -0500 - - Remove build/publish gh actions for now - -commit 998ba6bf657cd99710a8a17d1e2cf57c60b9e9a8 -Author: Alexander Raistrick -Date: Wed Nov 29 17:16:20 2023 -0500 - - Fix snakes, import crash, CI failures - - * Fix snake material, logging bug, fix debug crash - - * Add trycatches to prevent marching_cubes crash when not installed - - * Fix linting - - * Update changelog - -commit 71f254a7b31d2abc4ff02388a65d9aca5ca068e7 -Merge: e9313a618 04d8e573f -Author: Alexander Raistrick -Date: Tue Nov 28 15:48:14 2023 -0500 - - Merge branch 'main' into rc_1.1.1 - -commit 04d8e573f68b95981cc206ce858126bec0c5a57d -Author: pvl-bot <136786582+pvl-bot@users.noreply.github.com> -Date: Fri Nov 24 13:29:53 2023 -0500 - - Update dependencies, loosen version reqs, remove unused dependencies - -commit e9313a618220bfe02bc2b90f863143c64ac67687 -Author: pvl-bot -Date: Wed Oct 25 21:23:34 2023 -0400 - - Transpiler v2.6.5 - revert Ignore Reroutes to fix incorrect linkage bug - -commit 24242098961519c0678ff3757d4a9736358c4778 -Author: Alexander Raistrick -Date: Mon Oct 23 21:23:57 2023 -0400 - - Add infinigen.launch_blender helper, remove $BLENDER, fix test, update docs - -commit 2f025c5ff055b2538080fbceba36fb76e7da5a91 -Author: Alexander Raistrick -Date: Mon Oct 16 23:45:03 2023 -0400 - - WIP revised install instructions - -commit 091d6ec5ba61cab9db97851ccd92f3fdce005dd7 -Merge: ef75fe7fa a8ba86a39 -Author: Alexander Raistrick -Date: Mon Oct 16 17:01:07 2023 -0400 - - Merge branch 'main' into rc_1.1.1 - -commit a8ba86a394f8757586b4fb15252a3282e56a91de -Merge: 0f208a804 359f08e7c -Author: Alex Raistrick -Date: Mon Oct 16 15:58:16 2023 -0400 - - Merge pull request #87 from princeton-vl/rc_1.0.4 - - v1.0.4 - Pregenerated download tools, ground truth updates, render throughput improvements - -commit 359f08e7cfdf6caa3acf84bbb1bdaf6fbfe9a91d -Author: Lahav Lipson -Date: Mon Oct 16 15:44:15 2023 -0400 - - Update docs and download_pregenerated_data.py (princeton-vl/infinigen_internal/#90) - - * Update docs and download_pregenerated_data.py - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Fix hello_world example - - --------- - - Co-authored-by: Alexander Raistrick - -commit a646f3513d5512911fb5488537a8a724b57b79f3 -Author: Alexander Raistrick -Date: Sun Oct 15 18:54:25 2023 -0400 - - Make download script interactive, revise data docs - -commit 02cd5b6045b22ec772706257d0722d2f6b13b57f -Author: Lahav Lipson -Date: Sun Oct 15 17:35:19 2023 -0400 - - Refactor bounding_boxes_3d.py segmentation_lookup.py (#89) - -commit 79244928803532393f7f572c6f03540c0d9d1138 -Author: Alexander Raistrick -Date: Sun Oct 15 14:35:00 2023 -0400 - - Debug render pipeline getting stuck due to backlog limiters - -commit ef75fe7fa1d9155f365e0ac8344d6fe35d38f9b0 -Author: Alexander Raistrick -Date: Sat Oct 14 13:07:00 2023 -0400 - - Fix pip install with cuda, build.yml, terrain test, mark which tests are ci - -commit 6780f819cc2ba25e9298148ce35d0aa9ec02195c -Merge: f7889fb3c 3664df8bf -Author: pvl-bot -Date: Sun Oct 15 00:09:21 2023 -0400 - - Merge remote-tracking branch 'origin/fan' into rc_1.1.1 - -commit f7889fb3c95d5ec915b929b75a332a6e539e8087 -Author: Alexander Raistrick -Date: Sat Oct 14 23:50:54 2023 -0400 - - Fix GeneratingIndividualAssets - -commit b3d9d6f14c9d617d75a397dea6c96f95934adc46 -Author: lahavlipson -Date: Sat Oct 14 22:10:52 2023 -0400 - - Optical flow / ground truth fixes and refactor - - * Refactor. - - * Scaling no longer needed for optical_flow_warp.py - - * Update compile_opengl.sh - - * Refactor optical_flow_warp.py - -commit ced0e871a740785bb0c366607b215244b836d6c0 -Merge: 9310e7b7e d2246f493 -Author: Alexander Raistrick -Date: Sat Oct 14 22:38:49 2023 -0400 - - Merge remote-tracking branch 'origin/rc_1.0.4' into rc_1.1.1 - -commit d2246f493679fa973271155179b826c5956dfec7 -Author: pvl-bot -Date: Sat Oct 14 21:28:29 2023 -0400 - - Update CHANGELOG, add copyright comments - -commit b2d4e47ecf325d5fd2346b8041b3ebc966046071 -Author: lahavlipson -Date: Sat Oct 14 21:10:14 2023 -0400 - - Infer buffer_size = 2 x image_size - -commit 90b87310787b99f4bde004e0b8cd045442a762ff -Author: lahavlipson -Date: Sat Oct 14 20:53:10 2023 -0400 - - Update flow scale. - -commit 93c0bef3280fccca042ad5cfe75301a32385abda -Author: Alexander Raistrick -Date: Sat Oct 14 18:36:08 2023 -0400 - - Create dataset download tool, pregenerated dataset docs - -commit f1a27a93b858e9dee32b389e0694b98ce50f9485 -Author: Alexander Raistrick -Date: Fri Oct 13 15:55:12 2023 -0400 - - Bugfix tag segmentation, render throughput, upload cleanup, crashing on symlinks - - * Revert monocular video fineterrain/populate ordering - - * Fix jobs getting stuck due to max_stuck_at_numdone settings - - * Fix upload cleanup - - * Fix crashing on symlinks - - Bugfix tag segmentation, render throughput, upload cleanup, crashing on symlinks - - * Revert monocular video fineterrain/populate ordering - - * Fix jobs getting stuck due to max_stuck_at_numdone settings - - * Fix upload cleanup - - * Fix crashing on symlinks - -commit b7b431f90d106a11154a7d9d27b8835505e97e2c -Author: lahavlipson -Date: Fri Oct 13 19:08:02 2023 -0400 - - optical_flow_warp.py bug fix. - -commit 75994298ccc60e24aae838ac16b51a15172bde42 -Author: Alexander Raistrick -Date: Fri Oct 13 18:05:30 2023 -0400 - - Allow RCLONE prefix via ENVVAR, allow more general prefixes - -commit f94eebda675cdc8748319fa7265e881d13c38de8 -Author: lahavlipson -Date: Thu Oct 12 20:36:38 2023 -0400 - - Refactor. - -commit 6a49d6fe586175537dc98459f0d470b82731579e -Author: Alexander Raistrick -Date: Fri Oct 13 17:14:04 2023 -0400 - - Standardize image suffix format across the whole repo - -commit 98acdb8e1e147bda9fc295460ef3db695ad1751a -Author: Lahav Lipson -Date: Thu Oct 12 15:12:05 2023 -0400 - - Ground truth scripts update oct5 (princeton-vl/infingen_internal/#86) - - * Bug fix when rendering instance ids. Remove indents in Objects.json. Save instance_ids to Objects.json. - - * Update optical_flow_warp.py - - * Update depth_to_normals.py and requirements.txt - - * Update depth_to_normals.py - - * Update rigid_warp.png - - * Update instructions. - - * Update segmentation and bounding box scripts to work w/o tags. - - * Update comments. Use get_frame_path. - -commit fdef6f268bd47324f561d23ecdd8c48e48c84f4a -Author: Alexander Raistrick -Date: Wed Oct 11 16:06:16 2023 -0400 - - Implement retar for release - -commit 3e855cba2d6d91639a459793fdc81d818d3f7e26 -Author: Alexander Raistrick -Date: Sat Oct 7 18:53:27 2023 -0400 - - Foolproof interpolation specification, handle mask and compressed npz cases - -commit b69b87d1701b95a080a3318903fc56d999cdd1fc -Author: Alexander Raistrick -Date: Wed Oct 4 15:33:14 2023 -0400 - - Implement frames folder reformat, update torch dataset, make dataset mostly standalone - -commit a188fe18dfb21e75e25be837e0e9c612cc180e29 -Author: Alexander Raistrick -Date: Sun Sep 17 13:41:04 2023 -0400 - - Data release toolkit - - * Implement format fixer - * torch dataset example - - * Tar-by-tar version of data toolkit, - * resize groundtruth, - * optimize jsons, - * fill missing poses with dummy dicts - -commit 9310e7b7e440bdc84b5671a65baceb0f7db068d9 -Author: Alexander Raistrick -Date: Wed Oct 11 14:28:00 2023 -0400 - - Update landlab and numpy, fix bnurbs import when using minimal install - -commit 0306641b6899a825f698a29649bc9aa874c7e937 -Author: Zeyu Ma -Date: Tue Oct 10 03:23:08 2023 -0400 - - terrain tests - -commit 67d9b17ae9b72024a684e4e84210474c5a5cad39 -Author: Alexander Raistrick -Date: Mon Oct 9 17:39:07 2023 -0400 - - Add INFINIGEN_MINIMAL_INSTALL flag to disable ALL compiling when doing interactive blender install - -commit 464a3f7a66c42f123481c9b6c9d53c679f9f994c -Author: Alexander Raistrick -Date: Fri Sep 1 15:22:53 2023 -0400 - - Update docs, fix runtime issues in pip installation - - * Update docs, add json to packaging, fix .soil file loading - - * Update docs to remove all references to old installation strategy - - * Move tools back to infinigen/tools, update all python invocations to be -m calls for compatibility with pip-only installs - - * Dont require wandb, dont even import it unless explicitly enabled - - * Fix erroneous gin logging, disable non-warning logging for all child packages, make all code use a non-root logger - - * Make fluid installation a fully separate post-install step, and dont attempt to init the module unless it will be used - - * Import ordering fix - - * Add cibuildwheel config, fix python -m build crash - - * Fix infinite runtime on hello world blendergt - - * Install script for interactive blender install - - * Move color util to fix circular import - - Fix rebase typos, fix torch_dataset imports, fix interactive_blender install - -commit fd3592344ef4f0f9b8d3cdd4feb10d2588190263 -Author: David Yan -Date: Wed Aug 30 23:13:33 2023 -0400 - - ast rendering fix - -commit 593876afb822da756b42fd56618233e08826dcd8 -Author: David Yan -Date: Wed Aug 30 22:21:29 2023 -0400 - - bpy module multiprocessing fix - -commit 6b59ddae188de20cff361802489f24eb9a460ae2 -Author: DavidYan-1 -Date: Mon Aug 28 15:44:03 2023 -0400 - - Update docker for pip-installed bpy - - * docker fixes - - * wsl docker fix - - * remove opengl compilation from docker-run - - * docker editable install - - * dockerfile type - -commit cba91ca596c46cd8f78a693b1b2650ff2b35cf1e -Author: Alexander Raistrick -Date: Mon Aug 21 12:31:47 2023 -0400 - - Working pip installation - - * Update test import mechanism to use better specified paths - - * Move nonessential tools out of the infinigen package dir - - * Dont build terrain etc during tests - - * temp commit txt package data - - * Fix tests and runtime errors when running as a package - - * Refactor config loading, fix relative paths in config, html, json pallette - - * Move examples to infinigen_examples, test execute_tasks, make all config imports relative - - * Add manifest.in, simplify pyproject.toml - - * Convert surface registry to relative importlib style - - Remove old docs, single-source the package version - - Fix misc test warnings, disable egregiously slow single asset tests - - * Final painstaking fix for pyproject.toml to include all compiled files & data files - -commit eaaa8162a1647442e511bc2e1a35f1a2c061d8e5 -Author: DavidYan-1 -Date: Mon Aug 21 15:09:36 2023 -0400 - - Makefile switch back to rm -rf - -commit c367b6bdf5106a9eb4820123ea6d9201f8719586 -Author: pvl-bot -Date: Sat Aug 19 15:47:39 2023 -0400 - - Docker build & editor config from "Various docker fixes (#22)" - - Co-authored-by: datashaman - -commit 887cd553df405e37ed82ff19c95a207eef13f5d8 -Author: Alex Raistrick -Date: Fri Aug 18 12:24:53 2023 -0400 - - Fix everything that didnt pass tests / checks - - * Fix non-compiling code found by linting - - * Fix non fatal linting errors - -commit 0228fde529258a2b366450d75ae2db78f133ef58 -Author: Alex Raistrick -Date: Thu Aug 17 17:44:34 2023 -0400 - - Unit tests - - * Set up pytest - - * asset tests - - * material tests - - * hello_world tests - - * Iterate github workflows - - * Update tests - - * fix gh actions - - * Remove installation checks for now - - * Ignore dependencies folder - - * Test improvements - - * Fix unit test commit pyproject toml - -commit c4a14d6f89fa40a00d5e2f575946c0d851eba0c5 -Author: Alex Raistrick -Date: Mon Aug 14 15:17:47 2023 -0400 - - Create setup.py and configs, minimize dependencies - - * Disable blending.py, make pallette requirements optional - - * Move marching cubes into toplevel setup.py, remove all python command invocations from CMake - - * Add pyproject.toml, convert bnurbs cythonize to toplevel setup.py - - * Leave terrain/opengl as independent install scripts - - * Move version to a txt file - - * Install flip fluids into pip bpy's addons folder - - * Reduce commits via better __init__.pys - - * Use pip install -e . in setup.py, update infinigen_gpl pointer - - * Move remainder of install.sh to setup.py, move version to __init__.__version__, add build commands to Makefile - - * Only run build_deps in the right steps, add options to disable terrain etc - - * Move scripts to examples - - * Tweak setup.py - - * add tabulate req - -commit f1c3e24dd8dd51b21d07b8a8c806279a04fdf58a -Author: pvl-bot -Date: Sat Aug 12 17:47:33 2023 -0400 - - Fix all imports and paths - -commit 6265e9a6bef5a98c16f41be7f551f0339cfceec9 -Author: pvl-bot -Date: Sat Aug 12 15:04:22 2023 -0400 - - Reorganize the entire repo (breaks imports) - -commit 3c767644952044b96bbe4415e91d50f8ab696854 -Author: David Yan -Date: Sat Aug 12 14:40:47 2023 -0400 - - Update to 3.6, fix installation, fix all existing asset code - - * working install.sh - - * assorted 3.6 compatibility fixes - - * fixed transfer attribute and added kernerlizer nodes.mix code - - * group input value fixes + index -> name specifiy - - * rename asset_grid and fixed blender path - - * bird fixes + revert geometrynodes.py transfer_attr - - * tiger, snake, transfer_attr compatibility fixes - - * chameleon and sculpt transfer attribute updates - - * Coconut Tree Fixes - - * Tree Fix - - * Assorted Fixes - - * Fish, Bird, Flowering plant fixes - - * Dragonfly, Chameleon fixes + making more assetfactories discoverable - - * Ivy, Lichen, Treeflower Fixes - -commit 1100f52b49f474a602bfd2b14b9f4729d44691e6 -Author: Alexander Raistrick -Date: Sat Aug 12 14:38:50 2023 -0400 - - Initial buggy 3.5 fixes - - * pip-based install script - - * Update docs and manage_datagen_jobs to use conda python - - * Copy over 3.5 nodegroup interface fixes - - * Fix duplicate Value nodegroup input kwargs - - * Implement compatibility mapping functions to catch old code using no-longer-support blender nodes - - * Handle both commandline formats in argparse - - * Update transfer attributes to ignore hidden attrs - - * Fix all Msample default_value interfaces in mingzhe materials, fix remaining StoreNamed - - * patched butil and mesh tools - - * blender_internal_attr - -commit e393b667ab89de5498ff2ce374b8756a18e9ada0 -Author: lahavlipson -Date: Tue Sep 5 10:49:33 2023 -0400 - - Fix opengl not writing ground truth for second stereo image - - * GT bug fixes sep4 (princeton-vl/infinigen_internal/#81) - - * Misc bug fixes. - - * More bug fixes. - - ---------------- - - Co-authored-by: Alexander Raistrick - -commit dd9569b8329da70596cb7576301c1942e936a630 -Author: Alexander Raistrick -Date: Sun Sep 3 16:59:56 2023 -0400 - - manage_datagen_jobs refactor and new features - - * splt into many files - - * add max_queued_tasks - - * add cleanup except_logs - - * add finalize_tasks list, move upload - - * Aggressively cancel jobs when siblings crash - -commit b26f5654804870d9c22d09a237daa9a517edc498 -Author: Alexander Raistrick -Date: Sun Sep 17 13:41:04 2023 -0400 - - Data release toolkit - - * Implement format fixer - * torch dataset example - - * Tar-by-tar version of data toolkit, - * resize groundtruth, - * optimize jsons, - * fill missing poses with dummy dicts - -commit 4489715eaab60a3fbebfad512b046df3c3e7967e -Author: pvl-bot -Date: Wed Oct 4 09:50:15 2023 -0400 - - v1.0.4 - Rendering tools improvements, ground truth optimization - -commit 420664cc448ab93b8c887877a64be4581368dafd -Author: Alexander Raistrick -Date: Sat Sep 30 17:43:46 2023 -0400 - - Upload checking, fix opengl default resolution - - * Enforce that user tells upload what to do with every single file, no missing or accidentally omitted files are possible - - * Fix upload fine, disable camera placeholder to bring back forests, Refine cleanup, make camviews mandatory, opengl default to 720p - -commit 6907ac787061d3cdf5e89a2b6ce1e5b123e331ea -Author: DavidYan-1 -Date: Mon Oct 2 16:03:29 2023 -0400 - - test fixes + remove pytest from integration testing script - -commit 39bf2bffd1db2a54a0c3b62d959eba1f00bc5c38 -Author: Alexander Raistrick -Date: Sun Sep 17 23:22:00 2023 -0400 - - Manage datagen jobs refactor and latency improvements - - * Refactor and reorder upload, do metadata/thumbnail last as a sign of completion - - * Add max_stuck_at_step limiter, refactor state tracking, bring back jobs.log, cleanup/refactor some parts - - * Add command upload, add slurm_cpuheavy, tweak other configs, remove fineterrain by default - -commit 9694693de5ebca754cf8ce9a8840806b79cf8686 -Author: lahavlipson -Date: Tue Sep 5 10:49:33 2023 -0400 - - Fix opengl not writing ground truth for second stereo image - - * GT bug fixes sep4 (princeton-vl/infinigen_internal/#81) - - * Misc bug fixes. - - * More bug fixes. - - ---------------- - - Co-authored-by: Alexander Raistrick - -commit 03e27a735b1445cc16a50b55c8c3ecf32e0c4a1f -Author: Lahav Lipson -Date: Sun Sep 3 19:26:05 2023 -0400 - - Opengl updates sep3 (princeton-vl/infinigen_internal/#80) - - * Save view size and camera parameters to single npz file. - - * Ignore CURVES objects, for now. - - * Remove unused code. - -commit 3648e8571f2f762dbea5982919f6936518531154 -Author: Lahav Lipson -Date: Sun Sep 3 19:25:36 2023 -0400 - - Reduce storage costs & make segmentation masks easier/faster to use (#77) - - * Add compress_masks.py - - * Call compress_masks.py in opengl_uuid.sh - - * Teensy fix. - - * Teensy fix. - -commit 6a1e2190999f77e217feee70da03943fe5875f44 -Author: Alexander Raistrick -Date: Sun Sep 3 16:59:56 2023 -0400 - - manage_datagen_jobs refactor and new features - - * splt into many files - - * add max_queued_tasks - - * add cleanup except_logs - - * add finalize_tasks list, move upload - - * Aggressively cancel jobs when siblings crash - - * Fix queueing - - * Bugfixes - -commit 32e11751eb1cd68f1b0e3de50cd3a96c95930186 -Author: lahavlipson -Date: Fri Aug 25 18:13:56 2023 -0400 - - Fix missing range-check bug. - -commit fab93f4e819084b54b150105f1b80f369e1f7c7a -Author: Alexander Raistrick -Date: Thu Aug 24 16:20:34 2023 -0400 - - Visual & Pipeline config improvements - - * Fix upload_util - - * Tune visual configs, move ocean to experimental - - * Deduplicate slurm_account settings, allow random choice of accounts - - * Print banned nodes to verify they are working on startup - - * Add commit hash and resolve paths in run_pipeline.sh - - * Tune caustics and glowing rocks chance/strength - - * Move rain to experimental due to no motion blur - -commit 51c73075b7ead5ff2a8effeb62fdb3f494beb88f -Author: Lahav Lipson -Date: Thu Aug 24 16:17:09 2023 -0400 - - 96 bit instance ids (princeton-vl/infinigen_internal/#71) - - * Update exporting.py to save 3 32-bit ints for instances. - - * Save instance ids as HxWx3 array. - - * Update instance segmentation visualization. - -commit 774346e211935b0e84eeb1228ddf5ad979e75082 -Author: lahavlipson -Date: Sun Jul 9 00:57:58 2023 -0400 - - Only load vertex indices for current frame. - - * Save mesh bugfix - untested - - * Fix compilation error. - - * Bug fix. - -commit 0f208a8044c38a797f3383d7c7f7f7425154f25b -Author: Kaiyu Yang -Date: Mon Sep 4 08:06:38 2023 -1000 - - Update ImplementingAssets.md (#142) - -commit 6919bfbbb3342041504ff1ef986b036857e27783 -Author: pvl-bot <136786582+pvl-bot@users.noreply.github.com> -Date: Mon Aug 28 16:56:11 2023 -0400 - - Hotfix highpoly terrain mesh not shown in fine.blend (#139) - - * optimize_terrain_diskusage_flag - - * no redundant glb saving - - * Disable optimimze_terrain_diskusage unless using high_quality_terrain - - --------- - - Co-authored-by: Zeyu Ma - -commit 3664df8bf5b7ff45f3911be29bd56358689fd44e -Author: Lingjie Mei -Date: Fri Aug 25 14:16:19 2023 -0400 - - Make deformed trees work again. - -commit 0ab7cd7d2507115f3228aafc06126a4d4332a9e7 -Author: Lingjie Mei -Date: Thu Aug 24 22:01:15 2023 -0400 - - Cherry-pick from e2a7 - -commit b166f66f32d789481daff0d4a01e2fd2b9e57906 -Author: Lingjie Mei -Date: Thu Aug 24 21:01:28 2023 -0400 - - Cherry-pick from e2a7 - -commit d38346baeff7b30140c1b253ca4349125837f74d -Author: Lingjie Mei -Date: Thu Aug 24 21:01:10 2023 -0400 - - Cherry-pick from 986d3 - -commit 7e2975b239d6da2ad6af05457df19db06bdf755a -Author: Lingjie Mei -Date: Thu Aug 24 20:58:55 2023 -0400 - - Cherry pick from b63c9b - -commit e5d15b76c7a46a17f03bd2809d073f0e8ea03eca -Author: Lingjie Mei -Date: Thu Aug 24 20:53:29 2023 -0400 - - Cherry pick from 14e12f - -commit f26a82e0ae5fc0b08a7d2d0cff6b9830e41d2d8f -Author: pvl-bot -Date: Wed Aug 16 11:32:02 2023 -0400 - - Hotfix flip_fluid loading, update github templates - -commit 4ae5d20c410da9ced0424e3ec69e5a5701f30ae4 -Author: pvl-bot -Date: Tue Aug 15 18:41:53 2023 -0400 - - Hotfix opencv version #130 - -commit a1edc13ce3639384cf7afd9d45b7ec6060cfee2c -Merge: b387b5a60 366836bca -Author: pvl-bot <136786582+pvl-bot@users.noreply.github.com> -Date: Tue Aug 15 17:05:46 2023 -0400 - - Merge pull request #132 from princeton-vl/develop - - v1.0.3 - Fluid code release, implementing assets documentation, render tool improvements, integration tests - -commit 366836bca07917cc312b9a7c3b3b4e5818a1ef8c -Author: pvl-bot -Date: Mon Aug 14 13:34:26 2023 -0400 - - v1.0.3 - Fluid code release, implementing assets documentation, render tool improvements, integration tests - -commit a124321d82f0e9917baa35c9a291d4dd37c6dae6 -Author: Karhan Kaan Kayan -Date: Tue Aug 8 12:19:47 2023 -0400 - - Fluid documentation (princeton-vl/infinigen_internal/#61) - - * Add hello world scene - - * Config tweaks - - * fix the camera bug and water not simulating - - * res fix - - --------- - - Co-authored-by: pvl-bot - -commit c534cd87da56afe1a00d07a30595c17186082b95 -Author: Alex Raistrick -Date: Tue Aug 8 11:13:28 2023 -0400 - - Fluid Refactor (princeton-vl/infinigen_internal/#60) - - * Acknowledge FLIP-Fluids - - * Deduplicate configs - - * Remove fluid-specific logic from camera funcs, move to scene_type_fluidsim - - * Move river invocation from core.py to compose_scene run_stage calls - - * Remove FLIP caustics from release - - * Move fire scenecomp and Cached class wrappers under fluid/ - - * Change on_the_fly to use run_stage - - * Move installation to tools/install, make FLIP installation optional and update GeneratingFluidSimulations.md - - * Deduplicate river configs, add example commands - - * Only unhide assets needed for fire sim, and unhide once done - - * Move enable parent cols to butil contextmanager - - * Remove unnecessary --blender_path - - * Typofixes - - * Fix accidentally deleted config fields in new river configs - - * Fix finding placeholders for river calls - - * Fix impl typo - - * Cleanup camera selection varname, unused kwargs - - * Catch modulenotfound errors - -commit 18152ea60abab61fa226a67180be5249c9469922 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/tools/submit_asset_cache.py - -commit f295b204bd0124ddfc11355029ab55e98910f28d -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/tools/scripts/render_video_fire.sh - -commit d870745a74329f443aa66cf7c0942e41ea17f697 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/tools/scripts/render_river_video.sh - -commit 3f5bc04819280bdf9c8580f4a57057d4736b7021 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/tools/pipeline_configs/slurm_fluid.gin - -commit 70bd6f97dde1ee0b15c14d4453bd92e62f53d6a5 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/tools/pipeline_configs/monocular_video_river.gin - -commit 419baf3623de30d1220944d04f0ff7751eddb6ea -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/waterfall_material.py - -commit f3634a288ba7559554194a238ea1ff18aa5bb895 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/smoke_material.py - -commit bf3d97e03ed3aaa1966584d15d477afc92153009 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/river_water.py - -commit 77c36a5e93a4e79eefb7b7bf9327795a10759aa3 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/new_whitewater.py - -commit d684e5693a72f34a174c2362c1253f2addd8725d -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/blackbody_shader.py - -commit 8108cd9d7fcab8c6830379f64098cb99dc03eaf7 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/unit_tests.py - -commit 4ce86c1592b922db31da3637df491112ad84f3a6 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/run_tests.py - -commit 701defdda8ee1905d105e92b5f1ee8a6af109e5a -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/run_asset_cache.py - -commit f5c2a739afb2337e02d35cd7cf3e37c266e6db3f -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/liquid_particle_material.py - -commit 9107c1873b41f27df0f424fe8a8b5c5c44c5671c -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/generate.py - -commit a129c88170d062c639192395fb2d36386419c15f -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/fluid.py - -commit af0a1d6930b05afb0d65a89f4680a95685e248ad -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/flip_init.py - -commit d2b67ae694a7a3c7b8b878f8e810a51103100848 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/flip_fluid.py - -commit a33529ee7f2247d0efa1495e69f5f15b06c3dbfb -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/duplication_geomod.py - -commit 993087f4684e8f7e72fd9c5cc85a5806d2f79132 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/bounding_box.py - -commit 26022c1d6726bf2ef09ae4f1a75ed305e0d405d9 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/asset_cache.py - -commit 9fac0fbcae1cd1483cf6cf735bea38bf51509b6b -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/fluid/__init__.py - -commit 612527eb14b91f363d28841f362faa26616fd3f0 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/config/use_on_the_fly_fire.gin - -commit 85d1f789a17b9dd1b3ab834ce4c716bdd04af595 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/config/use_cached_fire.gin - -commit 891e0f57204a42cea612e04fd7487653e6fdfbcb -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:09 2023 -0400 - - Add fluid code-release changes in worldgen/config/trailer_river.gin - -commit c17a58aedd17586158a524f5388d4230a923b5b7 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/config/scene_types/tilted_river.gin - -commit e80503a64566d8b888aab242ee45928a9910d233 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/config/scene_types/simulated_river.gin - -commit 5ee57e4905b76fa1f639cb49a3a9b24f16b81438 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/util/blender.py - -commit 6487c2a2742abfdb877aa832f855245ffb6f7e04 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/tools/util/cleanup.py - -commit ca83ec8c4ca3174dc8150909b20150ed2b61a6d4 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/tools/asset_grid.py - -commit 7115b597e4ce1f92348a32795660bdc5cec9350a -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/terrain/core.py - -commit eddd10571f893099777f3cfdcfff1b6a58843312 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/spider_plant.py - -commit 2b9481d5fdcb23d860ea2996c2339aea8be50cc5 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/surfaces/templates/snake_plant.py - -commit a98fdd9bf52fc14eb2c51540906a2c1540aabb84 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/rendering/render.py - -commit 7b7c845a152262d59dca63d2337227c34c95c963 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/placement/placement.py - -commit e687f0b8edce3007acfc956d0320a4ff483b2fe8 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/placement/camera.py - -commit 3e685b6cf20f55d57e858b5c011faccbe6d67c9f -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/placement/animation_policy.py - -commit 3388c5d532504b663bcd54fdf076875088b5c907 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/generate.py - -commit 4903651982349fdfe841c41d16f15472386553a1 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/core.py - -commit f94a2d90349131fa858bce93c72655fdbedc0d18 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/config/scene_types/desert.gin - -commit 8141784a3e977820e0dde22b108d50aefc8017df -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/config/scene_types/arctic.gin - -commit 521da2ed314d95631b5e2d4f1f902600ce85e59d -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/config/base.gin - -commit bcddd46c8330ea4fdda80b563e55796959624144 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/trees/generate.py - -commit d8a17ef130abd790f0bca36334db93fe88d9c256 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/trees/__init__.py - -commit b3133b4504a844d4fad43ac754057cac789e206b -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/small_plants/spider_plant.py - -commit 27c70b91874b74bc66123c7bdad4772f80b35c5f -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/small_plants/snake_plant.py - -commit 630a951ef43f1fa34e5979e2d46fc87158921d33 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/creatures/genomes/carnivore.py - -commit b47c24f778eeabb7c1c3d80e92acd1bfda916a3f -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/creatures/__init__.py - -commit f167d5a594b1d855d82641e5a01af965f378ae73 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/cactus/generate.py - -commit 2cf1a545fa717826e08b37d0aa9d0b9e0f8077b1 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/cactus/__init__.py - -commit fc1d9b2c5433f4bc7334e606648a25ad0ca7e4d0 -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in worldgen/assets/boulder.py - -commit a2c9960d0d33660892616c27644cac0be19961da -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in process_mesh/blender_object.hpp - -commit 91b0edaa6e93dd54cf2aa968d0d839dbd69c13bb -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in process_mesh/blender_object.cpp - -commit bfa2ab7f82b3a08b128a07f164b02b26b03f127d -Author: Karhan Kaan Kaan -Date: Mon Aug 7 10:54:08 2023 -0400 - - Add fluid code-release changes in install.sh - -commit bd69644a3b322f3af941a0aeed74022f14f33340 -Author: Alex Raistrick -Date: Tue Aug 8 17:30:28 2023 -0400 - - Implementing Assets Documentation (princeton-vl/infinigen_internal/#64) - - * Documentation draft - - * Make terrain optional, remove terrain dependence from camera logic - - * Fix config typos - - * Improve intro and blender UI setup - - * Added transpiler tutorial - - * Finish minimal implementing assets docs - - * Downsize images, add new doc to README - - * Fix image hyperlinks - - * Add testing script addition - - * Fix non-terrain version - -commit 6e17b5c798cc2f71cc35b53e6f48a9ba439038e6 -Author: Alex Raistrick -Date: Tue Aug 1 17:53:31 2023 -0400 - - Rendering improvements (princeton-vl/infinigen_internal/#39) - - * Tweak render_video_final - - * Remove random config choosing from core.py - - * Create tools/pipeline_configs/base.gin, move scenetype distribution configs into it - - * Create noshortrender config to test on IONIC - - * Implement slurm niceness override, add it to render_video_final.sh - - * Only include camera 0 in parse_video output - - * Read slurm partition from ENVVAR by default - - * Fix config postprocess - - * Fix slurm envvar - - * Typo fixes - - * Use roundrobin by default - - * Rendering tweaks - - * Change trailer.gin to video.gin with 720p res - - * Fix niceness - - * Set exclude_nodes list via envvar, move niceness configs into slurm.gin - - * Create render_video_720p.sh, start off experimental.gin but more needs adding - - * Add dryrun options - - * Fix --override vs --overrides - - * Move legacy task.Fine - - * Retool upload func - - * Add slurm_1h and stereo config - - * Rendering & typo fixes - - * Update render script and slurm.gin mem amounts - - * Fix excluded gpus - - * Add queues stats to wandb, add pandas to requirements.txt - - * Fix num_concurrent reset 24h later - - * Dont keep working on scenes which have had a fatal crash - - * Add new timeout message to error parsing - - * Fix overly nested upload dirs - - * Add thread limit to local jobs - -commit 887eb82f099ba48a59f894b8eaa50ce52fbbb7da -Author: pvl-bot -Date: Sat Aug 5 15:18:52 2023 -0400 - - Docker overhaul (#65) - - Co-authored-by: datashaman - Co-authored-by: David Yan - -commit a27e62059b2fa17f88f7a073dae5218f98d79143 -Author: pvl-bot -Date: Sat Aug 5 15:11:54 2023 -0400 - - Remove docker code attributed to pvl-bot, to be recommitted with github - co-authorship - -commit 62350548c029b2aa8cfe9a67fd9a8da29c8799ed -Author: David Yan -Date: Tue Aug 1 17:55:53 2023 -0400 - - Profiling & Testing (princeton-vl/infinigen_internal/#28) - - * basic framework - - * step times - - * multiple file fix - - * multiple file fix - - * more detailed logs - - * asset stat reporting - - * memory stats - - * fixed brightness test - - * improved formatting - - * noise estimation using PSNR - - * grayscale working - - * single image noise estimation - - * asset step memory accuracy - - * switched to opengl_gt - - * fixed inaccurate mem diffs - - * output formatting - - * increased sampling and changed noise estimation method - - * blender opengl gt combined config - - * fixed opengl+blender config - - * fixed timedeltas > 1 day breaking - - * further ground truth testing - - * aggregate tag segmentation stats - - * obj segmentation stats - - * cleaned up gt comparison (not working still) and reordered config - - * named tables - - * more table titles + readability - - * removed old copyright - - * deleted extraneous file - - * copyright - - * made internal representation of stage times more flexible - - * Delete logs.log - - * save data csv - - * object and instance counting - - * general testing usability improvements - -commit b387b5a6002b2fd2898e78a664b5754ca01e7081 -Merge: 6d0d34a11 a3a5b57ff -Author: pvl-bot <136786582+pvl-bot@users.noreply.github.com> -Date: Fri Jul 28 18:22:29 2023 -0400 - - Merge pull request #115 from princeton-vl/develop - - v1.0.2 - New documentation, plant improvements, disk and reproducibility improvements - -commit a3a5b57ffee494121a50514a8e331d4c2a42dbd5 -Author: araistrick -Date: Fri Jul 21 01:14:26 2023 -0400 - - v1.0.2 - New documentation, plant improvements, disk and reproducibility improvements - -commit 587fe9828306a8ea1ae1eacdf786700a4c87f1a3 -Author: Alex Raistrick -Date: Fri Jul 21 03:01:02 2023 -0400 - - Refactored & Expanded Documentation (#22) - - * Move existing docs + Installation and HelloWorld into docs/ folder - - * Search entire config/ and pipeline_config/ dirs, allow specifying with .gin prefix - - * Organize worldgen/tools/pipeline_configs - - * Organize worldgen/configs - - * Initial commandline documentation - - * generate.py config tweaks - - * Add help strings to all manage_datagen_jobs args - - * CommandlineOptions documentation - - * Reorganize - - * Fix typos - -commit ee36d886b346b5f26ee265051ba743f16f481766 -Author: Alex Raistrick -Date: Wed Jul 19 19:17:07 2023 -0400 - - Copy in terrain onthefly diskusage improvements, update infinigen_gpl to add .gitignore - - Co-authored-by: Zeyu Ma - -commit 0ff02d6fc3d65048e5ce4e320cdcc0200d6efc01 -Author: Zeyu Ma <31351547+mazeyu@users.noreply.github.com> -Date: Thu Jul 20 07:16:47 2023 +0800 - - Reproducibility of asset placement - - * mesh_to_sdf as included code - - * replace mix with explicit formula - - * use relative path of mesh_to_sdf - - * comment out opengl - - * add pyrender req which is prereq of mesh_to_sdf - - * uncommenting back - - * use float in mix - - * trimesh force version - - * face ordering - - Add MIT license for mesh_to_sdf - -commit 18685454f5c411cd2eda6247e1a35a08e4afe75c -Author: Beining Han -Date: Wed Jul 19 13:35:17 2023 -0400 - - Add spider plant and snake plant - -commit d42a77b4126696ec8d3dda004057039892cc45ba -Author: zuoym15 -Date: Wed Jul 19 13:18:45 2023 -0400 - - move over tree branching code - - * move over tree branching code - - * performance bug fix - -commit ccee42c14a7bb3b084019565c92c00639d8c1530 -Author: araistrick -Date: Thu Jul 13 12:04:59 2023 -0400 - - Typo-fixes by tms-gvd (princeton-vl/infinigen#76) - - Co-authored-by: tms-gvd - -commit dbb6d1c63a37712a71a606ddd18609546a8790e7 -Author: araistrick -Date: Mon Jul 10 04:24:41 2023 -0400 - - Refactor rigging, fix IKs, smooth before remesh, fix running, add end trim (Fixes princeton-vl/infinigen#89) - - Copy in import blend devscript - -commit a28da5ad71eda6d616e47c27ed3242b49750ed7c -Author: araistrick -Date: Sun Jul 9 02:10:11 2023 -0400 - - Hide culled placeholders to minimize effects of white cube bug (princeton-vl/infinigen#86) - -commit 209c803f752484fc8a2310645e2448364371f0cf -Author: araistrick -Date: Sun Jul 9 01:55:50 2023 -0400 - - Clarify crashed.txt (princeton-vl/infinigen#95) - -commit 6d0d34a115ed5f9e453fa010c1f4a9038e9dc5c3 -Author: lahavlipson -Date: Mon Jul 3 15:43:58 2023 -0400 - - Revert "Update requirements.txt" - - This reverts commit 941880a4a383ffb15bdc7b288fb1fa3b5f7f6ec1. - -commit aa22d4d1d4cb30de12f6aac453e6f49b87b787db -Author: Lahav Lipson -Date: Mon Jul 3 01:22:06 2023 -0400 - - Gt utilities (#83) - - * Update GT utilities. - - * Overhaul GT visualization for blender's built-in GT - - * Refactor. - - * Flip camera pose axes to be consistent with computer vision. - - * Save camera parameters during render step, not during GT. - - * Fix bug with blender's built-in segmentation masks. - - * Flatten object data json. Remove redundant information. - - * Make built-in GT metadata and OpenGL metadata consistent. - - * Misc. - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update README.md - - * Update requirements.txt - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - - * Update GroundTruthAnnotations.md - -commit 97b8a415f8b4733c4fc0ec6bdd93dfd4aa4c4a44 -Author: Lahav Lipson -Date: Sat Jul 1 00:37:36 2023 -0400 - - Remove old files, update file headers. (#82) - -commit e0b75a8b4359003ee98bdf742c03247e158c0024 -Author: pvl-bot -Date: Fri Jun 30 03:53:40 2023 -0400 - - Infinigen v1.0.1 - BSD-3 license, expanded ground-truth docs, show line-credits, miscellaneous fixes - -commit 3040c22ba751ac28d8acca3972d846f269971ffe -Author: Zeyu Ma -Date: Wed Jun 28 22:39:39 2023 -0400 - - Code separation - -commit 1ebed6765fd185c7757766bdd40a856f3f57fbe5 -Author: pvl-bot -Date: Wed Jun 28 18:26:21 2023 -0700 - - Add acknowledgements - -commit aeb7fd0556b7aa2c34e17d53a9b26741156904a2 -Author: pvl-bot -Date: Sun Jun 18 00:46:48 2023 -0400 - - Switch to BSD 3-Clause License - -commit e52bf9f0045dae127d403132a8ea8a2900aa2a9a -Author: Soney Mathew -Date: Thu Jun 22 05:54:30 2023 +1000 - - Update README.md (#2) - - Blender documentation says it's `-noaudio` instead of `--noaudio` (Tested in MacOS Ventura 13.3.1) - -commit e059b8cf310c5d94c34deb0679f13f9da3291606 -Author: Jordan Hubbard -Date: Tue Jun 20 21:24:59 2023 -0700 - - Change submodule paths. - -commit 313a05d780d4dd4f358ec1a4c462caa8268e2594 -Author: Pvl-bot -Date: Fri Jun 30 03:11:45 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/land_process/erosion.py, fix SurfaceTypes as final commit - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e08a6184b09a17d326cc64d51259173e527598a2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:45 2023 -0400 - - Add 66 lines last edited by Zeyu Ma in worldgen/terrain/land_process/erosion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56329e9f3a058bafd27c136cf596fefa1850e0f7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:45 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/land_process/snowfall.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 83151be13cca1c77428c9adcf7616cc4079a02b8 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:45 2023 -0400 - - Add 73 lines last edited by Zeyu Ma in worldgen/terrain/land_process/snowfall.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4cc24cce7554ea3ad4212cc06197c2c7e13f4d22 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 13 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/meshing/cube_spherical_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2a21f2d3569beeb00e3de6d75f6ec2071951621b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 1240 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/meshing/cube_spherical_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0ddfffb512c7675c64bdc679678782a35dcb95a4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 14 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/meshing/frontview_spherical_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4707b1c20d8e7fb574f2404b7e9034ed5c622ba9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 958 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/meshing/frontview_spherical_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d1c09900b0dadada0328749b12b6143ae765c18b -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 14 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/meshing/visibility_test.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9abbce21271db762a66be81a22bdb14f7c081c46 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 729 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/meshing/visibility_test.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9a8ec0fb964c3a6544e94c7d3ce71090067ed1d7 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 7 lines last edited by Lahav Lipson in worldgen/terrain/source/cpu/meshing/utils.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b47a3811069f8abea32db6cae584856b8f5d7ba7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 13 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/meshing/utils.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c21bd16dac9cf7b67ddfde98a0a162b4a3895451 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 274 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/meshing/utils.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cea5de37431e445f4f2fdc2e95e75784cba7317a -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 12 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/meshing/uniform_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b38a82b4ccf4fdf45bac85db661515bda519c7f1 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 310 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/meshing/uniform_mesher.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40c8dea09dc35dbb4de73958ffc561bb1bb649ec -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/mountains.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56a12c342b8523f6a86b372ea1109523a1805736 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 16 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/mountains.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 06cbc998082ca92b962eca2b5a85e338c047cf50 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/ground.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 25cddd8a2b273f470ab8de2e721280f8a6b3147d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 21 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/ground.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb401d0e9d1f1438dab5aa305a9e007c92cb94e2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/warped_rocks.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f27d0dca660d3516f9d2e58ab85946b37a218863 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 21 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/warped_rocks.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dc0c3adf1e0a4ed5d16d15bf6f9f5168c47174d2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/atmosphere.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 73c750c1816ba0524d01b7dcb63be6c8497dcebf -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 17 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/atmosphere.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6821006fc83806e39251694bad9c6c19d0eb37cf -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/waterbody.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3be2bd470c4f47e20ec3d96645102006eb67e42a -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 19 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/waterbody.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 946c4a9e411297a0ce1471aeab017e544405f3d3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/voronoi_rocks.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7dffb8b4688ef14d0107fd5b84bf5e0355362fa3 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 21 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/voronoi_rocks.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 188697887a228ea66d5750114a9cb827f75e5339 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/source/cpu/elements/landtiles.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db368388b5d16488045028f1a6e082cadae40103 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/landtiles.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4cba457063c3cd8bbbd2daa5c2e8c982c0a982f1 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 20 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/landtiles.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 76e261bdb94169aac159a6f4e0a5b312ccdaf9f8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7fa201a6c9fe53d34dcc2d8cc6903466e73be6f -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 22 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a9089cc6278d5a6e9c33bea66c1935d9b02719a0 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in worldgen/terrain/source/cpu/elements/upsidedown_mountains.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6de8d76e7b953c3bd61fe9c2daa71ef6516de826 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/upsidedown_mountains.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 501fd9f085f5a5ddee03a6601dd7427f1aafc9ca -Author: Zeyu Ma -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 12 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/upsidedown_mountains.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f9bd2e742b066121ba679c79321724f2b0e4d368 -Author: Pvl-bot -Date: Fri Jun 30 03:11:44 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/elements/core.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 003aee9267b4655462ee861e656fc545d3eb6969 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 41 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/elements/core.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5199a2384c3c0d9337ca247896b93098531dd9b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 28 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/ice.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 369fb806f6f2924864a11621081bcf26f3b19449 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/cobble_stone.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 937752205c60d041230c2abb468ce0c6de1fe1ff -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 30 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/stone.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 50a4ed6ecaaed4a61c22ed98740db8b9ea45c8e8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/sandstone.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 436b443751fce727a585d01209e1257fd61953fa -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/sand.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 33ff252a4fe5196f03bc6bdf317f401ce791eb81 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 27 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40365f0c6490369e7ea59f66ebdfd744fef47013 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/dirt.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7b1da1c7f848e7ec15a78d82868102624a6f44b1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/soil.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ec0e1b753212309f8a824c4640e8d87c58dc5cb0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 28 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/mud.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dc6d2011cc54d37ad55ee2b194165ca72060d0e7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 27 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/snow.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 53bc800b7f3654f1c510e10cf5f4b07c12b85ffa -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 30 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/chunkyrock.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1e842865fa7dddc06e2aacba8f00fe78d70b9f87 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 29 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/mountain.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d759200f27e0b8072fcc4bb93ea32c6300a46e4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 28 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/surfaces/cracked_ground.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 86237c404769cbaff4c36e96f9fd291341348082 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 161 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/soil/rockgravelpebblessand.soil - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 05fb751ddb5dc49c61c836168255a58f9baa4932 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 63 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/soil/default.soil - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9f395cd3332cb94a40d489e9bdf3fb868fd60e91 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 60 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/soil/sand.soil - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c4808672fcf1c6a699d99512456a5f48f2f455b -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 118 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/soil/rockgravelpebbles.soil - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 44cd7b9090c6d05ceade24602bb01b5b527d35da -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 108 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/soil/rocksand.soil - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 245370e0bd9546556d3665f91d09f5b4bf405422 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 114 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/particle/particle.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f90c35d48af8c56ff3470dbc8bc705ee403e7de7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 156 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/particle/wind.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1eef059900f6be5f89847d4af43478b5f7e6aaf6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 385 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/particle/water.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8faa28325cdaec81c0eb6b4e9b2b5eb0d43be3ff -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 84 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/include/distribution.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6cc1edd468079585ab0de8f1bc4d6a44696686b9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 2586 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/include/FastNoiseLite.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 237997aa1ede3e3a95afeca06dd95ce74128ca8b -Author: Pvl-bot -Date: Fri Jun 30 03:11:43 2023 -0400 - - Add 360 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/include/vertexpool.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8c6dc05fc2e8b2e8af80f29faf08c3a13678a4c6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 247 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/io.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e093438c2b6ea9b0910510f96934918b11196c3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 122 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/surface.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 272c4958492945d6d1fe798877a905006758786e -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 627 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/layermap.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4ca6cc2e51c3a83d93e5220a0c587f14042c5473 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 79 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/soil_machine/SoilMachine.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit caab2aac7a4ffda7e8b9d223aaffc59d64931ba2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/terrain/source/cpu/utils/FastNoiseLite.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 72ee992a0b1b1d792e046f2d3a2b286dc0a9763b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 21 lines last edited by Zeyu Ma in worldgen/terrain/source/cpu/utils/FastNoiseLite.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 464ddf83726a026930eb91b8b746159d9e0832ac -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/atmosphere.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f9e5114f247364dd9b701376aec0a480128328e0 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 39 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/atmosphere.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9b5f4a57a59d4d716923958136177ed35924a962 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/ground.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3fd179b7acc87988ba75396908d3f998c993f8a9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 47 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/ground.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cbb2f994cbda949af95f03622a2506a589624fc1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/upsidedown_mountains.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1596141a911ab2bb592ce6da75168016c3afc605 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 12 lines last edited by Lahav Lipson in worldgen/terrain/source/cuda/elements/upsidedown_mountains.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d7985a4e18e6729246b25360ea432a920e80bb2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 31 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/upsidedown_mountains.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b80bc4085034c2b9a17275a443df6b5e1a97f736 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/core.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dd603c9b6e678603089fcb519af06e97b10dbc27 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 54 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/core.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c871c3cf4cf14620876b47b1c8f1177dab8c87b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/voronoi_rocks.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5f5f1f4e8c80fff433e7e3dac98a53e18b4f83fa -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 54 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/voronoi_rocks.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4e9343756cf3fe0469dd0b256a39d6901396088e -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/waterbody.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit abf703e63e345150d1176e9b152cdf3e6c6c1c0d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 50 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/waterbody.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2282d9f18673239d2440b87376b26b949ead7d0a -Author: Lahav Lipson -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/terrain/source/cuda/elements/landtiles.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1dd4d713dd7d3ce04025a337054c1b02ac0c7264 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/landtiles.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d19659d5381bb7b28b58dc5c1056c1ca694eda2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 49 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/landtiles.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4eea05fe37a778b1e8b95c09d20280fdf23a28cc -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 10 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ffd50b367e3fdc83a4006e86de4115a240ba3d7d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 22 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a0fbbf7e7d494f3db734aa5fa7a3e7a86d5a39a6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 10 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/warped_rocks.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d423c2cb8cc94e397ec7cd42440bd974f9b866f2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 50 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/warped_rocks.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c1c39bf80c8aee2ceee76085c0736ec8f80f3029 -Author: Pvl-bot -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/elements/mountains.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 729397bdc13ef52a0c79feb62084d95d48e29856 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:42 2023 -0400 - - Add 36 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/elements/mountains.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b73e2318b9b37e5a4781201b5ceaf98f29b7fdac -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 64 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/chunkyrock.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2d3f11fcbadecfc4081a2dc10154e4f3a0c6883b -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 58 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/sand.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74cc178655a18a4e8f6437e9940d5116fe232559 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 55 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/ice.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 24ddf1df072af0ca39903122fcebd09a80c9d655 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 64 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/stone.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e68462f69a2901cf3bf9b3d04da591588bb31ce3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 54 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/cracked_ground.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 08d6a43f354f2ab35a9f1a25ce15f4db82a2e082 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/sandstone.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 55c1cb90a89725bf7e2b2e3ea4dc1747ff6bb927 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/cobble_stone.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59f7c2472a3ba189d6869e748358cdc5b5106efc -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 53 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/mud.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97e6f45b47b1dbcb51f868f5a9df18dabad137d3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 38 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/header.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 45cf420485c891f85970881868cd3b1a0d2cf192 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 50 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/snow.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6dd21da6250bd04356cda8678ba4e60453650867 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/dirt.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 89e8ed2ae95cecc1687b35c1a254e91f03f5095d -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/soil.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 92aaa2da89d795f03df9f2e3a68080579784a796 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/surfaces/mountain.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 155e2ddafbf513140a30a871705c430f5d7056a3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 10 lines last edited by Pvl-bot in worldgen/terrain/source/cuda/utils/FastNoiseLite.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 926bafa3ee1e6dd3ae16d0a7fcdf1153112c47ba -Author: Zeyu Ma -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 42 lines last edited by Zeyu Ma in worldgen/terrain/source/cuda/utils/FastNoiseLite.cu - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 339047ad282ddce279e792985e5c1250802eff07 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 165 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_map_range.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f6a9c916e165655a3a156c7674280547bdd7c283 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 89 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_tex_noise.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 502cb910f2363a32e8c13dd7be78a23cae913c7a -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 92 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_tex_wave.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit baa93b3ae059a66222f637731d10ded2f0743100 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 290 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_texture_valToRgb.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d6ebfabfd22022865d3b5042d86b72c083eb7e5f -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 32 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_sepcomb_xyz.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d2b4d841b1418245b761103c287d3b3096ee4526 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 190 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_tex_musgrave.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 15dab862fe5facaa9b5a5d8d888c21d1c36bf032 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 58 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_vector_math.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 68a265c6d8b721e388d4300d92419eff6afec1f7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 278 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_texture_math.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b6d47fb015d92f660e64b0d5ddc66ca56c60995 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 32 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_mix_rgb.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff6a1cd4afbed04abf01240156bc1da5ae616547 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 437 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_shader_tex_voronoi.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b42021ef16240da77c54e34f41e22b3b4717ffaf -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 59 lines last edited by Pvl-bot in worldgen/terrain/source/common/nodes/node_float_curve.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 236b7cbee28dd8aca08a9730d61a6f8f669e5821 -Author: Pvl-bot -Date: Fri Jun 30 03:11:41 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/atmosphere.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f5f509d0611eff0e89a89c3adf27abb308aba4f2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 33 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/atmosphere.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b7690e68d124e8d37bdbeca17adb7d2144c13548 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/ground.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 633a44b5e18ed10e390113e520fff7869d72158c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 65 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/ground.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 64bd3ad4b468ecf2c2749f68544be6bc516286f7 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/terrain/source/common/elements/landtiles.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a3be16231109394430bd0231ec776a0234f235ff -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/landtiles.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cbfcaaf8f8e77ffec114e00d1e1dfa36f02de1c6 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 187 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/landtiles.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3b9cd2f52a9aa0c33dc3661d87ee3b3b32f25d74 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/mountains.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit da1dbba68d740e396a00c088abbcecff63481055 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 55 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/mountains.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bc2fe2b781e56ba3f1a1f91f52b03863e74e0e10 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/waterbody.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd49f7079ac1b5d519053f0bd8d9e983a9acfb88 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 35 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/waterbody.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cf9a114ff3c2aa5f8e34df237a13746eb4ad0ce3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/voronoi_rocks.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 39f69a73ffb39782d30b1615353dad574dcddddc -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 154 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/voronoi_rocks.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 35aaba8cdb981101fc06384827833b024d0550c4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/caves.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 04fe96eef38a20836cc19822d66bb3244357bc09 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 96 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/caves.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 31c91c81d3ddfcc597342b93af62d346f045337d -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/upsidedown_mountains.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 175cf0f103c4a03aa79bea09d6853f2d4d2022e3 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 7 lines last edited by Lahav Lipson in worldgen/terrain/source/common/elements/upsidedown_mountains.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5fd91e8ae624bdcfdc1c76192bb15ce8646e2ed8 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 63 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/upsidedown_mountains.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e5f708c702b4021080356b9883712cd3952dd2e9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/elements/warped_rocks.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d7d9c94b8cbf00ca80b277b93d0788f1074a04a -Author: Zeyu Ma -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 56 lines last edited by Zeyu Ma in worldgen/terrain/source/common/elements/warped_rocks.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cffd09694da75c829ab67e4b76fdf9f02f48d1d5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 189 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/chunkyrock.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7591e81f00e227a0517fadddd4ebc2e411072447 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 63 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/ice.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2cb748c507f8f47140facac762903aae6a3c2b6f -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 614 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/sandstone.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5949feaa2a32b25bfa4b19a452638fa2e5d20376 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 339 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/mud.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2d4631b73d45e8b23b678e60b4c7a88ad82f34d3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 185 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/cracked_ground.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2020f0c2600c34cdda00c5ebdc8c460885b1aa50 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 1296 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/mountain.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4cfafaa3970a8cb1c1639a65bfd844aab9b6c8b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:40 2023 -0400 - - Add 90 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/snow.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7f88ca8668a49965c68ff53b55593e6cf0f830b -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 376 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/stone.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fcaf59c7c1cdd34ac19c106729125b87e5b68150 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 399 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/sand.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1fd009838a0ac43954e94da7b12afbc6cc3fd380 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 306 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/dirt.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d71858dc7d5fe4d971edb29a73e2f8bd522ca6ab -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 309 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/soil.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5dabf8f9e52152a444edb1166aba83cdef87f023 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 152 lines last edited by Pvl-bot in worldgen/terrain/source/common/surfaces/cobble_stone.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b01daf75db77e3e16d105469d69ff5e24e886405 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 829 lines last edited by Zeyu Ma in worldgen/terrain/source/common/utils/nodes_util.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a77fbafb1f8260d604b3f3b8b113bbab28365074 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 585 lines last edited by Pvl-bot in worldgen/terrain/source/common/utils/FastNoiseLite.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 33e8e7a621d5b0a315a4cae76f83f8bc88c30cbe -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/utils/vectors.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 52af8720e8cfa833a480fda090989b73e8ab6dbd -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 189 lines last edited by Zeyu Ma in worldgen/terrain/source/common/utils/vectors.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1946c2afd30ce28a7703579f7cb234d057645388 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 2498 lines last edited by Pvl-bot in worldgen/terrain/source/common/utils/blender_noise.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56a7e030f0118ac0b2490ffac7ddbd5e67e3b9d8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/terrain/source/common/utils/smooth_bool_ops.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 085e45db314f87c278ce244497cd50613c8be70d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 9 lines last edited by Zeyu Ma in worldgen/terrain/source/common/utils/smooth_bool_ops.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 463d6d70b3f6d99b91b7eb2f677f5f80b74dcbd2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/source/common/utils/elements_util.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed0edd28e8795083e41f85d2a1d8ab8236c4f31f -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 167 lines last edited by Zeyu Ma in worldgen/terrain/source/common/utils/elements_util.h - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5503a285c437cf819f7d9f5b406a4e02c0cd0ab0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 2 lines last edited by Pvl-bot in worldgen/terrain/assets/caves/cfg.txt - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ad26966f4914b10ba53342462713bf6d6e0ef5e8 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 32 lines last edited by Lahav Lipson in worldgen/terrain/assets/caves/cfg.txt - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5909a7a6c0820410ab4a347cf81d495f5f40e668 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/caves/geometry_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af6ca6179b9ba38eff220c0ab126b7bb8d64858d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 30 lines last edited by Zeyu Ma in worldgen/terrain/assets/caves/geometry_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ceb8aae3edf812fcb3dcf1072e4d55af448fcc5b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/terrain/assets/caves/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a7d7141a83eee19d72427716759d2bf31a4ba645 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/caves/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7c6fa86c44b8b35b92806a8a6d52a7c4a1babf43 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/terrain/assets/caves/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e03401b786fd8a8a6db1808a7b7ac40af8149506 -Author: Pvl-bot -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/caves/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a70048d6e89af95df577a4adb6d6bfd1cf079b06 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:39 2023 -0400 - - Add 104 lines last edited by Lahav Lipson in worldgen/terrain/assets/caves/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d5f0d1b88e6da0beb8053124b22e4c8b06e17096 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 113 lines last edited by Zeyu Ma in worldgen/terrain/assets/caves/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd7ff5a567af667c8b7c3e0e4c4a25165f511c24 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/caves/pcfg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 066cdfba8d01c0bcf8ff16e7ad126543aab5f135 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 11 lines last edited by Zeyu Ma in worldgen/terrain/assets/caves/pcfg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 98525d810439e8a063ed25b89bbcd8591c42d9a8 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 47 lines last edited by Lahav Lipson in worldgen/terrain/assets/caves/pcfg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 397bd5530a4fb4be94bf9125f7cd7c90537d7a03 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/landtiles/custom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c220ce6da281667233787857566bd74b48b09d3c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 195 lines last edited by Zeyu Ma in worldgen/terrain/assets/landtiles/custom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c46f9f677d742ed199564662780ffc9913406fff -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/terrain/assets/landtiles/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fe998030be082ae2df5632563f1ca116d0971083 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/landtiles/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit da9784861325ab948f687b9ec73eb82711259341 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/landtiles/ant_landscape.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e2de8cfe4274d72f5aed3aefb359df49716efdcf -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 64 lines last edited by Zeyu Ma in worldgen/terrain/assets/landtiles/ant_landscape.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ef4abe6c355c9f61df886348732814112f6d5fdb -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/landtiles/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 32dc7898ae60f0fab244dcac39b6065c034a654a -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 137 lines last edited by Zeyu Ma in worldgen/terrain/assets/landtiles/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2aca7e39a72e0944d78b8fd956a321efafd92597 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/upsidedown_mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f149e6865869385309f9136a88edb9aca9cd1e93 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 128 lines last edited by Zeyu Ma in worldgen/terrain/assets/upsidedown_mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5dbfb9cd1ca123e3a7ec29af3f59560d692ee813 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/assets/ocean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f58de275dec6cb66488f2233c5560eab3374f395 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 90 lines last edited by Zeyu Ma in worldgen/terrain/assets/ocean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 18aa7286bdac54d96aca7c59d0132b90cc381412 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/elements/warped_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 82df81a81ba1673e4acc974c2720118f7b140e2b -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/warped_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3a85eb77ccf8ac81d26619cfda62911639670ad2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 43 lines last edited by Zeyu Ma in worldgen/terrain/elements/warped_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e7dcb0590764a47d85957510df5d7ebf6a41c2e6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/elements/waterbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c045a41b921b077a28270a7186578375f96fa48c -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/waterbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5686d3320b13e47eb68f4d0b2cc08383cb6ec4a0 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 37 lines last edited by Zeyu Ma in worldgen/terrain/elements/waterbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 87f50e89f6376dcd496bc68fbe6490541e480863 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/terrain/elements/voronoi_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 245a069e86f175305dacae906c93a80f27909bed -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/voronoi_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 62a3601780807f6c69e4a454a8b4901a12422ee2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 75 lines last edited by Zeyu Ma in worldgen/terrain/elements/voronoi_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 148a8c47d241443d5e4c21e20ed5f7b498f63122 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/elements/upsidedown_mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2eb5d3742b589846bc2f7371dd182178e8cc3e34 -Author: Pvl-bot -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/upsidedown_mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c8e6486f6b1cd5b8d14d2b966b19bf7177d69cf9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:38 2023 -0400 - - Add 74 lines last edited by Zeyu Ma in worldgen/terrain/elements/upsidedown_mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a82bee772e769b04e5329eb1a23cfbe335c0cf99 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/landtiles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9904aae6247d7f7b36d4fe7bf6a92ffdbfba2976 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Lahav Lipson in worldgen/terrain/elements/landtiles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a0e8810f5f19cf13e0070948c609ad0478d24db -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 197 lines last edited by Zeyu Ma in worldgen/terrain/elements/landtiles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b906721695d83abe498c1f4b868b93e6d375edcb -Author: Lahav Lipson -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/elements/ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1970501acbc38d12a59ff1c7ecd69f9c8ba02e0a -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eed0ce1a975dbba39d2551c290b670eaf42a974c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 55 lines last edited by Zeyu Ma in worldgen/terrain/elements/ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2b37eaec949520fbcac37e28fffa8cb1f4d84f07 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a47e4bfd4a7558d284efb4cca98cb49a9217cd87 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 94 lines last edited by Zeyu Ma in worldgen/terrain/elements/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a3a96cc43e10cec80ae558a0e6bcb42d927ee6e6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/terrain/elements/mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ce626ea179d5c281b4fb8453c91aad10ad72dfe6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9ec0b8ba4f80f1ec4b9700a5863705c927e48f7c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 54 lines last edited by Zeyu Ma in worldgen/terrain/elements/mountains.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 10fcb2ea95548b0e86d9b352b264094ce0861d3d -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/caves.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e654b23e528a6223bb1a004aa327a0989896d7d5 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 81 lines last edited by Zeyu Ma in worldgen/terrain/elements/caves.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0dbe126a05b2472768da60bbd79fe8feac9a4d73 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/elements/atmosphere.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2789b1150032c98e6f308b7345d0af9164dc3d18 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 29 lines last edited by Zeyu Ma in worldgen/terrain/elements/atmosphere.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b6f3902b21d6c42410b5ff0fa80ce27ca1d282d -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 303 lines last edited by Pvl-bot in worldgen/terrain/mesher/_marching_cubes_lewiner.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d7276722e5b2903d68054d3333c873c07b30dc2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/mesher/cube_spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit deaf37362c33b80a4554e813a221fb0b3078da54 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 221 lines last edited by Zeyu Ma in worldgen/terrain/mesher/cube_spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 658f2915ad5103a2c99fac1f8996e39768f31217 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/mesher/frontview_spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7cea012be68bb384ec51b175a65b8af904942bba -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 230 lines last edited by Zeyu Ma in worldgen/terrain/mesher/frontview_spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1179436a1a0d6f39459670af25f3a3c7cc2470fa -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/mesher/spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efee975ee0325dc661fd9a539a37efdd6dee0bda -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 159 lines last edited by Zeyu Ma in worldgen/terrain/mesher/spherical_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b01fd68abdd8ee470026217cf582bf9c5e4fa1e5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/mesher/uniform_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dbdd9e83ff4614d324aa975ed65ad5f94338e9df -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 154 lines last edited by Zeyu Ma in worldgen/terrain/mesher/uniform_mesher.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e90711721cee0f493ca1308fcfd3f0d3bbc57901 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/terrain/mesher/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c4e4e22831094f7fa6256653008d19e7e08bcfd2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:37 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/mesher/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e752f111b1c9f08400e7d3a50f0a9210843a82b0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 533 lines last edited by Pvl-bot in worldgen/terrain/mesher/_marching_cubes_lewiner_luts.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 925928586bbe9e22ff80ead4c125bfd25766020f -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/terrain/surface_kernel/kernelizer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a223624a858b5bc9ceb3a033ed13c095d87feec7 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 330 lines last edited by Zeyu Ma in worldgen/terrain/surface_kernel/kernelizer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4efdd49ef27e2895645ea5ca945b5d6f4eeeb6b0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/surface_kernel/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 02c0cdb1579e441ce826b4b7e7f9aac5de2e8a40 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 104 lines last edited by Zeyu Ma in worldgen/terrain/surface_kernel/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 697cd6a5b31739eb688ded342ba99083434e327d -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 1460 lines last edited by Pvl-bot in worldgen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9451c899d0e32f339311dc624a9dc5f380035fdf -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/terrain/utils/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff96bc4ea57c13d27fd0a3f7e0a556ce1d976557 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0bf4a35c1471ffeb7727e2bfed0b61aad65a5760 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 24 lines last edited by Zeyu Ma in worldgen/terrain/utils/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6ddc4d66a325bbf292efb639fe19dff6baab2547 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/ctype_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75bdaca30979d267e9016a9feb796b3ef0168b65 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 22 lines last edited by Zeyu Ma in worldgen/terrain/utils/ctype_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 074fec5a44e7c16cf8f58c48e1a5e28bfb6e60d8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/image_processing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a187f56bac34492050aec2ae7cbbcf299fe1fe5d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 69 lines last edited by Zeyu Ma in worldgen/terrain/utils/image_processing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4e9448cb137841f81d40e7f9c92d3dadabaf3c5c -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit de0ea76dc9044be143620b623ce4376355bdca4c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 17 lines last edited by Zeyu Ma in worldgen/terrain/utils/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ac59976ad3e99d982612bf2b56820191f5c230c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/terrain/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3e39289b52c0f3bd7caefc2c77ce59f2d5747ad0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 46679a09de0b50ceb0963188944226bf7eabf244 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 16 lines last edited by Lahav Lipson in worldgen/terrain/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4bee70645bea16cfaf421811b6b160902d18a8e7 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 302 lines last edited by Zeyu Ma in worldgen/terrain/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b809edd7070ce61c92bc50a31fb0ac840f8a104d -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 78b60bc0c8eb0e2dc086264fe04dee8481be0fed -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 71 lines last edited by Zeyu Ma in worldgen/terrain/utils/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b1c8f405bb5eb41c3cacdbe4bbc7b7586ad5ec5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d817eacf5d7323e52a6712306120479c899535ff -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 47 lines last edited by Zeyu Ma in worldgen/terrain/utils/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 583dd159742bccfefcfce05e9ca035501b12de53 -Author: Pvl-bot -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/utils/kernelizer_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f2986d5004265d9816f3710c6c12f5b7862e37dd -Author: Zeyu Ma -Date: Fri Jun 30 03:11:36 2023 -0400 - - Add 283 lines last edited by Zeyu Ma in worldgen/terrain/utils/kernelizer_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 06732fbabfdac9135c1b261573c4f1f2a34711f0 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/terrain/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ffaf7ae18a5a757554a233e74f0078742b7eab20 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 83fbc38b21a289cc8c020882b3873fcd0ecb11c6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/terrain/setup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ffdca236d98d94675bee9d12624b25dbb7126288 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 29 lines last edited by Zeyu Ma in worldgen/terrain/setup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e4527748bdb39f5da991062f99592e3cfc7669a4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/terrain/install_terrain.sh - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 117a5775adc9056ff410126e47dfc655e62d1b93 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 124 lines last edited by Zeyu Ma in worldgen/terrain/install_terrain.sh - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 118d889c100359b226a7447eafadb6bf3cab1418 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed02f2ca51e2aad9f9ce383642fc622bc026a0bc -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/terrain/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efe1ee2bcb899359be9753e333cb07669b74ddb7 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 456 lines last edited by Zeyu Ma in worldgen/terrain/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 42dfeab2c3d8ce8cf948409a786a20e0d6e61f3c -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/terrain/scene.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1c3caccc175b6035dddfa45085bb422e782337d2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 100 lines last edited by Zeyu Ma in worldgen/terrain/scene.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 63386fc3333b2d126c88dfa0d30adfbb939c2abe -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/util/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 012a8e9549fdc178ef54b7fafbff7012163d358e -Author: Hei Law -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 3 lines last edited by Hei Law in worldgen/util/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 839d5350a126b821a1834f738c713d0a5908eb67 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/util/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 46c0ea7bc2bc0c655febf0a6e51bb2a958cc6db2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/util/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21d8b9c7c45c004e70c56f9f6d4a7ef7740871f6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 56 lines last edited by Alexander Raistrick in worldgen/util/logging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 485d67bf71418bcf6b84f5d7d877227cbab4806c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Zeyu Ma in worldgen/util/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db832935d38cf9e633f567c1d3e9afc7e2a07c85 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3b13a4228c0febc2cc37af86c4ab2340555f3877 -Author: Jia Deng -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 44 lines last edited by Jia Deng in worldgen/util/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e02cb8505fe154dc706534e7469373adeea196c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 52 lines last edited by Lingjie Mei in worldgen/util/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 908ea180068a6204f3dc13151847caa8bc328640 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 267 lines last edited by Alexander Raistrick in worldgen/util/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d33c4aed5aa4e63f3c5ce5688726750bf00af072 -Author: Yihan Wang -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/util/exporting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3387103219d28ac10da49b585262525c9f71db18 -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/exporting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dc7a834e6a6707c577fa4312f864888ecbcde2bd -Author: Lahav Lipson -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 250 lines last edited by Lahav Lipson in worldgen/util/exporting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 61fcf50d3134f9d22e9e1c62f75c85bed755dc7c -Author: Pvl-bot -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d00d862971805fcc66043c69ecec72a27ceff2bf -Author: Lahav Lipson -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 7 lines last edited by Lahav Lipson in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 71d750b18ff6207fbfa5e1a055588239a90f7176 -Author: Hei Law -Date: Fri Jun 30 03:11:35 2023 -0400 - - Add 10 lines last edited by Hei Law in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 73ebc49214c52725faae5e62a8d66b7330c2c273 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 78 lines last edited by Zeyu Ma in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e36706d04f7315ed8c71293bdc23c9680a7a12bf -Author: Lingjie Mei -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 84 lines last edited by Lingjie Mei in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cedf84ffd431bd56c0422d4964c645870f48f569 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 553 lines last edited by Alexander Raistrick in worldgen/util/blender.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit edbdee1c56915247227fdb1f45f0413f8c70606b -Author: Pvl-bot -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/pipeline.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cfe923609e0358d3e94ff5bb0578cdcb13170de3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 67 lines last edited by Alexander Raistrick in worldgen/util/pipeline.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f6deec15fd555fa4948ce00d8987bba29a32c908 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/util/organization.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8a5819bc7935e607d8f3facc87ffdd14f0cc7707 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/util/organization.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ef4b0109ffb30d1eff7f3ed7bff63391eed235a5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/organization.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b1c6b86028c3c9373953fcf34b80cdfb1e058312 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 103 lines last edited by Zeyu Ma in worldgen/util/organization.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c88e81bc425ae41b69c9ab2accba3d1eb7ef704 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 175 lines last edited by Lahav Lipson in worldgen/util/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0f1cd7c27afe1c40135b3773b85b84bec7cc608a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 1 lines last edited by Lingjie Mei in worldgen/util/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 36d3bd1b856af1f97fe3d80fdb8365f51a72926a -Author: Pvl-bot -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/util/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1bc8e3097c5d2322590ccb5e8d0aa553a318b55a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 59 lines last edited by Alexander Raistrick in worldgen/util/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4482ea1e124e29af7055eb25c5117615fa7e73c1 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 134 lines last edited by Zeyu Ma in worldgen/util/random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40f54d04f4ee7153de1fd7ce97d716675d165dc7 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9addba12191b7491f6625bf327429912c7ab26e4 -Author: Karhan Kayan -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 4 lines last edited by Karhan Kayan in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 70890ea719a72686a870d3b8f4bf262767235869 -Author: Pvl-bot -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b928dc6010d30bc6c15747502a610084550d957b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 9 lines last edited by Zeyu Ma in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit aa111820262e700b218baf3a20b9113434d70168 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 17 lines last edited by Lingjie Mei in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dc2b197c51c7c3b5046c8ec42bbf5a3f27ddebb3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 218 lines last edited by Alexander Raistrick in worldgen/placement/placement.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2cdaff645022d47008f4cde8dec1dc44d3441f3e -Author: Hei Law -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 1 lines last edited by Hei Law in worldgen/placement/factory.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 792c0428da3a0ffa35c7e7515e74b39c933ee5b5 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in worldgen/placement/factory.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4725bb330bf9dbbb6290ef135d806e877f141c0a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:34 2023 -0400 - - Add 7 lines last edited by Lingjie Mei in worldgen/placement/factory.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ca08588329fb15f13c42befddd68b70adec3cf59 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/placement/factory.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a41cd76bbead9802f8b7764897357d442ed26290 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 146 lines last edited by Alexander Raistrick in worldgen/placement/factory.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b82018bb812f887ba9d22a7a513e2a76dccdf228 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/animation_policy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b87800c9c9d11a9b2feefd6e4fb92b75e2926d9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 32 lines last edited by Zeyu Ma in worldgen/placement/animation_policy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e977a20cd5004d0850a73e800b44a13299b94418 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 444 lines last edited by Alexander Raistrick in worldgen/placement/animation_policy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9243b65884d9e25347629d9b1e11adac9d7bac19 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/instance_scatter.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b7cf1454c112188b131453590bf01919b29623b3 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 12 lines last edited by Lahav Lipson in worldgen/placement/instance_scatter.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 867e6ea2fd400e3d4416fb91bb0bd285f8e66d9c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 35 lines last edited by Lingjie Mei in worldgen/placement/instance_scatter.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7d37d055539cdbbfdb91f50d687bbe6e0a845e61 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 169 lines last edited by Alexander Raistrick in worldgen/placement/instance_scatter.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 53783ffe6884f472186f20c629aed9c913abd1a6 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 3 lines last edited by Zeyu Ma in worldgen/placement/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 36c5a8aeb8d22716c37b2fb767b37c61d0e3d224 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/placement/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0857ebfe739496198467dfd82bcdb1dff53c8472 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 18 lines last edited by Lingjie Mei in worldgen/placement/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 57aa91c3de91726209722c65e3f76c91f9f84f49 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 208 lines last edited by Alexander Raistrick in worldgen/placement/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4107232a4579514b6c502af3f576f21a45491d42 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/density.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fdb9d66f2cd2feeb97289905a5d8113f048c0c51 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 8 lines last edited by Lahav Lipson in worldgen/placement/density.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 10f4c392a2469bab06ee89e990e465182629aa9c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 8 lines last edited by Lingjie Mei in worldgen/placement/density.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2153c9e08391a886d0f62707b3393062de2439ed -Author: Zeyu Ma -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 25 lines last edited by Zeyu Ma in worldgen/placement/density.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f625ea0e99b4b94c665e3092e4cf01102f36ffa8 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 32 lines last edited by Alexander Raistrick in worldgen/placement/density.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a34b388cd196dcf53fc4cd9d63477a6cca75f229 -Author: Pvl-bot -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6827237a6d35bcd3dc04023f17fecf06afb2bd4c -Author: Hei Law -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 20 lines last edited by Hei Law in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9a90db1a9b11d5fb59cdb26748eba95ace99c412 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 38 lines last edited by Lingjie Mei in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e79b1f08b062557304a40e18d081be543cd4a366 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 89 lines last edited by Lahav Lipson in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a87dd9dd313e6afb6cbce291048481cb1631aa73 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 92 lines last edited by Zeyu Ma in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 23aab7f49dc8cdfb5779bc438bfadc80a48f6895 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 288 lines last edited by Alexander Raistrick in worldgen/placement/camera.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6988922bf8f4c0cba386e9770ede022b888feb0a -Author: Zeyu Ma -Date: Fri Jun 30 03:11:33 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/placement/detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bfbcffc4d4f3c9b3133f93b0ce5a9dc070b08be2 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/placement/detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 02bb2d21b84e2325f95de075d65fdc0b8f171a11 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 222d9c1fab7608bcf17ebe071e78e6c8870073d8 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 64 lines last edited by Lingjie Mei in worldgen/placement/detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4b72f3a4998779b59f6f3f0cefbf368d262d388a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 99 lines last edited by Alexander Raistrick in worldgen/placement/detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0b8a294a27a82e9f77b2be5770411bc56bdc6f0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/placement/split_in_view.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 82c342392565d7f1737ff7d3c9e19a0675528903 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 115 lines last edited by Alexander Raistrick in worldgen/placement/split_in_view.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6d9da80880f95fc0ed6365ecc0e1ce725ba04a61 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/aggregate_job_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 35a0f09e5f97c8a45b2c176dfcad2b583742ab53 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 95 lines last edited by Lahav Lipson in worldgen/tools/results/aggregate_job_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8676d57777c3c4bbf834e7308c8596a37b9613ee -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/resource_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d2f8d568da167478ea6077dd71d539621b7a73d6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 117 lines last edited by Lahav Lipson in worldgen/tools/results/resource_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8892d082d732f50dc09404a13f715dcc35924cc2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/job_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fddf01eafb2302d2c519cf4ede579dbbf9ba6665 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 99 lines last edited by Lahav Lipson in worldgen/tools/results/job_stats.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb3127fe990079687a2572150804f1fe3038d899 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/scatter_figure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 49e3cc0dcd6588d77ba6b49e550af03b46ed8cab -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 10 lines last edited by Alexander Raistrick in worldgen/tools/results/scatter_figure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 92af756e27ca7d059730d7e23617d62294a4d035 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 170 lines last edited by Hongyu Wen in worldgen/tools/results/scatter_figure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit de67aa7d9db88ae0455da5b85dcbbf3e676e6c37 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/make_grid_figure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3ca2c27b14b2f216a345a48b95a2ea4cd8ab3fed -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 79 lines last edited by Alexander Raistrick in worldgen/tools/results/make_grid_figure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9498fc5f10bef531cc42ae8192345d53435284d4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/tools/results/parse_videos.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0103dd8bfa036961e06a83cb637d3c34cdb948a3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 47 lines last edited by Alexander Raistrick in worldgen/tools/results/parse_videos.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9b8ebdd6d0ff94d1dcaf6342794fd69b8f8acb81 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/tools/results/parse_times.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cfabaa0e5382f37a3e3dd21db3ea656c7d630e20 -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/parse_times.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 79acaca0fe22868c148f7b139aabdb93d7feb701 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 53 lines last edited by Lahav Lipson in worldgen/tools/results/parse_times.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8212edf7ba8c650617483342277cbef525b3bd0a -Author: Pvl-bot -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/results/strip_alpha_background.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a5ef91f137828efb45b31a1ea0458d12063cb40 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:32 2023 -0400 - - Add 35 lines last edited by Lingjie Mei in worldgen/tools/results/strip_alpha_background.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6758b0a8aa2e37ab02b26f30c927ee1c7306f50c -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/tools/util/smb_client.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 670263fa5626ac943e41232ae0e033323861d207 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 31 lines last edited by Lahav Lipson in worldgen/tools/util/smb_client.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 94d33bcfc2bb0f468e7d83728f6be9a258f44e7f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 40 lines last edited by Alexander Raistrick in worldgen/tools/util/smb_client.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 978c2060b01741bacbea1d7677e34309fd55cccd -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/tools/util/submitit_emulator.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ad1a2cef2e995aaa6ce7584ebbdae1c74f11eeca -Author: David Yan -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 13 lines last edited by David Yan in worldgen/tools/util/submitit_emulator.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bed7df582a34fc21ad31bb5bbf45e570338c4e9b -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 48 lines last edited by Lahav Lipson in worldgen/tools/util/submitit_emulator.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 00182ebfe08685c4f032d47ae5a9a08c4147477d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 205 lines last edited by Alexander Raistrick in worldgen/tools/util/submitit_emulator.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ebe043ff1affb0125f208cdf8046aa17fc184ae2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/tools/util/cleanup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0d078c7189acf2031d15e43e5f8b53a39b387a1 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/tools/util/cleanup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6227474d3e683453f0fe763abf823c4d495af2f9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/util/cleanup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4fce49c6f76d6a1e5829dcab4c33cfff8fd4ad2c -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 26 lines last edited by Lahav Lipson in worldgen/tools/util/cleanup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 310bd59de24a6a5b929b922c58e6f00b6f481393 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/util/google_drive_client.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 25c6636d75cde3d067455f39683c08fd9bdf76e8 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 17 lines last edited by Lahav Lipson in worldgen/tools/util/google_drive_client.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8780d296a177e91f12d430d7afaeff7d44c21b33 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/tools/util/show_gpu_table.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ee6e23b85481c92b55a899c90ab785c95874aea5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/util/show_gpu_table.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 82cf3acaccda4a67b013d3f8ce81fbcff59bdec3 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 58 lines last edited by Lahav Lipson in worldgen/tools/util/show_gpu_table.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e03f4ee23839c5b2ccb1886cf90139536a3b1f31 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/util/upload_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit da47341be52fa9269168df2b18e03917b2d9ea71 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 114 lines last edited by Alexander Raistrick in worldgen/tools/util/upload_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d4204525783da9944de8b324abf6d55dea32000c -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 3 lines last edited by Pvl-bot in worldgen/tools/ground_truth/bounding_boxes_3d.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fb1c6062e6854da6e2b1897ff9e948a32fca1dac -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 128 lines last edited by Lahav Lipson in worldgen/tools/ground_truth/bounding_boxes_3d.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bf2e16cf84ea1767fe2db5aa899043cea6537814 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 3 lines last edited by Pvl-bot in worldgen/tools/ground_truth/rigid_warp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b5820c110a64179c3ee4471e2584de19379ce75 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 71 lines last edited by Lahav Lipson in worldgen/tools/ground_truth/rigid_warp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 14356c73528dbd25dba55cf1765b0d20aad231b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/tools/ground_truth/segmentation_lookup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7bd87435b3d3f351264cc26964fe97f89c19652d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 112 lines last edited by Lahav Lipson in worldgen/tools/ground_truth/segmentation_lookup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b6339eb7e74a287d9f942ff110d5e36396b2ef1e -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 3 lines last edited by Pvl-bot in worldgen/tools/ground_truth/optical_flow_warp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b4969f7d0d6fbfe4b84668934de01f8b7a24ce49 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 47 lines last edited by Lahav Lipson in worldgen/tools/ground_truth/optical_flow_warp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7047e2ba8c5e2e25bafd8f53e58bc8bbdd3c1e61 -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/tools/export/export.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd713d252d8d5bc99373f9386866c5c86e81db22 -Author: David Yan -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 298 lines last edited by David Yan in worldgen/tools/export/export.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f52a24db5021621de0dad157af6737d7d2d1568f -Author: Pvl-bot -Date: Fri Jun 30 03:11:31 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/dev/params_parser.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9eaf36ec6d0674095af00377d974119c5c22a449 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 43 lines last edited by Zeyu Ma in worldgen/tools/dev/params_parser.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5267ee392fc6acc14c402a7e756d4bcac9a96c0e -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/dev/landtile_viewer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9c03b35df3cfde7422288acdb681a9857f99aeaa -Author: Zeyu Ma -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 34 lines last edited by Zeyu Ma in worldgen/tools/dev/landtile_viewer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b63d362e30570d4a5c675ba7d47ff537c630cdf4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/palette/palette.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1e5d6b7e6490175774557f5d1f49eb866dfe20db -Author: Lingjie Mei -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 12 lines last edited by Lingjie Mei in worldgen/tools/palette/palette.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d1df11c61f005f64584e618fdcb719d62ecb2412 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 99 lines last edited by Zeyu Ma in worldgen/tools/palette/palette.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a363493017d476d8f550a7a75bf839dd411ea721 -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/tools/pipeline_configs/local_256GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2722260c66efa202ca74c954125239a2c0813d4f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/local_256GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f38f050b7e7b991112dc70b36bcc1f1d4d6539e8 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 35 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/local_256GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6977eab608a3cfdb46ad724d96f29a34be40e16e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/local_16GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 37005f39db0103e8978e6da2d171e56851ebfb07 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 4 lines last edited by Zeyu Ma in worldgen/tools/pipeline_configs/cuda_terrain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6eb9fcb89bb62684144b2e2559f94eb159831b30 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/stereo_video.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fda5805ac8684eeb2ae851647fa7f528e0f59db6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/local_128GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 26fb0a355b8746136af96fbc07c0fc3ccc9fd4fa -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 3 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/asset_demo.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c3ce2900eff969e39e074407efa5dc69d9ee41b6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/stereo.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 718eb794a7288c1954b9c6b0e5114209a9ce5be4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 10 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/stereo.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b6670a890627c407b96178ae62b68cecc4b7fcd4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/local_64GB.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b55b251cca8b02c58085649023533f30f9828d7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/tools/pipeline_configs/monocular.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 45a8d71ea618286cea9bf2b70a62dd2208ca4bbb -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 8 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/monocular.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f2ca8ada5a5d4b6ef6dbe00bc3433cd43555a7fa -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 8 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/monocular.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6901c26a95dabbe5c262f019aa0629c6f146eb95 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/blender_gt.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7655583efc525fefecf5e326dc69458e28dafea1 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 3 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/blender_gt.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7254ce423eb9ebc13c9277d265ab55be18f2e53f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/monocular_flow.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b005f8b93e7e17a5ba682ac99c645d1dd1756c96 -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/tools/pipeline_configs/slurm.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1246ea48a4a670612808029109ae3313e0507c29 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 28 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/slurm.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 73fbe7c44af1b2b4193fe4a0268ac321e45e89f9 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 58 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/slurm.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f84978fecdf46041a66ef24f9649f3e249171f2f -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/tools/pipeline_configs/stereo_1h_jobs.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db91f830c86eaaf97f370abdfad01ec604dd96b7 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/tools/pipeline_configs/stereo_1h_jobs.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 633614af2f0d3673002442254964621cf92660c5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 11 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/stereo_1h_jobs.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3e7de65403621102303402d84a2af89413a96712 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/tools/pipeline_configs/opengl_gt.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b3879ee532a6b98924857a6d0c8b46f8aa8bd32 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/opengl_gt.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa8189908a59a007b2f664e96f5d4fc3e1ee346a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/tools/pipeline_configs/monocular_video.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b347c94a3ced3313023ff91e4adcdf5ea0816898 -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Pvl-bot in worldgen/tools/compile_opengl.sh - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4d9d863fa804367782efa435b6886d0135e608fb -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 10 lines last edited by Alexander Raistrick in worldgen/tools/compile_opengl.sh - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd1a01c327668961df34fed88a12b019a755cb2f -Author: Yihan Wang -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/tools/asset_grid.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c05865f99ceb29f2f2a088fa5e95d86945a29f9a -Author: Pvl-bot -Date: Fri Jun 30 03:11:30 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/tools/asset_grid.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85d63a82a9eece6ab070ad0d1eaedb7d3639ae0b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 118 lines last edited by Alexander Raistrick in worldgen/tools/asset_grid.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8cd15f0f5bd16dac9b313737ff08ccb964160f92 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 204 lines last edited by Lingjie Mei in worldgen/tools/asset_grid.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f96cd6186ef353a841dc0ffa8eddf548e9d8af3f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/tools/generate_terrain_assets.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c108731140abe296846a0bd2b6989da921c1dbea -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/generate_terrain_assets.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1a6004970fc585412f492ec2ac2d12511a5c5c29 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 76 lines last edited by Zeyu Ma in worldgen/tools/generate_terrain_assets.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85b90a55e816bd19f2f33b037d6a82446295325d -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/cancel_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a55d2ae7f0210e51653f9c494e49f06549142f9f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 22 lines last edited by Lahav Lipson in worldgen/tools/cancel_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 809fa9e8ee14c5836981c9b702601b86617e328d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 23 lines last edited by Lahav Lipson in worldgen/tools/template.html - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 733da481d61a73799405cba6fa6f8b2ab934db10 -Author: Hei Law -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 37 lines last edited by Hei Law in worldgen/tools/template.html - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c5a03edf6c52d7c4bd535ec06eef81787d2e4aef -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/tools/kernelize_surfaces.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1305f23f35c2388d7dc4deca2808bf6a5ad01ce2 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 30 lines last edited by Zeyu Ma in worldgen/tools/kernelize_surfaces.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 25f237ab3d162d4cf88e40c8b9a8534e664ec721 -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/tools/summarize.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit de86cd7466eceea4ebe7f8198482e05e0f04d510 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 151 lines last edited by Lahav Lipson in worldgen/tools/summarize.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c737f78639f7a6a0134f9603023400aa463431e7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/tools/manage_datagen_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97f5bdcb7ab58d3ed669eb6f5eed289a607b0f1b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 34 lines last edited by Zeyu Ma in worldgen/tools/manage_datagen_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 84fbb516c6136cee72dbb65c5f01109a9be596e0 -Author: Hei Law -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 67 lines last edited by Hei Law in worldgen/tools/manage_datagen_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1327421eec2e8ed6d9be6bc1142275410fd18c8e -Author: Lahav Lipson -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 212 lines last edited by Lahav Lipson in worldgen/tools/manage_datagen_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1413fd14f328c843f73e19661c923c2afc46f5db -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 719 lines last edited by Alexander Raistrick in worldgen/tools/manage_datagen_jobs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 41ce5efcc16b7e95e9b1bd330d1ad4e234eb349a -Author: Zeyu Ma -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/grassland/grass_tuft.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a6367dcee3d7fa1461434b966a017d7734938607 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Yiming Zuo in worldgen/assets/grassland/grass_tuft.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 828739a7f06bde25102962087a49ea44fc9d1e1b -Author: Yihan Wang -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/grassland/grass_tuft.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f8fafdd121a45b34dc08c7fa097313f6ce30d51d -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/grassland/grass_tuft.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit add953855b65019cbed6c00a1310040d6e0641e2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 88 lines last edited by Alexander Raistrick in worldgen/assets/grassland/grass_tuft.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff4c7ab90be54e90df9266286711cc20ca2b9375 -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/grassland/dandelion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 712811a3c6fbbcf572423c41bea6ca5214227f16 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 11 lines last edited by Yiming Zuo in worldgen/assets/grassland/dandelion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d1aa204da9ed93f71d0e3a99572c91c78359c5dc -Author: Yihan Wang -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 17 lines last edited by Yihan Wang in worldgen/assets/grassland/dandelion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bd246c44612d31a5a073bfbff7192c4a238b4410 -Author: Beining Han -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 636 lines last edited by Beining Han in worldgen/assets/grassland/dandelion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 57acf128c6b1e9d287bd01d7b6bfe7d9d2b193cd -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7bb69c857227dfbf66f6c6442c731aaf17c69e97 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Lingjie Mei in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bb2a5c61e220dcac04f081f1311ba1778f5ba09d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2073684656f6a7801bf8b20fcf317ceefeadb645 -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 091332d8dcff3e3c9472c43238f99f9c9fd34501 -Author: Yihan Wang -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 23 lines last edited by Yihan Wang in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 144978fa943144734c42a0386f5b034614649b86 -Author: Beining Han -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 578 lines last edited by Beining Han in worldgen/assets/grassland/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59adba075dd6c0cf41ca2bdf81d55f23a2a01456 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/grassland/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 96c7d57ce224df624fd1d7c6f9349263f1c48a52 -Author: Beining Han -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 1 lines last edited by Beining Han in worldgen/assets/grassland/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21b90d6d5203163e8455a54dbe096561865ab448 -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/grassland/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 55987196f3c41271d99923de018ba1896acd89cb -Author: Pvl-bot -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/coconuthairy_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f05e8ccabd61417ff7656c7bd552d6600bec9a9c -Author: Yiming Zuo -Date: Fri Jun 30 03:11:29 2023 -0400 - - Add 80 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/coconuthairy_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e10f35a199be71126a912e2207c458f35d2a74be -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/blackberry_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85a762235d8fbf7a95f65044c44d97080550fb90 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 107 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/blackberry_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 54b57ced9e69da6a65b084df4c373afcd6da480d -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/durian_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 143adc3d8129058ed8f88fc5499a4e3bc9ff36d4 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 110 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/durian_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 354fbaef665951117680bddfd77e788eb7678167 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 88d40e6b151640a53ed44da426a6ae8738a34900 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 50 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7188b16e740fb10699bfe15cfda96109a5a89cb -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/apple_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ecd0738a7b338a60f24793f57d8292fe3a28ef63 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 85 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/apple_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0c2e13ecf8a5ed27f47f028e0975aec9fa60c33c -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/starfruit_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 789d38e7c8191649916f1f8ee96cae1f75717a45 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 76 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/starfruit_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56bb6cb18fdea93c3fcc4c0973bc9d5a5ed549a9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/strawberry_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ef536d3c0b83c3354e9d6abc7e9673d76b34ad59 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 96 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/strawberry_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 926da6b1bbf98cebd6bb37d87a1d79892d67f85c -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/coconutgreen_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cc2bcdcce487c9e4cadbf98f2a2d0567d2038d40 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 93 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/coconutgreen_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9dd49b82f4c618cb7e0ec17011ab7452f1270656 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/surfaces/pineapple_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 83c40183fdfb0dc8f920f3547b056d2299da9e64 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 174 lines last edited by Yiming Zuo in worldgen/assets/fruits/surfaces/pineapple_surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dce4a7271d06275ae92315a38783146f07428aab -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/compositional_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ebb4f2d5748bf6959d785e0777399c2941b1bd27 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/compositional_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 796df833c2f17e3c21b26aff9b21ed9d4d5383b3 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 61 lines last edited by Yiming Zuo in worldgen/assets/fruits/compositional_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dbbfb420c03e0586eb9dd7cb3bd82af509307b03 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/apple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1792c3ae3dbfd2b98be0993731b9db30972c5f94 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/apple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e6a18486386c20c683c22e736ef26657f5c05860 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 92 lines last edited by Yiming Zuo in worldgen/assets/fruits/apple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b9e960c9392b2f1368dea76a31b855aff04412a4 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/strawberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b0d54be1d52590d2c3dc17e0b91c70604c30d022 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/strawberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cc5cf22a982167d0932f1851a88f9e988cb9c8ef -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 95 lines last edited by Yiming Zuo in worldgen/assets/fruits/strawberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 81fb223ea7a93ba2b43bab7bcc02f1276bab8368 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/stem_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 29988239c302a2df95f36fd3a03ae8c92b633d5e -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 617 lines last edited by Yiming Zuo in worldgen/assets/fruits/stem_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d78bc6b2da1b8f7a2a28b55a6cad1fb8cfcba11d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/fruits/coconutgreen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b3d5199277dbbc68aea57b25b60e2734079ef028 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/coconutgreen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b41d0bc8ab60335da71c05ae4e541cd23208c1b1 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 100 lines last edited by Yiming Zuo in worldgen/assets/fruits/coconutgreen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 64a47ed0b0a1528e6e6e4cb97798fccef1e41693 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/cross_section_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2761b2043f61865997b484ef93f671302fd194a6 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 251 lines last edited by Yiming Zuo in worldgen/assets/fruits/cross_section_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 68a799e8f9ea4528cb6d6affa8b953528a6d0b6f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/fruits/general_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c308a98a8d398638e03002eed9740d2433ee8c83 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/general_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e0fa15e74ff68710635eabc1d78c3227598d5d7e -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 167 lines last edited by Yiming Zuo in worldgen/assets/fruits/general_fruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d2cdf77edef61f136f73c9b942f8ba9bb00f9b87 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/starfruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 930803251ec0ec00cf2e4103912bf965923afa57 -Author: Pvl-bot -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/starfruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2fce798df6aa40980b7813a5b92106c020831009 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 85 lines last edited by Yiming Zuo in worldgen/assets/fruits/starfruit.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6efbcb3cafcf4eaba9730960ef71ba6999afab4e -Author: Lahav Lipson -Date: Fri Jun 30 03:11:28 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/durian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a9223b901fa0b4dd288341c489886ec4e8dd8388 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/fruits/durian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b2d08340b40744089e6120290cdd4731d6f24c5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/durian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ec52cedb338f51d9625b3e8c79125764c619c4d8 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 92 lines last edited by Yiming Zuo in worldgen/assets/fruits/durian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c47928124f932564136750aecdbedc0f389e6fb7 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/blackberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bb77306aa20424e6f1c201577a3612203140ec7c -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/blackberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b9cd9c0d6ea3ddfbc70d722b47eecb46e8ab0fdb -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 74 lines last edited by Yiming Zuo in worldgen/assets/fruits/blackberry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b4fd1a69b1b67cbdd27344ee3c91013b8105a3e1 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 9 lines last edited by Lahav Lipson in worldgen/assets/fruits/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3f0ea4da02f6f5ca1de6ae420a51888c33d8ec6c -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/fruit_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0e26afc719509198ec25a9ac239c4d987ab36a2d -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 690 lines last edited by Yiming Zuo in worldgen/assets/fruits/fruit_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a7193831a907b9e99605f64e768e16e8f40a3c30 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/pineapple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e29aca0f3a1dac6c1f858332378d96b567b371e9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/pineapple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed7708823e60dba77264fd8d6d1e0c7c53247218 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 107 lines last edited by Yiming Zuo in worldgen/assets/fruits/pineapple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2db6cae024b6961cdd16b302b0610b7892e71e48 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/fruits/coconuthairy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 15f1215d9c5a4715e4b94f1aa635b86d8533149f -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/coconuthairy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b97b5231595d2adae686ba10d10734ff5baddba1 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 72 lines last edited by Yiming Zuo in worldgen/assets/fruits/coconuthairy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d541f3af606a07f033dc6d61f54f65bf7d50f11d -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/fruits/seed_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 64df98d039152ea22dfc7be40a1bd4eec0ec25c8 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 59 lines last edited by Yiming Zuo in worldgen/assets/fruits/seed_lib.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e724e581221d3ff9cf150a024df95762fd49d2ab -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/monocot/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3c0da77a618867de1b1870b5ae75665ecd58b3a4 -Author: Yihan Wang -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/monocot/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 63cce9574e3ca21dd83062fbe0dd5bf64d89b415 -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1ba7b24aabb153cf56268c4a420a67c0667d0221 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 205 lines last edited by Lingjie Mei in worldgen/assets/monocot/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8450419fad8e4dd8817de1c0f480aa7dd26ac2bf -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/grasses.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af3cd6323c01e9e5362f17dec6f6f9ceb21693bb -Author: Yihan Wang -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 9 lines last edited by Yihan Wang in worldgen/assets/monocot/grasses.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40bb71254302061337277493c354c3acd9849d9e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/assets/monocot/grasses.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6ecf0e37dc61cff2eb1ba4fdaac0db5983dbaa8f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 230 lines last edited by Lingjie Mei in worldgen/assets/monocot/grasses.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 95ead79fca144948b6b969e1caf36d50eb68f0bd -Author: Yihan Wang -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/monocot/banana.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c6398a72af0d0b755f2a015024a5e0cfcda878a -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/banana.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb79f5963cef9a6b737d63c4cdef2ba1a76012d4 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 142 lines last edited by Lingjie Mei in worldgen/assets/monocot/banana.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 93f5018ff8a860c8c22783ae476db9738f0243cc -Author: Yihan Wang -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/assets/monocot/tussock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d27e31e92e5f54115fd326e63c17801865ea76bc -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/assets/monocot/tussock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8fd0ad437e32695dae8205803ac9f79149597f28 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 35 lines last edited by Lingjie Mei in worldgen/assets/monocot/tussock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c71b7a99cb2b23516412b21bed90254e3a023163 -Author: Yihan Wang -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/monocot/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit edd205084b2965341d53d5b294459aa3ac529384 -Author: Pvl-bot -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e733ee4e9a08d847ea9e9e419bc84b6f43b34247 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 71 lines last edited by Lingjie Mei in worldgen/assets/monocot/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5225ca0c83fe9bf3a31b45111ad09f8b501bd317 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:27 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/monocot/agave.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d9a4ed3566d81085994f951fe82937b8d49508d8 -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/monocot/agave.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b9de7516790066ba137970f71b45966451172ad -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/agave.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 358d1897e7e29c2ce2e014803e0132936a68834c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 61 lines last edited by Lingjie Mei in worldgen/assets/monocot/agave.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5e210a4bbaee48f72945611fb005ddfa6e26a30e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/monocot/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 120ab6094f31b89095e9a003147779afbc6ae691 -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 5 lines last edited by Yihan Wang in worldgen/assets/monocot/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ee97a0163e460c4836c319f6a82b486f5a827fdc -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac47111069623844137010981b2aa0ee624b8919 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 46 lines last edited by Lingjie Mei in worldgen/assets/monocot/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 823b2b3cb5ffe1f4ab7ff80ec62ea050b60853cd -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 9 lines last edited by Lingjie Mei in worldgen/assets/monocot/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 22a8889823a4ccfb722f8d364f4172027e8ce655 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/veratrum.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4a9520c4a5e3c821043bcc33335f836cf2ac0f9c -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Yihan Wang in worldgen/assets/monocot/veratrum.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 202aa5ef533e4fcc49e51874b8e6fc45e2b735d5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 128 lines last edited by Lingjie Mei in worldgen/assets/monocot/veratrum.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 086f2b3fe6e89ebc93605065c472e830c1970488 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/monocot/kelp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 77cf40d1ed21799c81bfd023746739b0df69951a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 105 lines last edited by Lingjie Mei in worldgen/assets/monocot/kelp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb0a5777adae2fbb14c6a85b4ee66aade8c3c1b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b8443658824e220f4704d0914309b0dcf0b2cff8 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 20 lines last edited by Lingjie Mei in worldgen/assets/corals/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7baafb8809b00a613907c6b3f36b76afc5b82b5f -Author: Zeyu Ma -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/corals/tentacles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b836855c893889df12e733e9d57ef6cd8dd76400 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/corals/tentacles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit aeb1e5abaa3b3ce63a10a50c288fbe54d7989103 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/tentacles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0df25b19304f9b25f8eb6f8d063079f4000b90b5 -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 9 lines last edited by Yihan Wang in worldgen/assets/corals/tentacles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bd5d82978059c8c2e2131998d1ec4d6278136922 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 98 lines last edited by Lingjie Mei in worldgen/assets/corals/tentacles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2430ef77cc089464e1db93be37a60fac0ee2340a -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/assets/corals/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 99d0edc25a856aa8a1eca03f3858137f2dd3f46d -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 87ef8b6293076cbda62cb10bb36de64692d20257 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 18 lines last edited by Alexander Raistrick in worldgen/assets/corals/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f487484b64de38ca04ea8bedc143072cec1bc0db -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 154 lines last edited by Lingjie Mei in worldgen/assets/corals/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d366eb2995295a1b8fcdd8d84d194921c0549e25 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0ca98d8599378d01d9bce66d280a08dc81f1022 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 12 lines last edited by Lingjie Mei in worldgen/assets/corals/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ef526f03892433c06296bf94d5e5c0a826729813 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/corals/elkhorn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f8a161adf5d4faca7cabe4b4b8d2ae398482d3ca -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/elkhorn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b9feefd48601e7f2fccc1bd36841ea327878f8e1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/elkhorn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4f1affdb87a5ac91c1ec8c7d7050b4a8b508ba39 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 136 lines last edited by Lingjie Mei in worldgen/assets/corals/elkhorn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1c99e21c5edb613d8db4b3e3f79cf8f023e0ec77 -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/diff_growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1a3befb5c73e5cc0337c7d2d592310520f7e6c6b -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/diff_growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f4ca02e7dbc2c4f41a14a40b4e92782cf7fdb033 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 107 lines last edited by Lingjie Mei in worldgen/assets/corals/diff_growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f4890828f821f03c7278ee69b08f58f75ac7951b -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/laplacian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fe9718b723bfb85cb6d595c666e37438f7c772ae -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/laplacian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d4e9d38485761a33da40d58aab73965ca35b3b21 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 23 lines last edited by Lingjie Mei in worldgen/assets/corals/laplacian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0bdd6ba6bb04a7c23febd97d8fb7b9349502251d -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/reaction_diffusion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0862d438705880bb0f3849216343a4f48837e9c8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/reaction_diffusion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a3c6b991fb1cdfb77be56b85494c72a505e3c8f5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 74 lines last edited by Lingjie Mei in worldgen/assets/corals/reaction_diffusion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c29c069f271cbf4c180eb10aa0761fb619f1602d -Author: Yihan Wang -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/star.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8fc60f2386c23441a542fd1c662d5d05b57ec137 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:26 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/corals/star.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85a5fca6bae065e4a84375208d9aa9f4380ed5ae -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/star.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fb0707a3de2a0dfb0665ab545c0f4e7b6cce73d5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 116 lines last edited by Lingjie Mei in worldgen/assets/corals/star.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa5f252d4118e1e32d8cd3a89c42afe8dd019b0f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/corals/fan.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f3e7f10e24a07ae89ac2ec5396532f6ce4ad179f -Author: Yihan Wang -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/fan.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2d938f99942cdbf13033839f217ee6649dd3d26f -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/fan.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c76673033539398d1cfa0e4608ac0888dace150a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 83 lines last edited by Lingjie Mei in worldgen/assets/corals/fan.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 566cebe2ac0e5e887698f67cd7d821ea48faf482 -Author: Yihan Wang -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9ab68eb97050c2e4331efc00beab0bb69c857438 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ca13fbedd3fb48e34c419586c035746558ac86a5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 149 lines last edited by Lingjie Mei in worldgen/assets/corals/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 17a9578dc8ebc7aeb5bf5839faef4e435ce93c0e -Author: Yihan Wang -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/corals/tube.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8b8d932042f6b7edab062aad294b964a9e470ee8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/corals/tube.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff3c13348e33fbff34b78be45d758bc1179b4168 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 71 lines last edited by Lingjie Mei in worldgen/assets/corals/tube.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a527cdd371526df7c27d56679c66323e8c91684 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/utils/materials.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9daf24f322ecf3fd60bb4a1307c21504fbb22796 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 245 lines last edited by Alejandro Newell in worldgen/assets/trees/utils/materials.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9f2bdadc567a759b47273e7cd6521c003b9bf05d -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b829e64c7e6adfb3e536f2e49a6848ac7a57640 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 8 lines last edited by Alexander Raistrick in worldgen/assets/trees/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac8757b18025da709521532ecf50132d4fd459f5 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 300 lines last edited by Alejandro Newell in worldgen/assets/trees/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bade7cf689411121aa696fb2530998782147b17e -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 4 lines last edited by Lingjie Mei in worldgen/assets/trees/utils/geometrynodes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e2c20350f4dbfeae06714220494a6bd173345d71 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/utils/geometrynodes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 55b9c5ff3977effbc6dfe3d71c943d1af9ee15b6 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 57 lines last edited by Yiming Zuo in worldgen/assets/trees/utils/geometrynodes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f00b5fbc51fc93edb899ef9e4b3261a132b0c157 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 269 lines last edited by Alexander Raistrick in worldgen/assets/trees/utils/geometrynodes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c6df950470eab81b09b6a40981ef9cf16fcdaac -Author: Alejandro Newell -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 369 lines last edited by Alejandro Newell in worldgen/assets/trees/utils/geometrynodes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9fe6ae855715f4a5c3fd6e9e6d2c2ad9a5438774 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/assets/trees/utils/helper.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8ab8b5300b28de0bc0eed2c290222ee6ff62307b -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/utils/helper.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 703cb96178177bd16ebd4117ad943b41d293ee15 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 9 lines last edited by Yiming Zuo in worldgen/assets/trees/utils/helper.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9e53dc0e91a11050d1357485858f68cad2eae363 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 222 lines last edited by Alejandro Newell in worldgen/assets/trees/utils/helper.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 00bf0651331d6d483892c47ce3852b0fe668ba56 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/assets/trees/treeconfigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efd6b249c92b5fcb9501795afdc7b48a7abda688 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/treeconfigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d8bca28c0e9bbc9a6756dc2817ec12ad35aa6878 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 30 lines last edited by Alejandro Newell in worldgen/assets/trees/treeconfigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f6282d2febb6cf983470a9fc79aa37e718c4a04f -Author: Yiming Zuo -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 49 lines last edited by Yiming Zuo in worldgen/assets/trees/treeconfigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1441bd8bef9322c13fc483deb8bac29330d23924 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 689 lines last edited by Alexander Raistrick in worldgen/assets/trees/treeconfigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b6575781ec1ea500d58d378e2f932c6eb3b3c91d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e537899fb0d48064965f3963439e5972cfef418d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cf6941fdfccf8293c460ff2af43ba16eefe30f83 -Author: Yihan Wang -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75f5468cfb5a74290dea849fc62c8683644c5760 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 4 lines last edited by Lingjie Mei in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d3dced0f42f68e68a609cfa5d3b69b5b1ae81df8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 94181957782bd80918392d26c5dbf3e5d41e9755 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 93 lines last edited by Yiming Zuo in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cfa19eb7b517414d3f240a2cc0fb42c2aab5e951 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 305 lines last edited by Alexander Raistrick in worldgen/assets/trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5f328d5cd1c26fe5ac080d762c9c5bec2432a2a1 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/trees/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db42163d78ba6d4b49e2893a1ae4db045abcd59c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:25 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/trees/tree_flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2e2a05cb6ef675db56f929b9335212a788fa401e -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/trees/tree_flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 70c978a4085655831b520a824a30f7007c1b4f3a -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 9 lines last edited by Pvl-bot in worldgen/assets/trees/tree_flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ecd22645a2b51b853903c090a6709394c2fdd745 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 586 lines last edited by Yiming Zuo in worldgen/assets/trees/tree_flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0c81542b7656ee3ed5da98f2e3e5b6183f578d10 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/trees/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9cc8ff5c6732993bab9f35747d0f1382b6999541 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 38 lines last edited by Alejandro Newell in worldgen/assets/trees/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 11de6e2cd47d26896599acac7f340ec4cf660bc0 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 70 lines last edited by Yiming Zuo in worldgen/assets/trees/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 959e843b3577e3c6a952375587b98bb7773c61b2 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 90 lines last edited by Lingjie Mei in worldgen/assets/trees/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a7e92135279d9b826b8620c2aba5308319086e2c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 261 lines last edited by Alexander Raistrick in worldgen/assets/trees/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d06e44d82b896667c5902a84d44930a45ef1372 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf_maple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb2450e1f8c30e5adffc1b92cc8172627355529f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf_maple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f509eefc2fce39a5b5d8837c5c9b99b645060c42 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_maple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 83be410d3226955955b30ba9bc1e6a12f1f85750 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 785 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_maple.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b870767699992bd9d81505d48ad5627004ee4a2 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf_v2.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0b2800d95ac4294827fd17327a0a9edb4d5489e8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_v2.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 95d4b1a693c882ae5b7a4b0cedf9698cbe3e1f94 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 106 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf_v2.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4beb78c60849056129d351fa7bb5fc009e57ec38 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 889 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_v2.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e5f4333de1e97d260004150059aca3766d4389bc -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf_ginko.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7c62042a5f96113480b770786b722d30279de834 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf_ginko.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 07759e758ba7ff372ec5ff817d31ea9f149a50f8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_ginko.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9d54216f03feadf3bd2707b53669136c8b5d39f4 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 516 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_ginko.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2d42179ee86631ba3ef12620353d8ea00d327609 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf_pine.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 53775f547f8d5a7c125406e4051725480977f7fe -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf_pine.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c834eb8e19a35f9c4f0f4fbb3f9de36acc5136a7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_pine.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 80a50165775715263089ac58518b7bfc77e57250 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 365 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_pine.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 092ab6eae584f65323567aceb06a89ac50e4fdd4 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf_broadleaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c0a92750738a51bc6861ea6dae47e238de138092 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf_broadleaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 00f76c40917f5840d5894ef2076f0895e0742518 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_broadleaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8bdb8cef3b5a5a2cb8345d4e386a7bad8fb36ea2 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 752 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_broadleaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 063e727a43555a571372001889ef8acbbf8d76e1 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/leaves/leaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d8c6f642e800438ed389d7619ff97537ef058e24 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 169ceb117294cf67cc3c4ff2f84f63c347a11c61 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 7 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f844741042f4dcc906a52a1c4220300b4bba9972 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 28 lines last edited by Alejandro Newell in worldgen/assets/leaves/leaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d4965f724400334de747020b8eba9c059a1e1fa4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 112 lines last edited by Alexander Raistrick in worldgen/assets/leaves/leaf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6903ec714fd8fc031ceddfd218f540f97d699f7d -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/leaves/leaf_wrapped.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3becd13d6ccf35151c8da6f3f7478169c4ef89fa -Author: Yiming Zuo -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 178 lines last edited by Yiming Zuo in worldgen/assets/leaves/leaf_wrapped.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 17f7c957c63ab9b1393199d6b7b9050e3ea861cd -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/mushroom/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 727c35e9fc45532ee60121accbff61b312d260d2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:24 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mushroom/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 628ea3bea8d61c35915a07aefd8cee4d0810f2fa -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 57 lines last edited by Lingjie Mei in worldgen/assets/mushroom/growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b8a6558f206f603e1c9897e1f0c09a86ecf675b -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/assets/mushroom/stem.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 098f54bcc4cf25cf20a0762609413cb052880271 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mushroom/stem.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a6dfa53320e6d2734ecf45dd4ff4a225d894214b -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 119 lines last edited by Lingjie Mei in worldgen/assets/mushroom/stem.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b91266da784f58b88fa2792e4ba59816b1f89219 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/mushroom/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit aa9c7fbf0349dc19a975489ed2d0ab5b3e26e0bb -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/mushroom/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 189277bc8cb9c4da8575347d0a845b0fc90d40d3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mushroom/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit afd7e139c0af446b2d472e090516db68114c5d6f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 100 lines last edited by Lingjie Mei in worldgen/assets/mushroom/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2ca5fe2b541ab4a28e820f71aad347ee21d524e1 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/assets/mushroom/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b08d0e131808144826d233b8e25080758d9d714f -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/mushroom/cap.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed0aa4aa0848b973c645e8b09cf918f627ccdfa3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mushroom/cap.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ccf05b0a57018601f1f1984a0b7526ae317e13e8 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 374 lines last edited by Lingjie Mei in worldgen/assets/mushroom/cap.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5f046ad73147ac927a2386f9de26b4e565e3d992 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/small_plants/num_leaf_grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 93076e4582a389a4df0188b7bbe224082ae97e0b -Author: Lahav Lipson -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/small_plants/num_leaf_grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 65ffa2d1efe8159efe8bfdb7b2556e71971d435a -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/small_plants/num_leaf_grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 441fd40d6bc5b35de5edd973be22e7ccd46a00f8 -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 182 lines last edited by Beining Han in worldgen/assets/small_plants/num_leaf_grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit be8ff695511f66afd7ad9b5071a04393abf1f5d5 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/small_plants/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3f545fd6ed4da4b40f8d000097646942459b6c8b -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/small_plants/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2c1f5175e95531dbd7fe1bbc4ebd493e715d2878 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 19 lines last edited by Alexander Raistrick in worldgen/assets/small_plants/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e587d6806307089f5a828bb1d9ca3aeec516239 -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 734 lines last edited by Beining Han in worldgen/assets/small_plants/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 698475e2ddc6aa8cc52bcd5b55091c78e67c8132 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/small_plants/leaf_heart.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f9044e8758bccc5ad6fd4e81973f9fd8210d0b25 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/small_plants/leaf_heart.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 287afddaae4a857a72a78126f8afdcc99dc0862c -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 73 lines last edited by Beining Han in worldgen/assets/small_plants/leaf_heart.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 171a275e8618531b4f62d4d96811cf9a1027ad30 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/small_plants/leaf_general.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e01879e0f3cebff8a396ae9e592550d1dff8176 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/assets/small_plants/leaf_general.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9db4cdd150ac4b642e399084232c1a786341f512 -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 85 lines last edited by Beining Han in worldgen/assets/small_plants/leaf_general.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 20bc971e661c675e47af0d292d136902e40841c4 -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Beining Han in worldgen/assets/small_plants/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fbcc9c53a42049a5ec336770917283ef6266306e -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/small_plants/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit aa8b13fe9e267da51e0e6680c5cd9019c6900a63 -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Yihan Wang in worldgen/assets/small_plants/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb72caadd84a356163aa7510cd61fe5c5fd793e0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 13 lines last edited by Alexander Raistrick in worldgen/assets/small_plants/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6aa8c2daa272e46b49bf114b4bd258b41148cd71 -Author: Beining Han -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 505 lines last edited by Beining Han in worldgen/assets/small_plants/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7698c3bcc01c0bd7a48d33ef6afaf04bb810472a -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 00b915be7c9bd164e32fdc650307ff5d29512d5b -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 21 lines last edited by Lingjie Mei in worldgen/assets/cactus/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 87972fa868b4de5112d0c224247b4cfc13d340d7 -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/cactus/pricky_pear.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59a43b84cba35935fff59737603cb75b25b1ce7c -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/pricky_pear.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56a9f61b205b6f22c16816108f4909758f5dc491 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 62 lines last edited by Lingjie Mei in worldgen/assets/cactus/pricky_pear.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3819ebb818860da4e5f4a351ab56612a5e439c0e -Author: Yihan Wang -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/cactus/columnar.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 618ee22b2fdd02f4ae478d8ce8af3b5f15e9e694 -Author: Pvl-bot -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/columnar.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 03a5d4df8963eab55c93ae8f2de753ab1e11611c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 92 lines last edited by Lingjie Mei in worldgen/assets/cactus/columnar.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d72e5d11cd6534d6d5101400a51349b53754185b -Author: Zeyu Ma -Date: Fri Jun 30 03:11:23 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/cactus/spike.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2e2b237d8923f5643de7ac031dbc719f95a9c3f8 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/assets/cactus/spike.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0f3e353825c2c5f51f2396a8ebc3e2bb193878df -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/spike.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e6137b646c1920fafa2d11cc12de6b327a116cc0 -Author: Yihan Wang -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 8 lines last edited by Yihan Wang in worldgen/assets/cactus/spike.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 57b005ca95e0ecb762482c9768b32a9842c11464 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 98 lines last edited by Lingjie Mei in worldgen/assets/cactus/spike.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b0e7661bcf3dbc8d7a2328f29296d87eb2a42591 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/cactus/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 519981998e3fafe3d522f16b59e9f72037bde1b9 -Author: Yihan Wang -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/cactus/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4ef74e04f14ac44a9a5c54ca1e981eed3ab243f4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 490bb59675540dec37a91537c0b33cfd0107f074 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 89 lines last edited by Lingjie Mei in worldgen/assets/cactus/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 576967bfe4ce5c69173a0bbbea2e129f36b827c2 -Author: Yihan Wang -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/cactus/kalidium.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21d3051fca8a5abdabfcee47d0f2635255d600cb -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/cactus/kalidium.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 01e2463493ed24a452e3177dd32616b3356df030 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/kalidium.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c885f549a671ff6e621c5f43586dd0ea29303be0 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 88 lines last edited by Lingjie Mei in worldgen/assets/cactus/kalidium.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb0bf9868af187766d6a96caa3cd62435ad8e8d0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 54e39135e5eb2bd097eb7ac4cd6f955bd0481790 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Lingjie Mei in worldgen/assets/cactus/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d912c86ff8a55981ef98f3357dbf19838b20b4ef -Author: Yihan Wang -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/cactus/globular.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 971478d3c08cac97032be994469be298167cb192 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cactus/globular.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f3851726070fe284b6117cd35e7ddccfe0837d8c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 48 lines last edited by Lingjie Mei in worldgen/assets/cactus/globular.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 06dbbd2078ed71fc9dc67239819a0a0c76f0517d -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/assembled/dragonfly.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6181e03c79d4d87e33b5d7b4c4478afef091990f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 71 lines last edited by Alexander Raistrick in worldgen/assets/insects/assembled/dragonfly.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a1bdc3a0998c5dcfa55d3c65f2df49f87f4f9bb4 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 240 lines last edited by Yiming Zuo in worldgen/assets/insects/assembled/dragonfly.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 90dbe93030eb09933fdb53fc6493b56e18885060 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/head/dragonfly_head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 13bf7f13e7ba59f2cfe18b0dd153db30c126f84b -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 178 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/head/dragonfly_head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1058edcaebf78104df4d3809f19faf49a708db34 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/eye/dragonfly_eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0fdff51fee89d9bc61bfa25651aceb8ab2f50c2e -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 69 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/eye/dragonfly_eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ba1d44d79a1af333841b7775e3764e11f28949ef -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/tail/dragonfly_tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b37c8bf2dd0c362ef936d90ff32d24840f2cb0ce -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 378 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/tail/dragonfly_tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 483c6b3f3c2a689bed5d0f7a57278915f218c498 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/antenna/dragonfly_antenna.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f66b56f2f8233704e876e478fdb01126343f9d9d -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 28 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/antenna/dragonfly_antenna.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2c62e5defa7cb971950e4b4dc4b29f518961c802 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/leg/dragonfly_leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bc83b2525be9cd506c99eddc80a60044b07ade98 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 186 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/leg/dragonfly_leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 17229a7b0e38e5deb25137f477ec2c028e887b31 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/wing/dragonfly_wing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7e7589377c99c9250f0a7f3c654964445368a1ff -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 280 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/wing/dragonfly_wing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d270995586b5738b668270963ff31b1ed1a1f8fd -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/body/dragonfly_body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 142be2c07aa8801bd35e801ab3a5a58af5753875 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 225 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/body/dragonfly_body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 98f15744b153d2b936c50d0b6370cd4f0cb723a4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/mouth/dragonfly_mouth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c3b4ca49c47cfe21fca758d3612d472208e3acac -Author: Yiming Zuo -Date: Fri Jun 30 03:11:22 2023 -0400 - - Add 67 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/mouth/dragonfly_mouth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0e360496bea19f6068a0a7929b86c59878196d4b -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/parts/hair/principled_hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f06ed88664d00b92201f9926621287fdaae00030 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 31 lines last edited by Yiming Zuo in worldgen/assets/insects/parts/hair/principled_hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b421d7003cd24120b15b2d8c46af96e56ff6a14 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/utils/geom_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9adcc051fedf041a24373ab66afaa9149fe7e438 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 829 lines last edited by Yiming Zuo in worldgen/assets/insects/utils/geom_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3307333ebababe72e0c831c4e36bbebc95bfe0ef -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/insects/utils/shader_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d3f0e62ded603d45bf2fcc48bcb9817979b0eb25 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 87 lines last edited by Yiming Zuo in worldgen/assets/insects/utils/shader_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 72df0dac62a14e4ad3a2f4e3b540526df792d246 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/utils/object.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5401431fd877b8f3e9c1216cbb0bfe44dee4a6cc -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/object.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b5690c3ba983531e6edcf324c162e822e75ecdf -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 101 lines last edited by Lingjie Mei in worldgen/assets/utils/object.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1be69f22de9620d7c33c01057fbbea96959ca4d7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/decorate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2a593a19a53206d2fde2387fe7b8bd314c59f890 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/assets/utils/decorate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8c755190927477707ef818660590d37e9d77c169 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 239 lines last edited by Lingjie Mei in worldgen/assets/utils/decorate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 510f1e10c7140e01353c8a724807db2e6f3d62e4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/physics.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1710621e9e83011216e0904ee4ae15a41343b91 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 43 lines last edited by Lingjie Mei in worldgen/assets/utils/physics.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 20b4476deffa1453f08947f8f3a74f16e53cf49d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 3 lines last edited by Alexander Raistrick in worldgen/assets/utils/shortest_path.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4687f89070c567a43b09423b4b40f7fe3797073a -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/assets/utils/shortest_path.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 737b74b13f1c0b070a8091aa8c6b7edf86d4f9a6 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 35 lines last edited by Lingjie Mei in worldgen/assets/utils/shortest_path.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 060bea59a3b3675b45456a5b0f7ef1efc9ddc5e0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/assets/utils/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a528c48eff7b635b72b2306a9b8e0c0b25ba5188 -Author: Yihan Wang -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/assets/utils/tag.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d3f30fe79ad264829c4368e2e6c1f6e8dc80e8d2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/tag.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 57bc35f03c061a326ba41cae34d6d8d66895e386 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 139 lines last edited by Lahav Lipson in worldgen/assets/utils/tag.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1a534dee34dbcc79b8bd0b27b58599f45015493 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cad33f9ff0eae1fcca1b3a21b92a4ed95a066ed1 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 65 lines last edited by Lingjie Mei in worldgen/assets/utils/mesh.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8631a9751422a7591b2c67165b921d8e24bb4d17 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/diff_growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e7f7fe49711e0aaf6168f7ac8b0a82527dc10d79 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 81 lines last edited by Lingjie Mei in worldgen/assets/utils/diff_growth.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6065753ccfab7e5d78183315f414c96fa3c22a63 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/nodegroup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 38412a0a124ed44753ef3eacd3ecc8a649e89c05 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 84 lines last edited by Lingjie Mei in worldgen/assets/utils/nodegroup.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a701974073fee802fb517822098b6b9a3a98bc1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/draw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 65eec1620f00f97dc69f15de477ec0d68628bf59 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:21 2023 -0400 - - Add 155 lines last edited by Lingjie Mei in worldgen/assets/utils/draw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c32013a55f83661facb3facc468c665e737ceb17 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/laplacian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b14a1c568fb02c44b9a58a8bcedbc2fa40baac9e -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 96 lines last edited by Lingjie Mei in worldgen/assets/utils/laplacian.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 707f157d34e1c3c59f0721460e679bcff37b8986 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/reaction_diffusion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2a99ee22bf76c8dfbfc00e6e1aa9ef3199f84378 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 69 lines last edited by Lingjie Mei in worldgen/assets/utils/reaction_diffusion.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d344597523f46e00368723d8a7bd7a581ddd1ab -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/utils/misc.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit aced7028a1303a9316845c3e954ba5604ab7e547 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 52 lines last edited by Lingjie Mei in worldgen/assets/utils/misc.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff4ed5a64b00ee08d17a13a8a1be527f97c74557 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/deformed_trees/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d0538413c1ac05ce1d8a1a0c2e81a627a69eab1c -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f73ef6b01250c56799e602f9694d35a866b0adfb -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 54 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 05c4fad39bd00ea8032f13cd8232902c746be79c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/deformed_trees/hollow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 13a812e4224d4c6ce3841835ee5cfac3ee1a785e -Author: Yihan Wang -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/deformed_trees/hollow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eebca09139b393a3cb5298a8d79924c985028dd6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/hollow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85fb21d48326745d696bb386b2714b9491118277 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 68 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/hollow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c78d553503bfd20989ab5fb97d245284231f96df -Author: Yihan Wang -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/assets/deformed_trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c0b701697da307653db0d04f0d5acc18637683df -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f5478819e4f2d64af778fde85c74b9f2f141a237 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 18 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5838146649aaa36a618f12c55f12786659126725 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 5 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 937adca4fe3f79142315143889da4b84966e02d0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/deformed_trees/fallen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fc3961b0120d74039ef4f3458d73e37ec0945b79 -Author: Yihan Wang -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/deformed_trees/fallen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 04c0a997f324c0a4d3a1f5a02ec4bd9ea2b57e39 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/fallen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b54f153677ab77883c62e27b3b61ce02b7113715 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 76 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/fallen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 187bd18fe0537ed550ebe452cdb4c41239f78e53 -Author: Yihan Wang -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/deformed_trees/truncated.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit faad74edef4054fd62adf72c611ba2aea0e85d79 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/truncated.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d3d40dcce054193c20eb08bdb4b44e23c71e5610 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 35 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/truncated.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c991e0848f739c7875084c315806a9c97ac8df79 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/deformed_trees/rotten.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2cabd7f412274922aff7515000818ee85391221f -Author: Yihan Wang -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/deformed_trees/rotten.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed6cd70e7ef5666a5bb761d04fbb6f533b66b7c2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/deformed_trees/rotten.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5823a9e8ca1bc3cef7ae93e6f44ea8b67f75ea5a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 81 lines last edited by Lingjie Mei in worldgen/assets/deformed_trees/rotten.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit df779c3cce33bb74dd911a99b06395a5de07287a -Author: Pvl-bot -Date: Fri Jun 30 03:11:20 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/util/creature_parser.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bfe0436df208d4f89f76f7961c88ea6b82d3c90d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 135 lines last edited by Alexander Raistrick in worldgen/assets/creatures/util/creature_parser.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 18fce8601a024d39fcb6a29f3cd6b10c941ffc3c -Author: Karhan Kayan -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 4 lines last edited by Karhan Kayan in worldgen/assets/creatures/util/geonode_part.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e666c6814e592c698677a7c4d1831546efc656d8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/util/geonode_part.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ef45ed84dd6f79aedf1a5de7934a5f7002a187f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 98 lines last edited by Alexander Raistrick in worldgen/assets/creatures/util/geonode_part.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97646d353737cd34114a5fa32d48f8f3e7589bf6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/util/join_smoothing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 63b793d36d4e36c8d7e3756f434a362c3038d9c7 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 217 lines last edited by Alexander Raistrick in worldgen/assets/creatures/util/join_smoothing.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a71a6331ad69dc659075abf06f6fd4412ce63978 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/util/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e8fd58873deb24f7a8ed493ecb8a7eb471d7b1a3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 62 lines last edited by Alexander Raistrick in worldgen/assets/creatures/util/tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a22533f323ad7b53b308e6a621dcda6209d5f424 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/util/part_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c2b138966cbc46e3b870272f928e11547bcb01c3 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 43 lines last edited by Hongyu Wen in worldgen/assets/creatures/util/part_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7472ff532d6d6d42f4dfdf5b2014f4ca48b127c0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 169 lines last edited by Alexander Raistrick in worldgen/assets/creatures/util/part_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74c5e265676e620f94c440aca5a847d1a7d950f7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/antenna.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af1dfdf39d2c5297d2c474c7710f001d3ad1580a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 52 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/antenna.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6449159f7f35a479690faa870c7f7a83341b7d39 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8170a0134bde00f4fb0e4a34dc766009e425f59b -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 29 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 67a12ce06e6c4314526718660deebf6fbb3d61eb -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5ad9f97f11f4a689311abe74fee1a34cd2252723 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 78 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8735b715042fbd973075d7992eaf2828ce88faf4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb9350f3e12111a0e8a3de5ddf20c42b10c8179c -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 33 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0b1476aead15787ab55e79bde8e93d59bb4f280f -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40304e10c3b4eb7b90a1271dd55af4ae1c5651cf -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 82 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a24a50efa21a0af1d9f245dd5d7f6948328e6c32 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85f58288e621af812bcdbcdd58b8df3f45a68e64 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 271 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ba8722f71ed0a521c967547c07ed8583a68c826 -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/crustacean/claw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 34955ea00f3ba0d5b1581b8f2501892eb24a2354 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 166 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/crustacean/claw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 79b45f9c23991ff5a3dd6bacf157ca172940a60c -Author: Pvl-bot -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/utils/draw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4c5f4fb376b7b96e008036c8c25ee8100b2b03ba -Author: Lingjie Mei -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 56 lines last edited by Lingjie Mei in worldgen/assets/creatures/parts/utils/draw.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9672da989fb1ce89718d2ff86f69880ebf0dd809 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:19 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/fin_old.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d8ba795f7406232ad0126686c4d7765d6f5c299b -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/fin_old.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 983e237798bdab9702b484636de3a2a58fdfd62e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 105 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/fin_old.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 49ebf25061994944110dfbda08ecb12045445dcd -Author: Yihan Wang -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/creatures/parts/ridged_fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit faa6619b19bf3afa6cfaeb6c08fb091587d72f59 -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/ridged_fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b7ff89d5a57bb283ab911957f9a97d561a7ae86e -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 193 lines last edited by Mingzhe Wang in worldgen/assets/creatures/parts/ridged_fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bbff5f927415b8029c3fbdfbae6f2c755ceb7c1f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 275 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/ridged_fin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7c286ca38486722766cab9e2fbb34d6621693161 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/foot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 77cf4868bf846df45e26e8d7b6888070bec39a13 -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/foot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b9b91a5108366c095090c4762c906bd27168580 -Author: Beining Han -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 127 lines last edited by Beining Han in worldgen/assets/creatures/parts/foot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 34d6fa5de7a137632e9bdd06b7b902cee576a4cf -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 159 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/foot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b60d2d6ec7e16b4dd6468f451f8663140d0a278b -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/eye_new.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b21294f55c3060253984063da01384e390cda93 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 2425 lines last edited by Mingzhe Wang in worldgen/assets/creatures/parts/eye_new.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 96f78c135b08bf1f158abcce8613a3ab0d9dbd1f -Author: Karhan Kayan -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 2 lines last edited by Karhan Kayan in worldgen/assets/creatures/parts/head_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fc10e38b3d9dfc74c696dc475bb7b45eb7153d4d -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/head_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cce5f0e7f2d055c652bd0723a1e078cc2a75df72 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/head_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 724a60d242b0dc1e271c51bcc1eb38d142e0a425 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 189 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/head_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit accf12cbadd0332f1cedde7a5e58d3b762609f02 -Author: Pvl-bot -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 22a9ad6181148caf18e511a83206713161af5ec0 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 6 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 41f90d29b964741008240dbc787b3f4206a9d722 -Author: Beining Han -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 14 lines last edited by Beining Han in worldgen/assets/creatures/parts/head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f6395e08f8bdc07c517583a94ff1959a74589227 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 597 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/head.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21e885144f94872f30ddbcae47231c087cd554d6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:18 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b15bc841f202f4f9309b77c3f2e873a8c9492a7f -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5490afa859ed5a4a93f43a5823e001c858cf7e4a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 263 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/leg.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d65e47e8e06e26489850acfbd6961ca9a78ef1f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/wings.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a01c9f0c208ea6c41498b07eba259bce06f3b85 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/wings.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b4943cb5e7ecab70388fe866597f239d79b6a18a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 301 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/wings.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1847dd9ca523d267493f87da2a6c38002b0d9ce8 -Author: Beining Han -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 359 lines last edited by Beining Han in worldgen/assets/creatures/parts/wings.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b4e3f77200e1327b3172f5c4a090b97616c9239f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af1f2543a1e97bf31bb5790ef4a2fe807fbb0224 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9283f2d36414c5a94211b92b920230102d0ecbfa -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Mingzhe Wang in worldgen/assets/creatures/parts/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f651188d061f06b386db5dbf92be5b765a9886d9 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 153 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/eye.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3280342de4602422f00095f857647c8711f97a38 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2a390f83c026e2eb6f40b1decc5b26e79ae17538 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a01ae86f567a34b7243a4f841adff26e039f398e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 47 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/tail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 780beca5b505ac70a52fa10f2aa0e1054d847fb5 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 1 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 49e28a4d95a4ffc7cb79c472d7a3a2675fae575e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2173db844a8ff4bb082221c4223b07a79a5c35e0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/chameleon.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 01e97ac0313d1c501c4dc2d914ac290da503aab3 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 1592 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/chameleon.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d4456b504acf69e9c2903a69b0206ca21a77e5d5 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/hoof.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 184ddca5d357d37ce9e934820d27fc947dd2a095 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/hoof.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac45fbc25665ea2f4d0e454fc490a38e07bda557 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 21 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/hoof.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4fb3b14cedcd691836bdcacb8be0077744db375e -Author: Hongyu Wen -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 177 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/hoof.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ab74d593a0d9d9f3b3bbfa81538e2c9f50cd0b1c -Author: Lahav Lipson -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 23ffb961a957cb4d9dd8cedc72ae9bc754df02f8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 220abbcc91f97cc14c112d4e54eb3bcb96f50114 -Author: Beining Han -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 14 lines last edited by Beining Han in worldgen/assets/creatures/parts/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5e94c9368bf736a84d7754dc5fdd7f781891381b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:17 2023 -0400 - - Add 199 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/body.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 71e19b570e967b549a93195dd3c7bf8a6539a45a -Author: Hongyu Wen -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 368 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/beak.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1c34beb283aff36576821650a83b1a282d93ae5 -Author: Yihan Wang -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/creatures/parts/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f82afae24e139b74b62f20691b468ea8188b542f -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f692b4d67d65e46d8c4027441bb2aca9e40ab63b -Author: Hongyu Wen -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 90 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8783dc34b56a8a5584d5091ac0a1b53f63d9e4bb -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 175 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 814803614f4507fd480b6ddd83969a6c08a50530 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/reptile_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb64ca465a7b6eff0d9853dc66f0bc7582a18780 -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/reptile_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59f6bbf54e5e9fb2e60d2b6e5a55045a8558c8ef -Author: Hongyu Wen -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 1386 lines last edited by Hongyu Wen in worldgen/assets/creatures/parts/reptile_detail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 27456f3c02880d1471a5a67097e8ddbd74e40232 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/assets/creatures/parts/generic_nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d803963f6925a1df879187f5f86a9e00cb6e27e3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/parts/generic_nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2f56825bf85cbe3187d00cc73cb37a9d8ad8d2d9 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 155 lines last edited by Alexander Raistrick in worldgen/assets/creatures/parts/generic_nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 990f031486170b964981bce11db5e362a4420a0c -Author: Lahav Lipson -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/assets/creatures/genomes/carnivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5ce5e20c404a37536f19d4317f24dcbea18f6b93 -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/carnivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e350c0b1c1d21796c0ca9582d07a14f3e5173120 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 214 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/carnivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0a96bf67949087bbe45f29fcea2387af4d6849f8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/reptile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9544466aed78499a38fbb86f07b92e2147365a04 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 126 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/reptile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 266ef52c97ed1b4270325ddc00fcf3a09d39a0a2 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 410 lines last edited by Hongyu Wen in worldgen/assets/creatures/genomes/reptile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 69dd9eaf7b2b2bdfb3748f747c5e8822a7633cc1 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/creatures/genomes/herbivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e3f0e29ece4835464bc5e4c512a3c57d4e68701b -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/herbivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ba271b8b06376f45b828853b18a09ddf84507cf5 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 8 lines last edited by Hongyu Wen in worldgen/assets/creatures/genomes/herbivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0f2178e5f0e654f08baeb935bf2a3dcc7eaa8e7c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 219 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/herbivore.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a778e8aeac7ab38bf95eaebd85b3a9abd869c092 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/assets/creatures/genomes/beetle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f6133be082f9d057e67f0f4ab4fcfa02316e19b5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/beetle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c2922899ae6fb48e5d709bf9c455a0dc96d4500 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 193 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/beetle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4e5f7959236bc96435ffe75c5ffef9f764c91559 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:16 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9ab62f63fe519cd51c2ae1ca302bbb3cf16e925b -Author: Hongyu Wen -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 2 lines last edited by Hongyu Wen in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 160e451fbf43423d452f9c67ea2771f2c543d38c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 4 lines last edited by Zeyu Ma in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c210e18356612770a4f193fab0724bbd2291f0e4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 92dcb39ab83f6017a1e1a501f014b11d271fbd3a -Author: Beining Han -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 100 lines last edited by Beining Han in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 30cefb8c88c4d208ad066a96c356309d064ec4bf -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 221 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75fd21564a589447c43035b838130a6fa00c852e -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/fish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0d582c14803cdb87a60f1845dd6cd19c5256cf8 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 31 lines last edited by Mingzhe Wang in worldgen/assets/creatures/genomes/fish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 36d58f373d3256a3fd4b2004f63664b553a0933d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 289 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/fish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 69fb6d85e4c2c333e45c26140f824300d4b27d07 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 4 lines last edited by Zeyu Ma in worldgen/assets/creatures/genomes/crustacean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bbe1fec3619ee7462162c0857d88fc0242bfad51 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/genomes/crustacean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a0f510fb557acfe471866895485d1ae059b17861 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 7 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genomes/crustacean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e944b5177b9a1a28f337922c9c43b3b5ce14296 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 302 lines last edited by Lingjie Mei in worldgen/assets/creatures/genomes/crustacean.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 77ff1a7324ea33ad68069f109a47378650b5f354 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/tools/dev_script_save_nurbs_handles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 232f96dc30719dde69a5e0959c1bd1d834210c25 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 37 lines last edited by Alexander Raistrick in worldgen/assets/creatures/tools/dev_script_save_nurbs_handles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 777eef102df53be31e7aeafce685ca20c15048a0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/animation/driver_wiggle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0ee1defacf98977bed1d5dc0d99ce5846c7ac804 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 51 lines last edited by Alexander Raistrick in worldgen/assets/creatures/animation/driver_wiggle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb961960e4dea162e324483d8caabdfbd5da9d7f -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/animation/curve_slither.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 116797cd802970951bdce3a5f3ca0dd62adcde6d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 208 lines last edited by Alexander Raistrick in worldgen/assets/creatures/animation/curve_slither.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f82f3dce8834597deadbfaf2ac64984767326df2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/animation/idle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2119a9bfff11dcff2401b914d4c641b5d77215e0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 147 lines last edited by Alexander Raistrick in worldgen/assets/creatures/animation/idle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85acc7cab889620bdbf267c9310cd99bdd3605db -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/animation/run_cycle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 819d8249e40e0daac502a0665400edde593bad3a -Author: Hongyu Wen -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 142 lines last edited by Hongyu Wen in worldgen/assets/creatures/animation/run_cycle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 337fa9b0679f3f1ba984779642f1e52ce1dfccf2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 185 lines last edited by Alexander Raistrick in worldgen/assets/creatures/animation/run_cycle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7cc5299abf9d9e23f501dce3d06382e641af8a2f -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/animation/driver_repeated.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 581b3570e3ae34d7f8e10a5bfb92c751b09662a7 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 41 lines last edited by Lingjie Mei in worldgen/assets/creatures/animation/driver_repeated.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cae6979bd8a30edda68702745d8aa97b1a9d9163 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7015e0fe8eada1ca631ca8a18bc1dffa6be3d8a0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 197 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/math.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit afbdf4fad5af6952ad7d1ef1ae126fb5f149a517 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/curve.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 18660518145487759a126605d34f361e67b2e11b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 364 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/curve.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 388590e7445d3b0e5df176f4e96f4378a3bee7ed -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/geometry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bf5f977b989b51f8ab1067cd6bb501747c027daa -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 131 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/geometry.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 568cfb7f988abe73c033b8cc09884a5260eff572 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/attach.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 703c735d941edcb4fc2490cf2240bcc51aab95d9 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 228 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/attach.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 871f3aacf36c45fc86892a36f1c96df0ad15a771 -Author: Pvl-bot -Date: Fri Jun 30 03:11:15 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/shader.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a032cb3f81478f0090d466506dff6fdc1b3a9ed4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 180 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/shader.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e6b76009f64e8d8fc2a4e3acc13cbb744ad912a1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e9b0436aea4e60bcefa5ff1f9862da5316172c78 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 288 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e0ba75a6943c7df0a362dec5213c7ffc8456a2c1 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8af483da9847d95fcead545a1319bca7dbc5e76c -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/nodegroups/sculpt_v1.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 368cc8949cce9057a2129f6d1153b08ef46b4e9e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 245 lines last edited by Alexander Raistrick in worldgen/assets/creatures/nodegroups/sculpt_v1.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 44bc141c1de1838487ded6e0e8cca3322908afee -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 22 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/cpp_utils/setup_macos_as.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a7b4fb76b3038c69cb8e0df68bb14c610d04aa4 -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 21 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/cpp_utils/setup_linux.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 557539bdf82442a6faac87033b9a38213ea46b5d -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 23 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/cpp_utils/setup_macos.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5914c4f00fcae496a63610ca566bf6004891796a -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 160 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/cpp_utils/bnurbs.pyx - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e537184f424b9d01c2a951430dfbd45de48a16bf -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/skin_ops.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59ed8729a1d8648ec48e7c73f4e6186e7f33dffa -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 23 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/skin_ops.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6e3cbde52f70c2b2558308aadd2fd9b47077fb06 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 94 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/skin_ops.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 169f01789a7dbd1c4c20077c1af494aa26b2d069 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/metaballs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 301adc8425a2f0746b149686ebfc1e950757067b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 158 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/metaballs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efc371d8981c7f6b015a4400f510445ef0c24e97 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/curve.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2c026530b525138045304ab987b96c5d1f46ab9c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 92 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/curve.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d5c741da7a58a901898d5fd54061bc58b74560f8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e2eb3ba64054190a075a81ea06e84aa6e6393794 -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 150 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c55cb40494796974f70849f7e3d85ebed94d2982 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 169 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/nurbs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2268b4a61e91a17172ad9b1c6b77baa869b29346 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/blending.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 16c915daf17cea85ff23f184d6b0dd1a85a06628 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/blending.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f2fd7d327ec6cb8404f6e38c0be09389c9af83fe -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 1164 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/blending.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 331f7f1834bc88764414391fc99fce1d91a9f834 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/geometry/lofting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit afd348d17c155169c762a60fd55ceb02fc3717db -Author: Jia Deng -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 15 lines last edited by Jia Deng in worldgen/assets/creatures/geometry/lofting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9caa4ff3a44f4ca47e0e3e77fadb24cc47446959 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 103 lines last edited by Alexander Raistrick in worldgen/assets/creatures/geometry/lofting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 044a12bed86558a18fa4d9ee12ef535266ce4053 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/boid_swarm.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e8e58ed6a1dd8e19eb9e721cb6d20bef0bf516f3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 92 lines last edited by Alexander Raistrick in worldgen/assets/creatures/boid_swarm.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac52cfc3d11c0227d8377a41a6f0d675df8a192f -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/creature_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 06ff1028f196d5f20912834e70fc93659b5e70f2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 94 lines last edited by Alexander Raistrick in worldgen/assets/creatures/creature_util.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb5d704fe9265a15602eb3a0deb8b491c338c136 -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/cloth_sim.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85cb3d77ffc877d2937d540a4324c7f3e9f04109 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 115 lines last edited by Alexander Raistrick in worldgen/assets/creatures/cloth_sim.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 082c42b987bc579c9cf01cd5f1cf9dde1536f19b -Author: Pvl-bot -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ec7d68d58aa3ad5dc456e3366e9cd46c307bf147 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:14 2023 -0400 - - Add 209 lines last edited by Alexander Raistrick in worldgen/assets/creatures/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 161dfb5339f456e76d2c5f77e6ba99f30a689539 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/creature.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 404284b843aba05611447a667bd56da14f96e092 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 48 lines last edited by Lingjie Mei in worldgen/assets/creatures/creature.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d0be183ff0467d069df20e5bb0a1c7c8f05fe13 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 267 lines last edited by Alexander Raistrick in worldgen/assets/creatures/creature.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 690d466826debc49846b135b9afa7fad95e68a89 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/rigging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4f6a3ac595d2ce55bdf68a7768c357d8cf38cab5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 51 lines last edited by Lingjie Mei in worldgen/assets/creatures/rigging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ea7ed03dcc92829a1f030e93624c6b885f1de288 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 372 lines last edited by Alexander Raistrick in worldgen/assets/creatures/rigging.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b72bdefabdcfe7dedb5afdbea04af0e3790abb2a -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9c7a09150584b31213ebd88dc04d917faac986c5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 389 lines last edited by Alexander Raistrick in worldgen/assets/creatures/hair.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3a62209640e5ffb70794b9d080ed7c34cb78fe1b -Author: Beining Han -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 1 lines last edited by Beining Han in worldgen/assets/creatures/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c72d20085c5ab94acd05c0fec6f2918d5bcd105 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/assets/creatures/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c7a681239b1c156c40a1f89d9ddf7aa2665a55d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/assets/creatures/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 09db66bc87a9b38570e7379e8e578b9d46c610c2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/creatures/genome.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 39a23ea9cce9dfae0517a3345fef2583c253e1b0 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 36 lines last edited by Lingjie Mei in worldgen/assets/creatures/genome.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 36ff7176ad82237bc93897cc56bb24cefecc5815 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 145 lines last edited by Alexander Raistrick in worldgen/assets/creatures/genome.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 978defa0e5dce600a062f6d93aa702829271b837 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c380df557175b214b7d9a523fec6f7c07e7e1194 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 2 lines last edited by Lahav Lipson in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 55d8921640ae8844518ea57c9ac7ec2ec34681c2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e40f58b92f7ee40ff130ad2b3f31712d604bb228 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 286858019f00028b40dd4ffb1bdad0d8f3ae0da0 -Author: Hei Law -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 45 lines last edited by Hei Law in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 50d50854e64f75c233000b39bdc7dd0d19b96e13 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 84 lines last edited by Lingjie Mei in worldgen/assets/cloud/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b2e6ce898dad23c5bcf10067bc59044dde30344e -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/assets/cloud/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 834b615b65e67d87105418fb4affcf2b4aa7e11d -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cloud/cloud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75a46cc286ea4ef145c3ae804c5788cc6c960c6d -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 11 lines last edited by Alexander Raistrick in worldgen/assets/cloud/cloud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9bb7184098039091520953c52efeb5b1b3480b17 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 124 lines last edited by Lingjie Mei in worldgen/assets/cloud/cloud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74f4a8d8044dba507002e204f973f45a37306d0f -Author: Hei Law -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 422 lines last edited by Hei Law in worldgen/assets/cloud/cloud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56cb68aee9dd7348eef1ce2f0c262e6359db6034 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/cloud/node.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5fdcee6e9c0a6dfeb9f3ea6988724cde84d8a46f -Author: Hei Law -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 805 lines last edited by Hei Law in worldgen/assets/cloud/node.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9ceab46f4271a471f9934cc12cda72abf2a5923e -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/tropic_plant_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cfe8bc57976ca0417bb3ece5b6f1c5a891ce48c0 -Author: Beining Han -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 821 lines last edited by Beining Han in worldgen/assets/tropic_plants/tropic_plant_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 23d148e270c7fb0ca6414fed1d1dd5ba36251f70 -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 04a85ee681d09be8d6fa73283962d629212df43c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 7 lines last edited by Zeyu Ma in worldgen/assets/tropic_plants/palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2a3edef8ddffb42862ba6a5682eb69d3ba8549bd -Author: Beining Han -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 980 lines last edited by Beining Han in worldgen/assets/tropic_plants/palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bab60719f1add3290c20b7697c8ad491e9406d6a -Author: Pvl-bot -Date: Fri Jun 30 03:11:13 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/coconut_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 32b39c8dc8a44464eaf3c5980bac3d1a17424dcd -Author: Beining Han -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 1288 lines last edited by Beining Han in worldgen/assets/tropic_plants/coconut_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0f3713921e013ccc7267a71ce5cc75102e149dcf -Author: Beining Han -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 5 lines last edited by Beining Han in worldgen/assets/tropic_plants/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b3181b2d715416dcd3e5fb49efebcd5fe73fa0c7 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/assets/tropic_plants/leaf_palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 64a135f766b4a4cd1388a78d0c8e9bda5827fa1f -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/tropic_plants/leaf_palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 49b6b485ff450f1ca41c6d5ba87d41deb43913ef -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/leaf_palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c01224a7896e2758beb5e3fe90b19045546735fd -Author: Beining Han -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 625 lines last edited by Beining Han in worldgen/assets/tropic_plants/leaf_palm_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 89417899e8b97928c94fed27025441cd1610374d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/tropic_plants/leaf_palm_plant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 05c0c7556950f5019c4012c6733df98452a6fe2b -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/leaf_palm_plant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97802868a06df24a169da14ff98cdc94be0a46d5 -Author: Beining Han -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 545 lines last edited by Beining Han in worldgen/assets/tropic_plants/leaf_palm_plant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a7fe96eda0bb3f74d1c16351a5887782e3bd91b6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in worldgen/assets/tropic_plants/leaf_banana_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 401eb16d7d62599864690b5b974c998294a7a370 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/tropic_plants/leaf_banana_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d11516020d320d64feb7d946b8577f280a5f8b4d -Author: Beining Han -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 666 lines last edited by Beining Han in worldgen/assets/tropic_plants/leaf_banana_tree.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cf0418549daac9a88fd60af8f1a8240de1d25301 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mollusk/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f15a2aa0a1f9300572fdf2e4296f41362921a15d -Author: Lingjie Mei -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 18 lines last edited by Lingjie Mei in worldgen/assets/mollusk/base.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8a777baea85f658586f1926d67e1e7b3a837108f -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mollusk/snail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1843e4a7f529b683fedd868a4d246654e254671a -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 15 lines last edited by Yihan Wang in worldgen/assets/mollusk/snail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 54b408ab5cc98e1f269a3c4c67af50fc9e378d1b -Author: Lingjie Mei -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 147 lines last edited by Lingjie Mei in worldgen/assets/mollusk/snail.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fb20f0c5970d4e61b05ae15c5257a5ac8d97fe57 -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/mollusk/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9accb8dc8a14bd9d72e064dc67f21fb8f494a7b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mollusk/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56259dda65db0ab27e51908508de3ea356fdf139 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 126 lines last edited by Lingjie Mei in worldgen/assets/mollusk/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 28a8571a9c7d267d611f5891dd256650b1cb69f0 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 4 lines last edited by Lingjie Mei in worldgen/assets/mollusk/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74e6d9df883bd5d1ef2420006788fe35b50aee29 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mollusk/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 04c68164077caff196c34cc6b5e259351fcc42bc -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/assets/mollusk/shell.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d2812ef948ac6f675cfae47a9bbf70d473d6f582 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/mollusk/shell.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 27277176c84588c86a9a4071cd533642d9bef483 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 150 lines last edited by Lingjie Mei in worldgen/assets/mollusk/shell.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a207c09286adcaba04cef9457a085d31bfa3ae69 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6676198b95a3411843f67d61684931662a520964 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/assets/flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0a5a7e3a915bbfebbd94edbbfda39c97ef6cd725 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 46 lines last edited by Alejandro Newell in worldgen/assets/flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cdf57e70cf310d187cb5c351365b3b55e62c00b4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 540 lines last edited by Alexander Raistrick in worldgen/assets/flower.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9241d064f307aff8f96a30e33da4d0f1c00acd38 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/blender_rock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 15d024aaa0b25d61fcb48f6d9ddd28265a1a44be -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/blender_rock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b417fb9737dfa74b2625d0892fee3344b0ce8700 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/blender_rock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 401373d7c6c62a0d4a8750446b4cae76318c0adf -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 46 lines last edited by Alexander Raistrick in worldgen/assets/blender_rock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 60d4aead9f4c43b7a5edf610603a5346c7a11667 -Author: Yihan Wang -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/assets/boulder.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e4bb6f33c99ee020911bb29319d89c3fc5da03b3 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/assets/boulder.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 897d18d1fbfb2c61b0ebc36fc86c2500937c1676 -Author: Pvl-bot -Date: Fri Jun 30 03:11:12 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/assets/boulder.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 490287b3dc833205fdfe02f80be6a3d43f554b95 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 64 lines last edited by Alexander Raistrick in worldgen/assets/boulder.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit da820c53201c1acc6b19fde70c76c85c25f8f9c3 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 70 lines last edited by Lingjie Mei in worldgen/assets/boulder.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 238be4f5fd2053321f71e9860d2d1ac831d985e3 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f4c74d360c32ac66326e22356b6eabdcb49f8b88 -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/assets/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f41ea6ee3a3d6c5b6fff911b93f51abcae6056ca -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c52291c9c7c91412b63bca4e5b6869918857e053 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 147 lines last edited by Alexander Raistrick in worldgen/assets/particles.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d65ba87682d13e1daba081a2c9a388ce0b790bd6 -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 3 lines last edited by Yihan Wang in worldgen/assets/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dbada6344b61f599f3e36d23189405c7ffa057b2 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Mingzhe Wang in worldgen/assets/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 954e537235103dd240707214be799b3c888a3604 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/assets/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 50e0dc44cd5af8cfa8e32ab45280fec21b10f3ba -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/assets/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3c53977c2232d9e3041206ad6f8c9226b1042a40 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 246 lines last edited by Lingjie Mei in worldgen/assets/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 30832a3202a97d943aaf9ddf2415bbaeb43bb47c -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/glowing_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c0fbccfbcfb74ef8f8296daa2aff283337dca999 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/assets/glowing_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ae6c94f23b6348624539480b807c1ee2b4e2040 -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/glowing_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 146592dcf44658985dc43494ffbc1bd726ca5959 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 7 lines last edited by Lingjie Mei in worldgen/assets/glowing_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 65b51de615a5f001e70d1e68820eba0fa15ac571 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 56 lines last edited by Alexander Raistrick in worldgen/assets/glowing_rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 453c764c173af77979ccb2ed4a4d5ae7b1d26d9e -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 86718b778acfbd1576a7d8d85f08bded5b44cc6c -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/assets/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7e11adec31eeea003c207d9093a8743922e1b83 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 119 lines last edited by Lingjie Mei in worldgen/assets/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d37ebdd331e49557a675b6e30e032009ea7746dd -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ad739f2824fd16266e63215b43c880751aa8515 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d19a71c9019590d6dc7e5c3c8f60624b3500d242 -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b6b15bacd18b735424b0e91bab698e31ac6e48ee -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 116 lines last edited by Lingjie Mei in worldgen/assets/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1921142675561436c0b11fdb8bacb0f9c292f4e5 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/assets/pile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a70265d34d50a16f3c6d1e055bfd4788bf3c38aa -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/assets/pile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4ece2cddfc0c7392655ceb26d0088cddbfe885e4 -Author: Yihan Wang -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/assets/pile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 969bc1f4ed6d983fcea44fe963f2cc48ab35d49a -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/pile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 373d0942ef1cbeb948d0da8a0db2ecaf9ca4d040 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 86 lines last edited by Lingjie Mei in worldgen/assets/pile.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit caef62cdf63b01ed56e8ab4d07b3babedbb7a146 -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/assets/caustics_lamp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a635d1774a442d3e14895914e790d796325a42f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 58 lines last edited by Alexander Raistrick in worldgen/assets/caustics_lamp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 504aa4e0e5bb59ee526852ee8d652732a1792cd4 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 60 lines last edited by Lingjie Mei in worldgen/assets/caustics_lamp.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c48977d817594af5ad1525bfa8ea478f733969f4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 165 lines last edited by Pvl-bot in worldgen/lighting/kole_clouds.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b95dd1c0a66cc9377308be6b94d49f8786d5b1ca -Author: Hei Law -Date: Fri Jun 30 03:11:11 2023 -0400 - - Add 1 lines last edited by Hei Law in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 45fedf8d83b90e5fc38bf8fe8ec70df7c010ee2f -Author: Kaiyu Yang -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 2 lines last edited by Kaiyu Yang in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3db7f17af3cddc3745d2f417e610bd9e06dcc3b2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a165c6de82c21ad0973d3b1768d18a3114cafc47 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 11 lines last edited by Lingjie Mei in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9c39b8311238cc1a795699fb454f948cc0507440 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 23 lines last edited by Alexander Raistrick in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 877a1e87f5bc97c32c9e9ec5ed8cc71a68548e00 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 38 lines last edited by Zeyu Ma in worldgen/lighting/lighting.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c95c814dc9c55945f0265955a9c8f6725b556256 -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/nodes/nodegroups/transfer_attributes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b358c623b1946b7faa20d66987bb7089ad36974 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 15 lines last edited by Yiming Zuo in worldgen/nodes/nodegroups/transfer_attributes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8352d7a7f803f22205f565fe32a8914673f36e81 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 24 lines last edited by Lingjie Mei in worldgen/nodes/nodegroups/transfer_attributes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8c4b1ee16913d56bff4ff63d9a900acd2213b276 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 66 lines last edited by Alexander Raistrick in worldgen/nodes/nodegroups/transfer_attributes.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 94810cbea5e6cd65aaab1fcc7c8c10352e4641ed -Author: Hei Law -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 1 lines last edited by Hei Law in worldgen/nodes/node_transpiler/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2b313bb5b21e920f56e56d18fcf8fdcb806ab00f -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/nodes/node_transpiler/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8fb40f4a83581826bc0cb292d598d5ca39dbb210 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 70 lines last edited by Alexander Raistrick in worldgen/nodes/node_transpiler/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1f459be67af302484d91a9e52ea667ec457e1262 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 16 lines last edited by Alexander Raistrick in worldgen/nodes/node_transpiler/changelog.md - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac19dc2c6a6bfe779014df26b58d422e39ee777b -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/nodes/node_transpiler/transpiler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 696cbda1f206fb33b78178dd687723212992a545 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 16 lines last edited by Lingjie Mei in worldgen/nodes/node_transpiler/transpiler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c61f21920767262cb7c7547b1b6479a26327e191 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 28 lines last edited by Alejandro Newell in worldgen/nodes/node_transpiler/transpiler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c24f705e25fab67bc1a2e50f5b5e2be456f6f71e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 680 lines last edited by Alexander Raistrick in worldgen/nodes/node_transpiler/transpiler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 284a5f9eaa94e4f85f40e83291dcdb4955b363d9 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 1 lines last edited by Yiming Zuo in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ac5f29f40e491101f5e479061b5e6ab9a0103eb9 -Author: Alejandro Newell -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 5 lines last edited by Alejandro Newell in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a1ff763f9650bdeb1c0452e26fea4be8dedc3f56 -Author: Jia Deng -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 5 lines last edited by Jia Deng in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 628cbe7df33b1871db4abb44b65404add0ac37e5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 10 lines last edited by Pvl-bot in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a42f50b78412a463e7c405a98fa31d7b82945f66 -Author: Karhan Kayan -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 18 lines last edited by Karhan Kayan in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fcfaf14b59a48c88dc180c59aaf6f0e4aad3f11b -Author: Lahav Lipson -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 20 lines last edited by Lahav Lipson in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d37161315d63157c0f8c3214894a0efa64c6e831 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 32 lines last edited by Zeyu Ma in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 91928425db2b4cc9ebd80fdc875adb580c7cffce -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 199 lines last edited by Lingjie Mei in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c78f3b27bf09f5b3724339dcab812135a1dea794 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 245 lines last edited by Alexander Raistrick in worldgen/nodes/node_wrangler.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c4220eb794dc7a410c389101755ab09d5e72f1b -Author: Yihan Wang -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a6bc2e273a022605c2176b4e717c01d213b5e19a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 4 lines last edited by Lingjie Mei in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 228e72df12fa7faaae2af8adb569b9cedc61cd13 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 5 lines last edited by Zeyu Ma in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6be930ca73c798b818a8e9de85814e24bfb9cbc7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0b45e1cf22e75428f91b3d0be371c0904f7f0aa -Author: Lahav Lipson -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 10 lines last edited by Lahav Lipson in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 72f85d48b7862b5b7a47615efce0e1ab3d764fe3 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 23 lines last edited by Yiming Zuo in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 576c3bc5bf7e854ed85876e034aa63a982efaf13 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 104 lines last edited by Alexander Raistrick in worldgen/nodes/color.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 92e45a535cdc7ae24ed43fc3fb99b97b464930ad -Author: Pvl-bot -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/nodes/node_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 862dec69837a310bffb673e1fdcf9bbcff0f0a2d -Author: Lingjie Mei -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 22 lines last edited by Lingjie Mei in worldgen/nodes/node_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cdc0aafd27530257a23fe54059fa127a98b588cb -Author: Lahav Lipson -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 23 lines last edited by Lahav Lipson in worldgen/nodes/node_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 33454fcaaee8a4baf6e1c0c0a2a208e45b00826c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 54 lines last edited by Alexander Raistrick in worldgen/nodes/node_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a7f2e94b9f011e0a0d4879e20974d569de47d26c -Author: Hongyu Wen -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 2 lines last edited by Hongyu Wen in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3156f4bf088704d2799a14d9aeb62d47c9907c68 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6b068b507f37d07df870f7d1f84140067ca7d684 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 4 lines last edited by Mingzhe Wang in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dbf09aafdd3981e624dfe4c95c934df6a6bb3a77 -Author: Beining Han -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 4 lines last edited by Beining Han in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8877decbbb979041a9a8cab8908db7fce7e7fbe8 -Author: Hei Law -Date: Fri Jun 30 03:11:10 2023 -0400 - - Add 5 lines last edited by Hei Law in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e38f9f4458d19989da837348dbd5b18784aee242 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Zeyu Ma in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b2f51be6928c2e86244a0c1aebcb22e5c123d14 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75f22acae9dde135fe079a318371c247c257fcda -Author: Yiming Zuo -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 15 lines last edited by Yiming Zuo in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b2d85e129e8dc238ee2bb188fd682ec71609e955 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 63 lines last edited by Lingjie Mei in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 49b032c41d966753ff8cc972cb62087f64256c58 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 266 lines last edited by Alexander Raistrick in worldgen/nodes/node_info.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ae41925cabd74e1cc0969bf1552d080c52436321 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 42 lines last edited by Pvl-bot in worldgen/rendering/auto_exposure.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a5b1a676d9770711544c3d11aed3d3ef8a6a5668 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 51 lines last edited by Lahav Lipson in worldgen/rendering/resample.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 58407782cd0408e87f7beff0f64a8ea2161a5757 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/rendering/post_render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 08e685e3f215cd12afb5d963dda6b3d0404ef498 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/rendering/post_render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b5ee0c11f7cdba0f678c02be12b301ce3d7f30d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 74 lines last edited by Lahav Lipson in worldgen/rendering/post_render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 33b233b16c215b9bc326fba56b6bac134b50b207 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 14 lines last edited by Zeyu Ma in worldgen/rendering/render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 65984cfa57cbc3ff3bc06d23928ace33e6bcac92 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 15 lines last edited by Pvl-bot in worldgen/rendering/render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f0f3a8cf33d0ad299d8efb8ee71a53dba4e084f1 -Author: Hei Law -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 38 lines last edited by Hei Law in worldgen/rendering/render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 67333b50b04b81e582c170dc43cafaca220e2ae2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 89 lines last edited by Alexander Raistrick in worldgen/rendering/render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd2a39b7cc589eb8a46771c08480cceb14b9eec6 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 208 lines last edited by Lahav Lipson in worldgen/rendering/render.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 100c3c16d4b2c4d64626cff1949efcd8b922a2d2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/utils/cluster.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 45a0915a175dd71462964244f3ac7238b9400eab -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/utils/cluster.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 045475a689f131983a8c1f982950fef2ed07dc54 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 70 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/utils/cluster.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f61d5a6a306ec4f334318b7ba02147feaf18ff75 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/utils/selection.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fec38c344899b0490f9ed4e1b773312b9c361dec -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 23 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/utils/selection.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2c47b408217a9c68ddb2df4508d9c43737765821 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/utils/wind.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9be175819399476352a5e6e779dc42cec590436c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 56 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/utils/wind.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9516aa9ce4dd08dbaa75d1e1e557a1ec439218b3 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 1 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dd72aeeb82266b7f79f15fdef48a5137404ac7f2 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 4 lines last edited by Lahav Lipson in worldgen/surfaces/scatters/rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2c4d71ba90bf1dd6b99ec36ee750d4a4f5f5bffc -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1ce54adcbf43be0f2278261119a17131fcea9b57 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 24 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/rocks.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9a837b85014cd0b8b57ca25b7dd80db5177b085e -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/ground_mushroom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b6b547f7980f13c860dd9f7db1bfb61f0294599d -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 8 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/ground_mushroom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3de4154ff5b37acaa21aed22f6f9104dab1d4ac6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 19 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/ground_mushroom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b7f0fb3e246e9c6b679609441dc415dd25e06b14 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/coral_reef.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ce76a156b5070c770cf37da5b8535310ce54e0b4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 21 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/coral_reef.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d433cafd1328dece97b963b19b1b69ce4c94294b -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 34 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/coral_reef.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa547cca7ddd134bc9e06ea7a710dae45c6cb63f -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/slime_mold.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b7c22e4a50601091e911cad73a0315f0d69d1e4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/slime_mold.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d31dbd1b6f64d2ef460a5a9502d35e7d5db17278 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 39 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/slime_mold.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5c1af13e576caefff1c1018135308aa58305492b -Author: Yihan Wang -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 1 lines last edited by Yihan Wang in worldgen/surfaces/scatters/pine_needle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dc01e73a925e86f7611b3670bb34c307551da6a5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/pine_needle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9d1c8aea29d13363ad73b5b47ce25d8eb6228512 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 18 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/pine_needle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ff9f6df85452c4f42c1bfe14d6939c38dd2cd2af -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 82 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/pine_needle.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5e861cea4bd575ec18aadd87b47db2f6d7e48c15 -Author: Pvl-bot -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/seashells.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 75760ac1a896d966cc42702ffd9069ff24d47eb2 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 10 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/seashells.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2e7e882f2b9a142b643479565ba0f73db2a3c61e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:09 2023 -0400 - - Add 19 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/seashells.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9f136a45ede8c19b9c590720bc8e8ef7dbb7b23c -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 324ec5359ca121d497b32b7ba285857248ece9d4 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 13 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cd5d316dba33265e12f6b30f0eed03ea9d440cc6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/pinecone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0e3c3a11e8c4f55b6ba0a00ab77b323c411e9541 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9bd195553d2644ad8efa90f4760fc9dd56faf693 -Author: Beining Han -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 14 lines last edited by Beining Han in worldgen/surfaces/scatters/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9367f349a6221cf5e4f1b82f9e5599ea11cfbaeb -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/flowerplant.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f8ad90b40bf26f70c446d40b2aa00450afb9b802 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/monocot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 86d40c7902fcaa9c28128d82d4293c44b75a1929 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 9 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/monocot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bf25d99020f02efb029365a56c96564a4b11fa38 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 16 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/monocot.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 54cfb6d6d9927bb30c0ef032abd33c7b74910cd2 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/scatters/snow_layer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ae489aeb332e0e1b54f394fbaeade2e0978c2a62 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/snow_layer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 137e19e0fbe84c876028be4e5c74ad2927ca87ed -Author: Zeyu Ma -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 10 lines last edited by Zeyu Ma in worldgen/surfaces/scatters/snow_layer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cb792d450731b109b66ef3ccb40f7827beaf74d4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 13 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/snow_layer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 954f1a161ae894d0820c689d51589a6e9158c7aa -Author: Yihan Wang -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/surfaces/scatters/moss.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0fbd6841ac93e3d9a95f356191920f506226540a -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 5 lines last edited by Mingzhe Wang in worldgen/surfaces/scatters/moss.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f3897770d6fdf207a6f97456c09fcbd44d783433 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/moss.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7f6422752a8ee9294d467fcc7793757257fd59ee -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 26 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/moss.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa64b3af9b39295e89f5920b94cd0903dcc63e5a -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 80 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/moss.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa8227dbbb2466872dd04825ccc470e4e31e0ccf -Author: Beining Han -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 5 lines last edited by Beining Han in worldgen/surfaces/scatters/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cafe5f599cb9cd41dd8059da0374404ba08a24d7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit abd3f57677841656bc4f7248500463caa35b2646 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/fern.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 868657dad8172b916752a8a083aa170b20b222ac -Author: Yihan Wang -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/surfaces/scatters/ivy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3b220a679d9275664503cf7d907f78969602d415 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/surfaces/scatters/ivy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 333e9a409835ed7dc1aff54c3136ddaabf890975 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/ivy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3c12b37ce43627ea37976f3c034d1215d0ec3331 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 77 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/ivy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9d480cc480f7dc90a44ab1b93e7504ae78aef4e9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/decorative_plants.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0a81065e9d93113bc264d44cceb2b4fc478ebace -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 32 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/decorative_plants.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a3e06749b0d45d8d13278a34e112c75d4e3c24c2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/chopped_trees.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 777e919292eba5312e9f31b142d3023f857da4a0 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 7 lines last edited by Yiming Zuo in worldgen/surfaces/scatters/chopped_trees.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 34e2362a319ab2c4318c558d7109668c575283ef -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 159 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/chopped_trees.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d4b2fb0af2e38964c4a9341651ef6927a85855a7 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/surfaces/scatters/grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 53971ebc2f0d98cc35b5998928b4fb7216b3fd2e -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a6561bed0ccf0edb54568a38a363cd4e3b44ff7c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 42 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/grass.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c7120724229974bf125c23ca9e8be931cbc7f13 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7675da9d7102a048e0e43aa9590bff8e1783e2ad -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 10 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c1c612689ba87eb0f83aad34282ed713bbc638b0 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 19 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/jellyfish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e61124dc68d15bfbb67d687ee9a9db519d3cf2b8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/mollusk.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2b64878b777a44430c984ca4252b6de9fe1d8702 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/mollusk.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8a5ca717085fa08891519f1ed5d408a95b6472d1 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 21 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/mollusk.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3debc26397aa8ea3f390ea9bc0942197676ba39d -Author: Yihan Wang -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 2 lines last edited by Yihan Wang in worldgen/surfaces/scatters/lichen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ab80b51d6b3ecb335a139d00b6cb762f66939ad1 -Author: Pvl-bot -Date: Fri Jun 30 03:11:08 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/lichen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97ae73a84be1ea985395dd6578a0a56b21a968f7 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 19 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/lichen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 082d9afda3cc34bf7821226ebbe1b81fd35a4377 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 90 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/lichen.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f1bda4c09519c36df4ce2414d157067f5602a678 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/surfaces/scatters/ground_twigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0b37dd3a2648476278a1a0153a93214d8a2ad860 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/ground_twigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4fbf52731c8a475cf33631c83f6f3512e9d360d6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 33 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/ground_twigs.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2be81b718de7a94d0d8e7e1eac1915119bce680e -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cc56eb820f432a2d0409cb153d9504ef02abbfc1 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 10 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a577887772586274c9684a0534b6ae8bf5b7a921 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 15 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/seaweed.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 936530475c45f7d004b3572194215a0c3e0ca5fe -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit be1b1900296a03d64eb6acc2fc102bb0653029ec -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5bc5245bd27d0e939ebce04b5d7e05199096720f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 17 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/urchin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4532bdf203d1b64455bf9d54ab4acef5d80720d4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/ground_leaves.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2bb902609d94036c4cae7c68493e1fd4c5d32add -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 18 lines last edited by Alexander Raistrick in worldgen/surfaces/scatters/ground_leaves.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 11bfcc8068c4a2b0d78a9764420389fe81860fd2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/scatters/mushroom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d95a29b2cf66b2b247b826c88dbb68cfa3603c3f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 86 lines last edited by Lingjie Mei in worldgen/surfaces/scatters/mushroom.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5dfeab892a9b0f8113246e4dde1557c08eae7d39 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/twocolorz.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1de6bc9d0b478cab950dd3d29f90ac39aa063a2 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/twocolorz.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2626efd66d831c81e0ddbc47ec92f48de6d8c5a5 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 58 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/twocolorz.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7dcb954913c20e805c5799593997ac4f70d71d98 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/surfaces/templates/lava.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a832b811c59b8e494e1fab2ee2605f43ef37820f -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 21 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/lava.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 19130a7275d2bb3153b631839433c45f57a471b6 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 211 lines last edited by Ankit Goyal in worldgen/surfaces/templates/lava.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5de0ce1e3adb376b56002a241fdbc24781134623 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 222 lines last edited by Zeyu Ma in worldgen/surfaces/templates/lava.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2bf5593b668deb69f66d9a50fcb5e4b5efbfb13f -Author: Lahav Lipson -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 84572ca988b1418c201c4e3db11c24b5b274fff5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 95c450f91b5db1647d2840242a95072edfcca3c4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7df0a4da59d281c8dfb6f76ba7450fdfb5719ae9 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 22 lines last edited by Ankit Goyal in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c286e2d5f4f33db3460e7ff9f28d0ed861c637b7 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 69 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8714d43a0147c72f3ea900b3dd2749fd7e595a71 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 193 lines last edited by Zeyu Ma in worldgen/surfaces/templates/dirt.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6f33279f4543e5c4e1aa8670ba18854df6c52677 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/scale.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eb663783aee861d323ce1eb681815dc8aa449690 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/scale.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit da71467a56ce43daffdadb4c58a76aeb18cd8ab9 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 340 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/scale.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 40561175de38a7bec5e882faf781b30fb73bbd28 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/spot_sparse_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 993046f404af7f6a2cd2a5701eff21f93ea5e9e9 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/spot_sparse_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db7ae05407c59f260f199fae57f187ed82c948e6 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 123 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/spot_sparse_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0e8f8602df3be85d094a5925591f3541be5d9cd3 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 3 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/ice.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 62b8d858941f61f6c1118e5ef1abe1332004ef69 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 5 lines last edited by Ankit Goyal in worldgen/surfaces/templates/ice.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c410b11f1c89a32f8123b1050044df124a825c50 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/surfaces/templates/ice.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 13ea07fc4ea3ecf1d48a67a7469734e5b806acee -Author: Zeyu Ma -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 24 lines last edited by Zeyu Ma in worldgen/surfaces/templates/ice.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 94f70bf246b2e06c4887315b742c6dc45d61f896 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 78 lines last edited by Hongyu Wen in worldgen/surfaces/templates/ice.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8ba7562b155bdc69318fa23f29a56a02c3db6eb7 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 3 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/sandstone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8868ad4c51fba965fe6c328231d00fbcf542e957 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/sandstone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3ed71e51a5107ec50e498c30fbe220511f538fa2 -Author: Pvl-bot -Date: Fri Jun 30 03:11:07 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/surfaces/templates/sandstone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0823da266f77c0d267824a7892d00377352d25c7 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 105 lines last edited by Zeyu Ma in worldgen/surfaces/templates/sandstone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c8f4f0f63cc26c9602ade5b9d7e9e38cbd87cab9 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 495 lines last edited by Ankit Goyal in worldgen/surfaces/templates/sandstone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c40941d34c7058aca12acfdcb4cd9e6f2d34c099 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/nose.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2ac07cb2577a26bb36169cc172df8dafbfd0f275 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 37 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/nose.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e53fe486f1db0f37be55d2e8780c7ba532978c91 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/templates/two_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3bb8a57868c9eaea39f6eb9ca1257e082051f5b9 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/two_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 249dc2bbde40649de6a4785b88abe871d6f6306f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/two_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 19ee97418a082358d5ced514913f59b0bbec5de7 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 105 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/two_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 43b3ee6b5cd2fbc574ce4d7bc511b36fad6e8a14 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/atmosphere_light_haze.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 56aac172c81d12c31ebb5d809f53ee0a9fcd471e -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 8 lines last edited by Zeyu Ma in worldgen/surfaces/templates/atmosphere_light_haze.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3417c2aa9131ebd82182a185b12b72fc11813794 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 18 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/atmosphere_light_haze.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21500bc972b5b35cf73b9071f851cdb0e9d23a35 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/mud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1bf7a504ae12793322d6ec04a7aba7b72a698a13 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/surfaces/templates/mud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fc20e0bf5ee92225fa459d80a00b19d17f3e7b30 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 7 lines last edited by Ankit Goyal in worldgen/surfaces/templates/mud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit edb49f03b0cd77ab65db3eb7c0d96e5ba4421414 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 15 lines last edited by Zeyu Ma in worldgen/surfaces/templates/mud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 77e3401b3ced84c4b51a137714e3b17fa8a5f3a7 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 119 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/mud.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 07b27d9bfe975292cc60443495097fe6837344a4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/eyeball.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 52ad554eaa91da7ce2f60edbbc662946c939e8c5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 33 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/eyeball.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed8e663b99b4f81dcf84a408fe236a15d22c4381 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 56 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/eyeball.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1f67e417f3d66a99e546e3a5d5fdd1d5abaa87cc -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/chunkyrock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0cad2be337fb4cadf91292be2915e72fa11185b5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 7 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/chunkyrock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c147245fe19241b43988c97bfefeee1ebf28e509 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 67 lines last edited by Zeyu Ma in worldgen/surfaces/templates/chunkyrock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5728ad511e31ef25eeb02a6a3080b6e5253843f3 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 74 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/chunkyrock.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 742c549d36b74d674447f2ada6a95ae41563e1af -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/snake_scale.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f28ced2bc3859a0bf89fedc14768cb996ae9acdb -Author: Hongyu Wen -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 272 lines last edited by Hongyu Wen in worldgen/surfaces/templates/snake_scale.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f83f1cec4c515866c2bb9c77c0fe1d02e03f1abd -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/surfaces/templates/bark_birch.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7b285c9fb5e04913ae9a448ee1b64893695c49d0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 14 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/bark_birch.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5bb02f63430703016e71623952b24834c3fc5006 -Author: Yiming Zuo -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 190 lines last edited by Yiming Zuo in worldgen/surfaces/templates/bark_birch.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efdf2677c9c41123c872a076758ba4280454fb7b -Author: Lahav Lipson -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d60f0449b764de403b8543058323d9173b8ca374 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fce03dfa1b4cb99e59818b71789d3d64be864048 -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7ed0c1f3053b80273abe477e83b660f7452d8522 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 12 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 244d6efe25d0514e6e195becff30e070d81dd895 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 18 lines last edited by Ankit Goyal in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ded02940ca08a46ff9e4ca8a0dcd03ec67359436 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 263 lines last edited by Zeyu Ma in worldgen/surfaces/templates/stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fe4158ef4686b509c3d6005b6ce1f49d0919b314 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/cracked_ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c5b890bb626f6511c20b8fedfc92f3e0a64782c1 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 5 lines last edited by Ankit Goyal in worldgen/surfaces/templates/cracked_ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 593a02ca5c485a72f414dee2fbd754e4967f4d8d -Author: Pvl-bot -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/cracked_ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 840366f9a8eb0606b5a6bebb3c089dc7b33036ba -Author: Zeyu Ma -Date: Fri Jun 30 03:11:06 2023 -0400 - - Add 18 lines last edited by Zeyu Ma in worldgen/surfaces/templates/cracked_ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 245302a8d663bbfc711323978f3ddb9f9cd792ad -Author: Yiming Zuo -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 166 lines last edited by Yiming Zuo in worldgen/surfaces/templates/cracked_ground.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0cd93371efd089e8c14af843a91c5ab9cbf90a9a -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/simple_brownish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5ced5fe6e09b268998565717d8d5f219ffd0bb37 -Author: Beining Han -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 27 lines last edited by Beining Han in worldgen/surfaces/templates/simple_brownish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 858d97c8a72e52e297652000e1abad40a07d56bd -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/bone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a06ab6344ed2f1d54011379b6307572728d25acf -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 20 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/bone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 85b9b0bf74988ea77f6ec7f6aa95b2d0188017fe -Author: Yihan Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 60 lines last edited by Yihan Wang in worldgen/surfaces/templates/bone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 748223bf29b0841828f95ec341419241b3b6b7e8 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/tongue.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 66a0c18ca1d974ddedd541f5d7ec3f111076771a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 29 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/tongue.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 22406b9b177770902b224ea76e9969167822ef13 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/soil.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2258f02d77ad425ff35f57d744e4f69cb70afd5c -Author: Zeyu Ma -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 141 lines last edited by Zeyu Ma in worldgen/surfaces/templates/soil.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 92203a38b2e23960266ee0ba88ce1156e3ece72b -Author: Ankit Goyal -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 182 lines last edited by Ankit Goyal in worldgen/surfaces/templates/soil.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a60c5268b60e9e800f6aac12ee0cdda508875e02 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/templates/slimy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1b0af9b19ddaf011fc8ac10d89dc83f10471673f -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/slimy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ea1f5013b2ba07b276f9a7054860d5315bbd4001 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 14 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/slimy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 47c34d55910595d556dd70bc662578b4f7f20781 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 106 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/slimy.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 33c4a6b23dafcd49649540d51efd55ab25318200 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/snake_shaders.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4b88edd6b309959551c8b62c2d221a883e8b30e7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/snake_shaders.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 397201770a117f013ca3f0bc31a6207d976e7a0e -Author: Hongyu Wen -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 297 lines last edited by Hongyu Wen in worldgen/surfaces/templates/snake_shaders.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 429f92562367d14d3b0ed5401e573b428a3956bb -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/aluminumdisp2tut.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 12973dec061d29d63d767c68b2326a1f93ff4043 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/aluminumdisp2tut.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cae0380c81ed2daa2098145454e79f8d5ebfb72c -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 192 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/aluminumdisp2tut.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c2aa38b56e3249c3b98fe64707988f4264ff1e96 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/reptile_brown_circle_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 283f914b642e8d8b601ecf32e26577e63fb7dabc -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/reptile_brown_circle_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 336a46056b01c412ff7f102c362e894915b8316b -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 299 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/reptile_brown_circle_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1a4701812225deb0d3f8304a365e818e9971c42 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/face_size_visualizer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bd794a3d92040ae93f13f556b50982f04f4c9464 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 41 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/face_size_visualizer.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9063d366099d7ea59448bc6f144628f7d2dc8650 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c94bc446ac855c8b944834d72b73cdcd14a07367 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d8e7c2968ed0c0b0c03c55f5e2244fae1440ee56 -Author: Beining Han -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 45 lines last edited by Beining Han in worldgen/surfaces/templates/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af30523c8bbaa2f983a1423aa22898fd02d616af -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 452 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/bird.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit daf294c3a329028db79fc941e215223e87dae9f0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/__init__.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 21288c5052b103f3df8d97238aebc456749919a0 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/surfaces/templates/sand.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cc2923879bda135284e6578ece95126c978e62f0 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 3 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/sand.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c83d7d421ca4057ca8f819fea714d4e3e8414735 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/sand.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 515dff4e6868079abeabf58d3d45bf038a46633f -Author: Zeyu Ma -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 126 lines last edited by Zeyu Ma in worldgen/surfaces/templates/sand.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b11ed0fcf5101230cc173c8664013ef3597d3490 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 5 lines last edited by Pvl-bot in worldgen/surfaces/templates/bark.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 59277ff73496b1c9fdd704d44f94a2cd1aec0afc -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 7 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/bark.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 08506e74726ee08318a3247d6be378847234fba5 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 123 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/bark.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7626580c07a4a85722ddc4bd9a95257beaabaec5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/fishfin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ccd4336df8833556a28c990fad30c6a118c3d89b -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:05 2023 -0400 - - Add 262 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/fishfin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 46fbc73a8dab688e229df7bb082ac980a78aad44 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/three_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d72789c9ac18f94d85d9e31b90ccfde70888c654 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Lahav Lipson in worldgen/surfaces/templates/three_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5f6e7a46447a82ece85257bd78043f850a5f9280 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 18 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/three_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 86393c66f85da428b5fe0a58bbb000172998642d -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 161 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/three_color_spots.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c6c24ebf3ae52a4b87c2fb4a61b9faa2b4067c4 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/reptile_two_color_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4e79be179d48280c9fa3e2c0de80b158e5a3e0cf -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/reptile_two_color_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 78972001d77851b00d95bcf2310788b93c07ad0e -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 234 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/reptile_two_color_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a02cc9fb7ec8b77fce9507c010441f9265dce633 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/simple_whitish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 971b77a06e38acda64c494cd0ae75c7cf0f7f69b -Author: Beining Han -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 28 lines last edited by Beining Han in worldgen/surfaces/templates/simple_whitish.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 71b9d1a3bdf2d5a00c3a12c7bf1c3acafe03a4dd -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/simple_greenery.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74e199d97d72f29a21b19ec86664eb23cc6ba062 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 38 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/simple_greenery.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2ff5d9681c5afe16cfffa50a9425c717390e4c43 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/giraffe_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a8c0a6953766bca0f750f116519e65b00934ac51 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 15 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/giraffe_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 76c33014a23d6d267821d31e34e1234dcb25db41 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 81 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/giraffe_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6297622c1fbff5264ae12c317140199728961720 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 70 lines last edited by Hongyu Wen in worldgen/surfaces/templates/beak.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0193a43b3d6c0e863e08cd93958cbdaa8ac2cec0 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/wood.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3f80c8ce4e0b229b5e786949706c5e9d2566ac46 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/wood.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e4b2ac26e109e38377804a59f193c99f080b56be -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 77 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/wood.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f55642e377dd22fc6bdc8dfde73f6d898574dfc5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/tiger_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7cd2fd5e127a043a28ec216c43097685a6302e86 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 47 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/tiger_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bc2a84190bbe4418be476e0a543b65dd4909c015 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 104 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/tiger_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 99bc849ddc856c8c1e653877481860d79efe5550 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/reptile_gray_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7d7783347361f5b0ecc762c490af4c9be899eec -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/reptile_gray_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6f0e0fee6d8d667d59e65eec2237ff13e70402ed -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 175 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/reptile_gray_attr.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9f4c37e6bd03f81c5262dbb2f9e7ac20268362be -Author: Lahav Lipson -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 3 lines last edited by Lahav Lipson in worldgen/surfaces/templates/mountain.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a0c0388acdd8b4845ece1657542c9332d46f86d5 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/mountain.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 215bcbce239f37a6b83658d9f1e7eebdfda95bfc -Author: Ankit Goyal -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 11 lines last edited by Ankit Goyal in worldgen/surfaces/templates/mountain.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 37ac68295b2722fc5a124048e60cafc6f8776032 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 16 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/mountain.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7fde8bf5d46fa12ac131979f599534d85c5a1d1e -Author: Zeyu Ma -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 280 lines last edited by Zeyu Ma in worldgen/surfaces/templates/mountain.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f37ac2d07b2bc4d28b9aea57037ac66f8a8f1129 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/fishbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c4c2aee94c0f0174d4ad45858506260138e7583 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/fishbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d058d72f82c478a67481cd0aa063ddbe13862154 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 1026 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/fishbody.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4da7a8ca09262e4b37a6a7fb41d9ab3e51bdbbfd -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/grass_blade_texture.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6de507903f46aa30f0184ca31cc0ff4779983673 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 19 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/grass_blade_texture.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit efeaf45d739da469db923c7505b357b00551c473 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 196 lines last edited by Lahav Lipson in worldgen/surfaces/templates/grass_blade_texture.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 908178dd4138fcc72f09b4590801290ea696a0ef -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 4 lines last edited by Pvl-bot in worldgen/surfaces/templates/snow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5692341f99740a41e7c1cb18b8ac0eaefc7b0093 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 18 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/snow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0c81bc41c66e3e2177ed9d3fc2ed5e9569422faf -Author: Zeyu Ma -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 112 lines last edited by Zeyu Ma in worldgen/surfaces/templates/snow.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0135eba9709d7ed0c757de65b4aa55969f1cae47 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/fish_eye_shader.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9ae7d54351b333d3656719d5a783495f0ca37309 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 207 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/fish_eye_shader.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9545c617180e22f16e1afce3ac29bdcf5d9a6909 -Author: Pvl-bot -Date: Fri Jun 30 03:11:04 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/chitin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 97c40b03f7243aca18a01f781f742bb044a614cc -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 93 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/chitin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2f75fb0e44e09e08ca36ae119c652f85fdd21dea -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 125 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/chitin.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 01928f544135739e9566d6664b2f27d5b192c765 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 7 lines last edited by Pvl-bot in worldgen/surfaces/templates/bark_random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9fcef67c55ac1a2ca7e4fcaef844982b9c5f4a29 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 23 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/bark_random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af0bc2a1327c4199003a20a7b8626df0051cb32a -Author: Yiming Zuo -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 519 lines last edited by Yiming Zuo in worldgen/surfaces/templates/bark_random.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 040e2dff6750c57e077743c6c928065ec706e1e6 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3ec32ee0468d6447970588c5a7c67166c93eee63 -Author: Yihan Wang -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 78 lines last edited by Yihan Wang in worldgen/surfaces/templates/horn.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c612986c76dc68196dc0e3c26399b41517910e8b -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d1a0dc6aa9d8a421d189d60fdb2f9b14a121c06f -Author: Beining Han -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 314 lines last edited by Beining Han in worldgen/surfaces/templates/succulent.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2dd7e80bcc27009bccf3f40414005795e10bafb3 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/surfaces/templates/water.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3d3423b0f0a2e2ac047d3b6df7fbea3ce5c6fd5f -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/water.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1e6e0995b60347cfc3b009c9470694523696c502 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 36 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/water.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f21f836c13a4e35ac2b3c5027e4e86f31caaf0ae -Author: Zeyu Ma -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 204 lines last edited by Zeyu Ma in worldgen/surfaces/templates/water.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit eeef5630194972d9c269ee0314fdfa97b117f31f -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/basic_bsdf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1226702aed4bd2cbce0553a97374a570bfba269a -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 31 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/basic_bsdf.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7f7f25cc54096e7ae79b51c216e97f8e4f7c7858 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/surfaces/templates/cobble_stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0e3f0ae88f8633643ecc4864dadcbb39e3ae0ae4 -Author: Ankit Goyal -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 2 lines last edited by Ankit Goyal in worldgen/surfaces/templates/cobble_stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 460612be65d44b99f0049d4be110f0e11970b07e -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/templates/cobble_stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d9d67b502254cb77f0708c6551c9ba7749b6f97c -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 25 lines last edited by Mingzhe Wang in worldgen/surfaces/templates/cobble_stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1413534216d53a4263d8e88a5ce5a0bab2a2caae -Author: Zeyu Ma -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 122 lines last edited by Zeyu Ma in worldgen/surfaces/templates/cobble_stone.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5769658ce26a28a0ab12aac34011eef9c6b6fba8 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/surfaces/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1efad5e7a4e69ae1e53c51870e503375f19668f3 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7e592e14b2a5f591a14405723d66a7ed3e0cd15b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 37 lines last edited by Alexander Raistrick in worldgen/surfaces/dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 519c638ffb2b05ebd27d0cdb9236700bb3bf3390 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e75d140c70512ca99fb3bbd05b0555dc0e1666b4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 787864c5e5c618f597bd828843b6ddb5dc280262 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 47 lines last edited by Lingjie Mei in worldgen/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1aa4e2a11de017e39640fb78aceb0a3218f37f8 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 124 lines last edited by Mingzhe Wang in worldgen/surfaces/surface_utils.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fc8a12578b8156ab998f31e21a4dc79db038b5a4 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 6 lines last edited by Pvl-bot in worldgen/surfaces/surface_mixing_dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3a2fc31dea210913b544d3d0aaab709ccacb072c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 12 lines last edited by Alexander Raistrick in worldgen/surfaces/surface_mixing_dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 19a7ee1060584d7f241c5fb29ae54047275198a5 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 28 lines last edited by Lahav Lipson in worldgen/surfaces/surface_mixing_dev_script.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 72092a0958836a3b84683370fe4c764da74fd211 -Author: Mingzhe Wang -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 1 lines last edited by Mingzhe Wang in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 602eedebb5f2cb9a173a310e5bb171b2ba4ade0d -Author: Karhan Kayan -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 4 lines last edited by Karhan Kayan in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 17221888b1acce0dee2340771a4d711163359870 -Author: Hongyu Wen -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 8 lines last edited by Hongyu Wen in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9418ed9624d8961a5589431f09ba582f700dd275 -Author: Pvl-bot -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 22a3e4d3629494903b83beefa72878d0f078b218 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 16 lines last edited by Zeyu Ma in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d6e3ccad7eeed58ed576616dd6291071cee8a90 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 25 lines last edited by Lahav Lipson in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9a830bd197aa891aac1a18e3c779a0e86af0090f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:03 2023 -0400 - - Add 111 lines last edited by Lingjie Mei in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d461618db13bc656c894329bd653d03c5bc38880 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 234 lines last edited by Alexander Raistrick in worldgen/surfaces/surface.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 61d0fba2d0d8602817ac584f1165f56aa83267fc -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 3 lines last edited by Zeyu Ma in worldgen/config/scene_types/plain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 039b1ffc5988328766ab7a6d4371a152aa170f92 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 25 lines last edited by Alexander Raistrick in worldgen/config/scene_types/plain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 788dfc9893d61a82856926b0684a13e1e0d6374d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 26 lines last edited by Zeyu Ma in worldgen/config/scene_types/cave.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 945f507be7b670fa1c16bac91fd1f6a302a501a8 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 35 lines last edited by Alexander Raistrick in worldgen/config/scene_types/cave.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 631747f074cbb4246f84090a20cdf145536c3905 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 13 lines last edited by Zeyu Ma in worldgen/config/scene_types/river.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 05d26f6f3b8a146fcaccfdce4938c50068cbe081 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 28 lines last edited by Alexander Raistrick in worldgen/config/scene_types/river.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9885969f9866e0f175f104d8007cc92f41a3ee68 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/config/scene_types/cliff.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a337da3df6bfe7d79b6df1ad644cf7dd4f832d3e -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 14 lines last edited by Zeyu Ma in worldgen/config/scene_types/cliff.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ec49d709edc69ed456fba6950384ac90bf4eaa69 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/config/scene_types/coral_reef.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e1e49c691d538d52e19f2949dafd6971aeb32d44 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/config/scene_types/coral_reef.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d33be963192e5e8eb8cbe19f96d36e69bc8dc887 -Author: Pvl-bot -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/config/scene_types/desert.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fa2800f9b13bc0d16575c1323c442b3fd710b1b3 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 1 lines last edited by Lingjie Mei in worldgen/config/scene_types/desert.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2830c81623c2354f92607839107e70b9b56943f1 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 18 lines last edited by Zeyu Ma in worldgen/config/scene_types/desert.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5a75d33a7ead9d3d3342825470bb5639de5060d5 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 25 lines last edited by Alexander Raistrick in worldgen/config/scene_types/desert.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6227818ba1e563e6ec80a926da1bb4c677b83306 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 4 lines last edited by Alexander Raistrick in worldgen/config/scene_types/snowy_mountain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ace50ece997286cf87fcad3be54520fee3d1be47 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 31 lines last edited by Zeyu Ma in worldgen/config/scene_types/snowy_mountain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 57b1c3e114abff81fc147ce31f553bedc44b1127 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 13 lines last edited by Alexander Raistrick in worldgen/config/scene_types/arctic.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3bde811030dc78f6ed769af01fd3b03c9416dced -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 28 lines last edited by Zeyu Ma in worldgen/config/scene_types/arctic.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit db282685ad7ae5dcc80c66bdd3a2f2b91ce5e60c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 11 lines last edited by Alexander Raistrick in worldgen/config/scene_types/coast.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit adbf76e58a503f8f75b745883026341a5f822549 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 32 lines last edited by Zeyu Ma in worldgen/config/scene_types/coast.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 29fa1c020cfa6e01d3efcbde633dc75c0d5e8a5d -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 3 lines last edited by Zeyu Ma in worldgen/config/scene_types/forest.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 27e04967cea3d6fb39f54d0f5501c7acda64d06e -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 55 lines last edited by Alexander Raistrick in worldgen/config/scene_types/forest.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c0f1b1f8a8889b60c001c6b463f8d738528306f0 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 8 lines last edited by Lingjie Mei in worldgen/config/scene_types/under_water.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 36d0faa13320b747cc89c6f95e52735df3d50715 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 13 lines last edited by Zeyu Ma in worldgen/config/scene_types/under_water.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 86c88747f903cff6b31e60807b32f34c7d0585e8 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 63 lines last edited by Alexander Raistrick in worldgen/config/scene_types/under_water.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d56d700da699e4f6b79a459dd51678c5080f7c34 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 5 lines last edited by Zeyu Ma in worldgen/config/scene_types/mountain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 203a92b35e1de4ca94c7803aa6755004aefecaaa -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 8 lines last edited by Alexander Raistrick in worldgen/config/scene_types/mountain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7d991ca45b30bd310de1c5210ac9f082b3dd5671 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 2 lines last edited by Lingjie Mei in worldgen/config/scene_types/kelp_forest.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 737db271662d3b143363c5b283e704d17d931ffc -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 22 lines last edited by Alexander Raistrick in worldgen/config/scene_types/kelp_forest.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74cdc70dd13b00c8a469f444da46ae081748c192 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 7 lines last edited by Alexander Raistrick in worldgen/config/scene_types/canyon.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9766ddaa080b9d7259d46a001fb9a56fdd5dde38 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 8 lines last edited by Zeyu Ma in worldgen/config/scene_types/canyon.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit cf12a5f35d26493711fb2843779c7972022ea2d9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:02 2023 -0400 - - Add 50 lines last edited by Zeyu Ma in worldgen/config/palette/water.json - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1f2d6c98947c54dfb13bd306c9c86a69d5be43bf -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 50 lines last edited by Zeyu Ma in worldgen/config/palette/sandstone.json - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8a857f4b2ff4427d9f3b842903ec0dda0a30c6c3 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 50 lines last edited by Zeyu Ma in worldgen/config/palette/desert.json - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6c0f48349c2af0c09071f4c3c7dd2f7eaee18ab7 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/config/simple.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1da542585359653c70ed081ae71e82d7f61a20af -Author: Pvl-bot -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 3 lines last edited by Pvl-bot in worldgen/config/simple.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3adc2dfd329ab3f1d6e789812f056010965e75a5 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/config/no_assets.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b398dae1ac9625f1f2f61c1fc37a55de0ed1fa1f -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Zeyu Ma in worldgen/config/no_assets.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d5647b3a843f87a5d2550db37d851f69aaa5506c -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 45 lines last edited by Alexander Raistrick in worldgen/config/no_assets.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 655d87435e7e500a54e1c50bad5d2fddcc2e0ed3 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 10 lines last edited by Zeyu Ma in worldgen/config/reuse_terrain_assets.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 79785b66a09027e4dbffb17a9fdaa59ee30bad9f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in worldgen/config/high_quality_terrain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9215f1a267408f4a8fddc8099e767b1e189a3a10 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 4 lines last edited by Zeyu Ma in worldgen/config/high_quality_terrain.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b54522163492f4457cb5067ba2496c3a2a8f380e -Author: Pvl-bot -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/config/stereo_training.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c1bc8163ae4ae1e0d306510d65631b6287b9992d -Author: Lahav Lipson -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Lahav Lipson in worldgen/config/stereo_training.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c9befacf759ddd9db6b8777c1521786ed55d4cc4 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 7 lines last edited by Zeyu Ma in worldgen/config/stereo_training.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 063da5f62336f2d98686a05363a352a095abc37f -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 17 lines last edited by Alexander Raistrick in worldgen/config/stereo_training.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6390ed70dc5b055754a0e5ee540aab869fa68538 -Author: Hei Law -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 1 lines last edited by Hei Law in worldgen/config/base.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0489251219c0b234e622babbfab9660a845dd316 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 32 lines last edited by Zeyu Ma in worldgen/config/base.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ba85630aecb42a3de64a2b46cfbc2c6f36e394f9 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 34 lines last edited by Lahav Lipson in worldgen/config/base.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 34964cbb63dbd4af6f0658aa6f5119a12537c0cc -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 145 lines last edited by Alexander Raistrick in worldgen/config/base.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit af684d282f645a7c7702b85849a106395fba8c97 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 9 lines last edited by Alexander Raistrick in worldgen/config/asset_demo.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2ddc977810d21f81a8e1a3cb56c38b57072511a4 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 3 lines last edited by Lingjie Mei in worldgen/config/base_surface_registry.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fafb646b1636620244837b7d1f71b9ac1d03764b -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 31 lines last edited by Alexander Raistrick in worldgen/config/base_surface_registry.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f4ca48f85f0490066f4646332c95cbbc9bcc6989 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 33 lines last edited by Zeyu Ma in worldgen/config/base_surface_registry.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 2e142d087789f5e02c1ccd19980af4bbb7751167 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 4 lines last edited by Zeyu Ma in worldgen/config/fast_terrain_assets.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3f095f0f12c5d8c34efd554a15626411ea890d73 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 3 lines last edited by Zeyu Ma in worldgen/config/monocular.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 67509281d99cd5c0ffe339508859674e9cdb3e22 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 5 lines last edited by Alexander Raistrick in worldgen/config/no_particles.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit abca0c4c5b8e106311abc9fb5466948084963fb7 -Author: Pvl-bot -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 8 lines last edited by Pvl-bot in worldgen/config/dev.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9c9f060e13f1cbcca893462893391e386a4409fe -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 8 lines last edited by Alexander Raistrick in worldgen/config/dev.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 855bc9ac0c52f274b4122af550515b1690a93b5f -Author: Pvl-bot -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 4 lines last edited by Pvl-bot in worldgen/config/no_creatures.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 08b10e8f2615df1878ce7cd5ec54a8a24ffdade6 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:01 2023 -0400 - - Add 2 lines last edited by Alexander Raistrick in worldgen/config/no_rocks.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 15b15727441cec53467c0d9b5b1bd499d5ef8991 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 20 lines last edited by Alexander Raistrick in worldgen/config/trailer.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6298ef51aca1e810abf7cd8749737698700f9e89 -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 1 lines last edited by Pvl-bot in worldgen/config/natural.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5b9d22d3d6b98dd205fd652a2d90db6e5716e7da -Author: Zeyu Ma -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 2 lines last edited by Zeyu Ma in worldgen/config/natural.gin - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 613e519dcb519c090bffa24063d6a533b739c47c -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 3 lines last edited by Pvl-bot in worldgen/asset_demo.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e97a625d33a0014d832cc8d1354b9447627fceea -Author: Zeyu Ma -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 26 lines last edited by Zeyu Ma in worldgen/asset_demo.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 657bce9c8ca1a0180843c79d384ca3d1418e5510 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 181 lines last edited by Alexander Raistrick in worldgen/asset_demo.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6a733761646cf0848bf72b0e3cd752704e26edc9 -Author: Hei Law -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 2 lines last edited by Hei Law in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 18e94e177032cf3c1aab409e74ecf08bf6f37fdc -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Pvl-bot in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit dedda5be5a670f67c61449302babd02f62f08a1b -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 11 lines last edited by Lahav Lipson in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7a1084c19eae959d2312f964d4103e513ace7b3f -Author: Lingjie Mei -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 17 lines last edited by Lingjie Mei in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c1c4311793cdb1a6def1d22aeb77e3e38a2a9d05 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 28 lines last edited by Zeyu Ma in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ecdfbd13f2b06c2bd2a9e31d6b70d03e2e0dd579 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 347 lines last edited by Alexander Raistrick in worldgen/generate.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9a4fd05ae7a7951f2d2f0525953f2e19ca1171c9 -Author: Hei Law -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 3 lines last edited by Hei Law in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9d1537514b0ec24c430437872d808fe202a56715 -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Pvl-bot in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 58b17f6cc3e86623659f7a3ffedb474579fce608 -Author: Yihan Wang -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Yihan Wang in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 46bb4a6dee1bed4041130ee991cab67a71ead1c5 -Author: Lingjie Mei -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 22 lines last edited by Lingjie Mei in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b8d90df1927b59384e6d199ffab2280a1c9f5ef9 -Author: Zeyu Ma -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 44 lines last edited by Zeyu Ma in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 686d6284b1b0b1452c1e6dbabe4b2dd2de7a0998 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 52 lines last edited by Lahav Lipson in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e788179213b27220067ca34d68672cf9452d4142 -Author: Alexander Raistrick -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 303 lines last edited by Alexander Raistrick in worldgen/core.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f858026d982604f494ca3fb22d2f6ac4580c4a3d -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 2 lines last edited by Pvl-bot in process_mesh/glsl/spine.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3341e95014a3c5bc1b4c51b7f7f48bc72178706c -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 52 lines last edited by Lahav Lipson in process_mesh/glsl/spine.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5f8f2185306f6b4458f92ad15fec978c2de9df96 -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/spine.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7265b275589673f1513bfa09869d3b6c26a7ac56 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 5 lines last edited by Lahav Lipson in process_mesh/glsl/spine.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d57fec664363384998510a2a5a879fd516128098 -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/next_wings.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 198d1187ce803fdb258762bee5ba8ae898ca1e92 -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 50 lines last edited by Lahav Lipson in process_mesh/glsl/next_wings.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit b67778daa2d706ec9f4e6c1841e1aa4765bf690c -Author: Pvl-bot -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/hair.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5e5d5d4fce9db382546316be88b23e7f63f7093c -Author: Lahav Lipson -Date: Fri Jun 30 03:11:00 2023 -0400 - - Add 32 lines last edited by Lahav Lipson in process_mesh/glsl/hair.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 74cea049d5a5d8c8a34f7ceb4d6f7406bd0522bf -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/wings.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a91a473044483be90e83c9cf6c363024915167a2 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 49 lines last edited by Lahav Lipson in process_mesh/glsl/wings.vert - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5e2d127c785d1948df149ae13cbacfdde09dd5eb -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/wings.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 433bc23e983a24cbce091894b2bae5427ca93bd2 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 56 lines last edited by Lahav Lipson in process_mesh/glsl/wings.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 10c35d608b2e2744c07c5210bac4887c906f4c32 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 2 lines last edited by Pvl-bot in process_mesh/glsl/hair.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 82c2104c4951da65d98360b4aea2717b20b2c8f0 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 100 lines last edited by Lahav Lipson in process_mesh/glsl/hair.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 17e5dbf900de45f5dcd4c6cba76db2dcbc915a11 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 2 lines last edited by Pvl-bot in process_mesh/glsl/wings.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit f1fd1f887e8a52e97aa7ab1001859b9430987ccc -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 104 lines last edited by Lahav Lipson in process_mesh/glsl/wings.geom - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 157d46939eb30b3467ef44a515a54da23af073cd -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 4 lines last edited by Pvl-bot in process_mesh/glsl/hair.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ee01661ad067fb4d1c58d052a913cc0355120f33 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 57 lines last edited by Lahav Lipson in process_mesh/glsl/hair.frag - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c0091f92d399cf66f35ac471e66cf0c0581367ca -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 14 lines last edited by Pvl-bot in process_mesh/utils.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 65bb67d8b6b27b3661e237280ebff0634bb1a6f9 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 28 lines last edited by Lahav Lipson in process_mesh/utils.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a0d6c08a5fb5f88dde2146322931c0a812f31178 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 36 lines last edited by Pvl-bot in process_mesh/main.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1737461c9614c32418dc3a6a2380620deb132acd -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 466 lines last edited by Lahav Lipson in process_mesh/main.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 6de4841646e9918984dc3bbf5136d287e57abc6c -Author: Alexander Raistrick -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 1 lines last edited by Alexander Raistrick in process_mesh/CMakeLists.txt - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ebc5d53b491d2a435fee5541f3f12a1f0baddc43 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 63 lines last edited by Lahav Lipson in process_mesh/CMakeLists.txt - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4078562f8255760aa1b0559b5459c653ad2e9f39 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 17 lines last edited by Pvl-bot in process_mesh/load_blender_mesh.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ed074bd1d01007e92e140e5af1cfbb93e781b478 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 90 lines last edited by Lahav Lipson in process_mesh/load_blender_mesh.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit e25bfeb0360d491ce0fd2ff74e026c1ccb21db2f -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 9 lines last edited by Pvl-bot in process_mesh/buffer_arrays.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8d6197cf3755ad019a0d08b43bd48da723d44ea5 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 117 lines last edited by Lahav Lipson in process_mesh/buffer_arrays.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit be730db2ae4b1753f5aa7760f8b11dc28bc365fc -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 10 lines last edited by Pvl-bot in process_mesh/string_tools.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 5be114b4f28a03eb0d1a39ee298e16d9f193aea9 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 31 lines last edited by Lahav Lipson in process_mesh/string_tools.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9e9058a7111f13c301c21ec8f277480a0b54cbcc -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 12 lines last edited by Pvl-bot in process_mesh/camera_view.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 028cc4f75d2a3515021c5bf7765cb3a794058d24 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 24 lines last edited by Lahav Lipson in process_mesh/camera_view.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a817f1abedff14fab35fb2b3164f3b55d501c7a9 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 14 lines last edited by Pvl-bot in process_mesh/io.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit bff6f301f8941a02e5e6f4be91dd518f51644331 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 47 lines last edited by Lahav Lipson in process_mesh/io.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit d7ca7a0294fb19407ca230daae14c2ccc6f2ffdc -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 12 lines last edited by Pvl-bot in process_mesh/camera_view.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 79bd829cbf4cd202b5cde8010de202d4e325e9ff -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 116 lines last edited by Lahav Lipson in process_mesh/camera_view.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9620bf42cf051b6fc541f266f724f9a2c704aa91 -Author: Pvl-bot -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 6 lines last edited by Pvl-bot in process_mesh/show.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 1a318ea77d09e7184064e932e5b80c8492d45cd5 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:59 2023 -0400 - - Add 50 lines last edited by Lahav Lipson in process_mesh/show.py - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 76614a208ddd8a34684f4879a09602426389e4dc -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 288 lines last edited by Pvl-bot in process_mesh/shader.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7be3e848014e4b10cb158aa54e391c90be8296f8 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 15 lines last edited by Pvl-bot in process_mesh/blender_object.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0d721679bf34c69af0e6b3ed8ec71b98f680a732 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 68 lines last edited by Lahav Lipson in process_mesh/blender_object.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 0317ee1a97004a364718866bb3f572f339cea12c -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 15 lines last edited by Pvl-bot in process_mesh/utils.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 3d0e5f41c695f86d5b4248f01d4d6d5ba4f7abb6 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 152 lines last edited by Lahav Lipson in process_mesh/utils.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 8bfe62e7a2af4d06601ff0d392cfe423c98e78f9 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 11 lines last edited by Lahav Lipson in process_mesh/load_blender_mesh.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fe7356b98a39e9595a5eea3d39e2bb41c45b4244 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 24 lines last edited by Pvl-bot in process_mesh/load_blender_mesh.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit c1846b7d90912cf9fa8ccb7cc844dce5a6e733d4 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 77 lines last edited by Pvl-bot in process_mesh/shader.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ae41e261f6ca62b1436a33cdc7eb5f0d9515e542 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 9 lines last edited by Pvl-bot in process_mesh/string_tools.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 23f38b7cd4fef89ef757c84d15e33f62de35e2ab -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 9 lines last edited by Lahav Lipson in process_mesh/string_tools.hpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit ca1611bccfab7ca8612793a09735d8d1961ac6c7 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 7 lines last edited by Pvl-bot in process_mesh/buffer_arrays.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 409785366db24f3cd6bb6a55145d625885d7a3eb -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 127 lines last edited by Lahav Lipson in process_mesh/buffer_arrays.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 4ce3c92de4d366f3c1d469162a97e014cd5fd9e2 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 20 lines last edited by Pvl-bot in process_mesh/io.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 9c81b0b573afd65976da018560a9171c8cdb0879 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 64 lines last edited by Lahav Lipson in process_mesh/io.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit a9b664c47ae75815726dd7de420b27d8d5df96dd -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 15 lines last edited by Pvl-bot in process_mesh/blender_object.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit 7135764600c01a5d71dc8b8de66b40b3744e4998 -Author: Lahav Lipson -Date: Fri Jun 30 03:10:58 2023 -0400 - - Add 185 lines last edited by Lahav Lipson in process_mesh/blender_object.cpp - - Commit made automatically to show authorship. This version of the code is not usable. - -commit fd99ac2314e85e2b7b5c9ebffb9107e2087b1902 -Author: Pvl-bot -Date: Fri Jun 30 03:10:58 2023 -0400 - - Initial commit - - Commit made automatically to show authorship. This version of the code is not usable. diff --git a/pyproject.toml b/pyproject.toml index 5722067da..55b620f17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,25 +23,16 @@ classifiers = [ requires-python = "==3.10.*" dependencies = [ "bpy==3.6.0", - "einops", - "flow_vis", - "frozendict", - "geomdl", + "frozendict", # TODO remove + "geomdl", # used only for creatures "gin_config>=0.5.0", "imageio", - "ipython", - "json5", - "landlab>=2.6.0", "matplotlib", "networkx", "numpy<2", "opencv-python", "pandas", "psutil", - "pycparser==2.22", - "pyrender", - "python-fcl", - "Rtree", "scikit-image", "scikit-learn", "scipy", @@ -49,12 +40,23 @@ dependencies = [ "submitit", "tqdm", "trimesh", - "vnoise", - "zarr", - "networkx", + + # used by trimesh, we could specify "trimesh[easy]" but this brings more packages + "python-fcl", + "rtree", ] [project.optional-dependencies] +terrain = [ + "landlab>=2.6.0", + "pyrender", +] +vis = [ + "einops", + "flow_vis", + "numba", # for ground truth visuals + "pyglet<2" # for trimesh_scene.show() +] dev = [ "pytest", "pytest-ordering", @@ -65,11 +67,7 @@ dev = [ "ruff", "isort", "tabulate", # for integration test results -] - -vis = [ - "numba", # for ground truth visuals - "pyglet<2" # for trimesh_scene.show() + "pre-commit" ] wandb = [ @@ -100,7 +98,7 @@ exclude = [ "terrain/lib/**/*.so", # created by terrain compilation "terrain/lib/**/*.o", # created by terrain compilation "datagen/customgt/build/customgt", # created during opengl compilation - "assets/creatures/parts/nurbs_data/*.npy", # stores creature nurbs centroids data, ideally will be deprecated + "assets/objects/creatures/parts/nurbs_data/*.npy", # stores creature nurbs centroids data, ideally will be deprecated ] [tool.setuptools.dynamic] @@ -128,34 +126,25 @@ test-extras = ["dev"] test-command = "pytest tests" [tool.ruff] -lint.select = [ - "E9", - "F63", - "F7", - "F82" - #"E", - #"F", - #"B", - #"C901", -] target-version = "py310" -line-length = 120 # will be reduced to 88 to match black soon exclude = [ - "*/dependencies/" -] -lint.ignore = [ - "E402", # we have lots of sys.path and opencv statements that must preceed imports, ignore for now - "E501", # ignore line length for now - "E701", # disable multi line statements until we enable black - "F401", # unused imports are sometimes necessary for gin to work - "E712", # some people think `if x == True` is more explicit, not going to force it + # exclude known submodules + "*/dependencies/", + "infinigen/OcMesher", + "infinigen/infinigen_gpl", ] -lint.mccabe.max-complexity = 10 +[tool.ruff.lint] + +select = ["E", "I", "F"] +ignore = [ + "E501", # over-length lines, formatter should handle it + "E741", # ambigous variable name +] [tool.ruff.lint.per-file-ignores] -"__init__.py" = [] +"__init__.py" = ["F401"] "infinigen/assets/*" = ["F841"] #transpiler currently creates lots of unused variables [tool.pytype] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 6686c552b..000000000 --- a/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -gin-config>=0.5.0 -tqdm -opencv-python -matplotlib -scipy -imageio -scikit-image==0.19.3 -submitit -frozendict -flow_vis -trimesh -einops -geomdl -numpy==1.26.2 -wandb -jinja2 -shapely -landlab==2.7.0 -scikit-learn -psutil -pyrender -pytest -pandas diff --git a/scripts/eevee_render.py b/scripts/eevee_render.py index 3a75e0345..882b94ce5 100644 --- a/scripts/eevee_render.py +++ b/scripts/eevee_render.py @@ -6,58 +6,72 @@ import argparse from pathlib import Path -from infinigen.core.util import blender as butil -from infinigen.core.rendering.render import enable_gpu -import mathutils + import bpy +import mathutils + +from infinigen.core.rendering.render import enable_gpu +from infinigen.core.util import blender as butil + def get_override(area_type, region_type): - for area in bpy.context.screen.areas: - if area.type == area_type: - for region in area.regions: - if region.type == region_type: - override = {'area': area, 'region': region} + for area in bpy.context.screen.areas: + if area.type == area_type: + for region in area.regions: + if region.type == region_type: + override = {"area": area, "region": region} return override - #error message if the area or region wasn't found - raise RuntimeError("Wasn't able to find", region_type," in area ", area_type, - "\n Make sure it's open while executing script.") - + # error message if the area or region wasn't found + raise RuntimeError( + "Wasn't able to find", + region_type, + " in area ", + area_type, + "\n Make sure it's open while executing script.", + ) def process(scene_folder: Path): butil.clear_scene() - bpy.ops.wm.open_mainfile(filepath=str(scene_folder/'scene.blend')) + bpy.ops.wm.open_mainfile(filepath=str(scene_folder / "scene.blend")) - for o in butil.get_collection('ceiling').objects: + for o in butil.get_collection("ceiling").objects: o.active_material.use_backface_culling = True - for o in butil.get_collection('wall').objects: + for o in butil.get_collection("wall").objects: o.active_material.use_backface_culling = True - bpy.ops.object.light_add(type='SUN') + bpy.ops.object.light_add(type="SUN") light = bpy.context.active_object light.rotation_euler = (-0.7, 0.1, 0.22) light.data.energy = 5 - - room = next(o for o in butil.get_collection('floor').objects if not o.hide_render) + + room = next(o for o in butil.get_collection("floor").objects if not o.hide_render) cam = bpy.context.scene.camera t = mathutils.Matrix.Translation(room.location) s = mathutils.Matrix.Scale(1.5, 4) - cam.matrix_world = t @ s @ mathutils.Euler((0.42, 0, 0.2)).to_matrix().to_4x4() @ t.inverted() @ cam.matrix_world + cam.matrix_world = ( + t + @ s + @ mathutils.Euler((0.42, 0, 0.2)).to_matrix().to_4x4() + @ t.inverted() + @ cam.matrix_world + ) - bpy.context.scene.render.filepath = str(scene_folder/'Image_EEVEE') + bpy.context.scene.render.filepath = str(scene_folder / "Image_EEVEE") enable_gpu() - bpy.context.scene.render.engine = 'BLENDER_EEVEE' + bpy.context.scene.render.engine = "BLENDER_EEVEE" bpy.ops.render.render(write_still=True) - butil.save_blend(scene_folder/'eevee.blend') + butil.save_blend(scene_folder / "eevee.blend") + parser = argparse.ArgumentParser() -parser.add_argument('input_folder', type=Path) +parser.add_argument("input_folder", type=Path) args = parser.parse_args() for p in args.input_folder.iterdir(): - if not (p/'scene.blend').exists(): - print(f'{p=} has no scene.blend') + if not (p / "scene.blend").exists(): + print(f"{p=} has no scene.blend") continue - process(p) \ No newline at end of file + process(p) diff --git a/setup.py b/setup.py index 48feae128..ea421ccd0 100644 --- a/setup.py +++ b/setup.py @@ -7,42 +7,41 @@ # Acknowledgement: This file draws inspiration from https://github.com/pytorch/pytorch/blob/main/setup.py -from pathlib import Path +import os import subprocess import sys -import os - -from setuptools import setup, find_packages, Extension +from pathlib import Path import numpy from Cython.Build import cythonize +from setuptools import Extension, setup cwd = Path(__file__).parent str_true = "True" -MINIMAL_INSTALL = os.environ.get('INFINIGEN_MINIMAL_INSTALL') == str_true -BUILD_TERRAIN = os.environ.get('INFINIGEN_INSTALL_TERRAIN', str_true) == str_true -BUILD_OPENGL = os.environ.get('INFINIGEN_INSTALL_CUSTOMGT', "False") == str_true +MINIMAL_INSTALL = os.environ.get("INFINIGEN_MINIMAL_INSTALL") == str_true +BUILD_TERRAIN = os.environ.get("INFINIGEN_INSTALL_TERRAIN", str_true) == str_true +BUILD_OPENGL = os.environ.get("INFINIGEN_INSTALL_CUSTOMGT", "False") == str_true dont_build_steps = ["clean", "egg_info", "dist_info", "sdist", "--help"] -is_build_step = not any(x in sys.argv[1] for x in dont_build_steps) +is_build_step = not any(x in sys.argv[1] for x in dont_build_steps) + def ensure_submodules(): # Inspired by https://github.com/pytorch/pytorch/blob/main/setup.py - with (cwd/'.gitmodules').open() as f: + with (cwd / ".gitmodules").open() as f: submodule_folders = [ - cwd/line.split("=", 1)[1].strip() + cwd / line.split("=", 1)[1].strip() for line in f.readlines() if line.strip().startswith("path") ] if any(not p.exists() or not any(p.iterdir()) for p in submodule_folders): subprocess.run( - ["git", "submodule", "update", "--init", "--recursive"], - cwd=cwd, - check=True - ) + ["git", "submodule", "update", "--init", "--recursive"], cwd=cwd, check=True + ) + ensure_submodules() @@ -50,30 +49,32 @@ def ensure_submodules(): # theirs seems to not exclude dist_info but this causes duplicate compiling in my tests if is_build_step and not MINIMAL_INSTALL: if BUILD_TERRAIN: - subprocess.run(['make', 'terrain'], cwd=cwd, check=True) + subprocess.run(["make", "terrain"], cwd=cwd, check=True) if BUILD_OPENGL: - subprocess.run(['make', 'customgt'], cwd=cwd, check=True) + subprocess.run(["make", "customgt"], cwd=cwd, check=True) cython_extensions = [] if not MINIMAL_INSTALL: - cython_extensions.append(Extension( - name="bnurbs", - sources=["infinigen/assets/creatures/util/geometry/cpp_utils/bnurbs.pyx"], - include_dirs=[numpy.get_include()] - )) + cython_extensions.append( + Extension( + name="bnurbs", + sources=["infinigen/assets/utils/geometry/cpp_utils/bnurbs.pyx"], + include_dirs=[numpy.get_include()], + ) + ) if BUILD_TERRAIN: cython_extensions.append( Extension( name="infinigen.terrain.marching_cubes", - sources=["infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx"], - include_dirs=[numpy.get_include()] + sources=[ + "infinigen/terrain/marching_cubes/_marching_cubes_lewiner_cy.pyx" + ], + include_dirs=[numpy.get_include()], ) ) setup( - ext_modules=[ - *cythonize(cython_extensions) - ] + ext_modules=[*cythonize(cython_extensions)] # other opts come from pyproject.toml ) diff --git a/tests/assets/list_displaced_materials.txt b/tests/assets/list_displaced_materials.txt index 4444025e9..09e97ef56 100644 --- a/tests/assets/list_displaced_materials.txt +++ b/tests/assets/list_displaced_materials.txt @@ -1,5 +1,5 @@ -infinigen.assets.materials.leather_and_fabrics.fabric -infinigen.assets.materials.leather_and_fabrics.leather +infinigen.assets.materials.fabrics.fabric +infinigen.assets.materials.fabrics.leather infinigen.assets.materials.metal.grained_and_polished_metal infinigen.assets.materials.metal.hammered_metal infinigen.assets.materials.stone_and_concrete.concrete diff --git a/tests/assets/list_indoor_materials.txt b/tests/assets/list_indoor_materials.txt index 49875dc23..1b602059d 100644 --- a/tests/assets/list_indoor_materials.txt +++ b/tests/assets/list_indoor_materials.txt @@ -1,35 +1,34 @@ -infinigen.assets.materials.fabrics -infinigen.assets.materials.leather -infinigen.assets.materials.sofa_fabric -infinigen.assets.materials.coarse_knit_fabric -infinigen.assets.materials.fine_knit_fabric -infinigen.assets.materials.lined_fabric -infinigen.assets.materials.brushed_metal -infinigen.assets.materials.galvanized_metal -infinigen.assets.materials.grained_and_polished_metal -infinigen.assets.materials.hammered_metal -infinigen.assets.materials.metal_basic -infinigen.assets.materials.concrete -infinigen.assets.materials.tiled_wood infinigen.assets.materials.art -infinigen.assets.materials.ArtRug infinigen.assets.materials.ArtFabric +infinigen.assets.materials.ArtRug infinigen.assets.materials.brick +infinigen.assets.materials.brushed_metal +infinigen.assets.materials.bumpy_rubber_floor infinigen.assets.materials.ceramic +infinigen.assets.materials.coarse_knit_fabric +infinigen.assets.materials.concrete +infinigen.assets.materials.fabrics.fabric_random +infinigen.assets.materials.fabrics.leather +infinigen.assets.materials.fabrics.velvet +infinigen.assets.materials.fine_knit_fabric +infinigen.assets.materials.galvanized_metal infinigen.assets.materials.glass +infinigen.assets.materials.grained_and_polished_metal +infinigen.assets.materials.hammered_metal infinigen.assets.materials.hardwood_floor -infinigen.assets.materials.leather_and_fabrics.leather -infinigen.assets.materials.leather_and_fabrics.velvet +infinigen.assets.materials.leather +infinigen.assets.materials.lined_fabric infinigen.assets.materials.marble_regular infinigen.assets.materials.marble_voronoi +infinigen.assets.materials.metal_basic infinigen.assets.materials.metal.metal_basic infinigen.assets.materials.mirror infinigen.assets.materials.plaster infinigen.assets.materials.plastic infinigen.assets.materials.rug +infinigen.assets.materials.sofa_fabric infinigen.assets.materials.text infinigen.assets.materials.tile -infinigen.assets.materials.wood -infinigen.assets.materials.wood_old infinigen.assets.materials.tiled_wood -infinigen.assets.materials.bumpy_rubber_floor +infinigen.assets.materials.wood +infinigen.assets.materials.wood_old \ No newline at end of file diff --git a/tests/assets/list_indoor_meshes.txt b/tests/assets/list_indoor_meshes.txt index 61f4cc886..2236fde8d 100644 --- a/tests/assets/list_indoor_meshes.txt +++ b/tests/assets/list_indoor_meshes.txt @@ -1,100 +1,100 @@ -infinigen.assets.appliances.BeverageFridgeFactory -infinigen.assets.appliances.DishwasherFactory -infinigen.assets.appliances.MicrowaveFactory -infinigen.assets.appliances.OvenFactory -infinigen.assets.appliances.MonitorFactory -infinigen.assets.appliances.TVFactory +infinigen.assets.objects.appliances.BeverageFridgeFactory +infinigen.assets.objects.appliances.DishwasherFactory +infinigen.assets.objects.appliances.MicrowaveFactory +infinigen.assets.objects.appliances.OvenFactory +infinigen.assets.objects.appliances.MonitorFactory +infinigen.assets.objects.appliances.TVFactory -infinigen.assets.bathroom.BathroomSinkFactory -infinigen.assets.bathroom.BathtubFactory -infinigen.assets.bathroom.HardwareFactory -infinigen.assets.bathroom.ToiletFactory +infinigen.assets.objects.bathroom.BathroomSinkFactory +infinigen.assets.objects.bathroom.BathtubFactory +infinigen.assets.objects.bathroom.HardwareFactory +infinigen.assets.objects.bathroom.ToiletFactory -infinigen.assets.clothes.BlanketFactory -infinigen.assets.clothes.PantsFactory -infinigen.assets.clothes.ShirtFactory -infinigen.assets.clothes.TowelFactory -infinigen.assets.decor.AquariumTankFactory +infinigen.assets.objects.clothes.BlanketFactory +infinigen.assets.objects.clothes.PantsFactory +infinigen.assets.objects.clothes.ShirtFactory +infinigen.assets.objects.clothes.TowelFactory +infinigen.assets.objects.decor.AquariumTankFactory -infinigen.assets.elements.doors.GlassPanelDoorFactory -infinigen.assets.elements.doors.LiteDoorFactory -infinigen.assets.elements.doors.LouverDoorFactory -infinigen.assets.elements.doors.PanelDoorFactory -infinigen.assets.elements.staircases.CantileverStaircaseFactory -infinigen.assets.elements.staircases.CurvedStaircaseFactory -infinigen.assets.elements.staircases.LShapedStaircaseFactory -infinigen.assets.elements.staircases.SpiralStaircaseFactory -infinigen.assets.elements.staircases.StraightStaircaseFactory -infinigen.assets.elements.staircases.UShapedStaircaseFactory -infinigen.assets.elements.warehouses.RackFactory -infinigen.assets.elements.warehouses.PalletFactory -infinigen.assets.elements.RugFactory -infinigen.assets.elements.NatureShelfTrinketsFactory +infinigen.assets.objects.elements.doors.GlassPanelDoorFactory +infinigen.assets.objects.elements.doors.LiteDoorFactory +infinigen.assets.objects.elements.doors.LouverDoorFactory +infinigen.assets.objects.elements.doors.PanelDoorFactory +infinigen.assets.objects.elements.staircases.CantileverStaircaseFactory +infinigen.assets.objects.elements.staircases.CurvedStaircaseFactory +infinigen.assets.objects.elements.staircases.LShapedStaircaseFactory +infinigen.assets.objects.elements.staircases.SpiralStaircaseFactory +infinigen.assets.objects.elements.staircases.StraightStaircaseFactory +infinigen.assets.objects.elements.staircases.UShapedStaircaseFactory +infinigen.assets.objects.elements.warehouses.RackFactory +infinigen.assets.objects.elements.warehouses.PalletFactory +infinigen.assets.objects.elements.RugFactory +infinigen.assets.objects.elements.NatureShelfTrinketsFactory -infinigen.assets.lighting.CeilingLightFactory -infinigen.assets.lighting.LampFactory -infinigen.assets.lighting.DeskLampFactory -infinigen.assets.lighting.FloorLampFactory -infinigen.assets.lighting.ceiling_classic_lamp.CeilingClassicLampFactory +infinigen.assets.objects.lamp.CeilingLightFactory +infinigen.assets.objects.lamp.LampFactory +infinigen.assets.objects.lamp.DeskLampFactory +infinigen.assets.objects.lamp.FloorLampFactory +infinigen.assets.objects.lamp.ceiling_classic_lamp.CeilingClassicLampFactory -infinigen.assets.seating.chairs.BarChairFactory -infinigen.assets.seating.chairs.ChairFactory -infinigen.assets.seating.chairs.OfficeChairFactory -infinigen.assets.seating.BedFactory -infinigen.assets.seating.BedFrameFactory -infinigen.assets.seating.MattressFactory -infinigen.assets.seating.PillowFactory -infinigen.assets.seating.SofaFactory -infinigen.assets.seating.ArmChairFactory +infinigen.assets.objects.seating.chairs.BarChairFactory +infinigen.assets.objects.seating.chairs.ChairFactory +infinigen.assets.objects.seating.chairs.OfficeChairFactory +infinigen.assets.objects.seating.BedFactory +infinigen.assets.objects.seating.BedFrameFactory +infinigen.assets.objects.seating.MattressFactory +infinigen.assets.objects.seating.PillowFactory +infinigen.assets.objects.seating.SofaFactory +infinigen.assets.objects.seating.ArmChairFactory -infinigen.assets.shelves.SingleCabinetFactory -infinigen.assets.shelves.KitchenCabinetFactory -infinigen.assets.shelves.CellShelfFactory -infinigen.assets.shelves.LargeShelfFactory -infinigen.assets.shelves.SimpleBookcaseFactory -infinigen.assets.shelves.SimpleDeskFactory -infinigen.assets.shelves.TriangleShelfFactory -infinigen.assets.shelves.KitchenSpaceFactory -infinigen.assets.shelves.KitchenIslandFactory -infinigen.assets.shelves.TVStandFactory +infinigen.assets.objects.shelves.SingleCabinetFactory +infinigen.assets.objects.shelves.KitchenCabinetFactory +infinigen.assets.objects.shelves.CellShelfFactory +infinigen.assets.objects.shelves.LargeShelfFactory +infinigen.assets.objects.shelves.SimpleBookcaseFactory +infinigen.assets.objects.shelves.SimpleDeskFactory +infinigen.assets.objects.shelves.TriangleShelfFactory +infinigen.assets.objects.shelves.KitchenSpaceFactory +infinigen.assets.objects.shelves.KitchenIslandFactory +infinigen.assets.objects.shelves.TVStandFactory -infinigen.assets.table_decorations.BookColumnFactory -infinigen.assets.table_decorations.BookFactory -infinigen.assets.table_decorations.BookStackFactory -infinigen.assets.table_decorations.SinkFactory -infinigen.assets.table_decorations.TapFactory -infinigen.assets.table_decorations.VaseFactory +infinigen.assets.objects.table_decorations.BookColumnFactory +infinigen.assets.objects.table_decorations.BookFactory +infinigen.assets.objects.table_decorations.BookStackFactory +infinigen.assets.objects.table_decorations.SinkFactory +infinigen.assets.objects.table_decorations.TapFactory +infinigen.assets.objects.table_decorations.VaseFactory -infinigen.assets.tables.TableCocktailFactory -infinigen.assets.tables.TableDiningFactory +infinigen.assets.objects.tables.TableCocktailFactory +infinigen.assets.objects.tables.TableDiningFactory -infinigen.assets.tableware.BottleFactory -infinigen.assets.tableware.BowlFactory -infinigen.assets.tableware.CanFactory -infinigen.assets.tableware.ChopsticksFactory -infinigen.assets.tableware.CupFactory -infinigen.assets.tableware.FoodBagFactory -infinigen.assets.tableware.FoodBoxFactory -infinigen.assets.tableware.ForkFactory -infinigen.assets.tableware.FruitContainerFactory -infinigen.assets.tableware.JarFactory -infinigen.assets.tableware.KnifeFactory -infinigen.assets.tableware.LidFactory -infinigen.assets.tableware.PanFactory -infinigen.assets.tableware.PlateFactory -infinigen.assets.tableware.PotFactory -infinigen.assets.tableware.SpoonFactory -infinigen.assets.tableware.WineglassFactory -infinigen.assets.tableware.PlantContainerFactory -infinigen.assets.tableware.LargePlantContainerFactory +infinigen.assets.objects.tableware.BottleFactory +infinigen.assets.objects.tableware.BowlFactory +infinigen.assets.objects.tableware.CanFactory +infinigen.assets.objects.tableware.ChopsticksFactory +infinigen.assets.objects.tableware.CupFactory +infinigen.assets.objects.tableware.FoodBagFactory +infinigen.assets.objects.tableware.FoodBoxFactory +infinigen.assets.objects.tableware.ForkFactory +infinigen.assets.objects.tableware.FruitContainerFactory +infinigen.assets.objects.tableware.JarFactory +infinigen.assets.objects.tableware.KnifeFactory +infinigen.assets.objects.tableware.LidFactory +infinigen.assets.objects.tableware.PanFactory +infinigen.assets.objects.tableware.PlateFactory +infinigen.assets.objects.tableware.PotFactory +infinigen.assets.objects.tableware.SpoonFactory +infinigen.assets.objects.tableware.WineglassFactory +infinigen.assets.objects.tableware.PlantContainerFactory +infinigen.assets.objects.tableware.LargePlantContainerFactory -infinigen.assets.wall_decorations.WallArtFactory -infinigen.assets.wall_decorations.BalloonFactory -infinigen.assets.wall_decorations.MirrorFactory +infinigen.assets.objects.wall_decorations.WallArtFactory +infinigen.assets.objects.wall_decorations.BalloonFactory +infinigen.assets.objects.wall_decorations.MirrorFactory -infinigen.assets.windows.WindowFactory +infinigen.assets.objects.windows.WindowFactory -infinigen.assets.organizer.basket.BasketBaseFactory -infinigen.assets.organizer.plate_rack.PlateOnRackBaseFactory +infinigen.assets.objects.organizer.basket.BasketBaseFactory +infinigen.assets.objects.organizer.plate_rack.PlateOnRackBaseFactory diff --git a/tests/assets/list_nature_meshes.txt b/tests/assets/list_nature_meshes.txt index f6780e5b7..a16b9dc2a 100644 --- a/tests/assets/list_nature_meshes.txt +++ b/tests/assets/list_nature_meshes.txt @@ -1,119 +1,108 @@ - -# Helper factories, not intended to be used directly +# infinigen.assets.objects.cactus.KalidiumCactusFactory # slow, fails 120sec timeout +# infinigen.assets.objects.corals.BrainCoralFactory # slow, fails 120sec timeout +# infinigen.assets.objects.creatures.CrustaceanFactory # slow, fails 120sec timeout +# infinigen.assets.objects.creatures.JellyfishFactory +# infinigen.assets.objects.creatures.SnakeFactory +# infinigen.assets.objects.tropic_plants.CoconutTreeFactory +# infinigen.assets.objects.tropic_plants.LeafPalmPlantFactory +# infinigen.assets.objects.tropic_plants.LeafPalmTreeFactory +# infinigen.assets.objects.tropic_plants.PalmTreeFactory +#AntSwarmFactory +#BoidSwarmFactory +#ChameleonFactory +#FanCoralFactory +#FrogFactory #FruitFactoryGeneralFruit #GenericTreeFactory - -# Factories which arent fully tested/integrated in current nature generate code -#FrogFactory -#ChameleonFactory +#HerbivoreFactory +#infinigen.assets.objects.creatures.CrabFactory # slow +#infinigen.assets.objects.creatures.DragonflyFactory +#infinigen.assets.objects.creatures.LobsterFactory # slow +#infinigen.assets.objects.creatures.SpinyLobsterFactory # slow +#infinigen.assets.objects.trees.TreeFactory # slow, TODO test with no leaves #LeafFactoryIvy #LizardFactory #OctopusFactory -#AntSwarmFactory -#BoidSwarmFactory -# infinigen.assets.tropic_plants.PalmTreeFactory -# infinigen.assets.creatures.JellyfishFactory -# infinigen.assets.tropic_plants.LeafPalmPlantFactory -# infinigen.assets.tropic_plants.LeafPalmTreeFactory -# infinigen.assets.tropic_plants.CoconutTreeFactory - -# currently a special exception from the "no unapplied geonodes" rule, or else the animation cant play -#infinigen.assets.creatures.DragonflyFactory - -# Slow factories - shouldnt be tested in full in CI -#HerbivoreFactory -#infinigen.assets.trees.TreeFactory # slow, TODO test with no leaves -#FanCoralFactory -#infinigen.assets.creatures.CrabFactory # slow -#infinigen.assets.creatures.LobsterFactory # slow -#infinigen.assets.creatures.SpinyLobsterFactory # slow #ReedMonocotFactory -# infinigen.assets.cactus.KalidiumCactusFactory # slow, fails 120sec timeout -# infinigen.assets.corals.BrainCoralFactory # slow, fails 120sec timeout -# infinigen.assets.creatures.CrustaceanFactory # slow, fails 120sec timeout -# infinigen.assets.creatures.SnakeFactory - -infinigen.assets.cactus.CactusFactory -infinigen.assets.cactus.ColumnarCactusFactory -infinigen.assets.cactus.GlobularCactusFactory - -infinigen.assets.cactus.PrickyPearCactusFactory -infinigen.assets.corals.BushCoralFactory -infinigen.assets.corals.CauliflowerCoralFactory -infinigen.assets.corals.CoralFactory -infinigen.assets.corals.ElkhornCoralFactory -infinigen.assets.corals.HoneycombCoralFactory -infinigen.assets.corals.LeatherCoralFactory -infinigen.assets.corals.StarCoralFactory -infinigen.assets.corals.TableCoralFactory -infinigen.assets.corals.TubeCoralFactory -infinigen.assets.corals.TwigCoralFactory -infinigen.assets.creatures.BeetleFactory -infinigen.assets.creatures.BirdFactory -infinigen.assets.creatures.CarnivoreFactory -infinigen.assets.creatures.FishFactory -infinigen.assets.creatures.FlyingBirdFactory -infinigen.assets.debris.LichenFactory -infinigen.assets.debris.MossFactory -infinigen.assets.debris.PineNeedleFactory -infinigen.assets.fruits.FruitFactoryApple -infinigen.assets.fruits.FruitFactoryBlackberry -infinigen.assets.fruits.FruitFactoryCoconutgreen -infinigen.assets.fruits.FruitFactoryCoconuthairy -infinigen.assets.fruits.FruitFactoryCompositional -infinigen.assets.fruits.FruitFactoryDurian -infinigen.assets.fruits.FruitFactoryPineapple -infinigen.assets.fruits.FruitFactoryStarfruit -infinigen.assets.fruits.FruitFactoryStrawberry -infinigen.assets.grassland.FlowerFactory -infinigen.assets.grassland.FlowerPlantFactory -infinigen.assets.grassland.GrassTuftFactory -infinigen.assets.leaves.LeafFactory -infinigen.assets.leaves.LeafFactoryBroadleaf -infinigen.assets.leaves.LeafFactoryGinko -infinigen.assets.leaves.LeafFactoryMaple -infinigen.assets.leaves.LeafFactoryPine -infinigen.assets.leaves.LeafFactoryV2 -infinigen.assets.lighting.CausticsLampFactory -infinigen.assets.mollusk.AugerFactory -infinigen.assets.mollusk.ClamFactory -infinigen.assets.mollusk.ConchFactory -infinigen.assets.mollusk.MolluskFactory -infinigen.assets.mollusk.MusselFactory -infinigen.assets.mollusk.NautilusFactory -infinigen.assets.mollusk.ScallopFactory -infinigen.assets.mollusk.VoluteFactory -infinigen.assets.monocot.AgaveMonocotFactory -infinigen.assets.monocot.BananaMonocotFactory -infinigen.assets.monocot.GrassesMonocotFactory -infinigen.assets.monocot.KelpMonocotFactory -infinigen.assets.monocot.MaizeMonocotFactory -infinigen.assets.monocot.MonocotFactory -infinigen.assets.monocot.PineconeFactory -infinigen.assets.monocot.TaroMonocotFactory -infinigen.assets.monocot.TussockMonocotFactory -infinigen.assets.monocot.VeratrumMonocotFactory -infinigen.assets.monocot.WheatEarMonocotFactory -infinigen.assets.monocot.WheatMonocotFactory -infinigen.assets.mushroom.MushroomFactory -infinigen.assets.rocks.BlenderRockFactory -infinigen.assets.rocks.BoulderFactory -infinigen.assets.rocks.GlowingRocksFactory -infinigen.assets.small_plants.FernFactory -infinigen.assets.small_plants.SnakePlantFactory -infinigen.assets.small_plants.SpiderPlantFactory -infinigen.assets.small_plants.SucculentFactory -infinigen.assets.trees.BushFactory -infinigen.assets.trees.TreeFlowerFactory -infinigen.assets.tropic_plants.LeafBananaTreeFactory -infinigen.assets.tropic_plants.PlantBananaTreeFactory -infinigen.assets.underwater.SeaweedFactory -infinigen.assets.underwater.UrchinFactory -infinigen.assets.weather.AltocumulusFactory -infinigen.assets.weather.CloudFactory -infinigen.assets.weather.CumulonimbusFactory -infinigen.assets.weather.CumulusFactory -infinigen.assets.weather.DustMoteFactory -infinigen.assets.weather.RaindropFactory -infinigen.assets.weather.SnowflakeFactory -infinigen.assets.weather.StratocumulusFactory \ No newline at end of file +infinigen.assets.objects.cactus.CactusFactory +infinigen.assets.objects.cactus.ColumnarCactusFactory +infinigen.assets.objects.cactus.GlobularCactusFactory +infinigen.assets.objects.cactus.PrickyPearCactusFactory +infinigen.assets.objects.cloud.AltocumulusFactory +infinigen.assets.objects.cloud.CloudFactory +infinigen.assets.objects.cloud.CumulonimbusFactory +infinigen.assets.objects.cloud.CumulusFactory +infinigen.assets.objects.cloud.StratocumulusFactory +infinigen.assets.objects.corals.BushCoralFactory +infinigen.assets.objects.corals.CauliflowerCoralFactory +infinigen.assets.objects.corals.CoralFactory +infinigen.assets.objects.corals.ElkhornCoralFactory +infinigen.assets.objects.corals.HoneycombCoralFactory +infinigen.assets.objects.corals.LeatherCoralFactory +infinigen.assets.objects.corals.StarCoralFactory +infinigen.assets.objects.corals.TableCoralFactory +infinigen.assets.objects.corals.TubeCoralFactory +infinigen.assets.objects.corals.TwigCoralFactory +infinigen.assets.objects.creatures.BeetleFactory +infinigen.assets.objects.creatures.BirdFactory +infinigen.assets.objects.creatures.CarnivoreFactory +infinigen.assets.objects.creatures.FishFactory +infinigen.assets.objects.creatures.FlyingBirdFactory +infinigen.assets.objects.fruits.FruitFactoryApple +infinigen.assets.objects.fruits.FruitFactoryBlackberry +infinigen.assets.objects.fruits.FruitFactoryCoconutgreen +infinigen.assets.objects.fruits.FruitFactoryCoconuthairy +infinigen.assets.objects.fruits.FruitFactoryCompositional +infinigen.assets.objects.fruits.FruitFactoryDurian +infinigen.assets.objects.fruits.FruitFactoryPineapple +infinigen.assets.objects.fruits.FruitFactoryStarfruit +infinigen.assets.objects.fruits.FruitFactoryStrawberry +infinigen.assets.objects.grassland.FlowerFactory +infinigen.assets.objects.grassland.FlowerPlantFactory +infinigen.assets.objects.grassland.GrassTuftFactory +infinigen.assets.objects.leaves.LeafFactory +infinigen.assets.objects.leaves.LeafFactoryBroadleaf +infinigen.assets.objects.leaves.LeafFactoryGinko +infinigen.assets.objects.leaves.LeafFactoryMaple +infinigen.assets.objects.leaves.LeafFactoryPine +infinigen.assets.objects.leaves.LeafFactoryV2 +infinigen.assets.objects.mollusk.AugerFactory +infinigen.assets.objects.mollusk.ClamFactory +infinigen.assets.objects.mollusk.ConchFactory +infinigen.assets.objects.mollusk.MolluskFactory +infinigen.assets.objects.mollusk.MusselFactory +infinigen.assets.objects.mollusk.NautilusFactory +infinigen.assets.objects.mollusk.ScallopFactory +infinigen.assets.objects.mollusk.VoluteFactory +infinigen.assets.objects.monocot.AgaveMonocotFactory +infinigen.assets.objects.monocot.BananaMonocotFactory +infinigen.assets.objects.monocot.GrassesMonocotFactory +infinigen.assets.objects.monocot.KelpMonocotFactory +infinigen.assets.objects.monocot.MaizeMonocotFactory +infinigen.assets.objects.monocot.MonocotFactory +infinigen.assets.objects.monocot.PineconeFactory +infinigen.assets.objects.monocot.TaroMonocotFactory +infinigen.assets.objects.monocot.TussockMonocotFactory +infinigen.assets.objects.monocot.VeratrumMonocotFactory +infinigen.assets.objects.monocot.WheatEarMonocotFactory +infinigen.assets.objects.monocot.WheatMonocotFactory +infinigen.assets.objects.mushroom.MushroomFactory +infinigen.assets.objects.particles.DustMoteFactory +infinigen.assets.objects.particles.LichenFactory +infinigen.assets.objects.particles.MossFactory +infinigen.assets.objects.particles.PineNeedleFactory +infinigen.assets.objects.particles.RaindropFactory +infinigen.assets.objects.particles.SnowflakeFactory +infinigen.assets.objects.rocks.BlenderRockFactory +infinigen.assets.objects.rocks.BoulderFactory +infinigen.assets.objects.rocks.GlowingRocksFactory +infinigen.assets.objects.small_plants.FernFactory +infinigen.assets.objects.small_plants.SnakePlantFactory +infinigen.assets.objects.small_plants.SpiderPlantFactory +infinigen.assets.objects.small_plants.SucculentFactory +infinigen.assets.objects.trees.BushFactory +infinigen.assets.objects.trees.TreeFlowerFactory +infinigen.assets.objects.tropic_plants.LeafBananaTreeFactory +infinigen.assets.objects.tropic_plants.PlantBananaTreeFactory +infinigen.assets.objects.underwater.SeaweedFactory +infinigen.assets.objects.underwater.UrchinFactory \ No newline at end of file diff --git a/tests/assets/test_materials_basic.py b/tests/assets/test_materials_basic.py index ff654c17b..0b50e5fb5 100644 --- a/tests/assets/test_materials_basic.py +++ b/tests/assets/test_materials_basic.py @@ -4,21 +4,17 @@ # Authors: Alexander Raistrick -from pathlib import Path -import importlib -import pytest import bpy -import gin +import pytest from infinigen.core.util import blender as butil - -from infinigen_examples.util.test_utils import (setup_gin, load_txt_list, import_item) +from infinigen_examples.util.test_utils import import_item, load_txt_list, setup_gin def check_material_runs(pathspec): butil.clear_scene() - bpy.ops.mesh.primitive_ico_sphere_add(radius=.8, subdivisions=5) + bpy.ops.mesh.primitive_ico_sphere_add(radius=0.8, subdivisions=5) asset = bpy.context.active_object mat = import_item(pathspec) @@ -27,21 +23,26 @@ def check_material_runs(pathspec): mat.apply(asset) # should not crash for input LIST of objects - bpy.ops.mesh.primitive_ico_sphere_add(radius=.8, subdivisions=5) + bpy.ops.mesh.primitive_ico_sphere_add(radius=0.8, subdivisions=5) asset2 = bpy.context.active_object - mat.apply([asset, asset2]) - - + mat.apply([asset, asset2]) @pytest.mark.nature -@pytest.mark.parametrize('pathspec', load_txt_list('tests/assets/list_nature_materials.txt')) +@pytest.mark.parametrize( + "pathspec", load_txt_list("tests/assets/list_nature_materials.txt") +) def test_nature_material_runs(pathspec, **kwargs): - setup_gin('infinigen_examples/configs_nature') + setup_gin("infinigen_examples/configs_nature", ["base_nature.gin"]) check_material_runs(pathspec) -@pytest.mark.parametrize('pathspec', load_txt_list('tests/assets/list_indoor_materials.txt')) +@pytest.mark.parametrize( + "pathspec", load_txt_list("tests/assets/list_indoor_materials.txt") +) def test_indoor_material_runs(pathspec, **kwargs): - setup_gin('infinigen_examples/configs_indoor') + setup_gin( + ["infinigen_examples/configs_indoor", "infinigen_examples/configs_nature"], + ["base_indoors.gin"], + ) check_material_runs(pathspec) diff --git a/tests/assets/test_meshes_basic.py b/tests/assets/test_meshes_basic.py index f6a3a2e95..9ad36574a 100644 --- a/tests/assets/test_meshes_basic.py +++ b/tests/assets/test_meshes_basic.py @@ -6,85 +6,91 @@ from pathlib import Path -import pytest import bpy -import gin -from math import prod +import pytest +from infinigen.core import tagging from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t +from infinigen_examples.util.test_utils import import_item, load_txt_list, setup_gin -from infinigen_examples.util.test_utils import ( - setup_gin, - import_item, - load_txt_list, -) def check_factory_runs(fac_class, seed1=0, seed2=0, distance_m=50): - butil.clear_scene() fac = fac_class(seed1) asset = fac.spawn_asset(seed2, distance=distance_m) if not isinstance(asset, bpy.types.Object): - raise ValueError(f'{asset.name=} had {type(asset)=}') - - if tuple(asset.location) != (0,0,0): - raise ValueError(f'{asset.location=}') - if tuple(asset.rotation_euler) != (0,0,0): - raise ValueError(f'{asset.rotation_euler=}') - if tuple(asset.scale) != (1,1,1): - raise ValueError(f'{asset.scale=}') - - # currently, assets may have objects as '.children'. + raise ValueError(f"{asset.name=} had {type(asset)=}") + + if tuple(asset.location) != (0, 0, 0): + raise ValueError(f"{asset.location=}") + if tuple(asset.rotation_euler) != (0, 0, 0): + raise ValueError(f"{asset.rotation_euler=}") + if tuple(asset.scale) != (1, 1, 1): + raise ValueError(f"{asset.scale=}") + + # currently, assets may have objects as '.children'. # This will eventually be removed except well-documented special cases for o in butil.iter_object_tree(asset): - for i, slot in enumerate(o.material_slots): if slot.material is None: - raise ValueError(f'In {asset.name=} {slot=} had {slot.material=}') + raise ValueError(f"In {asset.name=} {slot=} had {slot.material=}") for mod in asset.modifiers: - if ( - mod.type != 'NODES' - and mod.type != 'SUBSURF' - ): - # currently we allow unapplied non-modifiers for things like time-based deformation on - # seaweed etc. NODES and SUBSURF should still always be applied. + if mod.type != "NODES" and mod.type != "SUBSURF": + # currently we allow unapplied non-modifiers for things like time-based deformation on + # seaweed etc. NODES and SUBSURF should still always be applied. continue - raise ValueError(f'In {asset.name=} {o.name=} had unapplied modifier {mod.name=} {mod.type=} ') - - if o.type != 'MESH': + raise ValueError( + f"In {asset.name=} {o.name=} had unapplied modifier {mod.name=} {mod.type=} " + ) + + if o.type != "MESH": continue if o.data is None: - raise ValueError(f'In {asset.name=} {o.name=} had {o.data=}') + raise ValueError(f"In {asset.name=} {o.name=} had {o.data=}") if len(o.data.vertices) <= 2: - raise ValueError(f'{asset.name=} had {len(o.data.vertices)} vertices, usually indicates failed operation') - + raise ValueError( + f"{asset.name=} had {len(o.data.vertices)} vertices, usually indicates failed operation" + ) + if tagging.COMBINED_ATTR_NAME in o.data.attributes: attr = o.data.attributes[tagging.COMBINED_ATTR_NAME] - if attr.domain != 'FACE': - raise ValueError(f'In {asset.name=} had {attr.domain=} for {attr.name=}. Should be FACE') + if attr.domain != "FACE": + raise ValueError( + f"In {asset.name=} had {attr.domain=} for {attr.name=}. Should be FACE" + ) - # some objects like the older LeafFactory - #if len(o.data.polygons) < 2: + # some objects like the older LeafFactory + # if len(o.data.polygons) < 2: # raise ValueError(f'{asset.name=} had {len(o.data.polygons)} polygons, usually indicates failed operation') for attr in o.data.attributes: if attr.name.startswith(tagging.PREFIX): - raise ValueError(f'In {asset.name}, {o.name=} had un-merged tag-system tag {attr.name=}, need to call {tagging.tag_system.relabel_obj}') + raise ValueError( + f"In {asset.name}, {o.name=} had un-merged tag-system tag {attr.name=}, need to call {tagging.tag_system.relabel_obj}" + ) + @pytest.mark.nature -@pytest.mark.parametrize('pathspec', load_txt_list(Path(__file__).parent/'list_nature_meshes.txt')) +@pytest.mark.parametrize( + "pathspec", load_txt_list(Path(__file__).parent / "list_nature_meshes.txt") +) def test_nature_factory_runs(pathspec, **kwargs): - setup_gin('infinigen_examples/configs_nature') + setup_gin("infinigen_examples/configs_nature", configs=["base_nature.gin"]) fac_class = import_item(pathspec) check_factory_runs(fac_class, **kwargs) -@pytest.mark.parametrize('pathspec', load_txt_list(Path(__file__).parent/'list_indoor_meshes.txt')) + +@pytest.mark.parametrize( + "pathspec", load_txt_list(Path(__file__).parent / "list_indoor_meshes.txt") +) def test_indoor_factory_runs(pathspec, **kwargs): - setup_gin('infinigen_examples/configs_indoor') + setup_gin( + ["infinigen_examples/configs_indoor", "infinigen_examples/configs_nature"], + configs=["base_indoors.gin"], + ) fac_class = import_item(pathspec) check_factory_runs(fac_class, **kwargs) diff --git a/tests/assets/test_placeholders.py b/tests/assets/test_placeholders.py index b1b03f3e5..d7f9f4e70 100644 --- a/tests/assets/test_placeholders.py +++ b/tests/assets/test_placeholders.py @@ -3,23 +3,14 @@ # Authors: David Yan -from collections import OrderedDict -import bpy +import numpy as np import pytest -from infinigen.core.constraints import ( - usage_lookup, - constraint_language as cl -) - -from infinigen.core.constraints.example_solver.geometry import dof - -from infinigen_examples.indoor_asset_semantics import home_asset_usage - +from infinigen.core import tags as t +from infinigen.core.constraints import usage_lookup from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t -import numpy as np +from infinigen_examples.indoor_asset_semantics import home_asset_usage def get_real_placeholder_facs(): @@ -28,6 +19,7 @@ def get_real_placeholder_facs(): pholder_facs = usage_lookup.factories_for_usage({t.Semantics.RealPlaceholder}) return sorted(list(pholder_facs), key=lambda x: x.__name__) + def get_asset_facs(): used_as = home_asset_usage() usage_lookup.initialize_from_dict(used_as) @@ -35,34 +27,41 @@ def get_asset_facs(): return sorted(list(asset_facs), key=lambda x: x.__name__) -@pytest.mark.skip # TODO re-enable. Too many assets fail this -@pytest.mark.parametrize('fac', get_real_placeholder_facs()) +@pytest.mark.skip # TODO re-enable. Too many assets fail this +@pytest.mark.parametrize("fac", get_real_placeholder_facs()) def test_real_placeholders(fac): butil.clear_scene() - placeholder = fac(0).spawn_placeholder(0, loc=(0,0,0), rot=(0,0,0)) + placeholder = fac(0).spawn_placeholder(0, loc=(0, 0, 0), rot=(0, 0, 0)) asset = fac(0).spawn_asset(0) - assert np.abs(placeholder.dimensions.x - asset.dimensions.x) <= 0.05 * np.abs(asset.dimensions.x), "X dimension of placeholder not within 5 percent of mesh" - assert np.abs(placeholder.dimensions.y - asset.dimensions.y) <= 0.05 * np.abs(asset.dimensions.y), "Y dimension of placeholder not within 5 percent of mesh" - assert np.abs(placeholder.dimensions.z - asset.dimensions.z) <= 0.05 * np.abs(asset.dimensions.z), "Z dimension of placeholder not within 5 percent of mesh" - asset_min_corner = np.array(asset.bound_box[0]) # loXloYloZ https://blender.stackexchange.com/questions/32283/what-are-all-values-in-bound-box - asset_max_corner = np.array(asset.bound_box[6]) # hiXhiYhiZ + assert np.abs(placeholder.dimensions.x - asset.dimensions.x) <= 0.05 * np.abs( + asset.dimensions.x + ), "X dimension of placeholder not within 5 percent of mesh" + assert np.abs(placeholder.dimensions.y - asset.dimensions.y) <= 0.05 * np.abs( + asset.dimensions.y + ), "Y dimension of placeholder not within 5 percent of mesh" + assert np.abs(placeholder.dimensions.z - asset.dimensions.z) <= 0.05 * np.abs( + asset.dimensions.z + ), "Z dimension of placeholder not within 5 percent of mesh" + asset_min_corner = np.array( + asset.bound_box[0] + ) # loXloYloZ https://blender.stackexchange.com/questions/32283/what-are-all-values-in-bound-box + asset_max_corner = np.array(asset.bound_box[6]) # hiXhiYhiZ ph_min_corner = np.array(placeholder.bound_box[0]) ph_max_corner = np.array(placeholder.bound_box[6]) for i in range(3): - assert asset_min_corner[i] <= ph_max_corner[i] and asset_min_corner[i] >= ph_min_corner[i], "Asset not completely contained within placeholder" - assert asset_max_corner[i] <= ph_max_corner[i] and asset_max_corner[i] >= ph_min_corner[i], "Asset not completely contained within placeholder" + assert ( + asset_min_corner[i] <= ph_max_corner[i] + and asset_min_corner[i] >= ph_min_corner[i] + ), "Asset not completely contained within placeholder" + assert ( + asset_max_corner[i] <= ph_max_corner[i] + and asset_max_corner[i] >= ph_min_corner[i] + ), "Asset not completely contained within placeholder" -@pytest.mark.parametrize('fac', get_asset_facs()) +@pytest.mark.parametrize("fac", get_asset_facs()) def test_generated_placeholders(fac): butil.clear_scene() - fac(0).spawn_placeholder(0, loc=(0,0,0), rot=(0,0,0)) + fac(0).spawn_placeholder(0, loc=(0, 0, 0), rot=(0, 0, 0)) fac(0).spawn_asset(0) return - - - - - - - diff --git a/tests/constraints/test_constraint_bounding.py b/tests/constraints/test_constraint_bounding.py index d65d552c2..c75d73639 100644 --- a/tests/constraints/test_constraint_bounding.py +++ b/tests/constraints/test_constraint_bounding.py @@ -2,38 +2,33 @@ # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory # of this source tree. -# Authors: +# Authors: # - Alexander Raistrick: primary author # - David Yan: bounding for inequalities / expressions -from itertools import chain -from functools import partial -from pprint import pprint import pytest -import numpy as np -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r + def test_bound_eq(): bound1 = r.Bound(r.Domain(set())) bound2 = r.Bound(r.Domain(set())) assert bound1 == bound2 -def test_constant(): +def test_constant(): expr = cl.constant(1) * cl.constant(2) + cl.constant(3) < cl.constant(3) assert r.is_constant(expr) + def test_bounds_simple(): - furniture = cl.tagged(cl.scene(), tags={t.Semantics.Furniture}) count = cl.count(furniture) - + bounds = r.constraint_bounds(cl.in_range(count, 1, 5)) assert bounds == [r.Bound(r.Domain({t.Semantics.Furniture}), 1, 5)] @@ -48,7 +43,8 @@ def test_bounds_simple(): assert r.constraint_bounds(3 >= count) == upper assert r.constraint_bounds(1 <= count) == lower -@pytest.mark.skip # no longer supported for timebeing + +@pytest.mark.skip # no longer supported for timebeing def test_bounds_compound(): chair = cl.tagged(cl.scene(), tags={t.Semantics.Chair}) table = cl.tagged(cl.scene(), tags={t.Semantics.Table}) @@ -60,14 +56,16 @@ def test_bounds_compound(): assert bounds == [ r.Bound(r.Domain({t.Semantics.Chair}), high=11), - r.Bound(r.Domain({t.Semantics.Chair}), low=5) + r.Bound(r.Domain({t.Semantics.Chair}), low=5), ] - bounds2 = r.constraint_bounds(cl.in_range(cl.count(chair), cl.count(table), cl.count(table) * 3), scene_state) + bounds2 = r.constraint_bounds( + cl.in_range(cl.count(chair), cl.count(table), cl.count(table) * 3), scene_state + ) assert bounds2 == [r.Bound(r.Domain({t.Semantics.Chair}), 4, 12)] + def test_bounds_and(): - tags = {t.Semantics.Furniture} furniture = cl.tagged(cl.scene(), tags=tags) count = cl.count(furniture) @@ -79,18 +77,18 @@ def test_bounds_and(): r.Bound(r.Domain(tags), low=2), ] -def test_bounds_multilevel(): +def test_bounds_multilevel(): furniture = cl.tagged(cl.scene(), tags={t.Semantics.Furniture}) sofa = cl.tagged(furniture, tags={t.Semantics.Seating}) cons = cl.count(sofa) <= 3 - + assert r.constraint_bounds(cons) == [ r.Bound(r.Domain({t.Semantics.Furniture, t.Semantics.Seating}), high=3) -] + ] -def test_bounds_arithmetic(): +def test_bounds_arithmetic(): tags = {t.Semantics.Furniture} furniture = cl.tagged(cl.scene(), tags=tags) count = cl.count(furniture) @@ -99,84 +97,75 @@ def test_bounds_arithmetic(): bounds = r.constraint_bounds(cons) assert bounds == [r.Bound(r.Domain(tags), low=0, high=4)] -def test_bounds_domain_AnyRelation(): +def test_bounds_domain_AnyRelation(): bedrooms = cl.scene().tagged({t.Semantics.Bedroom}) beds = cl.scene().tagged({t.Semantics.Bed}) - all_bedrooms_beds = bedrooms.all(lambda r: - cl.related_to(beds, r, cl.SupportedBy()) - .count().in_range(1, 2) + all_bedrooms_beds = bedrooms.all( + lambda r: cl.related_to(beds, r, cl.SupportedBy()).count().in_range(1, 2) ) - + bd = r.Domain({t.Semantics.Bedroom}) bed_in_room = r.Domain({t.Semantics.Bed}, relations=[(cl.SupportedBy(), bd)]) res = r.constraint_bounds(all_bedrooms_beds) assert res == [r.Bound(bed_in_room, low=1, high=2)] -def test_bounds_forall(): +def test_bounds_forall(): rooms = cl.scene().tagged(t.Semantics.Room) furniture = cl.scene().tagged(t.Semantics.Furniture) small_obj = cl.scene().tagged(t.Semantics.OfficeShelfItem) rel = cl.SupportedBy() - - c = rooms.all(lambda room: ( - furniture.related_to(room, rel).count().in_range(1, 2) * - furniture.related_to(room, rel).all(lambda stor: - small_obj.related_to(stor, rel).count().in_range(5, 10) + + c = rooms.all( + lambda room: ( + furniture.related_to(room, rel).count().in_range(1, 2) + * furniture.related_to(room, rel).all( + lambda stor: small_obj.related_to(stor, rel).count().in_range(5, 10) + ) ) - )) + ) bounds = r.constraint_bounds(c) - furn_room = r.Domain({t.Semantics.Furniture}, relations=[(rel, r.Domain({t.Semantics.Room}))]) - item_furn_room = r.Domain({t.Semantics.OfficeShelfItem}, relations=[(rel, furn_room)]) + furn_room = r.Domain( + {t.Semantics.Furniture}, relations=[(rel, r.Domain({t.Semantics.Room}))] + ) + item_furn_room = r.Domain( + {t.Semantics.OfficeShelfItem}, relations=[(rel, furn_room)] + ) assert bounds == [ r.Bound(furn_room, 1, 2), r.Bound(item_furn_room, 5, 10), ] -def test_bound_implied_rel(): +def test_bound_implied_rel(): s = cl.scene() against = cl.StableAgainst(set(), set()) cons = ( - s.related_to(s, cl.AnyRelation()) - .related_to(s, against) - .count().in_range(1, 3) + s.related_to(s, cl.AnyRelation()).related_to(s, against).count().in_range(1, 3) ) bounds = r.constraint_bounds(cons) - assert bounds == [ - r.Bound( - r.Domain(set(), [(against, r.Domain())]), - low=1, high=3 - ) - ] + assert bounds == [r.Bound(r.Domain(set(), [(against, r.Domain())]), low=1, high=3)] cons = ( - s.related_to(s, against) - .related_to(s, cl.AnyRelation()) - .count().in_range(1, 3) + s.related_to(s, against).related_to(s, cl.AnyRelation()).count().in_range(1, 3) ) bounds = r.constraint_bounds(cons) - assert bounds == [ - r.Bound( - r.Domain(set(), [(against, r.Domain())]), - low=1, high=3 - ) - ] + assert bounds == [r.Bound(r.Domain(set(), [(against, r.Domain())]), low=1, high=3)] -def test_bound_implied_rel_forall(): +def test_bound_implied_rel_forall(): s = cl.scene() - + rel = cl.Touching() all_dom = r.Domain() @@ -188,4 +177,6 @@ def test_bound_implied_rel_forall(): bounds = r.constraint_bounds(cons) - assert bounds[0].domain == r.Domain({t.Semantics.OfficeShelfItem}, [(rel, r.Domain())]) \ No newline at end of file + assert bounds[0].domain == r.Domain( + {t.Semantics.OfficeShelfItem}, [(rel, r.Domain())] + ) diff --git a/tests/constraints/test_constraint_domain.py b/tests/constraints/test_constraint_domain.py index de24559e4..6712b1bde 100644 --- a/tests/constraints/test_constraint_domain.py +++ b/tests/constraints/test_constraint_domain.py @@ -4,49 +4,45 @@ # Authors: Alexander Raistrick -import logging -from infinigen.core.constraints import ( - reasoning as r, - constraint_language as cl -) -from infinigen.core.constraints.constraint_language import Semantics from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.constraint_language import Semantics -from infinigen_examples.util import constraint_util as cu def test_domain_obj(): - furniture = r.Domain({t.Semantics.Furniture}) - + sofas = r.Domain({t.Semantics.Furniture, t.Semantics.Seating}) assert sofas.implies(furniture) assert not furniture.implies(sofas) - furniture_in_livingroom = r.Domain({t.Semantics.Furniture}, relations=[(cl.SupportedBy(), r.Domain({t.Semantics.LivingRoom}))]) + furniture_in_livingroom = r.Domain( + {t.Semantics.Furniture}, + relations=[(cl.SupportedBy(), r.Domain({t.Semantics.LivingRoom}))], + ) assert not furniture.implies(furniture_in_livingroom) assert furniture_in_livingroom.implies(furniture) - furniture_in_bathroom = r.Domain({t.Semantics.Furniture}, relations=[(cl.SupportedBy(), r.Domain({t.Semantics.Bathroom}))]) + furniture_in_bathroom = r.Domain( + {t.Semantics.Furniture}, + relations=[(cl.SupportedBy(), r.Domain({t.Semantics.Bathroom}))], + ) assert not furniture_in_livingroom.implies(furniture_in_bathroom) assert not furniture_in_bathroom.implies(furniture_in_livingroom) -def test_domain_implies_complex(): +def test_domain_implies_complex(): against_wall = cl.StableAgainst( - child_tags={t.Subpart.Back}, - parent_tags={t.Subpart.Wall, t.Subpart.Interior}, - margin=0 + child_tags={t.Subpart.Back}, + parent_tags={t.Subpart.Wall, t.Subpart.Interior}, + margin=0, ) d = r.Domain( - tags={Semantics.Storage}, - relations=[ - ( - against_wall, - r.Domain(tags={Semantics.Room}, relations=[]) - ) - ] + tags={Semantics.Storage}, + relations=[(against_wall, r.Domain(tags={Semantics.Room}, relations=[]))], ) assert d.implies(d) @@ -54,55 +50,66 @@ def test_domain_implies_complex(): assert not r.Domain(set(), d.relations).implies(d) assert d.implies(r.Domain({t.Semantics.Storage})) assert d.implies(r.Domain(set(), d.relations)) - + # "storage related any way to any thing" is less specific generalize_relation = r.Domain( - {t.Semantics.Storage}, - relations=[(cl.AnyRelation(), r.Domain())]) + {t.Semantics.Storage}, relations=[(cl.AnyRelation(), r.Domain())] + ) assert d.implies(generalize_relation) assert not generalize_relation.implies(d) - # "storage related any way but specifically to bedroom" + # "storage related any way but specifically to bedroom" # is both more and less specific so not a subset either way different_relation = r.Domain( - {t.Semantics.Storage}, - relations=[(cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Semantics.Bedroom}))] + {t.Semantics.Storage}, + relations=[ + (cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Semantics.Bedroom})) + ], ) assert not d.implies(different_relation) assert not different_relation.implies(d) # "storage against a bedroom wall" is more specific against_bedroom_wall = r.Domain( - {t.Semantics.Storage}, - relations=[(against_wall, r.Domain({t.Semantics.Room, t.Semantics.Bedroom}))] + {t.Semantics.Storage}, + relations=[(against_wall, r.Domain({t.Semantics.Room, t.Semantics.Bedroom}))], ) assert against_bedroom_wall.implies(d) assert not d.implies(against_bedroom_wall) -def test_domain_var_substitute(): - var = t.Variable('x') - start = r.Domain({t.Subpart.Interior, var}, relations=[(cl.AnyRelation(), r.Domain())]) - subfor = r.Domain({t.Semantics.Room, t.Semantics.Bedroom}, relations=[(cl.Touching(), r.Domain({t.Semantics.Furniture}))]) +def test_domain_var_substitute(): + var = t.Variable("x") + start = r.Domain( + {t.Subpart.Interior, var}, relations=[(cl.AnyRelation(), r.Domain())] + ) + subfor = r.Domain( + {t.Semantics.Room, t.Semantics.Bedroom}, + relations=[(cl.Touching(), r.Domain({t.Semantics.Furniture}))], + ) assert r.domain_tag_substitute(start, var, subfor) == r.Domain( {t.Semantics.Room, t.Semantics.Bedroom, t.Subpart.Interior}, - relations=[ - (cl.Touching(), r.Domain({t.Semantics.Furniture})) - ] + relations=[(cl.Touching(), r.Domain({t.Semantics.Furniture}))], ) - start2 = r.Domain({t.Subpart.Interior, var}, relations=[(cl.AnyRelation(), r.Domain({t.Semantics.Lighting}))]) + start2 = r.Domain( + {t.Subpart.Interior, var}, + relations=[(cl.AnyRelation(), r.Domain({t.Semantics.Lighting}))], + ) assert r.domain_tag_substitute(start2, var, subfor) == r.Domain( {t.Semantics.Room, t.Semantics.Bedroom, t.Subpart.Interior}, relations=[ - (cl.AnyRelation(), r.Domain({t.Semantics.Lighting})), # not implied so gets kept - (cl.Touching(), r.Domain({t.Semantics.Furniture})) - ] + ( + cl.AnyRelation(), + r.Domain({t.Semantics.Lighting}), + ), # not implied so gets kept + (cl.Touching(), r.Domain({t.Semantics.Furniture})), + ], ) -def test_domain_intersect_tags(): +def test_domain_intersect_tags(): obj_types = {Semantics.Object, Semantics.Room, Semantics.Cutter} obj = cl.scene()[Semantics.Object].excludes(obj_types) room = cl.scene()[Semantics.Room].excludes(obj_types) @@ -116,19 +123,24 @@ def test_domain_intersect_tags(): assert not ld.intersects(md) assert not ld.intersects(md) + def test_domain_construction_complex(): - dom = r.Domain() dom.add_relation(cl.AnyRelation(), r.Domain({t.Semantics.Object}, [])) - dom.add_relation(cl.StableAgainst(), r.Domain({t.Semantics.Object, t.Variable('room')}, [])) + dom.add_relation( + cl.StableAgainst(), r.Domain({t.Semantics.Object, t.Variable("room")}, []) + ) dom.add_relation(-cl.AnyRelation(), r.Domain({t.Semantics.Room}, [])) - assert dom.relations[0] == (cl.StableAgainst(), r.Domain({t.Semantics.Object, t.Variable('room')}, [])) + assert dom.relations[0] == ( + cl.StableAgainst(), + r.Domain({t.Semantics.Object, t.Variable("room")}, []), + ) assert dom.relations[1] == (-cl.AnyRelation(), r.Domain({t.Semantics.Room}, [])) assert len(dom.relations) == 2 -def test_domain_construction_complex_2(): +def test_domain_construction_complex_2(): rd1 = (cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Semantics.DiningRoom})) rd2 = (cl.StableAgainst(), r.Domain({t.Semantics.Room})) @@ -137,14 +149,14 @@ def test_domain_construction_complex_2(): dom = r.Domain() dom.add_relation(*rd1) dom.add_relation(*rd2) - + print("DOM RESULT", dom) assert dom.relations == [ (cl.StableAgainst(), r.Domain({t.Semantics.Room, t.Semantics.DiningRoom})) ] -def test_domain_satisfies(): +def test_domain_satisfies(): a = r.Domain({t.Semantics.Object, -t.Semantics.Room}) b = r.Domain({t.Semantics.Object, t.Semantics.Room}) assert not b.satisfies(a) @@ -155,7 +167,9 @@ def test_domain_satisfies(): a.add_relation(cl.StableAgainst(), r.Domain({t.Semantics.Room})) assert not b.satisfies(a) - b.add_relation(cl.StableAgainst(), r.Domain({t.Semantics.Room, t.Semantics.DiningRoom})) + b.add_relation( + cl.StableAgainst(), r.Domain({t.Semantics.Room, t.Semantics.DiningRoom}) + ) assert b.satisfies(a) a.add_relation(-cl.AnyRelation(), r.Domain({t.Semantics.Object})) @@ -164,36 +178,49 @@ def test_domain_satisfies(): b.add_relation(cl.StableAgainst(), r.Domain({t.Semantics.Object})) assert not b.satisfies(a) -def test_domain_satisfies_2(): +def test_domain_satisfies_2(): res_dom = r.Domain( - {Semantics.Object, Semantics.Storage, -Semantics.Room}, [ + {Semantics.Object, Semantics.Storage, -Semantics.Room}, + [ ( cl.StableAgainst( - {t.Subpart.Bottom, -t.Subpart.Top, -t.Subpart.Back, -t.Subpart.Front}, - {t.Subpart.Visible, t.Subpart.SupportSurface, -t.Subpart.Ceiling, -t.Subpart.Wall} - ), - r.Domain({Semantics.DiningRoom, Semantics.Room, -Semantics.Object}, []) + { + t.Subpart.Bottom, + -t.Subpart.Top, + -t.Subpart.Back, + -t.Subpart.Front, + }, + { + t.Subpart.Visible, + t.Subpart.SupportSurface, + -t.Subpart.Ceiling, + -t.Subpart.Wall, + }, + ), + r.Domain({Semantics.DiningRoom, Semantics.Room, -Semantics.Object}, []), ), - ( - -cl.AnyRelation(), - r.Domain({Semantics.Object, -Semantics.Room}, []) - ) - ] - ) + (-cl.AnyRelation(), r.Domain({Semantics.Object, -Semantics.Room}, [])), + ], + ) filter_dom = r.Domain( - {Semantics.Object, -Semantics.Room}, + {Semantics.Object, -Semantics.Room}, [ ( cl.StableAgainst( - {}, - {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Ceiling, -t.Subpart.Wall} - ), - r.Domain({Semantics.DiningRoom, Semantics.Room, -Semantics.Object}, []) + {}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Ceiling, + -t.Subpart.Wall, + }, + ), + r.Domain({Semantics.DiningRoom, Semantics.Room, -Semantics.Object}, []), ), - (-cl.AnyRelation(), r.Domain({Semantics.Object, -Semantics.Room}, [])) - ] + (-cl.AnyRelation(), r.Domain({Semantics.Object, -Semantics.Room}, [])), + ], ) - assert res_dom.satisfies(filter_dom) \ No newline at end of file + assert res_dom.satisfies(filter_dom) diff --git a/tests/constraints/test_constraint_language.py b/tests/constraints/test_constraint_language.py index e12f1ca70..e5da111a7 100644 --- a/tests/constraints/test_constraint_language.py +++ b/tests/constraints/test_constraint_language.py @@ -4,23 +4,20 @@ # Authors: Alexander Raistrick +from infinigen.core.constraints import constraint_language as cl from infinigen_examples import indoor_constraint_examples as ex -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) -def test_residential(): +def test_residential(): cons = ex.home_constraints() assert isinstance(cons, cl.Node) assert isinstance(repr(cons), str) - -def test_operators_simple(): + +def test_operators_simple(): val = cl.constant(value=1) - assert hasattr(val, '__add__') + assert hasattr(val, "__add__") comp = cl.constant(1) + cl.constant(2) assert isinstance(comp, cl.Expression) @@ -31,13 +28,13 @@ def test_operators_simple(): assert isinstance(comp, cl.Expression) assert comp() is True -def test_operators_cast(): +def test_operators_cast(): comp = cl.constant(1) + 2 assert isinstance(comp, cl.ScalarOperatorExpression) assert comp() == 3 -def test_associative_construction(): +def test_associative_construction(): comp = cl.constant(1) + cl.constant(2) + cl.constant(3) - assert len(list(comp.traverse())) == 4 # 1 for additions, 3 for constants \ No newline at end of file + assert len(list(comp.traverse())) == 4 # 1 for additions, 3 for constants diff --git a/tests/constraints/test_constraint_relations.py b/tests/constraints/test_constraint_relations.py index f1ddf37ee..873668a55 100644 --- a/tests/constraints/test_constraint_relations.py +++ b/tests/constraints/test_constraint_relations.py @@ -4,17 +4,12 @@ # Authors: Alexander Raistrick -from pprint import pprint -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) from infinigen.core import tags as t -from infinigen_examples.indoor_constraint_examples import home_constraints +from infinigen.core.constraints import constraint_language as cl -def test_relation_implies_trivial(): +def test_relation_implies_trivial(): assert not cl.StableAgainst(set(), set()).implies(cl.Touching()) sf = cl.SupportedBy({t.Subpart.SupportSurface}) @@ -27,70 +22,100 @@ def test_relation_implies_trivial(): assert not cl.AnyRelation().implies(sf) assert not cl.AnyRelation().implies(sfi) + def require_intersects(a: cl.Relation, b: cl.Relation, truth): assert a.intersects(b) == truth assert b.intersects(a) == truth + example = cl.StableAgainst( - {t.Subpart.Top, -t.Subpart.Bottom}, - {t.Semantics.Object, -t.Subpart.Top} + {t.Subpart.Top, -t.Subpart.Bottom}, {t.Semantics.Object, -t.Subpart.Top} ) -def test_relations_intersects_unrestricted(): +def test_relations_intersects_unrestricted(): unrestricted = cl.AnyRelation() require_intersects(example, unrestricted, True) require_intersects(example, -unrestricted, False) require_intersects(-example, unrestricted, True) - + + def test_relation_intersects_mismatched_type(): mismatch_type = cl.Touching(example.child_tags, example.parent_tags) require_intersects(example, mismatch_type, False) require_intersects(example, -mismatch_type, True) require_intersects(-example, mismatch_type, True) - + + def test_relation_intersects_superset(): superset = cl.StableAgainst({t.Subpart.Top}, {t.Semantics.Object}) require_intersects(example, superset, True) - require_intersects(example, -superset, True) # Top-Bot,Obj-Top AND NOT(Top,Obj) permits Top+Bot_Obj+Top - require_intersects(-example, superset, False) # Top,Obj AND NOT(Top-Bot,Obj-Top) False + require_intersects( + example, -superset, True + ) # Top-Bot,Obj-Top AND NOT(Top,Obj) permits Top+Bot_Obj+Top + require_intersects( + -example, superset, False + ) # Top,Obj AND NOT(Top-Bot,Obj-Top) False + def test_relation_intersects_subset(): subset = cl.StableAgainst( - {t.Subpart.Top, -t.Subpart.Bottom, t.Subpart.SupportSurface}, - {t.Semantics.Object, -t.Subpart.Top, -t.Subpart.Side} + {t.Subpart.Top, -t.Subpart.Bottom, t.Subpart.SupportSurface}, + {t.Semantics.Object, -t.Subpart.Top, -t.Subpart.Side}, ) require_intersects(example, subset, True) - require_intersects(example, -subset, False) # Top-Bot,Obj-Top AND NOT Top-Bot+Sup,Obj-Top-Side - require_intersects(-example, subset, True) # Top-Bot+Sup,Obj-Top-Side AND NOT Top-Bot,Obj-Top + require_intersects( + example, -subset, False + ) # Top-Bot,Obj-Top AND NOT Top-Bot+Sup,Obj-Top-Side + require_intersects( + -example, subset, True + ) # Top-Bot+Sup,Obj-Top-Side AND NOT Top-Bot,Obj-Top + def test_relation_intersects_intersecting(): - inter = cl.StableAgainst({t.Subpart.Top, t.Subpart.Visible}, {t.Semantics.Object, t.Semantics.Furniture}) + inter = cl.StableAgainst( + {t.Subpart.Top, t.Subpart.Visible}, {t.Semantics.Object, t.Semantics.Furniture} + ) require_intersects(example, inter, True) - require_intersects(example, -inter, True) # Top-Bot_Obj-Top AND NOT Top+Vis_Obj+Furn. Yes, Top-Bot-Vis_Obj-Top-Furn - require_intersects(-example, inter, True) # Top+Vis_Obj+Furn AND NOT Top-Bot_Obj-Top. - + require_intersects( + example, -inter, True + ) # Top-Bot_Obj-Top AND NOT Top+Vis_Obj+Furn. Yes, Top-Bot-Vis_Obj-Top-Furn + require_intersects( + -example, inter, True + ) # Top+Vis_Obj+Furn AND NOT Top-Bot_Obj-Top. + + def test_relation_intersects_contradict_child(): - contradict_child = cl.StableAgainst({t.Subpart.Top, t.Subpart.Bottom}, {t.Semantics.Object}) + contradict_child = cl.StableAgainst( + {t.Subpart.Top, t.Subpart.Bottom}, {t.Semantics.Object} + ) require_intersects(example, contradict_child, False) require_intersects(example, -contradict_child, True) - require_intersects(-example, contradict_child, True) # Top+Bot,Obj AND NOT Top-Bot,Obj-Top = Top+Bot,Obj? + require_intersects( + -example, contradict_child, True + ) # Top+Bot,Obj AND NOT Top-Bot,Obj-Top = Top+Bot,Obj? + def test_relation_intersects_contradict_parent(): - contradict_parent = cl.StableAgainst({t.Subpart.Top}, {t.Semantics.Object, t.Subpart.Top}) + contradict_parent = cl.StableAgainst( + {t.Subpart.Top}, {t.Semantics.Object, t.Subpart.Top} + ) require_intersects(example, contradict_parent, False) require_intersects(example, -contradict_parent, True) require_intersects(-example, contradict_parent, True) -def test_relation_difference(): +def test_relation_difference(): assert t.difference( - {t.Semantics.Object, -t.Subpart.Top}, - {t.Semantics.Object, t.Subpart.Bottom} - ) == {t.Semantics.Object, -t.Subpart.Top, -t.Subpart.Bottom} + {t.Semantics.Object, -t.Subpart.Top}, {t.Semantics.Object, t.Subpart.Bottom} + ) == { + t.Semantics.Object, + -t.Subpart.Top, + -t.Subpart.Bottom, + } refine = cl.StableAgainst(set(), {t.Semantics.Object, t.Subpart.Bottom}) assert example.difference(refine) == cl.StableAgainst( - {t.Subpart.Top, -t.Subpart.Bottom}, - {t.Semantics.Object, -t.Subpart.Top, -t.Subpart.Bottom} + {t.Subpart.Top, -t.Subpart.Bottom}, + {t.Semantics.Object, -t.Subpart.Top, -t.Subpart.Bottom}, ) diff --git a/tests/constraints/test_reldom.py b/tests/constraints/test_reldom.py index 202364f52..4819cf6e6 100644 --- a/tests/constraints/test_reldom.py +++ b/tests/constraints/test_reldom.py @@ -4,74 +4,112 @@ # Authors: Alexander Raistrick -import copy -import pytest - -from pprint import pprint from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, -) def test_reldom_compatible_floorwall(): - room = r.Domain({t.Semantics.Room, -t.Semantics.Object}, []) nofloorrel = ( - -cl.StableAgainst({}, {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Ceiling, -t.Subpart.Wall}), - room + -cl.StableAgainst( + {}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Ceiling, + -t.Subpart.Wall, + }, + ), + room, ) against = cl.StableAgainst( - {t.Subpart.Back, -t.Subpart.Top, -t.Subpart.Front}, - {t.Subpart.Visible, t.Subpart.Wall, -t.Subpart.SupportSurface, -t.Subpart.Ceiling} + {t.Subpart.Back, -t.Subpart.Top, -t.Subpart.Front}, + { + t.Subpart.Visible, + t.Subpart.Wall, + -t.Subpart.SupportSurface, + -t.Subpart.Ceiling, + }, ) wallrel = (against, room) assert r.reldom_compatible(nofloorrel, wallrel) assert r.reldom_compatible(wallrel, nofloorrel) -def test_reldom_compatible_negation(): +def test_reldom_compatible_negation(): nofloorrel = ( - -cl.StableAgainst({}, {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Ceiling, -t.Subpart.Wall}), - r.Domain({t.Semantics.Room, -t.Semantics.Object}, []) + -cl.StableAgainst( + {}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Ceiling, + -t.Subpart.Wall, + }, + ), + r.Domain({t.Semantics.Room, -t.Semantics.Object}, []), ) on = cl.StableAgainst( - {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, - {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Wall, -t.Subpart.Ceiling} + {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Wall, + -t.Subpart.Ceiling, + }, ) - specific_floorrel = (on, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) + specific_floorrel = (on, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) assert r.reldom_compatible(specific_floorrel, specific_floorrel) assert not r.reldom_compatible(nofloorrel, specific_floorrel) assert not r.reldom_compatible(specific_floorrel, nofloorrel) -def test_reldom_intersects(): +def test_reldom_intersects(): onroom = ( cl.StableAgainst( - {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, - {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Wall, -t.Subpart.Ceiling} - ), - r.Domain({t.Semantics.Room, -t.Semantics.Object}, []) + {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Wall, + -t.Subpart.Ceiling, + }, + ), + r.Domain({t.Semantics.Room, -t.Semantics.Object}, []), ) onlivingroom = ( cl.StableAgainst( - {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, - {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Wall, -t.Subpart.Ceiling} - ), - r.Domain({t.Semantics.LivingRoom, t.Semantics.Room, -t.Semantics.Object, -t.Semantics.Bedroom, -t.Semantics.DiningRoom}, []) + {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Wall, + -t.Subpart.Ceiling, + }, + ), + r.Domain( + { + t.Semantics.LivingRoom, + t.Semantics.Room, + -t.Semantics.Object, + -t.Semantics.Bedroom, + -t.Semantics.DiningRoom, + }, + [], + ), ) assert r.reldom_intersects(onroom, onlivingroom) -def test_reldom_negative_contradict(): - a = (-cl.AnyRelation(), r.Domain({t.Semantics.Object, -t.Semantics.Room}, [])) - assert r.reldom_compatible(a, a) \ No newline at end of file +def test_reldom_negative_contradict(): + a = (-cl.AnyRelation(), r.Domain({t.Semantics.Object, -t.Semantics.Room}, [])) + assert r.reldom_compatible(a, a) diff --git a/tests/constraints/test_tags.py b/tests/constraints/test_tags.py index 5d11f6867..2162e9d52 100644 --- a/tests/constraints/test_tags.py +++ b/tests/constraints/test_tags.py @@ -6,10 +6,10 @@ from infinigen.core import tags as t -def test_implies(): +def test_implies(): assert t.implies(set(), set()) assert t.implies({t.Subpart.Wall}, {t.Subpart.Wall}) assert t.implies({t.Subpart.Wall}, set()) - assert t.implies({t.Semantics.Room, t.Variable('room')}, {t.Semantics.Room}) \ No newline at end of file + assert t.implies({t.Semantics.Room, t.Variable("room")}, {t.Semantics.Room}) diff --git a/tests/constraints/test_tagset_operations.py b/tests/constraints/test_tagset_operations.py index a35eb1a63..02911b4e4 100644 --- a/tests/constraints/test_tagset_operations.py +++ b/tests/constraints/test_tagset_operations.py @@ -4,11 +4,10 @@ # Authors: Alexander Raistrick -from infinigen.core.constraints import constraint_language as cl from infinigen.core import tags as t + def test_tagset_operations(): - example = {t.Subpart.Side, -t.Subpart.Bottom} assert t.implies(example, example) assert not t.contradiction(example) @@ -39,5 +38,6 @@ def test_tagset_operations(): assert not t.implies(intersect_neg, example) assert not t.contradiction(example.union(intersect_neg)) - assert not t.implies({t.Subpart.Top, -t.Subpart.Bottom}, {t.Subpart.Top, t.Subpart.Bottom}) - \ No newline at end of file + assert not t.implies( + {t.Subpart.Top, -t.Subpart.Bottom}, {t.Subpart.Top, t.Subpart.Bottom} + ) diff --git a/tests/core/test_execute_tasks.py b/tests/core/test_execute_tasks.py index 0ddb01e9c..0d36d2789 100644 --- a/tests/core/test_execute_tasks.py +++ b/tests/core/test_execute_tasks.py @@ -5,38 +5,31 @@ # Authors: Alexander Raistrick from pathlib import Path -from types import SimpleNamespace -import logging -import importlib -import pytest import bpy -import gin -from infinigen_examples import generate_nature from infinigen.core import execute_tasks from infinigen.core.placement import camera -from infinigen.core import init - from infinigen_examples.util.test_utils import setup_gin -def test_compose_cube(): - setup_gin('infinigen_examples/configs_nature') +def test_compose_cube(): + setup_gin("infinigen_examples/configs_nature", configs=["base_nature.gin"]) def compose_cube(output_folder, scene_seed, **params): - camera_rigs = camera.spawn_camera_rigs() + camera.spawn_camera_rigs() bpy.ops.mesh.primitive_cube_add() - output = Path('/tmp/test_compose_cube') + output = Path("/tmp/test_compose_cube") output.mkdir(exist_ok=True) execute_tasks.execute_tasks( compose_cube, + populate_scene_func=None, input_folder=None, output_folder=output, - task='coarse populate', + task="coarse populate", scene_seed=0, frame_range=[0, 100], - camera_id=(0, 0) + camera_id=(0, 0), ) diff --git a/tests/core/test_gins.py b/tests/core/test_gins.py index 8a26da057..c2c7d5564 100644 --- a/tests/core/test_gins.py +++ b/tests/core/test_gins.py @@ -4,36 +4,29 @@ # Authors: Alexander Raistrick -from pathlib import Path -from types import SimpleNamespace -import logging -import importlib import pytest -import bpy -import gin - -from infinigen_examples import generate_nature -from infinigen.core import execute_tasks -from infinigen.core.placement import camera -from infinigen.core import init +import infinigen from infinigen_examples.util.test_utils import setup_gin -nature_folder = 'infinigen_examples/configs_nature' -nature_gins = [p.name for p in (init.repo_root()/nature_folder).glob('**/*.gin')] +nature_folder = "infinigen_examples/configs_nature" +nature_gins = [p.name for p in (infinigen.repo_root() / nature_folder).glob("**/*.gin")] + -@pytest.mark.parametrize('extra_gin', sorted(nature_gins)) +@pytest.mark.parametrize("extra_gin", sorted(nature_gins)) def test_gins_load_nature(extra_gin): # gin must successfully load the config without crashing # common failures are misspellings of config fields, renamed functions, etc setup_gin(nature_folder, configs=[extra_gin]) -indoor_folder = 'infinigen_examples/configs_indoor' -indoor_gins = [p.name for p in (init.repo_root()/indoor_folder).glob('**/*.gin')] -@pytest.mark.parametrize('extra_gin', sorted(indoor_gins)) +indoor_folder = "infinigen_examples/configs_indoor" +indoor_gins = [p.name for p in (infinigen.repo_root() / indoor_folder).glob("**/*.gin")] + + +@pytest.mark.parametrize("extra_gin", sorted(indoor_gins)) def test_gins_load_indoor(extra_gin): # gin must successfully load the config without crashing # common failures are misspellings of config fields, renamed functions, etc - setup_gin(indoor_folder, configs=[extra_gin]) + setup_gin([indoor_folder, nature_folder], configs=[extra_gin]) diff --git a/tests/core/test_tagging.py b/tests/core/test_tagging.py index bfe140170..4560dec41 100644 --- a/tests/core/test_tagging.py +++ b/tests/core/test_tagging.py @@ -4,30 +4,36 @@ # Authors: Alexander Raistrick +import bpy +import numpy as np + +from infinigen.core import surface, tagging +from infinigen.core import tags as t from infinigen.core.util import blender as butil -from infinigen.core import tagging, surface, tags as t -import numpy as np -import bpy def test_tagging_basic(): - tagging.tag_system.clear() butil.clear_scene() - + cube = butil.spawn_cube() - tag = t.StringTag('cubey_tag') + tag = t.StringTag("cubey_tag") tag_name = tag.desc tagging.tag_object(cube, tag) - assert len([n for n in cube.data.attributes.keys() if n.startswith(tagging.PREFIX)]) == 0 + assert ( + len([n for n in cube.data.attributes.keys() if n.startswith(tagging.PREFIX)]) + == 0 + ) assert list(tagging.tag_system.tag_dict.keys()) == [tag_name] tagint_attr = cube.data.attributes.get(tagging.COMBINED_ATTR_NAME) assert tagint_attr is not None - assert tagint_attr.domain == 'FACE' + assert tagint_attr.domain == "FACE" - tagint_vals = surface.read_attr_data(cube, tagging.COMBINED_ATTR_NAME, domain='FACE') + tagint_vals = surface.read_attr_data( + cube, tagging.COMBINED_ATTR_NAME, domain="FACE" + ) cubey_tag_int = tagging.tag_system.tag_dict.get(tag_name) assert cubey_tag_int == 1 @@ -37,61 +43,61 @@ def test_tagging_basic(): assert np.all(tagint_vals == cubey_tag_int) assert tagging.tagged_face_mask(cube, tag).all() - halftag = t.StringTag('last_half') + halftag = t.StringTag("last_half") mask = np.arange(n_poly) >= (n_poly // 2) tagging.tag_object(cube, halftag, mask) - combined_half_name = tag.desc + '.' + halftag.desc + combined_half_name = tag.desc + "." + halftag.desc assert list(tagging.tag_system.tag_dict.keys()) == [tag_name, combined_half_name] assert np.all(tagging.tagged_face_mask(cube, halftag) == mask) assert np.all(tagging.tagged_face_mask(cube, {tag, halftag}) == mask) assert np.all(tagging.tagged_face_mask(cube, tag) == np.ones(n_poly, dtype=bool)) - with butil.ViewportMode(cube, mode='EDIT'): + with butil.ViewportMode(cube, mode="EDIT"): butil.select(cube) - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") assert tagging.tagged_face_mask(cube, halftag).sum() == 2 * mask.sum() -def get_canonical_tag_cube(): +def get_canonical_tag_cube(): cube = butil.spawn_cube() - with butil.ViewportMode(cube, mode='EDIT'): + with butil.ViewportMode(cube, mode="EDIT"): butil.select(cube) - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") tagging.tag_canonical_surfaces(cube) return cube + def test_tag_canonical(): - tagging.tag_system.clear() butil.clear_scene() - cube = get_canonical_tag_cube() - + cube = get_canonical_tag_cube() + for tag in tagging.CANONICAL_TAGS: mask = tagging.tagged_face_mask(cube, tag) - assert mask.sum() == 2 # expect 2 triangles forming every side of the cube - + assert mask.sum() == 2 # expect 2 triangles forming every side of the cube + idx1, idx2 = np.where(mask)[0] norm1 = cube.data.polygons[idx1].normal norm2 = cube.data.polygons[idx2].normal assert norm1 == norm2 -def test_tag_canonical_negated(): +def test_tag_canonical_negated(): tagging.tag_system.clear() butil.clear_scene() - cube = get_canonical_tag_cube() + cube = get_canonical_tag_cube() assert len(cube.data.polygons) == 12 assert tagging.tagged_face_mask(cube, t.Subpart.Top).sum() == 2 all_but_top = tagging.tagged_face_mask(cube, -t.Subpart.Top) - assert all_but_top.sum() == 10 # 4*2 side triangles, 2 bottom triangles + assert all_but_top.sum() == 10 # 4*2 side triangles, 2 bottom triangles side = tagging.tagged_face_mask(cube, {-t.Subpart.Top, -t.Subpart.Bottom}) - assert side.sum() == 8 # 4 sides, 2 triangles \ No newline at end of file + assert side.sum() == 8 # 4 sides, 2 triangles diff --git a/tests/datagen/test_manage_jobs.py b/tests/datagen/test_manage_jobs.py new file mode 100644 index 000000000..a4481ab43 --- /dev/null +++ b/tests/datagen/test_manage_jobs.py @@ -0,0 +1,67 @@ +import subprocess +from pathlib import Path + +import gin +import pytest + +import infinigen +from infinigen.core.init import apply_gin_configs +from infinigen.datagen import manage_jobs + +conf = infinigen.repo_root() / "infinigen/datagen/configs" +assert conf.exists() + +compute_platforms = [x.name for x in (conf / "compute_platform").glob("*.gin")] +data_schema = [x.name for x in (conf / "data_schema").glob("*.gin")] + + +@pytest.mark.parametrize("compute_platform", sorted(compute_platforms)) +def test_load_gin_compute_platform(compute_platform): + gin.clear_config() + + apply_gin_configs( + config_folders=Path("infinigen/datagen/configs"), + configs=[compute_platform, "monocular.gin"], + overrides=[], + mandatory_folders=manage_jobs.mandatory_exclusive_configs, + mutually_exclusive_folders=manage_jobs.mandatory_exclusive_configs, + ) + + +@pytest.mark.parametrize("data_schema", sorted(data_schema)) +def test_load_gin_data_schema(data_schema): + gin.clear_config() + + apply_gin_configs( + config_folders=Path("infinigen/datagen/configs"), + configs=["local_256GB.gin", data_schema], + overrides=[], + mandatory_folders=manage_jobs.mandatory_exclusive_configs, + mutually_exclusive_folders=manage_jobs.mandatory_exclusive_configs, + ) + + +def test_dryrun_hello_world(tmp_path): + cmd = ( + f"python -m infinigen.datagen.manage_jobs --output_folder {tmp_path}/hello_world --num_scenes 1 --specific_seed 0 " + "--configs desert.gin simple.gin --pipeline_configs local_16GB.gin monocular.gin blender_gt.gin " + "--pipeline_overrides LocalScheduleHandler.use_gpu=False" + ) + + cmd += " --overrides execute_tasks.dryrun=True" + res = subprocess.run(cmd, shell=True, check=True) + assert res.returncode == 0 + + +def test_dryrun_hello_room(tmp_path): + cmd = ( + f"python -m infinigen.datagen.manage_jobs --output_folder {tmp_path}/hello_room --num_scenes 1 --specific_seed 0 " + "--pipeline_configs local_256GB.gin monocular.gin blender_gt.gin indoor_background_configs.gin " + "--configs singleroom.gin " + "--pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' LocalScheduleHandler.use_gpu=False " + "--overrides compose_indoors.restrict_single_supported_roomtype=True" + ) + + cmd += " execute_tasks.dryrun=True" + res = subprocess.run(cmd, shell=True, check=True) + assert res.returncode == 0 diff --git a/tests/integration/manual_integration_check.py b/tests/integration/manual_integration_check.py index 0cce0f720..2f81450d4 100644 --- a/tests/integration/manual_integration_check.py +++ b/tests/integration/manual_integration_check.py @@ -4,26 +4,25 @@ # Authors: David Yan, Lahav Lipson (SLURM job parsing) +import argparse +import json +import math import os +import re +import subprocess import sys -import argparse -import shutil -import cv2 -import numpy as np -import pandas as pd -from tabulate import tabulate from collections import defaultdict -import re -import statistics from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path -import subprocess -import math -from scipy.signal import convolve2d -from skimage.restoration import estimate_sigma + +import cv2 +import numpy as np +import pandas as pd from skimage.metrics import structural_similarity -import json +from skimage.restoration import estimate_sigma +from tabulate import tabulate + @dataclass class Job: @@ -42,7 +41,7 @@ def end_time(self): return self.start_time + self.time_elapsed def __lt__(self, other): - return (int(self.job_id) < int(other.job_id)) + return int(self.job_id) < int(other.job_id) def __str__(self): if self.req_memory is not None: @@ -51,97 +50,140 @@ def __str__(self): return f"{self.job_id} {self.name.ljust(60+73)} {self.current_status.ljust(10)}" -sacct_line_regex = re.compile(r"([0-9]+) +(\S+) +(\S+) +([0-9]+) +([A-Z_]+) +(node[0-9]+) +(\S+).*").fullmatch +sacct_line_regex = re.compile( + r"([0-9]+) +(\S+) +(\S+) +([0-9]+) +([A-Z_]+) +(node[0-9]+) +(\S+).*" +).fullmatch MEM_FACTOR = {"G": 1, "M": 1e3, "K": 1e6} -pd.set_option('display.max_rows', None) -pd.set_option('display.max_columns', None) -pd.options.display.width=None +pd.set_option("display.max_rows", None) +pd.set_option("display.max_columns", None) +pd.options.display.width = None -suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] +suffixes = ["B", "KB", "MB", "GB", "TB", "PB"] all_data = defaultdict(dict) -''' +""" The following function s attributed to Sridhar Ratnakumar from Stack Overflow at https://stackoverflow.com/a/1094933 and is licensed under CC-BY-SA 4.0 (https://creativecommons.org/licenses/by-sa/4.0/deed.en#ref-appropriate-credit). David Yan used this code WITHOUT modification. -''' -def sizeof_fmt(num, suffix="B"): +""" + + +def sizeof_fmt(num, suffix="B"): for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"): if abs(num) < 1024.0: return f"{num:3.1f} {unit}{suffix}" num /= 1024.0 return f"{num:.1f}Yi{suffix}" -''' + +""" The following function s attributed to FObersteiner from Stack Overflow at https://stackoverflow.com/a/64662985 and is licensed under CC-BY-SA 4.0 (https://creativecommons.org/licenses/by-sa/4.0/deed.en#ref-appropriate-credit). David Yan used this code WITHOUT modification. -''' +""" + + def td_to_str(td): """ convert a timedelta object td to a string in HH:MM:SS format. """ - if (pd.isnull(td)): + if pd.isnull(td): return td hours, remainder = divmod(td.total_seconds(), 3600) minutes, seconds = divmod(remainder, 60) - return f'{int(hours):02}:{int(minutes):02}:{int(seconds):02}' + return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" def parse_sacct_line(line): if sacct_line_regex(line) is None: return - job_id, job_name, resources, elapsed_raw, current_status, node, start_time = sacct_line_regex(line).groups() - request = dict(e.split('=') for e in resources.split(',')) - start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S') + job_id, job_name, resources, elapsed_raw, current_status, node, start_time = ( + sacct_line_regex(line).groups() + ) + request = dict(e.split("=") for e in resources.split(",")) + start_time = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S") elapsed = timedelta(seconds=int(elapsed_raw)) - return Job(job_id=job_id, name=job_name, req_memory=request['mem'], cpu=request['cpu'], gpu=request.get('gpu', '0'), current_status=current_status, node=node, start_time=start_time, time_elapsed=elapsed) - -def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_mem_data, obj_created_data, instance_created_data): + return Job( + job_id=job_id, + name=job_name, + req_memory=request["mem"], + cpu=request["cpu"], + gpu=request.get("gpu", "0"), + current_status=current_status, + node=node, + start_time=start_time, + time_elapsed=elapsed, + ) + + +def parse_scene_log( + scene_path, + step_times, + asset_time_data, + poly_data, + asset_mem_data, + obj_created_data, + instance_created_data, +): log_folder = os.path.join(scene_path, "logs") - coarse_folder = os.path.join(scene_path , "coarse") + coarse_folder = os.path.join(scene_path, "coarse") fine_folder = next(Path(scene_path).glob("fine*")) - seed = Path(scene_path).stem + seed = Path(scene_path).stem scene_times = [] - if (os.path.isdir(log_folder)): - for filepath in Path(log_folder).glob('*.err'): + if os.path.isdir(log_folder): + for filepath in Path(log_folder).glob("*.err"): step = "" for stepName in step_times: - if filepath.stem.startswith(stepName): + if filepath.stem.startswith(stepName): step = stepName break - else: continue + else: + continue errFile = open(filepath) text = errFile.read() - if "[MAIN TOTAL] finished in" not in text: continue - search = re.search(r'\[MAIN TOTAL\] finished in ([0-9]+):([0-9]+):([0-9]+)', text) + if "[MAIN TOTAL] finished in" not in text: + continue + search = re.search( + r"\[MAIN TOTAL\] finished in ([0-9]+):([0-9]+):([0-9]+)", text + ) d = None - if search is None: - search = re.search(r'\[MAIN TOTAL\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)', text) - d,h,m,s = search.group(1,2,3,4) + if search is None: + search = re.search( + r"\[MAIN TOTAL\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)", + text, + ) + d, h, m, s = search.group(1, 2, 3, 4) + else: + h, m, s = search.group(1, 2, 3) + if d is None: + step_timedelta = timedelta(hours=int(h), minutes=int(m), seconds=int(s)) else: - h,m,s = search.group(1,2,3) - if d is None: - step_timedelta = timedelta(hours=int(h),minutes=int(m),seconds=int(s)) - else: - step_timedelta = timedelta(days=int(d), hours=int(h),minutes=int(m),seconds=int(s)) + step_timedelta = timedelta( + days=int(d), hours=int(h), minutes=int(m), seconds=int(s) + ) step_times[step].append(step_timedelta) scene_times.append(step_timedelta) all_data[seed]["[" + step + "] Step Time"] = step_timedelta # parse times < 1 day - for name, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)', text): + for name, h, m, s in re.findall( + r"\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)", text + ): timedelta_obj = timedelta(hours=int(h), minutes=int(m), seconds=int(s)) - if (name == "MAIN TOTAL"): continue + if name == "MAIN TOTAL": + continue else: - if (timedelta_obj.total_seconds() < 1): continue + if timedelta_obj.total_seconds() < 1: + continue instance_dict = {} instance_dict["stage_timedelta"] = timedelta_obj - instance_dict["step_timedelta"] = step_timedelta - instance_dict["step_name"] = stepName # should be same for every instance of a given stage - instance_dict["seed"] = seed + instance_dict["step_timedelta"] = step_timedelta + instance_dict["step_name"] = ( + stepName # should be same for every instance of a given stage + ) + instance_dict["seed"] = seed stage_key = "[" + stepName + "] " + name asset_time_data[stage_key].append(instance_dict) if stage_key in all_data[seed]: @@ -150,16 +192,25 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me all_data[seed]["[time] " + stage_key] = timedelta_obj # parse times > 1 day - for name, d, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)', text): - timedelta_obj = timedelta(days=int(d), hours=int(h),minutes=int(m),seconds=int(s)) - if (name == "MAIN TOTAL"): continue + for name, d, h, m, s in re.findall( + r"\[INFO\] \| \[(.*?)\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)", + text, + ): + timedelta_obj = timedelta( + days=int(d), hours=int(h), minutes=int(m), seconds=int(s) + ) + if name == "MAIN TOTAL": + continue else: - if (timedelta_obj.total_seconds() < 1): continue + if timedelta_obj.total_seconds() < 1: + continue instance_dict = {} instance_dict["stage_timedelta"] = timedelta_obj - instance_dict["step_timedelta"] = step_timedelta - instance_dict["step_name"] = stepName # should be same for every instance of a given stage - instance_dict["seed"] = seed + instance_dict["step_timedelta"] = step_timedelta + instance_dict["step_name"] = ( + stepName # should be same for every instance of a given stage + ) + instance_dict["seed"] = seed stage_key = "[" + stepName + "] " + name asset_time_data[stage_key].append(instance_dict) if stage_key in all_data[seed]: @@ -172,14 +223,14 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me for asset_step in asset_time_data: for stage_instance in asset_time_data[asset_step]: - if (stage_instance["seed"] == seed): + if stage_instance["seed"] == seed: stage_instance["scene_time"] = scene_time - + coarse_poly = os.path.join(coarse_folder, "polycounts.txt") fine_poly = os.path.join(fine_folder, "polycounts.txt") if os.path.isfile(coarse_poly) and os.path.isfile(fine_poly): - coarse_text = open(coarse_poly).read().replace(',', '') - fine_text = open(fine_poly).read().replace(',', '') + coarse_text = open(coarse_poly).read().replace(",", "") + fine_text = open(fine_poly).read().replace(",", "") for faces, tris in re.findall("Faces:([0-9]+)Tris:([0-9]+)", coarse_text): poly_data["[Coarse] Faces"].append(int(faces)) poly_data["[Coarse] Tris"].append(int(tris)) @@ -193,123 +244,214 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me all_data[seed]["[Polys] [Fine] Tris"] = int(tris) coarse_stage_df = pd.read_csv(os.path.join(coarse_folder, "pipeline_coarse.csv")) - coarse_stage_df["mem_delta"] = coarse_stage_df[coarse_stage_df['ran']==True]['mem_at_finish'].diff() - coarse_stage_df["obj_delta"] = coarse_stage_df[coarse_stage_df['ran']==True]['obj_count'].diff() - coarse_stage_df["instance_delta"] = coarse_stage_df[coarse_stage_df['ran']==True]['instance_count'].diff() + coarse_stage_df["mem_delta"] = coarse_stage_df[coarse_stage_df["ran"] is True][ + "mem_at_finish" + ].diff() + coarse_stage_df["obj_delta"] = coarse_stage_df[coarse_stage_df["ran"] is True][ + "obj_count" + ].diff() + coarse_stage_df["instance_delta"] = coarse_stage_df[coarse_stage_df["ran"] is True][ + "instance_count" + ].diff() for index, row in coarse_stage_df.iterrows(): - if row["mem_delta"] == 0 or math.isnan(float(row["mem_delta"])) or row["ran"] == False: continue + if ( + row["mem_delta"] == 0 + or math.isnan(float(row["mem_delta"])) + or row["ran"] is False + ): + continue asset_mem_data["[Coarse] " + row["name"]].append(row["mem_delta"]) obj_created_data["[Coarse] " + row["name"]].append(row["obj_delta"]) instance_created_data["[Coarse] " + row["name"]].append(row["instance_delta"]) - all_data[seed]["[Memory] [Coarse] " + row["name"]] = sizeof_fmt(row["mem_delta"]) + all_data[seed]["[Memory] [Coarse] " + row["name"]] = sizeof_fmt( + row["mem_delta"] + ) all_data[seed]["[Objects Generated] [Coarse] " + row["name"]] = row["obj_delta"] - all_data[seed]["[Instances Generated] [Coarse] " + row["name"]] = row["instance_delta"] - - fine_stage_df = pd.read_csv(os.path.join(fine_folder, "pipeline_fine.csv")) # this is supposed to be coarse folder - fine_stage_df["mem_delta"] = fine_stage_df[fine_stage_df['ran']]['mem_at_finish'].diff() - fine_stage_df["obj_delta"] = fine_stage_df[fine_stage_df['ran']]['obj_count'].diff() - fine_stage_df["instance_delta"] = fine_stage_df[fine_stage_df['ran']]['instance_count'].diff() + all_data[seed]["[Instances Generated] [Coarse] " + row["name"]] = row[ + "instance_delta" + ] + + fine_stage_df = pd.read_csv( + os.path.join(fine_folder, "pipeline_fine.csv") + ) # this is supposed to be coarse folder + fine_stage_df["mem_delta"] = fine_stage_df[fine_stage_df["ran"]][ + "mem_at_finish" + ].diff() + fine_stage_df["obj_delta"] = fine_stage_df[fine_stage_df["ran"]]["obj_count"].diff() + fine_stage_df["instance_delta"] = fine_stage_df[fine_stage_df["ran"]][ + "instance_count" + ].diff() for index, row in fine_stage_df.iterrows(): - if row["mem_delta"] == 0 or math.isnan(float(row["mem_delta"])) or row["ran"] == False: continue + if ( + row["mem_delta"] == 0 + or math.isnan(float(row["mem_delta"])) + or row["ran"] is False + ): + continue asset_mem_data["[Fine] " + row["name"]].append(row["mem_delta"]) obj_created_data["[Fine] " + row["name"]].append(row["obj_delta"]) instance_created_data["[Fine] " + row["name"]].append(row["instance_delta"]) all_data[seed]["[Memory] [Fine] " + row["name"]] = sizeof_fmt(row["mem_delta"]) all_data[seed]["[Objects Generated] [Fine] " + row["name"]] = row["obj_delta"] - all_data[seed]["[Instances Generated] [Fine] " + row["name"]] = row["instance_delta"] - + all_data[seed]["[Instances Generated] [Fine] " + row["name"]] = row[ + "instance_delta" + ] + + def test_generation(dir): completed_seeds = os.path.join(dir, "finished_seeds.txt") num_lines = sum(1 for _ in open(completed_seeds)) num_scenes = len(next(os.walk(dir))[1]) - 1 - print(f'{num_lines}/{num_scenes} succeeded scenes') - # assert num_lines >= 0.8 * int(num_scenes), "Over 20% of scenes did not complete" + print(f"{num_lines}/{num_scenes} succeeded scenes") + + +# assert num_lines >= 0.8 * int(num_scenes), "Over 20% of scenes did not complete" + def make_stats(data_df): stats = pd.DataFrame() - stats['mean'] = data_df.mean(axis=1) - stats['median'] = data_df.min(axis=1) - stats['90%'] = data_df.quantile(0.9, axis=1) - stats['95%'] = data_df.quantile(0.95, axis=1) - stats['99%'] = data_df.quantile(0.99, axis=1) + stats["mean"] = data_df.mean(axis=1) + stats["median"] = data_df.min(axis=1) + stats["90%"] = data_df.quantile(0.9, axis=1) + stats["95%"] = data_df.quantile(0.95, axis=1) + stats["99%"] = data_df.quantile(0.99, axis=1) return stats + def test_logs(dir): - print('') - asset_time_data = defaultdict(list) # data for individual asset stages + print("") + asset_time_data = defaultdict(list) # data for individual asset stages asset_mem_data = defaultdict(list) obj_created_data = defaultdict(list) instance_created_data = defaultdict(list) - - step_times = {"fineterrain" : [], "coarse" : [], "populate" : [], "rendershort" : [], "blendergt": []} - poly_data = {"[Coarse] Faces" : [], "[Coarse] Tris" : [], \ - "[Fine] Faces" : [], "[Fine] Tris" : []} + step_times = { + "fineterrain": [], + "coarse": [], + "populate": [], + "rendershort": [], + "blendergt": [], + } + poly_data = { + "[Coarse] Faces": [], + "[Coarse] Tris": [], + "[Fine] Faces": [], + "[Fine] Tris": [], + } completed_seeds = os.path.join(dir, "finished_seeds.txt") num_lines = sum(1 for _ in open(completed_seeds)) for scene in os.listdir(dir): - if scene not in open(completed_seeds).read(): continue + if scene not in open(completed_seeds).read(): + continue scene_path = os.path.join(dir, scene) - parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_mem_data, obj_created_data, instance_created_data) - - step_df = pd.DataFrame.from_dict(step_times, orient='index') + parse_scene_log( + scene_path, + step_times, + asset_time_data, + poly_data, + asset_mem_data, + obj_created_data, + instance_created_data, + ) + + step_df = pd.DataFrame.from_dict(step_times, orient="index") step_stats = make_stats(step_df) for column in step_stats: - step_stats[column] = step_stats[column].dt.round('1s').map(lambda x: td_to_str(x)) + step_stats[column] = ( + step_stats[column].dt.round("1s").map(lambda x: td_to_str(x)) + ) print("Time Logs by Step") - print(tabulate(step_stats, headers='keys', tablefmt='fancy_grid')) - + print(tabulate(step_stats, headers="keys", tablefmt="fancy_grid")) + asset_stats = defaultdict(list) for asset_name in asset_time_data: - asset_times = pd.Series(instance["stage_timedelta"] for instance in asset_time_data[asset_name]) - chance = float(len(asset_time_data[asset_name]))/float(num_lines) - step_times = pd.Series(instance["step_timedelta"] for instance in asset_time_data[asset_name]) - scene_times = pd.Series(instance["scene_time"] for instance in asset_time_data[asset_name]) + asset_times = pd.Series( + instance["stage_timedelta"] for instance in asset_time_data[asset_name] + ) + chance = float(len(asset_time_data[asset_name])) / float(num_lines) + step_times = pd.Series( + instance["step_timedelta"] for instance in asset_time_data[asset_name] + ) + scene_times = pd.Series( + instance["scene_time"] for instance in asset_time_data[asset_name] + ) step_percent = asset_times.sum() / step_times.sum() * 100 scene_percent = asset_times.sum() / scene_times.sum() * 100 - asset_stats[asset_name] =\ - [asset_times.mean().round(freq ='s'), asset_times.median().round(freq ='s'),\ - asset_times.quantile(0.9).round(freq ='s'), asset_times.quantile(0.95).round(freq ='s'),\ - asset_times.quantile(0.99).round(freq ='s'), asset_times.sum().round(freq ='s'), \ - scene_percent, step_percent, asset_time_data[asset_name][0]["step_name"], chance] - - asset_stats = pd.DataFrame.from_dict(asset_stats, orient='index') - asset_stats.columns = ["mean time", "median time", "90%", "95%", "99%", "total time", "% of scene time", "% of step time", "step", "chance of ocurring"] - + asset_stats[asset_name] = [ + asset_times.mean().round(freq="s"), + asset_times.median().round(freq="s"), + asset_times.quantile(0.9).round(freq="s"), + asset_times.quantile(0.95).round(freq="s"), + asset_times.quantile(0.99).round(freq="s"), + asset_times.sum().round(freq="s"), + scene_percent, + step_percent, + asset_time_data[asset_name][0]["step_name"], + chance, + ] + + asset_stats = pd.DataFrame.from_dict(asset_stats, orient="index") + asset_stats.columns = [ + "mean time", + "median time", + "90%", + "95%", + "99%", + "total time", + "% of scene time", + "% of step time", + "step", + "chance of ocurring", + ] + for column in asset_stats: if column in ["mean time", "median time", "90%", "95%", "99%", "total time"]: asset_stats[column] = asset_stats[column].apply(td_to_str) - - print("\nTime Logs by Asset Stage") - print(tabulate(asset_stats.sort_values("% of scene time", ascending=False), headers='keys', tablefmt='fancy_grid')) - assset_mem_df = pd.DataFrame.from_dict(asset_mem_data, orient='index') + print("\nTime Logs by Asset Stage") + print( + tabulate( + asset_stats.sort_values("% of scene time", ascending=False), + headers="keys", + tablefmt="fancy_grid", + ) + ) + + assset_mem_df = pd.DataFrame.from_dict(asset_mem_data, orient="index") assset_mem_stats = make_stats(assset_mem_df) - assset_mem_stats = assset_mem_stats.sort_values("mean", ascending=False) + assset_mem_stats = assset_mem_stats.sort_values("mean", ascending=False) print("\nMemory Usage by Asset Stage") - print(tabulate(assset_mem_stats.applymap(lambda x: sizeof_fmt(x)), headers='keys', tablefmt='fancy_grid')) - - obj_created_df = pd.DataFrame.from_dict(obj_created_data, orient='index') + print( + tabulate( + assset_mem_stats.applymap(lambda x: sizeof_fmt(x)), + headers="keys", + tablefmt="fancy_grid", + ) + ) + + obj_created_df = pd.DataFrame.from_dict(obj_created_data, orient="index") obj_created_stats = make_stats(obj_created_df) - obj_created_stats = obj_created_stats.sort_values("mean", ascending=False) + obj_created_stats = obj_created_stats.sort_values("mean", ascending=False) print("\nObjects Generated by Asset Stage") - print(tabulate(obj_created_stats, headers='keys', tablefmt='fancy_grid')) + print(tabulate(obj_created_stats, headers="keys", tablefmt="fancy_grid")) - instance_created_df = pd.DataFrame.from_dict(instance_created_data, orient='index') + instance_created_df = pd.DataFrame.from_dict(instance_created_data, orient="index") instance_created_stats = make_stats(instance_created_df) - instance_created_stats = instance_created_stats.sort_values("mean", ascending=False) + instance_created_stats = instance_created_stats.sort_values("mean", ascending=False) print("\nInstances Generated by Asset Stage") - print(tabulate(instance_created_stats, headers='keys', tablefmt='fancy_grid')) + print(tabulate(instance_created_stats, headers="keys", tablefmt="fancy_grid")) - poly_df = pd.DataFrame.from_dict(poly_data, orient='index') + poly_df = pd.DataFrame.from_dict(poly_data, orient="index") poly_stats = make_stats(poly_df) print("\nPolycount Statistics") - print(tabulate(poly_stats, headers='keys', tablefmt='fancy_grid')) + print(tabulate(poly_stats, headers="keys", tablefmt="fancy_grid")) def test_step_memory(dir, days): days_since = int(days) - sacct_start_date = (datetime.now() - timedelta(days=days_since)).strftime('%Y-%m-%d') + sacct_start_date = (datetime.now() - timedelta(days=days_since)).strftime( + "%Y-%m-%d" + ) sacct_command = f"sacct --starttime {sacct_start_date} -u {os.environ['USER']} --noheader -o jobid,jobname%80,AllocTRES%80,ElapsedRaw,stat%30,NodeList,Start,MaxRSS" print(f"Running + {sacct_command}") sacct_output = subprocess.check_output(sacct_command.split()).decode() @@ -321,68 +463,84 @@ def test_step_memory(dir, days): for sacct_line in sacct_output.splitlines(): parsed_job = parse_sacct_line(sacct_line) - if (parsed_job is None): + if parsed_job is None: continue for name in re.findall(f"{Path(dir).stem}_([^ _]+)_.*", parsed_job.name): if name in seeds: if parsed_job.job_id in mem_dict: max_memory = mem_dict[parsed_job.job_id] - parsed_job.max_memory_gb = float(max_memory[:-1]) / MEM_FACTOR[max_memory[-1]] + parsed_job.max_memory_gb = ( + float(max_memory[:-1]) / MEM_FACTOR[max_memory[-1]] + ) relevant_started_jobs.append(parsed_job) - - step_mem = {"fineterrain" : [], "coarse" : [], "populate" : [], "rendershort" : [], "blendergt": []} + + step_mem = { + "fineterrain": [], + "coarse": [], + "populate": [], + "rendershort": [], + "blendergt": [], + } for job in relevant_started_jobs: for step in step_mem: - if (step in job.name): + if step in job.name: step_mem[step].append(job.max_memory_gb) - - step_mem_df = pd.DataFrame.from_dict(step_mem, orient='index') + + step_mem_df = pd.DataFrame.from_dict(step_mem, orient="index") step_mem_stats = make_stats(step_mem_df) print("\nMemory Usage by Step") - print(tabulate(step_mem_stats, headers='keys', tablefmt='fancy_grid')) - + print(tabulate(step_mem_stats, headers="keys", tablefmt="fancy_grid")) + def test_brightness(dir): - print('') + print("") completed_seeds = os.path.join(dir, "finished_seeds.txt") num_lines = sum(1 for _ in open(completed_seeds)) numDark = 0 for scene in os.listdir(dir): - for filepath in Path(os.path.join(dir,scene)).rglob('Image*.png'): - im = cv2.imread(str(filepath), cv2.IMREAD_GRAYSCALE) #https://stackoverflow.com/a/52514730 + for filepath in Path(os.path.join(dir, scene)).rglob("Image*.png"): + im = cv2.imread( + str(filepath), cv2.IMREAD_GRAYSCALE + ) # https://stackoverflow.com/a/52514730 meanPercent = np.mean(im) * 100 / 255 if meanPercent < 5: - numDark+=1 + numDark += 1 all_data[scene]["Dark"] = True print(f"{scene} is dark") else: all_data[scene]["Dark"] = False - + print(f"{numDark}/{num_lines} images are dark") def test_noise(dir): noise_dict = {} for scene in os.listdir(dir): - for filepath in Path(os.path.join(dir,scene)).rglob('Image*.png'): + for filepath in Path(os.path.join(dir, scene)).rglob("Image*.png"): img = cv2.imread(str(filepath)) sigma = estimate_sigma(img, channel_axis=-1, average_sigmas=True) noise_dict[scene] = sigma all_data[scene]["Noise Index"] = sigma - break # there will be up to two renders but they're the same - - noise_df = pd.DataFrame.from_dict(noise_dict, orient='index') + break # there will be up to two renders but they're the same + + noise_df = pd.DataFrame.from_dict(noise_dict, orient="index") noise_df.columns = ["Noise Estimate"] print("\nNoise in Rendered Images") - print(tabulate(noise_df.sort_values("Noise Estimate", ascending=False), headers='keys', tablefmt='fancy_grid')) + print( + tabulate( + noise_df.sort_values("Noise Estimate", ascending=False), + headers="keys", + tablefmt="fancy_grid", + ) + ) def test_gt(dir): completed_seeds = os.path.join(dir, "finished_seeds.txt") seeds = open(completed_seeds).read() - print('') - + print("") + tag_seg_data = defaultdict(list) pix_sum_seg = 0 @@ -392,33 +550,38 @@ def test_gt(dir): similarity = {} for scene in os.listdir(dir): - if scene not in seeds: continue - scene_path = os.path.join(dir,scene) - blender_gt_search = list(Path(scene_path).glob('frames*')) - opengl_gt_search = list(Path(scene_path).glob('opengl*')) - - if not blender_gt_search or not opengl_gt_search: continue - - blender_gt_folder = blender_gt_search[0] #should only be one occurrence of each + if scene not in seeds: + continue + scene_path = os.path.join(dir, scene) + blender_gt_search = list(Path(scene_path).glob("frames*")) + opengl_gt_search = list(Path(scene_path).glob("opengl*")) + + if not blender_gt_search or not opengl_gt_search: + continue + + blender_gt_folder = blender_gt_search[ + 0 + ] # should only be one occurrence of each opengl_gt_folder = opengl_gt_search[0] - blender_depth_search = list(Path(blender_gt_folder).glob('Depth*.npy')) - opengl_depth_search = list((opengl_gt_folder).glob('Depth*.npy')) - - if blender_depth_search and opengl_depth_search: + blender_depth_search = list(Path(blender_gt_folder).glob("Depth*.npy")) + opengl_depth_search = list((opengl_gt_folder).glob("Depth*.npy")) + if blender_depth_search and opengl_depth_search: blender_depth = np.load(blender_depth_search[0]) opengl_depth = np.load(opengl_depth_search[0]) - opengl_depth[opengl_depth == np.inf] = 10*10 - opengl_depth = cv2.resize(opengl_depth, dsize= (blender_depth.shape[1], blender_depth.shape[0])) + opengl_depth[opengl_depth == np.inf] = 10 * 10 + opengl_depth = cv2.resize( + opengl_depth, dsize=(blender_depth.shape[1], blender_depth.shape[0]) + ) score, diff = structural_similarity(blender_depth, opengl_depth, full=True) similarity[scene] = score * 100 - + fine_folder = next(Path(scene_path).glob("fine*")) tags = json.load(open(os.path.join(fine_folder, "MaskTag.json"))) - tag_seg_search = list((opengl_gt_folder).glob('TagSegmentation*.npy')) + tag_seg_search = list((opengl_gt_folder).glob("TagSegmentation*.npy")) if tag_seg_search: tag_seg = np.load(tag_seg_search[0]) @@ -431,52 +594,87 @@ def test_gt(dir): all_data[scene]["[Tag Seg. Percent] " + tag] = 0 else: tag_seg_data[tag].append(tag_seg_dict[tags[tag]]) - all_data[scene]["[Tag Seg. Percent] " + tag] = float(tag_seg_dict[tags[tag]])/float(tag_seg.shape[0] * tag_seg.shape[1]) * 100 + all_data[scene]["[Tag Seg. Percent] " + tag] = ( + float(tag_seg_dict[tags[tag]]) + / float(tag_seg.shape[0] * tag_seg.shape[1]) + * 100 + ) - obj_json_search = list((blender_gt_folder).glob('Objects*.json')) - obj_seg_search = list((blender_gt_folder).glob('ObjectSegmentation*.npy')) + obj_json_search = list((blender_gt_folder).glob("Objects*.json")) + obj_seg_search = list((blender_gt_folder).glob("ObjectSegmentation*.npy")) - if (obj_json_search and obj_seg_search): + if obj_json_search and obj_seg_search: obj_seg = np.load(obj_seg_search[0]) objects = json.load(open(obj_json_search[0])) pix_sum_obj += obj_seg.shape[0] * obj_seg.shape[1] index, counts = np.unique(obj_seg, return_counts=True) obj_seg_dict = dict(zip(index, counts)) for obj in objects.keys(): - concise_obj = obj.split('(')[0].split(':')[-1].split('.')[0] + concise_obj = obj.split("(")[0].split(":")[-1].split(".")[0] if objects[obj]["object_index"] not in obj_seg_dict.keys(): - obj_seg_data[obj].append(0) - all_data[scene]["[Obj Seg. Percent] " + concise_obj] = 0 + obj_seg_data[obj].append(0) + all_data[scene]["[Obj Seg. Percent] " + concise_obj] = 0 else: obj_seg_data[obj].append(obj_seg_dict[objects[obj]["object_index"]]) - concise_obj = obj.split('(')[0].split(':')[-1].split('.')[0] - all_data[scene]["[Obj Seg. Percent] " + concise_obj] = float((obj_seg_dict[objects[obj]["object_index"]]))/float(obj_seg.shape[0] * obj_seg.shape[1]) * 100 + concise_obj = obj.split("(")[0].split(":")[-1].split(".")[0] + all_data[scene]["[Obj Seg. Percent] " + concise_obj] = ( + float((obj_seg_dict[objects[obj]["object_index"]])) + / float(obj_seg.shape[0] * obj_seg.shape[1]) + * 100 + ) assert len(similarity) != 0 - similarity_df = pd.DataFrame.from_dict(similarity, orient='index') + similarity_df = pd.DataFrame.from_dict(similarity, orient="index") print("Comparison checking not fully working, proceed with caution") similarity_df.columns = ["Similarity Score (%)"] - print(tabulate(similarity_df, headers='keys', tablefmt='fancy_grid')) + print(tabulate(similarity_df, headers="keys", tablefmt="fancy_grid")) - tag_seg_data_df = pd.DataFrame.from_dict(tag_seg_data, orient='index') + tag_seg_data_df = pd.DataFrame.from_dict(tag_seg_data, orient="index") tag_seg_stats = pd.DataFrame() - tag_seg_stats['Percent of Pixels'] = tag_seg_data_df.sum(axis=1).map(lambda x: float(x)/float(pix_sum_seg) * 100) + tag_seg_stats["Percent of Pixels"] = tag_seg_data_df.sum(axis=1).map( + lambda x: float(x) / float(pix_sum_seg) * 100 + ) print("\nTag Segmentation Pixel Sources") - print("Percent of untagged pixels: " + str(100 - tag_seg_stats["Percent of Pixels"].sum())) - print(tabulate(tag_seg_stats.sort_values("Percent of Pixels", ascending=False), headers='keys', tablefmt='fancy_grid')) - - - obj_seg_data_df = pd.DataFrame.from_dict(obj_seg_data, orient='index') + print( + "Percent of untagged pixels: " + + str(100 - tag_seg_stats["Percent of Pixels"].sum()) + ) + print( + tabulate( + tag_seg_stats.sort_values("Percent of Pixels", ascending=False), + headers="keys", + tablefmt="fancy_grid", + ) + ) + + obj_seg_data_df = pd.DataFrame.from_dict(obj_seg_data, orient="index") obj_seg_stats = pd.DataFrame() - obj_seg_stats['Percent of Pixels'] = obj_seg_data_df.sum(axis=1).map(lambda x: float(x)/float(pix_sum_obj) * 100) - obj_seg_stats = obj_seg_stats.groupby(obj_seg_stats.index.str.split('(').str[0]).sum() - obj_seg_stats = obj_seg_stats.groupby(obj_seg_stats.index.str.split(':').str[-1]).sum() - obj_seg_stats = obj_seg_stats.groupby(obj_seg_stats.index.str.split('.').str[0]).sum() + obj_seg_stats["Percent of Pixels"] = obj_seg_data_df.sum(axis=1).map( + lambda x: float(x) / float(pix_sum_obj) * 100 + ) + obj_seg_stats = obj_seg_stats.groupby( + obj_seg_stats.index.str.split("(").str[0] + ).sum() + obj_seg_stats = obj_seg_stats.groupby( + obj_seg_stats.index.str.split(":").str[-1] + ).sum() + obj_seg_stats = obj_seg_stats.groupby( + obj_seg_stats.index.str.split(".").str[0] + ).sum() print("\nObject Segmentation Pixel Sources") - print("Percent of untagged pixels: " + str(100 - obj_seg_stats["Percent of Pixels"].sum())) - print(tabulate(obj_seg_stats.sort_values("Percent of Pixels", ascending=False), headers='keys', tablefmt='fancy_grid')) + print( + "Percent of untagged pixels: " + + str(100 - obj_seg_stats["Percent of Pixels"].sum()) + ) + print( + tabulate( + obj_seg_stats.sort_values("Percent of Pixels", ascending=False), + headers="keys", + tablefmt="fancy_grid", + ) + ) def dir_path(string): @@ -485,10 +683,11 @@ def dir_path(string): else: raise NotADirectoryError(string) + def make_args(): parser = argparse.ArgumentParser() - parser.add_argument('-d', '--dir', type=dir_path) - parser.add_argument('-t', '--time', type=int, default=None) + parser.add_argument("-d", "--dir", type=dir_path) + parser.add_argument("-t", "--time", type=int, default=None) args = parser.parse_args() return args @@ -499,49 +698,49 @@ def main(dir, time): os.mkdir(f"{dir}/test_results") if os.path.exists(f"{dir}/test_results/test_logs.log"): os.remove(f"{dir}/test_results/test_logs.log") - sys.stdout = open(f"{dir}/test_results/test_logs.log", 'w') + sys.stdout = open(f"{dir}/test_results/test_logs.log", "w") try: print("\nTesting scene success rate") test_generation(dir) - except Exception as e: + except Exception as e: print(e) try: print("\nTesting logs") test_logs(dir) - except Exception as e: + except Exception as e: print(e) - + if time is None: print("\nNo slurm time arg provided, skipping scene memory stats") else: try: print("\nTesting step memory") test_step_memory(dir, time) - except Exception as e: + except Exception as e: print(e) try: print("\nTesting scene brightness") test_brightness(dir) - except Exception as e: + except Exception as e: print(e) try: print("\nTesting scene noise") test_noise(dir) - except Exception as e: + except Exception as e: print(e) try: test_gt(dir) - except Exception as e: + except Exception as e: print(e) - data_df = pd.DataFrame.from_dict(all_data, orient='index') + data_df = pd.DataFrame.from_dict(all_data, orient="index") data_df.to_csv(f"{dir}/test_results/data.csv") - -if __name__ == '__main__': + + +if __name__ == "__main__": args = make_args() main(args.dir, args.time) - diff --git a/tests/list_displaced_materials.txt b/tests/list_displaced_materials.txt index 4444025e9..09e97ef56 100644 --- a/tests/list_displaced_materials.txt +++ b/tests/list_displaced_materials.txt @@ -1,5 +1,5 @@ -infinigen.assets.materials.leather_and_fabrics.fabric -infinigen.assets.materials.leather_and_fabrics.leather +infinigen.assets.materials.fabrics.fabric +infinigen.assets.materials.fabrics.leather infinigen.assets.materials.metal.grained_and_polished_metal infinigen.assets.materials.metal.hammered_metal infinigen.assets.materials.stone_and_concrete.concrete diff --git a/tests/solver/test_asset_surfaces.py b/tests/solver/test_asset_surfaces.py index 94b7c0c3a..f8b8e249e 100644 --- a/tests/solver/test_asset_surfaces.py +++ b/tests/solver/test_asset_surfaces.py @@ -5,57 +5,53 @@ # Authors: Alexander Raistrick import bpy -import pytest - -from infinigen.core.constraints import ( - usage_lookup, - constraint_language as cl -) - -from infinigen.core.constraints.example_solver.geometry import dof +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import usage_lookup +from infinigen.core.util import blender as butil from infinigen_examples.indoor_constraint_examples import home_asset_usage -from infinigen.core.util import blender as butil -from infinigen.core import tagging, tags as t def test_canonical_planes_real_placeholders(): - used_as = home_asset_usage() usage_lookup.initialize_from_dict(used_as) - + pholder_facs = usage_lookup.factories_for_usage({t.Semantics.RealPlaceholder}) asset_facs = usage_lookup.factories_for_usage({t.Semantics.AssetAsPlaceholder}) test_facs = pholder_facs.union(asset_facs) test_facs.intersection_update( - usage_lookup.factories_for_usage({t.Semantics.Storage}) - .union(usage_lookup.factories_for_usage({t.Semantics.Seating})) + usage_lookup.factories_for_usage({t.Semantics.Storage}).union( + usage_lookup.factories_for_usage({t.Semantics.Seating}) + ) ) for fac in test_facs: butil.clear_scene() if fac in pholder_facs: - obj = fac(0).spawn_placeholder(0, loc=(0,0,0), rot=(0,0,0)) + obj = fac(0).spawn_placeholder(0, loc=(0, 0, 0), rot=(0, 0, 0)) elif fac in asset_facs: - obj = fac(0).spawn_asset(0, loc=(0,0,0), rot=(0,0,0)) - else: + obj = fac(0).spawn_asset(0, loc=(0, 0, 0), rot=(0, 0, 0)) + else: raise ValueError() - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): butil.select(obj) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris( + quad_method="BEAUTY", ngon_method="BEAUTY" + ) tagging.tag_canonical_surfaces(obj) obj_tags = tagging.union_object_tags(obj) - for tag in [t.Subpart.Back, t.Subpart.Bottom]:#, t.Subpart.SupportSurface]: + for tag in [t.Subpart.Back, t.Subpart.Bottom]: # , t.Subpart.SupportSurface]: mask = tagging.tagged_face_mask(obj, {tag}) if mask.sum() == 0: obj_tags = tagging.union_object_tags(obj) raise ValueError( - f'{obj.name=} has nothing tagged for {tag=}. {obj_tags=}' - ) \ No newline at end of file + f"{obj.name=} has nothing tagged for {tag=}. {obj_tags=}" + ) diff --git a/tests/solver/test_constraint_evaluator.py b/tests/solver/test_constraint_evaluator.py index 567ac6450..e08840b74 100644 --- a/tests/solver/test_constraint_evaluator.py +++ b/tests/solver/test_constraint_evaluator.py @@ -1,38 +1,33 @@ # Copyright (c) Princeton University. # This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Authors: Karhan Kayan -from itertools import chain from functools import partial -from time import time -import sys -import os -import bpy -import pytest +# Authors: Karhan Kayan +import bpy import numpy as np +import pytest from mathutils import Vector -from infinigen.core.constraints import ( - usage_lookup, - example_solver as solver, - constraint_language as cl -) +from infinigen.assets.objects.seating.chairs import ChairFactory +from infinigen.assets.objects.tables.dining_table import TableDiningFactory +from infinigen.assets.utils.bbox_from_mesh import bbox_mesh_from_hipoly +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import usage_lookup from infinigen.core.constraints.evaluator import evaluate from infinigen.core.constraints.evaluator.node_impl import node_impls -from infinigen.core.constraints.example_solver.state_def import state_from_dummy_scene, State, ObjectState +from infinigen.core.constraints.example_solver.state_def import ( + ObjectState, + State, + state_from_dummy_scene, +) from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver.propose_discrete import lookup_generator -from infinigen.core import tagging, tags as t -import infinigen.core.constraints.example_solver.moves.addition as addition - -from infinigen.assets.tables.dining_table import TableDiningFactory -from infinigen.assets.seating.chairs import ChairFactory -from infinigen.assets.utils.bbox_from_mesh import bbox_mesh_from_hipoly - from infinigen_examples.indoor_asset_semantics import home_asset_usage from infinigen_examples.indoor_constraint_examples import home_constraints + def test_home_constraints_implemented(): butil.clear_scene() @@ -43,42 +38,46 @@ def test_home_constraints_implemented(): continue assert node.__class__ in node_impls + def make_chair_table(): butil.clear_scene() col = butil.get_collection("indoor_scene_test") chairs = butil.get_collection("chair") - tables = butil.get_collection("table") + tables = butil.get_collection("table") col.children.link(chairs) col.children.link(tables) - chair = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') + chair = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") butil.put_in_collection(chair, chairs) - table = butil.spawn_cube(size=2, location=(3, 0, 0), name='table1') + table = butil.spawn_cube(size=2, location=(3, 0, 0), name="table1") butil.put_in_collection(table, tables) return col + def test_parse_scene(): butil.clear_scene() state = state_from_dummy_scene(make_chair_table()) - assert state.objs['chair1'].tags == {t.Semantics.Chair, t.SpecificObject('chair1')} - assert state.objs['table1'].tags == {t.Semantics.Table, t.SpecificObject('table1')} + assert state.objs["chair1"].tags == {t.Semantics.Chair, t.SpecificObject("chair1")} + assert state.objs["table1"].tags == {t.Semantics.Table, t.SpecificObject("table1")} + def test_eval_node(): butil.clear_scene() state = state_from_dummy_scene(make_chair_table()) eval = partial(evaluate.evaluate_node, state=state) - + scene = cl.scene() assert eval(scene) == {"chair1", "table1"} - assert eval(scene.tagged({t.Semantics.Chair})) == {'chair1'} + assert eval(scene.tagged({t.Semantics.Chair})) == {"chair1"} assert eval(scene.tagged({t.Semantics.Seating})) == set() assert eval(scene.tagged({t.Semantics.Chair}).count()) == 1 + def test_min_dist(): butil.clear_scene() @@ -115,27 +114,28 @@ def test_min_dist(): butil.clear_scene() + def test_accessibility_monotonicity(): butil.clear_scene() scores = [] - for dist in [1,1.5,2,2.5]: + for dist in [1, 1.5, 2, 2.5]: butil.clear_scene() obj_states = {} col = butil.get_collection("indoor_scene_test") chairs = butil.get_collection("chair") - tables = butil.get_collection("table") + tables = butil.get_collection("table") col.children.link(chairs) col.children.link(tables) - chair = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') + chair = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") chair.rotation_euler = (0, 0, 0.1) butil.put_in_collection(chair, chairs) - table = butil.spawn_cube(size=2, location=(2+dist, 0, 0), name='table1') + table = butil.spawn_cube(size=2, location=(2 + dist, 0, 0), name="table1") butil.put_in_collection(table, tables) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -157,55 +157,59 @@ def test_accessibility_monotonicity(): assert np.all(np.diff(scores) < 0) + def test_accessibility_side(): - butil.clear_scene() - obj_states = {} - col = butil.get_collection("indoor_scene_test") - chairs = butil.get_collection("chair") - tables = butil.get_collection("table") - col.children.link(chairs) - col.children.link(tables) + butil.clear_scene() + obj_states = {} + col = butil.get_collection("indoor_scene_test") + chairs = butil.get_collection("chair") + tables = butil.get_collection("table") + col.children.link(chairs) + col.children.link(tables) - chair = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') - butil.put_in_collection(chair, chairs) + chair = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") + butil.put_in_collection(chair, chairs) - table = butil.spawn_cube(size=2, location=(0, 2, 0), name='table1') - butil.put_in_collection(table, tables) + table = butil.spawn_cube(size=2, location=(0, 2, 0), name="table1") + butil.put_in_collection(table, tables) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) - state = State(objs=obj_states) + state = State(objs=obj_states) - tagging.tag_canonical_surfaces(chair) - tagging.tag_canonical_surfaces(table) + tagging.tag_canonical_surfaces(chair) + tagging.tag_canonical_surfaces(table) - constraints = [] - score_terms = [] - scene = cl.scene() - chair = cl.tagged(scene, {t.Semantics.Chair}) - table = cl.tagged(scene, {t.Semantics.Table}) + constraints = [] + score_terms = [] + scene = cl.scene() + chair = cl.tagged(scene, {t.Semantics.Chair}) + table = cl.tagged(scene, {t.Semantics.Table}) + + score_terms += [cl.accessibility_cost(chair, table)] + problem = cl.Problem(constraints, score_terms) + res = evaluate.evaluate_problem(problem, state).loss() + print("nonaccessibility scores", res) + assert np.isclose(res, 0) - score_terms += [cl.accessibility_cost(chair, table)] - problem = cl.Problem(constraints, score_terms) - res = evaluate.evaluate_problem(problem, state).loss() - print("nonaccessibility scores", res) - assert np.isclose(res, 0) def test_accessibility_angle(): butil.clear_scene() scores = [] - for angle in [0, np.pi/4, np.pi/2, np.pi]: + for angle in [0, np.pi / 4, np.pi / 2, np.pi]: butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') + chair = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") - table = butil.spawn_sphere(radius = 1, location=(4*np.cos(angle), 4*np.sin(angle), 0), name='table1') + table = butil.spawn_sphere( + radius=1, location=(4 * np.cos(angle), 4 * np.sin(angle), 0), name="table1" + ) print(table.location) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -225,6 +229,7 @@ def test_accessibility_angle(): print("nonaccessibility scores", scores) assert scores == sorted(scores, reverse=True) + def test_accessibility_volume(): butil.clear_scene() scores = [] @@ -232,11 +237,11 @@ def test_accessibility_volume(): butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') - table = butil.spawn_sphere(radius=volume, location=(6, 0, 0), name='table1') + chair = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") + table = butil.spawn_sphere(radius=volume, location=(6, 0, 0), name="table1") - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -256,6 +261,7 @@ def test_accessibility_volume(): print("nonaccessibility scores", scores) assert scores == sorted(scores) + # def test_accessibility_speed(): # scores = [] # butil.clear_scene() @@ -284,7 +290,7 @@ def test_accessibility_volume(): # problem = cl.Problem(constraints, score_terms) # s = time() # res = evaluate.evaluate_problem(problem, state).score() -# print(time() - s) +# print(time() - s) # scores.append(res) # print("nonaccessibility scores", scores) # assert scores == sorted(scores) @@ -293,17 +299,17 @@ def test_accessibility_volume(): def test_angle_alignment(): butil.clear_scene() scores = [] - for angle in np.linspace(0, np.pi/2, 5): + for angle in np.linspace(0, np.pi / 2, 5): butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name='chair1') - table = butil.spawn_sphere(radius = 1, location=(0,0, 0), name='table1') + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name="chair1") + table = butil.spawn_sphere(radius=1, location=(0, 0, 0), name="table1") # rotate chair by angle in z direction chair.rotation_euler = (0, 0, angle) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -324,175 +330,161 @@ def test_angle_alignment(): print("angle_alignment costs", scores) assert scores == sorted(scores) + def test_angle_alignment_multiple_objects(): butil.clear_scene() scores = [] - - for angle in np.linspace(0, np.pi/2, 5): + + for angle in np.linspace(0, np.pi / 2, 5): butil.clear_scene() obj_states = {} - - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name='chair1') - table1 = butil.spawn_sphere(radius=1, location=(0, 0, 0), name='table1') - table2 = butil.spawn_sphere(radius=1, location=(3, 0, 0), name='table2') - + + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name="chair1") + table1 = butil.spawn_sphere(radius=1, location=(0, 0, 0), name="table1") + table2 = butil.spawn_sphere(radius=1, location=(3, 0, 0), name="table2") + # Rotate chair by angle in z direction chair.rotation_euler = (0, 0, angle) - + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) obj_states[table1.name] = ObjectState(table1, tags={t.Semantics.Table}) obj_states[table2.name] = ObjectState(table2, tags={t.Semantics.Table}) - + state = State(objs=obj_states) - + tagging.tag_canonical_surfaces(chair) tagging.tag_canonical_surfaces(table1) tagging.tag_canonical_surfaces(table2) - + constraints = [] score_terms = [] scene = cl.scene() - + chair = cl.tagged(scene, {t.Semantics.Chair}) tables = cl.tagged(scene, {t.Semantics.Table}) - + score_terms += [cl.angle_alignment_cost(chair, tables)] - + problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - + print("angle_alignment costs (multiple objects):", scores) assert scores == sorted(scores) + def test_angle_alignment_multiple_objects_varying_positions(): butil.clear_scene() scores = [] - + for i in range(5): butil.clear_scene() obj_states = {} - - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name='chair') - - table_positions = [ - (0, 0, 0), - (3, 0, 0), - (0, 3, 0), - (-3, 2, 0), - (3, 3, 0) - ] - + + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name="chair") + + table_positions = [(0, 0, 0), (3, 0, 0), (0, 3, 0), (-3, 2, 0), (3, 3, 0)] + tables = [] - for j, pos in enumerate(table_positions[:i+1], start=1): - table = butil.spawn_sphere(radius=1, location=pos, name=f'table{j}') + for j, pos in enumerate(table_positions[: i + 1], start=1): + table = butil.spawn_sphere(radius=1, location=pos, name=f"table{j}") tables.append(table) obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) - + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) - + state = State(objs=obj_states) - + tagging.tag_canonical_surfaces(chair) for table in tables: tagging.tag_canonical_surfaces(table) - + constraints = [] score_terms = [] scene = cl.scene() - + chair_obj = cl.tagged(scene, {t.Semantics.Chair}) table_objs = cl.tagged(scene, {t.Semantics.Table}) - + score_terms += [cl.angle_alignment_cost(chair_obj, table_objs)] - + problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - + print("angle_alignment costs (multiple objects, varying positions):", scores) assert scores == sorted(scores) + def test_angle_alignment_multipolygon_projection(): butil.clear_scene() scores = [] - + for i in range(5): butil.clear_scene() obj_states = {} - - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name='chair') - + + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name="chair") + # Create a complex object that may result in a multipolygon projection - table_verts = [ - (-1, -1, 0), - (1, -1, 0), - (1, 1, 0), - (-1, 1, 0), - (0, 0, 1) - ] - table_faces = [ - (0, 1, 2, 3), - (0, 1, 4), - (1, 2, 4), - (2, 3, 4), - (3, 0, 4) - ] - + table_verts = [(-1, -1, 0), (1, -1, 0), (1, 1, 0), (-1, 1, 0), (0, 0, 1)] + table_faces = [(0, 1, 2, 3), (0, 1, 4), (1, 2, 4), (2, 3, 4), (3, 0, 4)] + table_mesh = bpy.data.meshes.new(name="TableMesh") table_obj = bpy.data.objects.new(name="Table", object_data=table_mesh) - + scene = bpy.context.scene scene.collection.objects.link(table_obj) - + table_mesh.from_pydata(table_verts, [], table_faces) table_mesh.update() - + table_obj.location = (0, 0, 0) - + # Rotate the table object based on the iteration - chair.rotation_euler = (0, 0, i*np.pi/10) - + chair.rotation_euler = (0, 0, i * np.pi / 10) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) obj_states[table_obj.name] = ObjectState(table_obj, tags={t.Semantics.Table}) - + state = State(objs=obj_states) - + tagging.tag_canonical_surfaces(chair) tagging.tag_canonical_surfaces(table_obj) - + constraints = [] score_terms = [] scene = cl.scene() - + chair_obj = cl.tagged(scene, {t.Semantics.Chair}) table_objs = cl.tagged(scene, {t.Semantics.Table}) - + score_terms += [cl.angle_alignment_cost(chair_obj, table_objs)] - + problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - + print("angle_alignment costs (multipolygon projection):", scores) assert sorted(scores) == scores + def test_angle_alignment_tagged(): butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=2, location=(5, 0, 0), name='chair1') - table = butil.spawn_cube(size=2, location=(0,0, 0), name='table1') + chair = butil.spawn_cube(size=2, location=(5, 0, 0), name="chair1") + table = butil.spawn_cube(size=2, location=(0, 0, 0), name="table1") - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) tagging.tag_canonical_surfaces(chair) tagging.tag_canonical_surfaces(table) - table.rotation_euler[2] = np.pi/2 + table.rotation_euler[2] = np.pi / 2 constraints = [] score_terms = [] @@ -500,20 +492,22 @@ def test_angle_alignment_tagged(): chair = cl.tagged(scene, {t.Semantics.Chair}) table = cl.tagged(scene, {t.Semantics.Table}) - score_terms += [cl.angle_alignment_cost(chair, table, others_tags={t.Subpart.Front})] + score_terms += [ + cl.angle_alignment_cost(chair, table, others_tags={t.Subpart.Front}) + ] problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() - + assert np.isclose(res, 0.5, atol=1e-3) butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=2, location=(5, 0, 0), name='chair1') - table = butil.spawn_cube(size=2, location=(0,0, 0), name='table1') + chair = butil.spawn_cube(size=2, location=(5, 0, 0), name="chair1") + table = butil.spawn_cube(size=2, location=(0, 0, 0), name="table1") - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -526,27 +520,29 @@ def test_angle_alignment_tagged(): chair = cl.tagged(scene, {t.Semantics.Chair}) table = cl.tagged(scene, {t.Semantics.Table}) - score_terms += [cl.angle_alignment_cost(chair, table, others_tags={t.Subpart.Front})] + score_terms += [ + cl.angle_alignment_cost(chair, table, others_tags={t.Subpart.Front}) + ] problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() - + assert np.isclose(res, 0, atol=1e-3) def test_focus_score(): butil.clear_scene() scores = [] - for angle in np.linspace(0, np.pi/2, 5): + for angle in np.linspace(0, np.pi / 2, 5): butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name='chair1') - table = butil.spawn_sphere(radius = 1, location=(0,0, 0), name='table1') + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name="chair1") + table = butil.spawn_sphere(radius=1, location=(0, 0, 0), name="table1") # rotate chair by angle in z direction chair.rotation_euler = (0, 0, angle) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -567,22 +563,26 @@ def test_focus_score(): print("focus_score costs", scores) assert scores == sorted(scores) + @pytest.mark.skip def test_viol_amounts(): butil.clear_scene() def mk_state(n): - butil.clear_scene() obj_states = {} for i in range(n): - chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name=f'chair{i}') - obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Furniture, t.Semantics.Chair}) + chair = butil.spawn_cube(size=1, location=(-3, 0, 0), name=f"chair{i}") + obj_states[chair.name] = ObjectState( + chair, tags={t.Semantics.Furniture, t.Semantics.Chair} + ) + + return State(objs=obj_states) - return State(objs=obj_states) - - cons = cl.Problem([cl.scene().tagged(t.Semantics.Furniture).count().in_range(1, 3)], []) + cons = cl.Problem( + [cl.scene().tagged(t.Semantics.Furniture).count().in_range(1, 3)], [] + ) assert evaluate.evaluate_problem(cons, mk_state(0))[1] == 1 assert evaluate.evaluate_problem(cons, mk_state(1))[1] == 0 assert evaluate.evaluate_problem(cons, mk_state(3))[1] == 0 @@ -607,17 +607,17 @@ def mk_state(n): assert evaluate.evaluate_problem(cons, mk_state(3))[1] == 0 assert evaluate.evaluate_problem(cons, mk_state(4))[1] == 1 assert evaluate.evaluate_problem(cons, mk_state(6))[1] == 3 - -def test_min_dist_tagged(): + +def test_min_dist_tagged(): butil.clear_scene() obj_states = {} - chair = butil.spawn_cube(size=2, location=(0, 0, 2), name='chair1') - table = butil.spawn_cube(size=10, location=(0,0, 0), name='table1') + chair = butil.spawn_cube(size=2, location=(0, 0, 2), name="chair1") + table = butil.spawn_cube(size=10, location=(0, 0, 0), name="table1") - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) state = State(objs=obj_states) @@ -630,8 +630,6 @@ def test_min_dist_tagged(): chair = cl.tagged(scene, {t.Semantics.Chair}) table = cl.tagged(scene, {t.Semantics.Table}) - - score_terms += [cl.distance(chair, table, others_tags={t.Subpart.Front})] problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() @@ -669,19 +667,19 @@ def test_table(): butil.clear_scene() gen = TableDiningFactory(0) - obj = bbox_mesh_from_hipoly(gen, 0, use_pholder=False) + obj = bbox_mesh_from_hipoly(gen, 0, use_pholder=False) # if fac in pholder_facs: # obj = fac(0).spawn_placeholder(0, loc=(0,0,0), rot=(0,0,0)) # elif fac in asset_facs: # obj = fac(0).spawn_asset(0, loc=(0,0,0), rot=(0,0,0)) - # else: + # else: # raise ValueError() - with butil.ViewportMode(obj, mode='EDIT'): + with butil.ViewportMode(obj, mode="EDIT"): butil.select(obj) - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.mesh.quads_convert_to_tris(quad_method='BEAUTY', ngon_method='BEAUTY') + bpy.ops.mesh.select_all(action="SELECT") + bpy.ops.mesh.quads_convert_to_tris(quad_method="BEAUTY", ngon_method="BEAUTY") tagging.tag_canonical_surfaces(obj) tagging.extract_tagged_faces(obj, {t.Subpart.Front}) @@ -700,23 +698,23 @@ def test_table(): # f'{obj.name=} has nothing tagged for {tag=}. {obj_tags=}' # ) + def test_reflection_asymmetry(): - """ - create a bunch of chairs and a table. The chairs are reflected along the long part of the table. + create a bunch of chairs and a table. The chairs are reflected along the long part of the table. check that the asymmetry score is 0 """ scores = [] butil.clear_scene() obj_states = {} - table = bbox_mesh_from_hipoly(TableDiningFactory(0), 0, use_pholder=False) - obj_states[table.name] = ObjectState(table, tags= {t.Semantics.Table}) + table = bbox_mesh_from_hipoly(TableDiningFactory(0), 0, use_pholder=False) + obj_states[table.name] = ObjectState(table, tags={t.Semantics.Table}) chairs = [] for i in range(4): - chair = bbox_mesh_from_hipoly(ChairFactory(0), i, use_pholder=False) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) + chair = bbox_mesh_from_hipoly(ChairFactory(0), i, use_pholder=False) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) chairs.append(chair) # tagging.tag_canonical_surfaces(chair) # tagging.extract_tagged_faces(chair, {t.Semantics.Front}) @@ -725,10 +723,10 @@ def test_reflection_asymmetry(): chairs[2].location = (-1, 1, 0) chairs[3].location = (-1, -1, 0) - chairs[0].rotation_euler = (0, 0, -np.pi/2) - chairs[1].rotation_euler = (0, 0, np.pi/2) - chairs[2].rotation_euler = (0, 0, -np.pi/2) - chairs[3].rotation_euler = (0, 0, np.pi/2) + chairs[0].rotation_euler = (0, 0, -np.pi / 2) + chairs[1].rotation_euler = (0, 0, np.pi / 2) + chairs[2].rotation_euler = (0, 0, -np.pi / 2) + chairs[3].rotation_euler = (0, 0, np.pi / 2) bpy.context.view_layer.update() # chairs[0].rotation_euler = (0, 0, np.pi) @@ -763,7 +761,7 @@ def test_reflection_asymmetry(): res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - #assert the asymmetry increases if we rotate chair 0 + # assert the asymmetry increases if we rotate chair 0 chairs[0].rotation_euler = (0, 0, 0) bpy.context.view_layer.update() score_terms = [] @@ -773,7 +771,7 @@ def test_reflection_asymmetry(): scores.append(res) print("asymmetry scores", scores) - #assert monotonocity + # assert monotonocity assert scores == sorted(scores) # assert it is strict assert (scores[0] < scores[1]) and scores[1] < scores[2] @@ -782,7 +780,7 @@ def test_reflection_asymmetry(): @pytest.mark.skip def test_rotation_asymmetry(): """ - create a bunch of chairs. The chairs are rotationally symmetric and then perturbed. + create a bunch of chairs. The chairs are rotationally symmetric and then perturbed. """ scores = [] butil.clear_scene() @@ -790,24 +788,24 @@ def test_rotation_asymmetry(): chairs = [] for i in range(6): - chair = bbox_mesh_from_hipoly(ChairFactory(0), i, use_pholder=False) - obj_states[chair.name] = ObjectState(chair, tags= {t.Semantics.Chair}) + chair = bbox_mesh_from_hipoly(ChairFactory(0), i, use_pholder=False) + obj_states[chair.name] = ObjectState(chair, tags={t.Semantics.Chair}) chairs.append(chair) - circle_locations_rotations = [((2*np.cos(i*np.pi/3), 2*np.sin(i*np.pi/3), 0),i*np.pi/3) for i in range(6)] + circle_locations_rotations = [ + ((2 * np.cos(i * np.pi / 3), 2 * np.sin(i * np.pi / 3), 0), i * np.pi / 3) + for i in range(6) + ] np.random.shuffle(circle_locations_rotations) # put the chairs in a circle for i in range(6): chairs[i].location = circle_locations_rotations[i][0] chairs[i].rotation_euler = (0, 0, circle_locations_rotations[i][1]) - - bpy.context.view_layer.update() state = State(objs=obj_states) - constraints = [] score_terms = [] scene = cl.scene() @@ -817,7 +815,7 @@ def test_rotation_asymmetry(): problem = cl.Problem(constraints, score_terms) res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - assert np.isclose(res,0, atol=1e-2) + assert np.isclose(res, 0, atol=1e-2) # assert the asymmetry increases as we gradually move one chair from the circle chairs[0].location += Vector(np.random.rand(3)) @@ -828,7 +826,7 @@ def test_rotation_asymmetry(): res = evaluate.evaluate_problem(problem, state).loss() scores.append(res) - #assert the asymmetry increases if we rotate chair 0 + # assert the asymmetry increases if we rotate chair 0 chairs[0].rotation_euler = (0, 0, np.random.rand(1)) bpy.context.view_layer.update() score_terms = [] @@ -852,21 +850,21 @@ def test_rotation_asymmetry(): # assert it is strict assert (scores[0] < scores[1]) and scores[1] < scores[2] and scores[2] < scores[3] print("asymmetry scores", scores) - + def test_coplanarity(): butil.clear_scene() obj_states = {} - chair1 = butil.spawn_cube(size=2, location=(0, 0, 0), name='chair1') - chair2 = butil.spawn_cube(size=2, location=(4, 0, 0), name='chair2') - chair3 = butil.spawn_cube(size=2, location=(8, 0, 0), name='chair3') - chair4 = butil.spawn_cube(size=2, location=(12, 0, 0), name='chair4') + chair1 = butil.spawn_cube(size=2, location=(0, 0, 0), name="chair1") + chair2 = butil.spawn_cube(size=2, location=(4, 0, 0), name="chair2") + chair3 = butil.spawn_cube(size=2, location=(8, 0, 0), name="chair3") + chair4 = butil.spawn_cube(size=2, location=(12, 0, 0), name="chair4") - obj_states[chair1.name] = ObjectState(chair1, tags= {t.Semantics.Chair}) - obj_states[chair2.name] = ObjectState(chair2, tags= {t.Semantics.Chair}) - obj_states[chair3.name] = ObjectState(chair3, tags= {t.Semantics.Chair}) - obj_states[chair4.name] = ObjectState(chair4, tags= {t.Semantics.Chair}) + obj_states[chair1.name] = ObjectState(chair1, tags={t.Semantics.Chair}) + obj_states[chair2.name] = ObjectState(chair2, tags={t.Semantics.Chair}) + obj_states[chair3.name] = ObjectState(chair3, tags={t.Semantics.Chair}) + obj_states[chair4.name] = ObjectState(chair4, tags={t.Semantics.Chair}) state = State(objs=obj_states) @@ -882,7 +880,7 @@ def test_coplanarity(): score_terms += [cl.coplanarity_cost(chairs)] problem = cl.Problem(constraints, score_terms) - res1= evaluate.evaluate_problem(problem, state).loss() + res1 = evaluate.evaluate_problem(problem, state).loss() # print(res1) assert np.isclose(res1, 0, atol=1e-2) @@ -898,7 +896,7 @@ def test_coplanarity(): # print(res2) assert res2 > res1 - chair3.rotation_euler = (0, 0, np.pi/6) + chair3.rotation_euler = (0, 0, np.pi / 6) bpy.context.view_layer.update() state = State(objs=obj_states) score_terms = [] @@ -913,27 +911,27 @@ def test_coplanarity(): score_terms = [] score_terms += [cl.coplanarity_cost(chairs)] problem = cl.Problem(constraints, score_terms) - res4= evaluate.evaluate_problem(problem, state).loss() + res4 = evaluate.evaluate_problem(problem, state).loss() assert res4 > res3 def test_evaluate_problem_scalar_ops(): - state = State(objs={}) one = cl.constant(1) two = cl.constant(2) three = cl.constant(3) - e = lambda x: evaluate.evaluate_problem(cl.Problem({}, {repr(x): x}), state).loss() + def e(x): + return evaluate.evaluate_problem(cl.Problem({}, {repr(x): x}), state).loss() assert e(two) == 2 assert e(one + two) == 3 assert e(one - two) == -1 assert e(two * three) == 6 - assert e(two / three) == 2/3 - assert e(two ** three) == 8 - + assert e(two / three) == 2 / 3 + assert e(two**three) == 8 + assert e(two == two) == 1 assert e(two == one) == 0 assert e(two >= two) == 1 @@ -949,17 +947,19 @@ def test_evaluate_problem_scalar_ops(): assert e(cl.max_expr(one, two)) == 2 assert e(cl.min_expr(one, two)) == 1 - + assert e(one.clamp_min(two)) == 2 assert e(two.clamp_max(one)) == 1 assert e(-one) == -1 assert e((-one).abs()) == 1 -def test_evaluate_hinge(): +def test_evaluate_hinge(): state = State(objs={}) - e = lambda x: evaluate.evaluate_problem(cl.Problem({}, {repr(x): x}), state).loss() + + def e(x): + return evaluate.evaluate_problem(cl.Problem({}, {repr(x): x}), state).loss() one = cl.constant(1) two = cl.constant(2) @@ -971,7 +971,8 @@ def test_evaluate_hinge(): assert e(cl.hinge(one, 2, 3)) == 1 assert e(cl.hinge(two, 0, 1.5)) == 0.5 -if __name__ == '__main__': + +if __name__ == "__main__": # test_min_dist() # test_min_dist_tagged() # test_reflection_asymmetry() diff --git a/tests/solver/test_greedy_partition.py b/tests/solver/test_greedy_partition.py index 2518af262..e5806a4f0 100644 --- a/tests/solver/test_greedy_partition.py +++ b/tests/solver/test_greedy_partition.py @@ -5,49 +5,48 @@ # Authors: Alexander Raistrick import copy -import pytest -from pprint import pprint +import pytest +from test_greedy_substitutions import make_dummy_state from infinigen.core import tags as t - -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r, - usage_lookup, - evaluator -) +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import evaluator, usage_lookup +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.example_solver import ( - state_def, greedy, propose_discrete, + state_def, ) - -from infinigen_examples import indoor_constraint_examples as ex, generate_indoors +from infinigen_examples import generate_indoors +from infinigen_examples import indoor_constraint_examples as ex from infinigen_examples.util import constraint_util as cu -from test_greedy_substitutions import make_dummy_state def test_partition_basecase_irrelevant(): cons = cl.scene()[t.Semantics.TableDisplayItem].count().in_range(0, 1) res, relevant = greedy.filter_constraints(cons, r.Domain({t.Semantics.Room}, [])) assert not relevant + def test_basecase_relevant(): cons = cl.scene()[t.Semantics.Room].count().in_range(0, 1) filter = r.Domain({t.Semantics.Room}, []) res, relevant = greedy.filter_constraints(cons, filter) assert relevant -def test_partition_collapse_binop(): - cons = ( - cl.scene()[t.Semantics.Furniture].count().in_range(0, 1) * - cl.scene()[t.Semantics.Room].count().in_range(0, 1) - ) +def test_partition_collapse_binop(): + cons = cl.scene()[t.Semantics.Furniture].count().in_range(0, 1) * cl.scene()[ + t.Semantics.Room + ].count().in_range(0, 1) - assert not greedy.filter_constraints(cons.operands[0], r.Domain({t.Semantics.Room}, []))[1] - assert greedy.filter_constraints(cons.operands[1], r.Domain({t.Semantics.Room}, []))[1] + assert not greedy.filter_constraints( + cons.operands[0], r.Domain({t.Semantics.Room}, []) + )[1] + assert greedy.filter_constraints( + cons.operands[1], r.Domain({t.Semantics.Room}, []) + )[1] res, relevant = greedy.filter_constraints(cons, r.Domain({t.Semantics.Room}, [])) assert relevant @@ -59,120 +58,155 @@ def test_partition_collapse_binop(): def test_partition_eliminate_irrelevant(): - scene = cl.scene() firstpart = scene[t.Semantics.Furniture].count().in_range(0, 1) - secondpart = scene[t.Semantics.Furniture].all(lambda f: ( - scene[t.Semantics.Chair].related_to(f, cl.AnyRelation()).count().in_range(0, 1) - )) + secondpart = scene[t.Semantics.Furniture].all( + lambda f: ( + scene[t.Semantics.Chair] + .related_to(f, cl.AnyRelation()) + .count() + .in_range(0, 1) + ) + ) cons = firstpart * secondpart - assert not greedy.filter_constraints(secondpart, r.Domain({t.Semantics.Furniture}, []))[1] + assert not greedy.filter_constraints( + secondpart, r.Domain({t.Semantics.Furniture}, []) + )[1] res, relevant = greedy.filter_constraints(cons, r.Domain({t.Semantics.Furniture})) assert relevant assert r.expr_equal(res, firstpart) + def test_greedy_partition_bathroom(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() stages = generate_indoors.default_greedy_stages() - bath_cons = prob.constraints['bathroom'] + bath_cons = prob.constraints["bathroom"] - on_floor = stages['on_floor'] - on_floor_any = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain()) + on_floor = stages["on_floor"] + on_floor_any = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain()) assert greedy.filter_constraints(bath_cons, on_floor_any)[1] - on_bathroom = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain({t.Semantics.Bathroom})) + on_bathroom = r.domain_tag_substitute( + on_floor, cu.variable_room, r.Domain({t.Semantics.Bathroom}) + ) assert greedy.filter_constraints(bath_cons, on_bathroom)[1] -def test_greedy_partition_multilevel(): +def test_greedy_partition_multilevel(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) - prob = ex.home_constraints() + ex.home_constraints() stages = generate_indoors.default_greedy_stages() - bathroom = cl.scene()[{t.Semantics.Room, t.Semantics.Bathroom}].excludes(cu.room_types) + bathroom = cl.scene()[{t.Semantics.Room, t.Semantics.Bathroom}].excludes( + cu.room_types + ) storage = cl.scene()[t.Semantics.Storage] bath_cons_1 = storage.related_to(bathroom, cu.on_floor).count().in_range(0, 1) - on_hallway = r.domain_tag_substitute(stages['on_floor'], cu.variable_room, r.Domain({t.Semantics.Hallway})) + on_hallway = r.domain_tag_substitute( + stages["on_floor"], cu.variable_room, r.Domain({t.Semantics.Hallway}) + ) assert not greedy.filter_constraints(bath_cons_1, on_hallway)[1] - bath_cons_2 = bathroom.all(lambda r: storage.related_to(r, cu.on_floor).count().in_range(0, 1)) + bath_cons_2 = bathroom.all( + lambda r: storage.related_to(r, cu.on_floor).count().in_range(0, 1) + ) assert not greedy.filter_constraints(bath_cons_2, on_hallway)[1] - bath_cons_3 = bathroom.all(lambda r: ( - storage.related_to(r).all( - lambda s: cl.scene()[t.Semantics.Object].related_to(s).count().in_range(0, 1) + bath_cons_3 = bathroom.all( + lambda r: ( + storage.related_to(r).all( + lambda s: cl.scene()[t.Semantics.Object] + .related_to(s) + .count() + .in_range(0, 1) + ) ) - )) + ) assert not greedy.filter_constraints(bath_cons_3, on_hallway)[1] -def test_greedy_partition_bathroom_nofalsepositive(): +def test_greedy_partition_bathroom_nofalsepositive(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() stages = generate_indoors.default_greedy_stages() - bath_cons = prob.constraints['bathroom'] + bath_cons = prob.constraints["bathroom"] - on_hallway = r.domain_tag_substitute(stages['on_floor'], cu.variable_room, r.Domain({t.Semantics.Hallway})) + on_hallway = r.domain_tag_substitute( + stages["on_floor"], cu.variable_room, r.Domain({t.Semantics.Hallway}) + ) assert not greedy.filter_constraints(bath_cons, on_hallway)[1] + def test_greedy_partition_plants(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() stages = generate_indoors.default_greedy_stages() - plant_cons = prob.constraints['plants'] + plant_cons = prob.constraints["plants"] - on_floor = stages['on_floor'] - on_floor_any = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain()) + on_floor = stages["on_floor"] + on_floor_any = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain()) assert greedy.filter_constraints(plant_cons, on_floor_any)[1] - on_bathroom = r.domain_tag_substitute(on_floor, cu.variable_room, r.Domain({t.Semantics.Bathroom})) + on_bathroom = r.domain_tag_substitute( + on_floor, cu.variable_room, r.Domain({t.Semantics.Bathroom}) + ) assert greedy.filter_constraints(plant_cons, on_bathroom)[1] -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_objects_on_generic_obj(): +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_objects_on_generic_obj(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) stages = generate_indoors.default_greedy_stages() - on_obj = stages['on_obj'] + on_obj = stages["on_obj"] on_obj = r.domain_tag_substitute(on_obj, cu.variable_room, r.Domain()) on_obj = r.domain_tag_substitute( - on_obj, cu.variable_obj, r.Domain({t.SpecificObject('thatchair'), t.Semantics.Chair}) + on_obj, + cu.variable_obj, + r.Domain({t.SpecificObject("thatchair"), t.Semantics.Chair}), ) print("ON_OBJ_FILTER", on_obj) bathroom = cl.scene()[t.Semantics.Room, t.Semantics.Bathroom] storage = cl.scene()[t.Semantics.Object, t.Semantics.Storage] - prob = bathroom.all(lambda r: - storage.related_to(r).all(lambda s: ( - cl.scene()[t.Semantics.Object].related_to(s).count().in_range(0, 1) - )) + prob = bathroom.all( + lambda r: storage.related_to(r).all( + lambda s: ( + cl.scene()[t.Semantics.Object].related_to(s).count().in_range(0, 1) + ) + ) ) cons, relevant = greedy.filter_constraints(prob, on_obj) assert not relevant -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_on_obj_coverage(): - cons = cl.scene()[t.Semantics.Room].all(lambda r: ( - cl.scene()[t.Semantics.Storage].related_to(r).all(lambda s: ( - cl.scene()[t.Semantics.Object].related_to(s).count().in_range(0, 1) - )) - )) +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_on_obj_coverage(): + cons = cl.scene()[t.Semantics.Room].all( + lambda r: ( + cl.scene()[t.Semantics.Storage] + .related_to(r) + .all( + lambda s: ( + cl.scene()[t.Semantics.Object].related_to(s).count().in_range(0, 1) + ) + ) + ) + ) obj_in_bathroom = r.domain_tag_substitute( - generate_indoors.default_greedy_stages()['on_obj'], - cu.variable_room, - r.Domain({t.Semantics.Bathroom}) + generate_indoors.default_greedy_stages()["on_obj"], + cu.variable_room, + r.Domain({t.Semantics.Bathroom}), ) obj_in_bathroom = r.domain_tag_substitute( obj_in_bathroom, cu.variable_obj, r.Domain({t.Semantics.Storage}) @@ -181,36 +215,34 @@ def test_on_obj_coverage(): res, relevant = greedy.filter_constraints(cons, obj_in_bathroom) assert relevant -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_only_bathcons_coverage(): +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_only_bathcons_coverage(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() stages = generate_indoors.default_greedy_stages() - bath_cons = prob.constraints['bathroom'] + bath_cons = prob.constraints["bathroom"] dom = r.domain_tag_substitute( - stages['on_floor'], cu.variable_room, r.Domain({t.Semantics.Bathroom}) + stages["on_floor"], cu.variable_room, r.Domain({t.Semantics.Bathroom}) ) assert greedy.filter_constraints(bath_cons, dom)[1] dom = r.domain_tag_substitute( - stages['on_wall'], cu.variable_room, r.Domain({t.Semantics.Bathroom}) + stages["on_wall"], cu.variable_room, r.Domain({t.Semantics.Bathroom}) ) assert greedy.filter_constraints(bath_cons, dom)[1] dom = r.domain_tag_substitute( - stages['on_obj'], cu.variable_room, r.Domain({t.Semantics.Bathroom}) - ) - dom = r.domain_tag_substitute( - dom, cu.variable_obj, r.Domain({t.Semantics.Storage}) + stages["on_obj"], cu.variable_room, r.Domain({t.Semantics.Bathroom}) ) + dom = r.domain_tag_substitute(dom, cu.variable_obj, r.Domain({t.Semantics.Storage})) assert greedy.filter_constraints(bath_cons, dom)[1] + @pytest.fixture def precompute_all_coverage(): - usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() stages = generate_indoors.default_greedy_stages() @@ -219,7 +251,6 @@ def precompute_all_coverage(): score_coverage = {k: set() for k in prob.score_terms.keys()} for k, filter in stages.items(): - for roomtype in cu.room_types: room_filter = r.domain_tag_substitute( copy.deepcopy(filter), cu.variable_room, r.Domain({roomtype}) @@ -227,7 +258,7 @@ def precompute_all_coverage(): # eliminate the var, assume any object is fine, most generous possible assumption room_filter = r.domain_tag_substitute( - room_filter, cu.variable_obj, r.Domain() + room_filter, cu.variable_obj, r.Domain() ) for name, cons in prob.constraints.items(): if greedy.filter_constraints(cons, room_filter)[1]: @@ -238,32 +269,32 @@ def precompute_all_coverage(): return cons_coverage, score_coverage -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_specific_coverage(precompute_all_coverage): +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_specific_coverage(precompute_all_coverage): cons_coverage, _ = precompute_all_coverage - assert cons_coverage['bathroom'] == { - ('on_floor', t.Semantics.Bathroom), - ('on_wall', t.Semantics.Bathroom), - ('on_obj', t.Semantics.Bathroom), + assert cons_coverage["bathroom"] == { + ("on_floor", t.Semantics.Bathroom), + ("on_wall", t.Semantics.Bathroom), + ("on_obj", t.Semantics.Bathroom), } - assert cons_coverage['diningroom'] == { - ('on_floor', t.Semantics.DiningRoom), - ('on_wall', t.Semantics.DiningRoom), - ('on_obj', t.Semantics.DiningRoom), + assert cons_coverage["diningroom"] == { + ("on_floor", t.Semantics.DiningRoom), + ("on_wall", t.Semantics.DiningRoom), + ("on_obj", t.Semantics.DiningRoom), } - assert cons_coverage['livingroom'] == { - ('on_floor', t.Semantics.LivingRoom), - ('on_wall', t.Semantics.LivingRoom), - ('on_obj', t.Semantics.LivingRoom), + assert cons_coverage["livingroom"] == { + ("on_floor", t.Semantics.LivingRoom), + ("on_wall", t.Semantics.LivingRoom), + ("on_obj", t.Semantics.LivingRoom), } -@pytest.mark.skip # filter_constraints development has been abandoned until a later date + +@pytest.mark.skip # filter_constraints development has been abandoned until a later date def test_greedy_partition_coverage(precompute_all_coverage): - cons_coverage, score_coverage = precompute_all_coverage for k, v in cons_coverage.items(): @@ -273,29 +304,29 @@ def test_greedy_partition_coverage(precompute_all_coverage): if len(score_coverage[k]) == 0: raise ValueError(f"Score term {k} has no coverage") + def get_on_diningroom_stage(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) stages = generate_indoors.default_greedy_stages() on_diningroom = r.domain_tag_substitute( - stages['on_floor'], - cu.variable_room, - r.Domain({t.Semantics.DiningRoom, t.Semantics.Room}) + stages["on_floor"], + cu.variable_room, + r.Domain({t.Semantics.DiningRoom, t.Semantics.Room}), ) return on_diningroom -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_greedy_partition_diningroom(): +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_greedy_partition_diningroom(): on_diningroom = get_on_diningroom_stage() prob = ex.home_constraints() - diningroom = prob.constraints['diningroom'] - - + diningroom = prob.constraints["diningroom"] + for node in diningroom.traverse(): if isinstance(node, cl.item): print(node) - res, relevant = greedy.filter_constraints(diningroom, on_diningroom) + res, relevant = greedy.filter_constraints(diningroom, on_diningroom) assert relevant print("FILTER", on_diningroom) @@ -304,17 +335,17 @@ def test_greedy_partition_diningroom(): assert isinstance(res, cl.ForAll) assert res.pred.__class__ is not cl.constant -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_diningroom_bounds_active(): +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_diningroom_bounds_active(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) stages = generate_indoors.default_greedy_stages() on_diningroom = r.domain_tag_substitute( - stages['on_floor'], cu.variable_room, r.Domain({t.Semantics.DiningRoom}) + stages["on_floor"], cu.variable_room, r.Domain({t.Semantics.DiningRoom}) ) prob = ex.home_constraints() - diningroom = prob.constraints['diningroom'] + diningroom = prob.constraints["diningroom"] bounds_before_preproc = r.constraint_bounds(diningroom) bounds = propose_discrete.preproc_bounds( @@ -323,63 +354,61 @@ def test_diningroom_bounds_active(): assert len(bounds) > 0 -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_partition_keep_constants(): - cons = (cl.scene()[t.Semantics.Room].count() * 2) +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_partition_keep_constants(): + cons = cl.scene()[t.Semantics.Room].count() * 2 res, relevant = greedy.filter_constraints(cons, r.Domain({t.Semantics.Room}, [])) assert relevant assert r.expr_equal(res, cons) -@pytest.mark.skip # filter_constraints development has been abandoned until a later date -def test_multiroom_viol(): - state = make_dummy_state({ - (t.Semantics.Room,): 3 - }) +@pytest.mark.skip # filter_constraints development has been abandoned until a later date +def test_multiroom_viol(): + state = make_dummy_state({(t.Semantics.Room,): 3}) - state.objs['room_0'].tags.add(t.Semantics.DiningRoom) + state.objs["room_0"].tags.add(t.Semantics.DiningRoom) - cons = cl.scene()[t.Semantics.Room].all(lambda r: - cl.scene()[{t.Semantics.Object, t.Semantics.Storage}].related_to(r, cu.on_floor).count() == 1 + cons = cl.scene()[t.Semantics.Room].all( + lambda r: cl.scene()[{t.Semantics.Object, t.Semantics.Storage}] + .related_to(r, cu.on_floor) + .count() + == 1 ) - prob = cl.Problem({'storage': cons}, {}) + prob = cl.Problem({"storage": cons}, {}) on_diningroom = get_on_diningroom_stage() prob, relevant = greedy.filter_constraints(prob, on_diningroom) assert relevant - print("PRED", prob.constraints['storage'].pred) - print("OBJS", prob.constraints['storage'].objs) + print("PRED", prob.constraints["storage"].pred) + print("OBJS", prob.constraints["storage"].objs) result = evaluator.evaluate_problem(prob, state) - assert result.viol_count() == 1 # only one room is relevant, so only one violation applies for this stage + assert ( + result.viol_count() == 1 + ) # only one room is relevant, so only one violation applies for this stage - state.objs['stor_1'] = state_def.ObjectState( + state.objs["stor_1"] = state_def.ObjectState( obj=None, generator=None, tags={t.Semantics.Storage}, relations=[ - state_def.RelationState( - relation=cl.StableAgainst(), - target_name='room_0' - ) - ] + state_def.RelationState(relation=cl.StableAgainst(), target_name="room_0") + ], ) result = evaluator.evaluate_problem(prob, state) - assert result.viol_count() == 0 # only one room is relevant, and it has an obj + assert result.viol_count() == 0 # only one room is relevant, and it has an obj + @pytest.mark.skip def test_forall_furnroom(): - scene = cl.scene() rooms = scene[t.Semantics.Room] furniture = scene[t.Semantics.Furniture] - cons = rooms.all( - lambda r: furniture.related_to(r).count().in_range(0, 1) - ) + cons = rooms.all(lambda r: furniture.related_to(r).count().in_range(0, 1)) room = r.Domain({t.Semantics.Room}, []) furn = r.Domain({t.Semantics.Furniture}, []) @@ -397,16 +426,14 @@ def test_forall_furnroom(): res, rel = greedy.filter_constraints(cons, furn_no_room) assert not rel + @pytest.mark.skip def test_forall_narrow_pred(): - scene = cl.scene() rooms = scene[t.Semantics.Room] furniture = scene[t.Semantics.Furniture] - cons = rooms.all( - lambda r: furniture.related_to(r).count().in_range(0, 1) - ) + cons = rooms.all(lambda r: furniture.related_to(r).count().in_range(0, 1)) room = r.Domain({t.Semantics.Room}, []) stor = r.Domain({t.Semantics.Furniture}, []) @@ -424,23 +451,20 @@ def test_forall_narrow_pred(): res, rel = greedy.filter_constraints(cons, stor_no_room) assert not rel + @pytest.mark.skip def test_forall_narrow_loopvar(): - scene = cl.scene() rooms = scene[t.Semantics.Room] furniture = scene[t.Semantics.Furniture] - cons = rooms.all( - lambda r: furniture.related_to(r).count().in_range(0, 1) - ) + cons = rooms.all(lambda r: furniture.related_to(r).count().in_range(0, 1)) droom = r.Domain({t.Semantics.Room, t.Semantics.DiningRoom}, []) furn = r.Domain({t.Semantics.Furniture}, []) furn_room = furn.with_relation(cl.AnyRelation(), droom) furn_no_room = furn.with_relation(-cl.AnyRelation(), droom) - cons_narrow = r.FilterByDomain(rooms, droom).all( lambda r: furniture.related_to(r).count().in_range(0, 1) ) @@ -457,15 +481,12 @@ def test_forall_narrow_loopvar(): res, rel = greedy.filter_constraints(cons, furn_no_room) assert not rel + @pytest.mark.skip def test_forall_sumconst(): - scene = cl.scene() rooms = scene[t.Semantics.Room] - furniture = scene[t.Semantics.Furniture] + scene[t.Semantics.Furniture] sumcons = rooms.sum(lambda r: cl.constant(1)) assert greedy.filter_constraints(sumcons, r.Domain({t.Semantics.Room}))[1] - - - diff --git a/tests/solver/test_greedy_stages.py b/tests/solver/test_greedy_stages.py index 4e319384e..d74e21a76 100644 --- a/tests/solver/test_greedy_stages.py +++ b/tests/solver/test_greedy_stages.py @@ -4,34 +4,28 @@ # Authors: Alexander Raistrick -import logging - from pprint import pprint + import pytest +from infinigen.assets.objects.tableware import PlantContainerFactory from infinigen.core import tags as t -from infinigen.core.constraints import ( - checks, - constraint_language as cl, - reasoning as r, - usage_lookup, - evaluator -) +from infinigen.core.constraints import checks, evaluator, usage_lookup +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r from infinigen.core.constraints.example_solver import ( - propose_discrete, greedy, - state_def + propose_discrete, + state_def, ) -from infinigen_examples import indoor_constraint_examples as ex, generate_indoors +from infinigen.core.util import blender as butil +from infinigen_examples import generate_indoors +from infinigen_examples import indoor_constraint_examples as ex from infinigen_examples.util import constraint_util as cu -from infinigen.assets.tableware import PlantContainerFactory - -from infinigen.core.util import blender as butil -@pytest.mark.parametrize('key', generate_indoors.default_greedy_stages().keys()) +@pytest.mark.parametrize("key", generate_indoors.default_greedy_stages().keys()) def test_stages_relations(key): - pprint(generate_indoors.default_greedy_stages()) v = generate_indoors.default_greedy_stages()[key] @@ -39,24 +33,26 @@ def test_stages_relations(key): assert not v.is_recursive() if len(v.relations) != 0: - if all(isinstance(r, cl.NegatedRelation) for r, _ in v.relations): - raise ValueError(f"Stage {key} has no positive relation, {[r for r, _ in v.relations]}") - #if any(isinstance(r, cl.AnyRelation) for r, _ in v.relations): + raise ValueError( + f"Stage {key} has no positive relation, {[r for r, _ in v.relations]}" + ) + # if any(isinstance(r, cl.AnyRelation) for r, _ in v.relations): # raise ValueError(f"Stage {key} has an AnyRelation which is underspecified, {v}") -#@pytest.mark.parametrize('key', generate_indoors.default_greedy_stages().keys()) -#@pytest.mark.parametrize('roomtype', cu.room_types) -#def test_stage_bound_roomsubs(key: str, roomtype: t.Semantics): -# + +# @pytest.mark.parametrize('key', generate_indoors.default_greedy_stages().keys()) +# @pytest.mark.parametrize('roomtype', cu.room_types) +# def test_stage_bound_roomsubs(key: str, roomtype: t.Semantics): +# # stages = generate_indoors.default_greedy_stages() # stage = stages[key] # stage = r.domain_tag_substitute(stage, t.Variable('room'), r.Domain({roomtype})) # -# bounds = r.constraint_bounds(ex.home_constraints()) +# bounds = r.constraint_bounds(ex.home_constraints()) -def test_validate_bounds(): +def test_validate_bounds(): bounds = r.constraint_bounds(ex.home_constraints()) for b in bounds: @@ -69,239 +65,322 @@ def test_validate_bounds(): if rel.parent_tags == set(): raise ValueError(f"GeometryRelation with empty parent_tags in {b=}") + def test_validate_stages(): stages = generate_indoors.default_greedy_stages() - wall = stages['on_wall'] - floor = stages['on_floor'] + wall = stages["on_wall"] + floor = stages["on_floor"] assert not wall.intersects(floor) - onobj = stages['obj_ontop_obj'] + onobj = stages["obj_ontop_obj"] assert not onobj.intersects(floor) checks.validate_stages(stages) -def test_example_intersects(): +def test_example_intersects(): on_wall_complex = cl.StableAgainst( - {-t.Subpart.Top, t.Subpart.Back, -t.Subpart.Front}, - {t.Subpart.Wall, t.Subpart.Visible, -t.Subpart.Ceiling, -t.Subpart.SupportSurface} + {-t.Subpart.Top, t.Subpart.Back, -t.Subpart.Front}, + { + t.Subpart.Wall, + t.Subpart.Visible, + -t.Subpart.Ceiling, + -t.Subpart.SupportSurface, + }, ) on_wall_simple = cl.StableAgainst({}, {t.Subpart.Wall}) assert on_wall_simple.intersects(on_wall_complex) dom = r.Domain( - {t.Semantics.WallDecoration, t.Semantics.Object}, - relations=[ - (on_wall_complex, r.Domain({t.Semantics.Room}, [])) - ] + {t.Semantics.WallDecoration, t.Semantics.Object}, + relations=[(on_wall_complex, r.Domain({t.Semantics.Room}, []))], ) - filter = generate_indoors.default_greedy_stages()['on_wall'] + filter = generate_indoors.default_greedy_stages()["on_wall"] assert propose_discrete.active_for_stage(dom, filter) - -def test_contradiction_fail(): - prob = cl.Problem(constraints=[ - cl.scene()[{t.Semantics.Object, -t.Semantics.Object}].count().in_range(1, 3) - ], score_terms=[]) + +def test_contradiction_fail(): + prob = cl.Problem( + constraints=[ + cl.scene()[{t.Semantics.Object, -t.Semantics.Object}].count().in_range(1, 3) + ], + score_terms=[], + ) with pytest.raises(ValueError): checks.check_contradictory_domains(prob) + def get_walldec(): return r.Domain( - {t.Semantics.WallDecoration, t.Semantics.Object, -t.Semantics.Room}, - [( - cl.StableAgainst( - {-t.Subpart.Front, -t.Subpart.Top, t.Subpart.Back}, - {-t.Subpart.SupportSurface, -t.Subpart.Ceiling, t.Subpart.Visible, t.Subpart.Wall} - ), - r.Domain({t.Semantics.Room, -t.Semantics.Object}, []) - )] + {t.Semantics.WallDecoration, t.Semantics.Object, -t.Semantics.Room}, + [ + ( + cl.StableAgainst( + {-t.Subpart.Front, -t.Subpart.Top, t.Subpart.Back}, + { + -t.Subpart.SupportSurface, + -t.Subpart.Ceiling, + t.Subpart.Visible, + t.Subpart.Wall, + }, + ), + r.Domain({t.Semantics.Room, -t.Semantics.Object}, []), + ) + ], ) + def test_example_walldec(): - dom = get_walldec() stages = generate_indoors.default_greedy_stages() - assert not propose_discrete.active_for_stage(dom, stages['on_ceiling']) - assert not propose_discrete.active_for_stage(dom, stages['on_floor']) + assert not propose_discrete.active_for_stage(dom, stages["on_ceiling"]) + assert not propose_discrete.active_for_stage(dom, stages["on_floor"]) - assert t.satisfies(dom.tags, stages['on_wall'].tags) - print("ONWALL", stages['on_wall']) - - assert propose_discrete.active_for_stage(dom, stages['on_wall']) + assert t.satisfies(dom.tags, stages["on_wall"].tags) + print("ONWALL", stages["on_wall"]) + + assert propose_discrete.active_for_stage(dom, stages["on_wall"]) -def test_example_floorwall(): +def test_example_floorwall(): on = cl.StableAgainst( - {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, - {t.Subpart.SupportSurface, t.Subpart.Visible, -t.Subpart.Wall, -t.Subpart.Ceiling} + {t.Subpart.Bottom, -t.Subpart.Front, -t.Subpart.Top, -t.Subpart.Back}, + { + t.Subpart.SupportSurface, + t.Subpart.Visible, + -t.Subpart.Wall, + -t.Subpart.Ceiling, + }, ) against = cl.StableAgainst( - {t.Subpart.Back, -t.Subpart.Top, -t.Subpart.Front}, - {t.Subpart.Visible, t.Subpart.Wall, -t.Subpart.SupportSurface, -t.Subpart.Ceiling} + {t.Subpart.Back, -t.Subpart.Top, -t.Subpart.Front}, + { + t.Subpart.Visible, + t.Subpart.Wall, + -t.Subpart.SupportSurface, + -t.Subpart.Ceiling, + }, ) dom = r.Domain( - {t.Semantics.Storage, t.Semantics.Furniture, t.Semantics.Object, -t.Semantics.Room}, + { + t.Semantics.Storage, + t.Semantics.Furniture, + t.Semantics.Object, + -t.Semantics.Room, + }, [ (on, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), - (against, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) - ] + (against, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + ], ) stages = generate_indoors.default_greedy_stages() - assert propose_discrete.active_for_stage(dom, stages['on_floor']) - assert not propose_discrete.active_for_stage(dom, stages['on_wall']) + assert propose_discrete.active_for_stage(dom, stages["on_floor"]) + assert not propose_discrete.active_for_stage(dom, stages["on_wall"]) -def test_example_secondary(): - - floorwall_furn = r.Domain({t.Semantics.Furniture, t.Semantics.Storage, t.Semantics.Object}, [ - (cu.on_floor, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), - (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) - ]) - dom = r.Domain({t.FromGenerator(PlantContainerFactory), t.Semantics.Object, -t.Semantics.Room}, [ - (cl.StableAgainst({t.Subpart.Bottom}, {t.Subpart.Top}), floorwall_furn) - ]) +def test_example_secondary(): + floorwall_furn = r.Domain( + {t.Semantics.Furniture, t.Semantics.Storage, t.Semantics.Object}, + [ + (cu.on_floor, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + ], + ) + dom = r.Domain( + {t.FromGenerator(PlantContainerFactory), t.Semantics.Object, -t.Semantics.Room}, + [(cl.StableAgainst({t.Subpart.Bottom}, {t.Subpart.Top}), floorwall_furn)], + ) stages = generate_indoors.default_greedy_stages() - on_obj = stages['obj_ontop_obj'] + on_obj = stages["obj_ontop_obj"] assert propose_discrete.active_for_stage(dom, on_obj) -def test_example_sideobj(): +def test_example_sideobj(): anyroom = r.Domain({t.Semantics.Room, -t.Semantics.Object}, []) - objonroom = r.Domain({t.Semantics.Object, t.Semantics.Table, -t.Semantics.Room}, [ - (cu.on_floor, anyroom) - ]) + objonroom = r.Domain( + {t.Semantics.Object, t.Semantics.Table, -t.Semantics.Room}, + [(cu.on_floor, anyroom)], + ) - dom = r.Domain({t.Semantics.Object, t.Semantics.Chair, -t.Semantics.Room}, [ - (cu.front_against, objonroom), - (cu.on_floor, anyroom) - ]) + dom = r.Domain( + {t.Semantics.Object, t.Semantics.Chair, -t.Semantics.Room}, + [(cu.front_against, objonroom), (cu.on_floor, anyroom)], + ) stages = generate_indoors.default_greedy_stages() - assert propose_discrete.active_for_stage(dom, stages['side_obj']) - assert not propose_discrete.active_for_stage(dom, stages['on_floor']) + assert propose_discrete.active_for_stage(dom, stages["side_obj"]) + assert not propose_discrete.active_for_stage(dom, stages["on_floor"]) -def test_example_monitor(): - desk = r.Domain({t.Semantics.Object, t.Semantics.Desk, -t.Semantics.Room}, [ - (cu.on_floor, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), - (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) - ]) +def test_example_monitor(): + desk = r.Domain( + {t.Semantics.Object, t.Semantics.Desk, -t.Semantics.Room}, + [ + (cu.on_floor, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + ], + ) - monitor = r.Domain({t.Semantics.Object, t.Semantics.Chair, -t.Semantics.Room}, [ # chair vs other tags doesnt matter - (cu.ontop, desk), - (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) - ]) + monitor = r.Domain( + {t.Semantics.Object, t.Semantics.Chair, -t.Semantics.Room}, + [ # chair vs other tags doesnt matter + (cu.ontop, desk), + (cu.against_wall, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])), + ], + ) stages = generate_indoors.default_greedy_stages() - assert propose_discrete.active_for_stage(monitor, stages['obj_ontop_obj']) - assert not propose_discrete.active_for_stage(monitor, stages['on_wall']) + assert propose_discrete.active_for_stage(monitor, stages["obj_ontop_obj"]) + assert not propose_discrete.active_for_stage(monitor, stages["on_wall"]) -def test_example_on_obj(): - not_others = {-t.Semantics.LivingRoom, -t.Semantics.Hallway, -t.Semantics.Closet, -t.Semantics.Balcony, -t.Semantics.Staircase, -t.Semantics.Garage, -t.Semantics.DiningRoom, -t.Semantics.Utility, -t.Semantics.Bathroom, -t.Semantics.Kitchen} - bedroom_storage = r.Domain({t.Semantics.Object, t.Semantics.Furniture, t.Semantics.Storage}, [ - (cu.on_floor, r.Domain({t.Semantics.Bedroom, t.Semantics.Room}.union(not_others), [])), - (cu.against_wall, r.Domain({t.Semantics.Bedroom, t.Semantics.Room}.union(not_others), [])) - ]) +def test_example_on_obj(): + not_others = { + -t.Semantics.LivingRoom, + -t.Semantics.Hallway, + -t.Semantics.Closet, + -t.Semantics.Balcony, + -t.Semantics.Staircase, + -t.Semantics.Garage, + -t.Semantics.DiningRoom, + -t.Semantics.Utility, + -t.Semantics.Bathroom, + -t.Semantics.Kitchen, + } + bedroom_storage = r.Domain( + {t.Semantics.Object, t.Semantics.Furniture, t.Semantics.Storage}, + [ + ( + cu.on_floor, + r.Domain({t.Semantics.Bedroom, t.Semantics.Room}.union(not_others), []), + ), + ( + cu.against_wall, + r.Domain({t.Semantics.Bedroom, t.Semantics.Room}.union(not_others), []), + ), + ], + ) - obj = r.Domain({t.Semantics.OfficeShelfItem, t.Semantics.Object}, [ - (cu.on, bedroom_storage) - ]) + obj = r.Domain( + {t.Semantics.OfficeShelfItem, t.Semantics.Object}, [(cu.on, bedroom_storage)] + ) - onfloor = generate_indoors.default_greedy_stages()['on_floor'] - dining = r.domain_tag_substitute(onfloor, cu.variable_room, r.Domain({t.Semantics.DiningRoom})) + onfloor = generate_indoors.default_greedy_stages()["on_floor"] + dining = r.domain_tag_substitute( + onfloor, cu.variable_room, r.Domain({t.Semantics.DiningRoom}) + ) assert not propose_discrete.active_for_stage(obj, dining) -def test_active_incorrect_room(): - onfloor = generate_indoors.default_greedy_stages()['on_floor'] - dining = r.domain_tag_substitute(onfloor, cu.variable_room, r.Domain({t.Semantics.DiningRoom})) +def test_active_incorrect_room(): + onfloor = generate_indoors.default_greedy_stages()["on_floor"] + dining = r.domain_tag_substitute( + onfloor, cu.variable_room, r.Domain({t.Semantics.DiningRoom}) + ) - sofa = r.Domain({t.Semantics.Object, t.Semantics.Seating, -t.Semantics.Room}, [ - (cu.on_floor, r.Domain({t.Semantics.LivingRoom, -t.Semantics.DiningRoom, -t.Semantics.Object}, [])) - ]) + sofa = r.Domain( + {t.Semantics.Object, t.Semantics.Seating, -t.Semantics.Room}, + [ + ( + cu.on_floor, + r.Domain( + { + t.Semantics.LivingRoom, + -t.Semantics.DiningRoom, + -t.Semantics.Object, + }, + [], + ), + ) + ], + ) assert not propose_discrete.active_for_stage(sofa, dining) -def test_stage_intersect_table(): - onfloor = generate_indoors.default_greedy_stages()['on_floor'] - onfloor_dining = r.domain_tag_substitute(onfloor, cu.variable_room, r.Domain({t.Semantics.DiningRoom, t.SpecificObject('diningroom01')})) +def test_stage_intersect_table(): + onfloor = generate_indoors.default_greedy_stages()["on_floor"] + onfloor_dining = r.domain_tag_substitute( + onfloor, + cu.variable_room, + r.Domain({t.Semantics.DiningRoom, t.SpecificObject("diningroom01")}), + ) - dining = r.Domain({t.Semantics.Room, t.Semantics.DiningRoom, -t.Semantics.Object}, []) - table = r.Domain({t.Semantics.Object, t.Semantics.Table, -t.Semantics.Room}, [ - (cu.on_floor, dining) - ]) + dining = r.Domain( + {t.Semantics.Room, t.Semantics.DiningRoom, -t.Semantics.Object}, [] + ) + table = r.Domain( + {t.Semantics.Object, t.Semantics.Table, -t.Semantics.Room}, + [(cu.on_floor, dining)], + ) inter = onfloor_dining.intersection(table) assert len(inter.relations) == 2 assert inter.relations[0][0].__class__ is cl.StableAgainst assert inter.relations[1][0].__class__ is cl.NegatedRelation -def test_obj_on_ceilinglight(): +def test_obj_on_ceilinglight(): bounds = r.constraint_bounds(ex.home_constraints()) - ceilinglight = r.Domain({t.Semantics.Object, t.Semantics.Lighting, -t.Semantics.Room}, [ - (cu.hanging, r.Domain({t.Semantics.Room, -t.Semantics.Object}, [])) - ]) + ceilinglight = r.Domain( + {t.Semantics.Object, t.Semantics.Lighting, -t.Semantics.Room}, + [(cu.hanging, r.Domain({t.Semantics.Room, -t.Semantics.Object}, []))], + ) active_bounds = [ - b for b in bounds - if propose_discrete.active_for_stage(ceilinglight, b.domain) + b for b in bounds if propose_discrete.active_for_stage(ceilinglight, b.domain) ] assert active_bounds == [] + def test_greedy_partition_home(): usage_lookup.initialize_from_dict(ex.home_asset_usage()) prob = ex.home_constraints() checks.check_problem_greedy_coverage(prob, generate_indoors.default_greedy_stages()) + def test_contradiction_home(): prob = ex.home_constraints() checks.check_contradictory_domains(prob) -@pytest.mark.parametrize('rtype', sorted(cu.room_types, key=lambda x: x.name)) -def test_room_has_viols_at_init(rtype): +@pytest.mark.parametrize("rtype", sorted(cu.room_types, key=lambda x: x.name)) +def test_room_has_viols_at_init(rtype): prob = ex.home_constraints() ostate_name = str(rtype) - state = state_def.State({ - ostate_name: state_def.ObjectState( - obj=butil.spawn_cube(), generator=None, tags={rtype, t.Semantics.Room}, relations=[] - ) - }) + state = state_def.State( + { + ostate_name: state_def.ObjectState( + obj=butil.spawn_cube(), + generator=None, + tags={rtype, t.Semantics.Room}, + relations=[], + ) + } + ) active_count = greedy.update_active_flags(state, {cu.variable_room: ostate_name}) print("active", rtype, active_count) assert active_count > 0 - filter = generate_indoors.default_greedy_stages()['on_floor'] - filter = r.domain_tag_substitute(filter, cu.variable_room, r.Domain({rtype, t.Semantics.Room})) + filter = generate_indoors.default_greedy_stages()["on_floor"] + filter = r.domain_tag_substitute( + filter, cu.variable_room, r.Domain({rtype, t.Semantics.Room}) + ) result = evaluator.evaluate_problem(prob, state, filter) assert result.viol_count() > 0 - - - - - - - - - - \ No newline at end of file diff --git a/tests/solver/test_greedy_substitutions.py b/tests/solver/test_greedy_substitutions.py index c499628ba..ee75fff4e 100644 --- a/tests/solver/test_greedy_substitutions.py +++ b/tests/solver/test_greedy_substitutions.py @@ -5,73 +5,65 @@ # Authors: Alexander Raistrick from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints import reasoning as r +from infinigen.core.constraints.example_solver import greedy, state_def -from infinigen.core.constraints import ( - constraint_language as cl, - reasoning as r -) -from infinigen.core.constraints.example_solver import ( - state_def, - greedy -) def make_dummy_state(type_counts: dict[tuple[t.Tag], int]): - objs = {} for tags, count in type_counts.items(): for i in range(count): - name = '_'.join([t.value for t in tags]) + f'_{i}' + name = "_".join([t.value for t in tags]) + f"_{i}" objs[name] = state_def.ObjectState( obj=None, generator=None, tags=set(tags).union([t.SpecificObject(name)]), - relations=[] + relations=[], ) - return state_def.State( - objs=objs - ) + return state_def.State(objs=objs) -def test_substitutions_no_vars(): - state = make_dummy_state({ - (t.Semantics.Room,): 3, - }) - var_dom = r.Domain( - {t.Semantics.Room}, - [] +def test_substitutions_no_vars(): + state = make_dummy_state( + { + (t.Semantics.Room,): 3, + } ) + var_dom = r.Domain({t.Semantics.Room}, []) + subs = list(greedy.substitutions(var_dom, state)) assert len(subs) == 1 -def test_substitutions_simple(): - state = make_dummy_state({ - (t.Semantics.Room,): 3, - }) - var_dom = r.Domain( - {t.Semantics.Room, t.Variable('room')}, - [] +def test_substitutions_simple(): + state = make_dummy_state( + { + (t.Semantics.Room,): 3, + } ) + var_dom = r.Domain({t.Semantics.Room, t.Variable("room")}, []) + subs = list(greedy.substitutions(var_dom, state)) assert len(subs) == 3 - assert t.SpecificObject('room_0') in subs[0].tags - assert t.SpecificObject('room_1') in subs[1].tags - assert t.SpecificObject('room_2') in subs[2].tags + assert t.SpecificObject("room_0") in subs[0].tags + assert t.SpecificObject("room_1") in subs[1].tags + assert t.SpecificObject("room_2") in subs[2].tags -def test_substitutions_child(): - state = make_dummy_state({ - (t.Semantics.Room,): 4, - }) +def test_substitutions_child(): + state = make_dummy_state( + { + (t.Semantics.Room,): 4, + } + ) var_dom = r.Domain( {t.Semantics.Object}, - [ - (cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Variable('room')}, [])) - ] + [(cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Variable("room")}, []))], ) subs = list(greedy.substitutions(var_dom, state)) @@ -79,42 +71,39 @@ def test_substitutions_child(): def test_substitutions_child_complex(): + state = make_dummy_state( + { + (t.Semantics.Room,): 4, + } + ) - state = make_dummy_state({ - (t.Semantics.Room,): 4, - }) - - state.objs['obj_0'] = state_def.ObjectState( + state.objs["obj_0"] = state_def.ObjectState( obj=None, generator=None, - tags={t.Semantics.Object, t.SpecificObject('obj_0')}, + tags={t.Semantics.Object, t.SpecificObject("obj_0")}, relations=[ - state_def.RelationState( - relation=cl.Touching(), target_name='room_0' - ) - ] + state_def.RelationState(relation=cl.Touching(), target_name="room_0") + ], ) - state.objs['obj_1'] = state_def.ObjectState( + state.objs["obj_1"] = state_def.ObjectState( obj=None, generator=None, - tags={t.Semantics.Object, t.SpecificObject('obj_1')}, + tags={t.Semantics.Object, t.SpecificObject("obj_1")}, relations=[ - state_def.RelationState( - relation=cl.Touching(), target_name='room_1' - ) - ] + state_def.RelationState(relation=cl.Touching(), target_name="room_1") + ], ) var_dom = r.Domain( - {t.Semantics.Object, t.Variable('obj')}, + {t.Semantics.Object, t.Variable("obj")}, [ - (cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Variable('room')}, [])), - ] + (cl.AnyRelation(), r.Domain({t.Semantics.Room, t.Variable("room")}, [])), + ], ) subs = list(greedy.substitutions(var_dom, state)) print(subs) assert len(subs) == 2 - assert len([s for s in subs if t.SpecificObject('obj_0') in s.tags]) == 1 - assert len([s for s in subs if t.SpecificObject('obj_1') in s.tags]) == 1 \ No newline at end of file + assert len([s for s in subs if t.SpecificObject("obj_0") in s.tags]) == 1 + assert len([s for s in subs if t.SpecificObject("obj_1") in s.tags]) == 1 diff --git a/tests/solver/test_stable_against.py b/tests/solver/test_stable_against.py index 005574260..27679655d 100644 --- a/tests/solver/test_stable_against.py +++ b/tests/solver/test_stable_against.py @@ -3,55 +3,47 @@ # Authors: Karhan Kayan -import bpy -from itertools import chain -from functools import partial + +import bpy # import pytest import numpy as np -import sys -import os - -from infinigen.core.constraints.example_solver.geometry import dof, parse_scene, planes, stability, validity from mathutils import Vector -from infinigen.core.constraints import ( - usage_lookup, - example_solver as solver, - constraint_language as cl -) -from infinigen.core import tagging, tags as t +from infinigen.core import tagging +from infinigen.core import tags as t +from infinigen.core.constraints import constraint_language as cl +from infinigen.core.constraints.example_solver import state_def +from infinigen.core.constraints.example_solver.geometry import parse_scene, validity from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver import ( - state_def -) + def make_scene(loc2): """Create a scene with a table and a cup, and return the state.""" butil.clear_scene() objs = {} - table = butil.spawn_cube(scale=(5, 5, 1), name='table') - cup = butil.spawn_cube(scale=(1, 1, 1), name='cup', location=loc2) + table = butil.spawn_cube(scale=(5, 5, 1), name="table") + cup = butil.spawn_cube(scale=(1, 1, 1), name="cup", location=loc2) for o in [table, cup]: butil.apply_transform(o) parse_scene.preprocess_obj(o) tagging.tag_canonical_surfaces(o) - assert table.scale == Vector((1,1,1)) - assert cup.location != Vector((0,0,0)) + assert table.scale == Vector((1, 1, 1)) + assert cup.location != Vector((0, 0, 0)) bpy.context.view_layer.update() - objs['table'] = state_def.ObjectState(table) - objs['cup'] = state_def.ObjectState(cup) - objs['cup'].relations.append( + objs["table"] = state_def.ObjectState(table) + objs["cup"] = state_def.ObjectState(cup) + objs["cup"].relations.append( state_def.RelationState( cl.StableAgainst({t.Subpart.Bottom}, {t.Subpart.Top}), - target_name='table', + target_name="table", child_plane_idx=0, - parent_plane_idx=0 + parent_plane_idx=0, ) ) @@ -59,106 +51,104 @@ def make_scene(loc2): return state_def.State(objs=objs) -def test_stable_against(): +def test_stable_against(): # too low, intersects ground - assert not validity.check_post_move_validity(make_scene((0, 0, 0.5)), 'cup') + assert not validity.check_post_move_validity(make_scene((0, 0, 0.5)), "cup") # exactly touches surface - assert validity.check_post_move_validity(make_scene((0, 0, 1)), 'cup') + assert validity.check_post_move_validity(make_scene((0, 0, 1)), "cup") # underneath - assert not validity.check_post_move_validity(make_scene((0, 0, -3)), 'cup') + assert not validity.check_post_move_validity(make_scene((0, 0, -3)), "cup") # exactly at corner - assert validity.check_post_move_validity(make_scene((2, 2, 1)), 'cup') + assert validity.check_post_move_validity(make_scene((2, 2, 1)), "cup") # slightly over corner - assert not validity.check_post_move_validity(make_scene((2.1, 2.1, 1)), 'cup') + assert not validity.check_post_move_validity(make_scene((2.1, 2.1, 1)), "cup") # farr away - assert not validity.check_post_move_validity(make_scene((4, 4, 0.5)), 'cup') + assert not validity.check_post_move_validity(make_scene((4, 4, 0.5)), "cup") + def test_horizontal_stability(): butil.clear_scene() objs = {} - table = butil.spawn_cube(name='table') - table.dimensions = (4,10,2) + table = butil.spawn_cube(name="table") + table.dimensions = (4, 10, 2) - chair1 = butil.spawn_cube(name='chair1') - chair1.dimensions = (2,2,3) - chair1.location = (3,3,0) + chair1 = butil.spawn_cube(name="chair1") + chair1.dimensions = (2, 2, 3) + chair1.location = (3, 3, 0) - chair2 = butil.spawn_cube(name='chair2') - chair2.dimensions = (2,2,3) - chair2.location = (3,-3,0) + chair2 = butil.spawn_cube(name="chair2") + chair2.dimensions = (2, 2, 3) + chair2.location = (3, -3, 0) - chair3 = butil.spawn_cube(name='chair3') - chair3.dimensions = (2,2,3) - chair3.location = (-3,3,0) + chair3 = butil.spawn_cube(name="chair3") + chair3.dimensions = (2, 2, 3) + chair3.location = (-3, 3, 0) - chair4 = butil.spawn_cube(name='chair4') - chair4.dimensions = (2,2,3) - chair4.location = (-3,-3,0) + chair4 = butil.spawn_cube(name="chair4") + chair4.dimensions = (2, 2, 3) + chair4.location = (-3, -3, 0) for o in [table, chair1, chair2, chair3, chair4]: butil.apply_transform(o) parse_scene.preprocess_obj(o) tagging.tag_canonical_surfaces(o) with butil.SelectObjects([table, chair1, chair2, chair3, chair4]): # rotate - bpy.ops.transform.rotate(value=np.pi/4, orient_axis='Z', orient_type='GLOBAL') + bpy.ops.transform.rotate(value=np.pi / 4, orient_axis="Z", orient_type="GLOBAL") # butil.save_blend('test.blend') bpy.context.view_layer.update() - - - objs['table'] = state_def.ObjectState(table) - objs['chair1'] = state_def.ObjectState(chair1) - objs['chair2'] = state_def.ObjectState(chair2) - objs['chair3'] = state_def.ObjectState(chair3) - objs['chair4'] = state_def.ObjectState(chair4) - objs['chair1'].relations.append( + objs["table"] = state_def.ObjectState(table) + objs["chair1"] = state_def.ObjectState(chair1) + objs["chair2"] = state_def.ObjectState(chair2) + objs["chair3"] = state_def.ObjectState(chair3) + objs["chair4"] = state_def.ObjectState(chair4) + objs["chair1"].relations.append( state_def.RelationState( cl.StableAgainst({t.Subpart.Back}, {t.Subpart.Front}, check_z=False), - target_name='table', + target_name="table", child_plane_idx=0, - parent_plane_idx=0 + parent_plane_idx=0, ) ) - objs['chair2'].relations.append( + objs["chair2"].relations.append( state_def.RelationState( cl.StableAgainst({t.Subpart.Back}, {t.Subpart.Front}, check_z=False), - target_name='table', + target_name="table", child_plane_idx=0, - parent_plane_idx=0 + parent_plane_idx=0, ) ) - objs['chair3'].relations.append( + objs["chair3"].relations.append( state_def.RelationState( cl.StableAgainst({t.Subpart.Front}, {t.Subpart.Back}, check_z=False), - target_name='table', + target_name="table", child_plane_idx=0, - parent_plane_idx=0 + parent_plane_idx=0, ) ) - objs['chair4'].relations.append( + objs["chair4"].relations.append( state_def.RelationState( cl.StableAgainst({t.Subpart.Front}, {t.Subpart.Back}, check_z=False), - target_name='table', + target_name="table", child_plane_idx=0, - parent_plane_idx=0 + parent_plane_idx=0, ) ) state = state_def.State(objs=objs) - assert validity.check_post_move_validity(state, 'chair1') - assert validity.check_post_move_validity(state, 'chair2') - assert validity.check_post_move_validity(state, 'chair3') - assert validity.check_post_move_validity(state, 'chair4') + assert validity.check_post_move_validity(state, "chair1") + assert validity.check_post_move_validity(state, "chair2") + assert validity.check_post_move_validity(state, "chair3") + assert validity.check_post_move_validity(state, "chair4") # butil.save_blend('test.blend') - -if __name__ == '__main__': - test_horizontal_stability() \ No newline at end of file +if __name__ == "__main__": + test_horizontal_stability() diff --git a/tests/solver/test_state_def.py b/tests/solver/test_state_def.py index 8b03fd67b..e57de90dd 100644 --- a/tests/solver/test_state_def.py +++ b/tests/solver/test_state_def.py @@ -4,41 +4,21 @@ # Authors: Alexander Raistrick -from itertools import chain -from functools import partial import json # import pytest -import bpy -import numpy as np -import sys -import os - -from infinigen.core.constraints.example_solver.geometry import dof, parse_scene, planes, stability, validity from mathutils import Vector - -from infinigen.core.constraints import ( - usage_lookup, - example_solver as solver, - constraint_language as cl -) -from infinigen.core import tagging, tags as t -from infinigen.core.util import blender as butil -from infinigen.core.constraints.example_solver import ( - state_def -) - from test_stable_against import make_scene -def test_state_to_json(tmp_path): +def test_state_to_json(tmp_path): state = make_scene(Vector((1, 0, 0))) - - path = tmp_path/'state.json' + + path = tmp_path / "state.json" state.to_json(path) with path.open() as json_file: state_json = json.load(json_file) - assert sorted(list(state_json['objs'].keys())) == ['cup', 'table'] - assert len(state_json['objs']['cup']['relations']) == 1 \ No newline at end of file + assert sorted(list(state_json["objs"].keys())) == ["cup", "table"] + assert len(state_json["objs"]["cup"]["relations"]) == 1 diff --git a/tests/test_terrain_basic.py b/tests/test_terrain_basic.py index 968fd9b4f..bcb0d49ec 100644 --- a/tests/test_terrain_basic.py +++ b/tests/test_terrain_basic.py @@ -3,45 +3,44 @@ # Authors: Zeyu Ma -from pathlib import Path -import pytest import bpy import gin -from infinigen.terrain import Terrain +import pytest + from infinigen.core.surface import registry from infinigen.core.util.organization import Task +from infinigen.terrain import Terrain +from infinigen_examples.util.test_utils import setup_gin -from infinigen_examples.util.test_utils import ( - setup_gin, -) -@pytest.mark.skip_for_ci +@pytest.mark.skip @pytest.mark.nature def test_terrain_runs(): - setup_gin( - 'infinigen_examples/configs_nature', - configs=['fast_terrain_assets'], + "infinigen_examples/configs_nature", + configs=["base_nature.gin", "fast_terrain_assets"], overrides=[ - 'scene.caves_chance=1', - 'scene.landtiles_chance=1', - 'scene.ground_chance=1', - 'scene.warped_rocks_chance=1', - 'scene.voronoi_rocks_chance=1', - 'scene.voronoi_grains_chance=0', - 'scene.upsidedown_mountains_chance=1', - 'scene.waterbody_chance=1', - 'scene.volcanos_chance=0', - 'scene.ground_ice_chance=0', + "scene.caves_chance=1", + "scene.landtiles_chance=1", + "scene.ground_chance=1", + "scene.warped_rocks_chance=1", + "scene.voronoi_rocks_chance=1", + "scene.voronoi_grains_chance=0", + "scene.upsidedown_mountains_chance=1", + "scene.waterbody_chance=1", + "scene.volcanos_chance=0", + "scene.ground_ice_chance=0", ], ) - bpy.ops.preferences.addon_enable(module='add_mesh_extra_objects') - bpy.ops.preferences.addon_enable(module='ant_landscape') + bpy.ops.preferences.addon_enable(module="add_mesh_extra_objects") + bpy.ops.preferences.addon_enable(module="ant_landscape") - terrain = Terrain(0, registry, task=Task.Coarse, on_the_fly_asset_folder="/tmp/terrain_tests") + terrain = Terrain( + 0, registry, task=Task.Coarse, on_the_fly_asset_folder="/tmp/terrain_tests" + ) terrain.coarse_terrain() gin.clear_config() - gin.unlock_config() \ No newline at end of file + gin.unlock_config() diff --git a/tests/tools/test_export.py b/tests/tools/test_export.py index 92bb76183..a3ffbb72a 100644 --- a/tests/tools/test_export.py +++ b/tests/tools/test_export.py @@ -4,24 +4,20 @@ # Authors: Alexander Raistrick -import copy - -import pytest import bpy +import pytest -from infinigen.tools import export - -from infinigen.assets.mollusk import MolluskFactory +from infinigen.assets.objects.mollusk import MolluskFactory from infinigen.core.util import blender as butil -from infinigen.tools.export import triangulate_meshes +from infinigen.tools import export TEST_FORMATS = ["obj", "usdc", "fbx", "ply", "usdc"] TEST_IMAGE_RES = 32 + @pytest.mark.parametrize("format", TEST_FORMATS) def test_export_one_obj(format, tmp_path): - butil.clear_scene() asset = MolluskFactory(0).spawn_asset(0) file = export.export_single_obj(asset, tmp_path, format, image_res=TEST_IMAGE_RES) @@ -33,18 +29,20 @@ def test_export_one_obj(format, tmp_path): butil.clear_scene() new_obj = butil.import_mesh(file) - if format == 'usdc': - assert num_objs + 1 == len(bpy.data.objects) #usdc import generates extra "world" prim + if format == "usdc": + assert num_objs + 1 == len( + bpy.data.objects + ) # usdc import generates extra "world" prim else: assert num_objs == len(bpy.data.objects) assert len(new_obj.data.polygons) == asset_polys - + # TODO David Yan add other guarantees (count objects, count/names of materials, any others) + @pytest.mark.parametrize("format", TEST_FORMATS) def test_export_curr_scene(format, tmp_path): - butil.clear_scene() asset1 = MolluskFactory(0).spawn_asset(0) asset2 = MolluskFactory(0).spawn_asset(1) @@ -53,7 +51,7 @@ def test_export_curr_scene(format, tmp_path): file = export.export_curr_scene(tmp_path, format, image_res=TEST_IMAGE_RES) assert file.suffix == f".{format}" - + num_objs = len(bpy.data.objects) poly_count1 = len(asset1.data.polygons) poly_count2 = len(asset2.data.polygons) @@ -62,19 +60,22 @@ def test_export_curr_scene(format, tmp_path): butil.import_mesh(file) total_polys = 0 for obj in bpy.data.objects: - if obj.name == 'World': + if obj.name == "World": continue total_polys += len(obj.data.polygons) - + assert total_polys == poly_count1 + poly_count2 - if format == 'usdc': - assert num_objs + 1 == len(bpy.data.objects) #usdc import generates extra "world" prim - elif format == 'ply': + if format == "usdc": + assert num_objs + 1 == len( + bpy.data.objects + ) # usdc import generates extra "world" prim + elif format == "ply": assert len(bpy.data.objects) == 1 else: assert num_objs == len(bpy.data.objects) # TODO David Yan add other guarantees (count objects, count/names of materials, any others) -# TODO test all export.py features, including individual export, transparent mats, instances \ No newline at end of file + +# TODO test all export.py features, including individual export, transparent mats, instances